Search is not available for this dataset
content
stringlengths 60
399M
| max_stars_repo_name
stringlengths 6
110
|
---|---|
<|start_filename|>folly/experimental/crypto/LtHash.h<|end_filename|>
/*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <cstddef>
#include <memory>
#include <folly/Range.h>
#include <folly/experimental/crypto/Blake2xb.h>
#include <folly/io/IOBuf.h>
namespace folly {
namespace crypto {
namespace detail {
/**
* Allocates an IOBuf of the given size, aligned on a cache line boundary.
* Similar to folly::IOBuf::create(), the returned IOBuf has an initial
* capacity == size and an initial length == 0.
*/
folly::IOBuf allocateCacheAlignedIOBuf(size_t size);
/**
* Similar to allocateCacheAlignedIOBuf(), but returns a unique_ptr to an IOBuf
* instead of an IOBuf.
*/
std::unique_ptr<folly::IOBuf> allocateCacheAlignedIOBufUnique(size_t size);
/**
* Returns true if the given memory address is aligned on a cache line boundary
* and false if it isn't.
*/
bool isCacheAlignedAddress(const void* addr);
} // namespace detail
/**
* Templated homomorphic hash, using LtHash (lattice-based crypto).
* Template parameters: B = element size in bits, N = number of elements.
*
* Current constraints (checked at compile time with static asserts):
* (1) B must be 16, 20 or 32.
* (2) N must be > 999.
* (3) when B is 16, N must be divisible by 32.
* (4) when B is 20, N must be divisible by 24.
* (5) when B is 32, N must be divisible by 16.
*/
template <std::size_t B, std::size_t N>
class LtHash {
public:
explicit LtHash(const folly::IOBuf& initialChecksum = {});
/**
* Like the above constructor but takes ownership of the checksum buffer,
* avoiding a copy if these conditions about the input buffer are met:
* - initialChecksum->isChained() is false
* - initialChecksum->isShared() is false
* - detail::isCacheAlignedAddress(initialChecksum.data()) is true
*
* If you want to take advantage of this and need to make sure your IOBuf
* address is aligned on a cache line boundary, you can use the
* function detail::allocateCacheAlignedIOBufUnique() to do it.
*/
explicit LtHash(std::unique_ptr<folly::IOBuf> initialChecksum);
// Note: we explicitly implement copy constructor and copy assignment
// operator to make sure the checksum_ IOBuf is deep-copied.
LtHash(const LtHash<B, N>& that);
LtHash<B, N>& operator=(const LtHash<B, N>& that);
LtHash(LtHash<B, N>&& that) noexcept = default;
LtHash<B, N>& operator=(LtHash<B, N>&& that) noexcept = default;
~LtHash() = default;
/**
* Resets the checksum in this LtHash. This puts the hash into the same
* state as if it was just constructed with the zero-argument constructor.
*/
void reset();
/**
* IMPORTANT: Unlike regular hash, the incremental hash functions operate on
* individual objects, not a stream of data. For example, the following
* example codes will lead to different checksum values.
* (1) addObject("Hello"); addObject(" World");
* (2) addObject("Hello World");
* because addObject() calculates hashes for the two words separately, and
* aggregate them to update checksum.
*
* addObject() is commutative. LtHash generates the same checksum over a
* given set of objects regardless of the order they were added.
* Example: H(a + b + c) = H(b + c + a)
*
* addObject() can be called with multiple ByteRange parameters, in which
* case it will behave as if it was called with a single ByteRange which
* contained the concatenation of all the input ByteRanges. This allows
* adding an object whose hash is computed from several non-contiguous
* ranges of data, without having to copy the data to a contiguous
* piece of memory.
*
* Example: addObject(r1, r2, r3) is equivalent to
* addObject(r4) where r4 contains the concatenation of r1 + r2 + r3.
*/
template <typename... Args>
LtHash<B, N>& addObject(folly::ByteRange firstRange, Args&&... moreRanges);
/**
* removeObject() is the inverse function of addObject(). Note that it does
* NOT check whether the object has been actually added to LtHash. The caller
* should ensure that the object is valid.
*
* Example: H(a - a + b - b + c - c) = H(a + b + c - a - b - c) = H()
*
* Similar to addObject(), removeObject() can be called with more than one
* ByteRange parameter.
*/
template <typename... Args>
LtHash<B, N>& removeObject(folly::ByteRange firstRange, Args&&... moreRanges);
/**
* Because the addObject() operation in LtHash is commutative and transitive,
* it's possible to break down a large LtHash computation (i.e. adding 100k
* objects) into several parallel steps each of which computes a LtHash of a
* subset of the objects, and then add the LtHash objects together.
* Pseudocode:
*
* std::vector<std::string> objects = ...;
* Future<LtHash<20, 1008>> h1 = computeInBackgroundThread(
* &objects[0], &objects[10000]);
* Future<LtHash<20, 1008>> h2 = computeInBackgroundThread(
* &objects[10001], &objects[20000]);
* LtHash<20, 1008> result = h1.get() + h2.get();
*/
LtHash<B, N>& operator+=(const LtHash<B, N>& rhs);
friend LtHash<B, N> operator+(
const LtHash<B, N>& lhs,
const LtHash<B, N>& rhs) {
LtHash<B, N> result = lhs;
result += rhs;
return result;
}
friend LtHash<B, N> operator+(LtHash<B, N>&& lhs, const LtHash<B, N>& rhs) {
LtHash<B, N> result = std::move(lhs);
result += rhs;
return result;
}
friend LtHash<B, N> operator+(const LtHash<B, N>& lhs, LtHash<B, N>&& rhs) {
// addition is commutative so we can just swap the two arguments
return std::move(rhs) + lhs;
}
friend LtHash<B, N> operator+(LtHash<B, N>&& lhs, LtHash<B, N>&& rhs) {
LtHash<B, N> result = std::move(lhs);
result += rhs;
return result;
}
/**
* The subtraction operator is provided for symmetry, but I'm not sure if
* anyone will ever actually use it outside of tests.
*/
LtHash<B, N>& operator-=(const LtHash<B, N>& rhs);
friend LtHash<B, N> operator-(
const LtHash<B, N>& lhs,
const LtHash<B, N>& rhs) {
LtHash<B, N> result = lhs;
result -= rhs;
return result;
}
friend LtHash<B, N> operator-(LtHash<B, N>&& lhs, const LtHash<B, N>& rhs) {
LtHash<B, N> result = std::move(lhs);
result -= rhs;
return result;
}
/**
* Equality comparison operator, implemented in a data-independent way to
* guard against timing attacks. Always use this to check if two LtHash
* values are equal instead of manually comparing checksum buffers.
*/
bool operator==(const LtHash<B, N>& that) const;
/**
* Equality comparison operator for checksum in ByteRange, implemented in a
* data-independent way to guard against timing attacks.
*/
bool checksumEquals(folly::ByteRange otherChecksum) const;
/**
* Inequality comparison operator.
*/
bool operator!=(const LtHash<B, N>& that) const;
/**
* Sets the intial checksum value to use for processing objects in the
* xxxObject() calls.
*/
void setChecksum(const folly::IOBuf& checksum);
/**
* Like the above method but takes ownership of the checksum buffer,
* avoiding a copy if these conditions about the input buffer are met:
* - checksum->isChained() is false
* - checksum->isShared() is false
* - detail::isCacheAlignedAddress(checksum.data()) is true
*
* If you want to take advantage of this and need to make sure your IOBuf
* address is aligned on a cache line boundary, you can use the
* function detail::allocateCacheAlignedIOBufUnique() to do it.
*/
void setChecksum(std::unique_ptr<folly::IOBuf> checksum);
/**
* Returns the total length of the checksum (element_count * element_length)
*/
static constexpr size_t getChecksumSizeBytes();
/**
* Returns the template parameter B.
*/
static constexpr size_t getElementSizeInBits();
/**
* Returns the number of elements that get packed into a single uint64_t.
*/
static constexpr size_t getElementsPerUint64();
/**
* Returns the template parameter N.
*/
static constexpr size_t getElementCount();
/**
* Retruns true if the internal checksum uses padding bits between elements.
*/
static constexpr bool hasPaddingBits();
/**
* Returns a copy of the current checksum value
*/
std::unique_ptr<folly::IOBuf> getChecksum() const;
private:
template <typename... Args>
void hashObject(
folly::MutableByteRange out,
folly::ByteRange firstRange,
Args&&... moreRanges);
template <typename... Args>
void
updateDigest(Blake2xb& digest, folly::ByteRange range, Args&&... moreRanges);
void updateDigest(Blake2xb& digest);
// current checksum
folly::IOBuf checksum_;
};
} // namespace crypto
} // namespace folly
#include <folly/experimental/crypto/LtHash-inl.h>
namespace folly {
namespace crypto {
// This is the fastest and smallest specialization and should be
// preferred in most cases. It provides over 200 bits of security
// which should be good enough for most cases.
using LtHash16_1024 = LtHash<16, 1024>;
// These specializations are available to users who want a higher
// level of cryptographic security. They are slower and larger than
// the one above.
using LtHash20_1008 = LtHash<20, 1008>;
using LtHash32_1024 = LtHash<32, 1024>;
} // namespace crypto
} // namespace folly
<|start_filename|>folly/SingletonThreadLocal.cpp<|end_filename|>
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <folly/SingletonThreadLocal.h>
#include <cstdlib>
#include <iostream>
#include <folly/Demangle.h>
namespace folly {
namespace detail {
SingletonThreadLocalBase::UniqueBase::UniqueBase(
Ref type,
Ref tag,
Ref make,
Ref tltag,
Value& value) noexcept {
if (!value.init) {
value.init = true;
value.make = &make;
value.tltag = &tltag;
}
if (*value.make == make && *value.tltag == tltag) {
return;
}
auto const type_s = demangle(type.name());
auto const tag_s = demangle(tag.name());
auto const make_0_s = demangle(value.make->name());
auto const make_1_s = demangle(make.name());
auto const tltag_0_s = demangle(value.tltag->name());
auto const tltag_1_s = demangle(tltag.name());
std::ios_base::Init io_init;
std::cerr << "Overloaded folly::SingletonThreadLocal<" << type_s << ", "
<< tag_s << ", ...> with differing trailing arguments:\n"
<< " folly::SingletonThreadLocal<" << type_s << ", " << tag_s
<< ", " << make_0_s << ", " << tltag_0_s << ">\n"
<< " folly::SingletonThreadLocal<" << type_s << ", " << tag_s
<< ", " << make_1_s << ", " << tltag_1_s << ">\n";
std::abort();
}
} // namespace detail
} // namespace folly
<|start_filename|>folly/futures/test/CollectTest.cpp<|end_filename|>
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <numeric>
#include <boost/thread/barrier.hpp>
#include <folly/Random.h>
#include <folly/futures/Future.h>
#include <folly/portability/GTest.h>
#include <folly/small_vector.h>
using namespace folly;
typedef FutureException eggs_t;
static eggs_t eggs("eggs");
auto rng = std::mt19937(folly::randomNumberSeed());
TEST(Collect, collectAll) {
// returns a vector variant
{
std::vector<Promise<int>> promises(10);
std::vector<Future<int>> futures;
for (auto& p : promises) {
futures.push_back(p.getFuture());
}
auto allf = collectAll(futures);
std::shuffle(promises.begin(), promises.end(), rng);
for (auto& p : promises) {
EXPECT_FALSE(allf.isReady());
p.setValue(42);
}
EXPECT_TRUE(allf.isReady());
auto& results = allf.value();
for (auto& t : results) {
EXPECT_EQ(42, t.value());
}
}
// check error semantics
{
std::vector<Promise<int>> promises(4);
std::vector<Future<int>> futures;
for (auto& p : promises) {
futures.push_back(p.getFuture());
}
auto allf = collectAll(futures);
promises[0].setValue(42);
promises[1].setException(eggs);
EXPECT_FALSE(allf.isReady());
promises[2].setValue(42);
EXPECT_FALSE(allf.isReady());
promises[3].setException(eggs);
EXPECT_TRUE(allf.isReady());
EXPECT_FALSE(allf.getTry().hasException());
auto& results = allf.value();
EXPECT_EQ(42, results[0].value());
EXPECT_TRUE(results[1].hasException());
EXPECT_EQ(42, results[2].value());
EXPECT_TRUE(results[3].hasException());
}
// check that futures are ready in thenValue()
{
std::vector<Promise<Unit>> promises(10);
std::vector<Future<Unit>> futures;
for (auto& p : promises) {
futures.push_back(p.getFuture());
}
auto allf = collectAllSemiFuture(futures).toUnsafeFuture().thenTry(
[](Try<std::vector<Try<Unit>>>&& ts) {
for (auto& f : ts.value()) {
f.value();
}
});
std::shuffle(promises.begin(), promises.end(), rng);
for (auto& p : promises) {
p.setValue();
}
EXPECT_TRUE(allf.isReady());
}
}
TEST(Collect, collect) {
// success case
{
std::vector<Promise<int>> promises(10);
std::vector<Future<int>> futures;
for (auto& p : promises) {
futures.push_back(p.getFuture());
}
auto allf = collect(futures);
std::shuffle(promises.begin(), promises.end(), rng);
for (auto& p : promises) {
EXPECT_FALSE(allf.isReady());
p.setValue(42);
}
EXPECT_TRUE(allf.isReady());
for (auto i : allf.value()) {
EXPECT_EQ(42, i);
}
}
// failure case
{
std::vector<Promise<int>> promises(10);
std::vector<Future<int>> futures;
for (auto& p : promises) {
futures.push_back(p.getFuture());
}
auto allf = collect(futures);
std::shuffle(promises.begin(), promises.end(), rng);
for (int i = 0; i < 10; i++) {
if (i < 5) {
// everthing goes well so far...
EXPECT_FALSE(allf.isReady());
promises[i].setValue(42);
} else if (i == 5) {
// short circuit with an exception
EXPECT_FALSE(allf.isReady());
promises[i].setException(eggs);
EXPECT_TRUE(allf.isReady());
} else if (i < 8) {
// don't blow up on further values
EXPECT_TRUE(allf.isReady());
promises[i].setValue(42);
} else {
// don't blow up on further exceptions
EXPECT_TRUE(allf.isReady());
promises[i].setException(eggs);
}
}
EXPECT_THROW(allf.value(), eggs_t);
}
// void futures success case
{
std::vector<Promise<Unit>> promises(10);
std::vector<Future<Unit>> futures;
for (auto& p : promises) {
futures.push_back(p.getFuture());
}
auto allf = collect(futures);
std::shuffle(promises.begin(), promises.end(), rng);
for (auto& p : promises) {
EXPECT_FALSE(allf.isReady());
p.setValue();
}
EXPECT_TRUE(allf.isReady());
}
// void futures failure case
{
std::vector<Promise<Unit>> promises(10);
std::vector<Future<Unit>> futures;
for (auto& p : promises) {
futures.push_back(p.getFuture());
}
auto allf = collect(futures);
std::shuffle(promises.begin(), promises.end(), rng);
for (int i = 0; i < 10; i++) {
if (i < 5) {
// everthing goes well so far...
EXPECT_FALSE(allf.isReady());
promises[i].setValue();
} else if (i == 5) {
// short circuit with an exception
EXPECT_FALSE(allf.isReady());
promises[i].setException(eggs);
EXPECT_TRUE(allf.isReady());
} else if (i < 8) {
// don't blow up on further values
EXPECT_TRUE(allf.isReady());
promises[i].setValue();
} else {
// don't blow up on further exceptions
EXPECT_TRUE(allf.isReady());
promises[i].setException(eggs);
}
}
EXPECT_THROW(allf.value(), eggs_t);
}
// move only compiles
{
std::vector<Promise<std::unique_ptr<int>>> promises(10);
std::vector<Future<std::unique_ptr<int>>> futures;
for (auto& p : promises) {
futures.push_back(p.getFuture());
}
collect(futures);
}
}
struct NotDefaultConstructible {
NotDefaultConstructible() = delete;
explicit NotDefaultConstructible(int arg) : i(arg) {}
int i;
};
// We have a specialized implementation for non-default-constructible objects
// Ensure that it works and preserves order
TEST(Collect, collectNotDefaultConstructible) {
std::vector<Promise<NotDefaultConstructible>> promises(10);
std::vector<Future<NotDefaultConstructible>> futures;
std::vector<int> indices(10);
std::iota(indices.begin(), indices.end(), 0);
std::shuffle(indices.begin(), indices.end(), rng);
for (auto& p : promises) {
futures.push_back(p.getFuture());
}
auto allf = collect(futures);
for (auto i : indices) {
EXPECT_FALSE(allf.isReady());
promises[i].setValue(NotDefaultConstructible(i));
}
EXPECT_TRUE(allf.isReady());
int i = 0;
for (auto val : allf.value()) {
EXPECT_EQ(i, val.i);
i++;
}
}
TEST(Collect, collectAny) {
{
std::vector<Promise<int>> promises(10);
std::vector<Future<int>> futures;
for (auto& p : promises) {
futures.push_back(p.getFuture());
}
for (auto& f : futures) {
EXPECT_FALSE(f.isReady());
}
auto anyf = collectAny(futures);
/* futures were moved in, so these are invalid now */
EXPECT_FALSE(anyf.isReady());
promises[7].setValue(42);
EXPECT_TRUE(anyf.isReady());
auto& idx_fut = anyf.value();
auto i = idx_fut.first;
EXPECT_EQ(7, i);
auto& f = idx_fut.second;
EXPECT_EQ(42, f.value());
}
// error
{
std::vector<Promise<Unit>> promises(10);
std::vector<Future<Unit>> futures;
for (auto& p : promises) {
futures.push_back(p.getFuture());
}
for (auto& f : futures) {
EXPECT_FALSE(f.isReady());
}
auto anyf = collectAny(futures);
EXPECT_FALSE(anyf.isReady());
promises[3].setException(eggs);
EXPECT_TRUE(anyf.isReady());
EXPECT_TRUE(anyf.value().second.hasException());
}
// thenValue()
{
std::vector<Promise<int>> promises(10);
std::vector<Future<int>> futures;
for (auto& p : promises) {
futures.push_back(p.getFuture());
}
auto anyf = collectAny(futures).thenValue(
[](std::pair<size_t, Try<int>> p) { EXPECT_EQ(42, p.second.value()); });
promises[3].setValue(42);
EXPECT_TRUE(anyf.isReady());
}
}
TEST(Collect, collectAnyWithoutException) {
auto& executor = folly::InlineExecutor::instance();
{
std::vector<Promise<int>> promises(10);
std::vector<Future<int>> futures;
for (auto& p : promises) {
futures.push_back(p.getFuture());
}
auto onef = collectAnyWithoutException(futures).via(&executor);
/* futures were moved in, so these are invalid now */
EXPECT_FALSE(onef.isReady());
promises[7].setValue(42);
EXPECT_TRUE(onef.isReady());
auto& idx_fut = onef.value();
EXPECT_EQ(7, idx_fut.first);
EXPECT_EQ(42, idx_fut.second);
}
// some exception before ready
{
std::vector<Promise<int>> promises(10);
std::vector<Future<int>> futures;
for (auto& p : promises) {
futures.push_back(p.getFuture());
}
auto onef = collectAnyWithoutException(futures).via(&executor);
EXPECT_FALSE(onef.isReady());
promises[3].setException(eggs);
EXPECT_FALSE(onef.isReady());
promises[4].setException(eggs);
EXPECT_FALSE(onef.isReady());
promises[0].setValue(99);
EXPECT_TRUE(onef.isReady());
auto& idx_fut = onef.value();
EXPECT_EQ(0, idx_fut.first);
EXPECT_EQ(99, idx_fut.second);
}
// all exceptions
{
std::vector<Promise<int>> promises(10);
std::vector<Future<int>> futures;
for (auto& p : promises) {
futures.push_back(p.getFuture());
}
auto onef = collectAnyWithoutException(futures).via(&executor);
EXPECT_FALSE(onef.isReady());
for (int i = 0; i < 9; ++i) {
promises[i].setException(eggs);
}
EXPECT_FALSE(onef.isReady());
promises[9].setException(eggs);
EXPECT_TRUE(onef.isReady());
EXPECT_TRUE(onef.hasException());
}
// Deferred work
{
std::vector<Promise<int>> promises(10);
auto onef = [&] {
std::vector<SemiFuture<int>> futures;
for (auto& p : promises) {
futures.push_back(
p.getSemiFuture().deferValue([](auto v) { return v; }));
}
return collectAnyWithoutException(futures);
}();
/* futures were moved in, so these are invalid now */
promises[7].setValue(42);
auto idx_fut = std::move(onef).get();
EXPECT_EQ(7, idx_fut.first);
EXPECT_EQ(42, idx_fut.second);
}
}
TEST(Collect, alreadyCompleted) {
{
std::vector<Future<Unit>> fs;
for (int i = 0; i < 10; i++) {
fs.push_back(makeFuture());
}
collectAllSemiFuture(fs).toUnsafeFuture().thenValue(
[&](std::vector<Try<Unit>> ts) { EXPECT_EQ(fs.size(), ts.size()); });
}
{
std::vector<Future<int>> fs;
for (int i = 0; i < 10; i++) {
fs.push_back(makeFuture(i));
}
collectAny(fs).thenValue([&](std::pair<size_t, Try<int>> p) {
EXPECT_EQ(p.first, p.second.value());
});
}
}
TEST(Collect, parallel) {
std::vector<Promise<int>> ps(10);
std::vector<Future<int>> fs;
for (size_t i = 0; i < ps.size(); i++) {
fs.emplace_back(ps[i].getFuture());
}
auto f = collect(fs);
std::vector<std::thread> ts;
boost::barrier barrier(ps.size() + 1);
for (size_t i = 0; i < ps.size(); i++) {
ts.emplace_back([&ps, &barrier, i]() {
barrier.wait();
ps[i].setValue(i);
});
}
barrier.wait();
for (size_t i = 0; i < ps.size(); i++) {
ts[i].join();
}
EXPECT_TRUE(f.isReady());
for (size_t i = 0; i < ps.size(); i++) {
EXPECT_EQ(i, f.value()[i]);
}
}
TEST(Collect, parallelWithError) {
std::vector<Promise<int>> ps(10);
std::vector<Future<int>> fs;
for (size_t i = 0; i < ps.size(); i++) {
fs.emplace_back(ps[i].getFuture());
}
auto f = collect(fs);
std::vector<std::thread> ts;
boost::barrier barrier(ps.size() + 1);
for (size_t i = 0; i < ps.size(); i++) {
ts.emplace_back([&ps, &barrier, i]() {
barrier.wait();
if (i == (ps.size() / 2)) {
ps[i].setException(eggs);
} else {
ps[i].setValue(i);
}
});
}
barrier.wait();
for (size_t i = 0; i < ps.size(); i++) {
ts[i].join();
}
EXPECT_TRUE(f.isReady());
EXPECT_THROW(f.value(), eggs_t);
}
TEST(Collect, allParallel) {
std::vector<Promise<int>> ps(10);
std::vector<Future<int>> fs;
for (size_t i = 0; i < ps.size(); i++) {
fs.emplace_back(ps[i].getFuture());
}
auto f = collectAll(fs);
std::vector<std::thread> ts;
boost::barrier barrier(ps.size() + 1);
for (size_t i = 0; i < ps.size(); i++) {
ts.emplace_back([&ps, &barrier, i]() {
barrier.wait();
ps[i].setValue(i);
});
}
barrier.wait();
for (size_t i = 0; i < ps.size(); i++) {
ts[i].join();
}
EXPECT_TRUE(f.isReady());
for (size_t i = 0; i < ps.size(); i++) {
EXPECT_TRUE(f.value()[i].hasValue());
EXPECT_EQ(i, f.value()[i].value());
}
}
TEST(Collect, allParallelWithError) {
std::vector<Promise<int>> ps(10);
std::vector<Future<int>> fs;
for (size_t i = 0; i < ps.size(); i++) {
fs.emplace_back(ps[i].getFuture());
}
auto f = collectAll(fs);
std::vector<std::thread> ts;
boost::barrier barrier(ps.size() + 1);
for (size_t i = 0; i < ps.size(); i++) {
ts.emplace_back([&ps, &barrier, i]() {
barrier.wait();
if (i == (ps.size() / 2)) {
ps[i].setException(eggs);
} else {
ps[i].setValue(i);
}
});
}
barrier.wait();
for (size_t i = 0; i < ps.size(); i++) {
ts[i].join();
}
EXPECT_TRUE(f.isReady());
for (size_t i = 0; i < ps.size(); i++) {
if (i == (ps.size() / 2)) {
EXPECT_THROW(f.value()[i].value(), eggs_t);
} else {
EXPECT_TRUE(f.value()[i].hasValue());
EXPECT_EQ(i, f.value()[i].value());
}
}
}
TEST(Collect, collectN) {
std::vector<Promise<Unit>> promises(10);
std::vector<Future<Unit>> futures;
for (auto& p : promises) {
futures.push_back(p.getFuture());
}
bool flag = false;
size_t n = 3;
collectN(futures, n)
.via(&InlineExecutor::instance())
.thenValue([&](std::vector<std::pair<size_t, Try<Unit>>> v) {
flag = true;
EXPECT_EQ(n, v.size());
for (auto& tt : v) {
EXPECT_TRUE(tt.second.hasValue());
}
});
promises[0].setValue();
EXPECT_FALSE(flag);
promises[1].setValue();
EXPECT_FALSE(flag);
promises[2].setValue();
EXPECT_TRUE(flag);
}
TEST(Collect, collectNParallel) {
std::vector<Promise<Unit>> ps(100);
std::vector<Future<Unit>> futures;
for (auto& p : ps) {
futures.push_back(p.getFuture());
}
bool flag = false;
size_t n = 90;
collectN(futures, n)
.via(&InlineExecutor::instance())
.thenValue([&](std::vector<std::pair<size_t, Try<Unit>>> v) {
flag = true;
EXPECT_EQ(n, v.size());
for (auto& tt : v) {
EXPECT_TRUE(tt.second.hasValue());
}
});
std::vector<std::thread> ts;
boost::barrier barrier(ps.size() + 1);
for (size_t i = 0; i < ps.size(); i++) {
ts.emplace_back([&ps, &barrier, i]() {
barrier.wait();
ps[i].setValue();
});
}
barrier.wait();
for (size_t i = 0; i < ps.size(); i++) {
ts[i].join();
}
EXPECT_TRUE(flag);
}
/// Ensure that we can compile collectAll/Any with folly::small_vector
TEST(Collect, smallVector) {
static_assert(
!folly::is_trivially_copyable<Future<Unit>>::value,
"Futures should not be trivially copyable");
static_assert(
!folly::is_trivially_copyable<Future<int>>::value,
"Futures should not be trivially copyable");
{
folly::small_vector<Future<Unit>> futures;
for (int i = 0; i < 10; i++) {
futures.push_back(makeFuture());
}
auto anyf = collectAny(futures);
}
{
folly::small_vector<Future<Unit>> futures;
for (int i = 0; i < 10; i++) {
futures.push_back(makeFuture());
}
auto allf = collectAll(futures);
}
}
TEST(Collect, collectAllVariadic) {
Promise<bool> pb;
Promise<int> pi;
Future<bool> fb = pb.getFuture();
Future<int> fi = pi.getFuture();
bool flag = false;
collectAllSemiFuture(std::move(fb), std::move(fi))
.toUnsafeFuture()
.thenValue([&](std::tuple<Try<bool>, Try<int>> tup) {
flag = true;
EXPECT_TRUE(std::get<0>(tup).hasValue());
EXPECT_EQ(std::get<0>(tup).value(), true);
EXPECT_TRUE(std::get<1>(tup).hasValue());
EXPECT_EQ(std::get<1>(tup).value(), 42);
});
pb.setValue(true);
EXPECT_FALSE(flag);
pi.setValue(42);
EXPECT_TRUE(flag);
}
TEST(Collect, collectAllVariadicReferences) {
Promise<bool> pb;
Promise<int> pi;
Future<bool> fb = pb.getFuture();
Future<int> fi = pi.getFuture();
bool flag = false;
collectAllSemiFuture(fb, fi).toUnsafeFuture().thenValue(
[&](std::tuple<Try<bool>, Try<int>> tup) {
flag = true;
EXPECT_TRUE(std::get<0>(tup).hasValue());
EXPECT_EQ(std::get<0>(tup).value(), true);
EXPECT_TRUE(std::get<1>(tup).hasValue());
EXPECT_EQ(std::get<1>(tup).value(), 42);
});
pb.setValue(true);
EXPECT_FALSE(flag);
pi.setValue(42);
EXPECT_TRUE(flag);
}
TEST(Collect, collectAllVariadicWithException) {
Promise<bool> pb;
Promise<int> pi;
Future<bool> fb = pb.getFuture();
Future<int> fi = pi.getFuture();
bool flag = false;
collectAllSemiFuture(std::move(fb), std::move(fi))
.toUnsafeFuture()
.thenValue([&](std::tuple<Try<bool>, Try<int>> tup) {
flag = true;
EXPECT_TRUE(std::get<0>(tup).hasValue());
EXPECT_EQ(std::get<0>(tup).value(), true);
EXPECT_TRUE(std::get<1>(tup).hasException());
EXPECT_THROW(std::get<1>(tup).value(), eggs_t);
});
pb.setValue(true);
EXPECT_FALSE(flag);
pi.setException(eggs);
EXPECT_TRUE(flag);
}
TEST(Collect, collectVariadic) {
Promise<bool> pb;
Promise<int> pi;
Future<bool> fb = pb.getFuture();
Future<int> fi = pi.getFuture();
bool flag = false;
collect(std::move(fb), std::move(fi))
.thenValue([&](std::tuple<bool, int> tup) {
flag = true;
EXPECT_EQ(std::get<0>(tup), true);
EXPECT_EQ(std::get<1>(tup), 42);
});
pb.setValue(true);
EXPECT_FALSE(flag);
pi.setValue(42);
EXPECT_TRUE(flag);
}
TEST(Collect, collectVariadicWithException) {
Promise<bool> pb;
Promise<int> pi;
Future<bool> fb = pb.getFuture();
Future<int> fi = pi.getFuture();
auto f = collect(std::move(fb), std::move(fi));
pb.setValue(true);
EXPECT_FALSE(f.isReady());
pi.setException(eggs);
EXPECT_TRUE(f.isReady());
EXPECT_TRUE(f.getTry().hasException());
EXPECT_THROW(std::move(f).get(), eggs_t);
}
TEST(Collect, collectAllNone) {
std::vector<Future<int>> fs;
auto f = collectAll(fs);
EXPECT_TRUE(f.isReady());
}
TEST(Collect, noDefaultConstructor) {
struct A {
explicit A(size_t /* x */) {}
};
auto f1 = makeFuture(A(1));
auto f2 = makeFuture(A(2));
auto f = collect(std::move(f1), std::move(f2));
}
<|start_filename|>folly/net/NetOps.cpp<|end_filename|>
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <folly/net/NetOps.h>
#include <errno.h>
#include <fcntl.h>
#include <cstddef>
#include <folly/Portability.h>
#include <folly/net/detail/SocketFileDescriptorMap.h>
#if _WIN32
#include <event2/util.h> // @manual
#include <MSWSock.h> // @manual
#include <folly/ScopeGuard.h>
#endif
namespace folly {
namespace netops {
namespace {
#if _WIN32
// WSA has to be explicitly initialized.
static struct WinSockInit {
WinSockInit() {
WSADATA dat;
WSAStartup(MAKEWORD(2, 2), &dat);
}
~WinSockInit() {
WSACleanup();
}
} winsockInit;
int translate_wsa_error(int wsaErr) {
switch (wsaErr) {
case WSAEWOULDBLOCK:
return EAGAIN;
default:
return wsaErr;
}
}
#endif
template <class R, class F, class... Args>
static R wrapSocketFunction(F f, NetworkSocket s, Args... args) {
R ret = f(s.data, args...);
#if _WIN32
errno = translate_wsa_error(WSAGetLastError());
#endif
return ret;
}
} // namespace
NetworkSocket accept(NetworkSocket s, sockaddr* addr, socklen_t* addrlen) {
return NetworkSocket(wrapSocketFunction<NetworkSocket::native_handle_type>(
::accept, s, addr, addrlen));
}
int bind(NetworkSocket s, const sockaddr* name, socklen_t namelen) {
if (kIsWindows && name->sa_family == AF_UNIX) {
// Windows added support for AF_UNIX sockets, but didn't add
// support for autobind sockets, so detect requests for autobind
// sockets and treat them as invalid. (otherwise they don't trigger
// an error, but also don't actually work)
if (name->sa_data[0] == '\0') {
errno = EINVAL;
return -1;
}
}
return wrapSocketFunction<int>(::bind, s, name, namelen);
}
int close(NetworkSocket s) {
return netops::detail::SocketFileDescriptorMap::close(s.data);
}
int connect(NetworkSocket s, const sockaddr* name, socklen_t namelen) {
auto r = wrapSocketFunction<int>(::connect, s, name, namelen);
#if _WIN32
if (r == -1 && WSAGetLastError() == WSAEWOULDBLOCK) {
errno = EINPROGRESS;
}
#endif
return r;
}
int getpeername(NetworkSocket s, sockaddr* name, socklen_t* namelen) {
return wrapSocketFunction<int>(::getpeername, s, name, namelen);
}
int getsockname(NetworkSocket s, sockaddr* name, socklen_t* namelen) {
return wrapSocketFunction<int>(::getsockname, s, name, namelen);
}
int getsockopt(
NetworkSocket s,
int level,
int optname,
void* optval,
socklen_t* optlen) {
auto ret = wrapSocketFunction<int>(
::getsockopt, s, level, optname, (char*)optval, optlen);
#if _WIN32
if (optname == TCP_NODELAY && *optlen == 1) {
// Windows is weird about this value, and documents it as a
// BOOL (ie. int) but expects the variable to be bool (1-byte),
// so we get to adapt the interface to work that way.
*(int*)optval = *(uint8_t*)optval;
*optlen = sizeof(int);
}
#endif
return ret;
}
int inet_aton(const char* cp, in_addr* inp) {
inp->s_addr = inet_addr(cp);
return inp->s_addr == INADDR_NONE ? 0 : 1;
}
int listen(NetworkSocket s, int backlog) {
return wrapSocketFunction<int>(::listen, s, backlog);
}
int poll(PollDescriptor fds[], nfds_t nfds, int timeout) {
// Make sure that PollDescriptor is byte-for-byte identical to pollfd,
// so we don't need extra allocations just for the safety of this shim.
static_assert(
alignof(PollDescriptor) == alignof(pollfd),
"PollDescriptor is misaligned");
static_assert(
sizeof(PollDescriptor) == sizeof(pollfd),
"PollDescriptor is the wrong size");
static_assert(
offsetof(PollDescriptor, fd) == offsetof(pollfd, fd),
"PollDescriptor.fd is at the wrong place");
static_assert(
sizeof(decltype(PollDescriptor().fd)) == sizeof(decltype(pollfd().fd)),
"PollDescriptor.fd is the wrong size");
static_assert(
offsetof(PollDescriptor, events) == offsetof(pollfd, events),
"PollDescriptor.events is at the wrong place");
static_assert(
sizeof(decltype(PollDescriptor().events)) ==
sizeof(decltype(pollfd().events)),
"PollDescriptor.events is the wrong size");
static_assert(
offsetof(PollDescriptor, revents) == offsetof(pollfd, revents),
"PollDescriptor.revents is at the wrong place");
static_assert(
sizeof(decltype(PollDescriptor().revents)) ==
sizeof(decltype(pollfd().revents)),
"PollDescriptor.revents is the wrong size");
// Pun it through
pollfd* files = reinterpret_cast<pollfd*>(reinterpret_cast<void*>(fds));
#if _WIN32
return ::WSAPoll(files, (ULONG)nfds, timeout);
#else
return ::poll(files, nfds, timeout);
#endif
}
ssize_t recv(NetworkSocket s, void* buf, size_t len, int flags) {
#if _WIN32
if ((flags & MSG_DONTWAIT) == MSG_DONTWAIT) {
flags &= ~MSG_DONTWAIT;
u_long pendingRead = 0;
if (ioctlsocket(s.data, FIONREAD, &pendingRead)) {
errno = translate_wsa_error(WSAGetLastError());
return -1;
}
fd_set readSet;
FD_ZERO(&readSet);
FD_SET(s.data, &readSet);
timeval timeout{0, 0};
auto ret = select(1, &readSet, nullptr, nullptr, &timeout);
if (ret == 0) {
errno = EWOULDBLOCK;
return -1;
}
}
return wrapSocketFunction<ssize_t>(::recv, s, (char*)buf, (int)len, flags);
#else
return wrapSocketFunction<ssize_t>(::recv, s, buf, len, flags);
#endif
}
ssize_t recvfrom(
NetworkSocket s,
void* buf,
size_t len,
int flags,
sockaddr* from,
socklen_t* fromlen) {
#if _WIN32
if ((flags & MSG_TRUNC) == MSG_TRUNC) {
SOCKET h = s.data;
WSABUF wBuf{};
wBuf.buf = (CHAR*)buf;
wBuf.len = (ULONG)len;
WSAMSG wMsg{};
wMsg.dwBufferCount = 1;
wMsg.lpBuffers = &wBuf;
wMsg.name = from;
if (fromlen != nullptr) {
wMsg.namelen = *fromlen;
}
// WSARecvMsg is an extension, so we don't get
// the convenience of being able to call it directly, even though
// WSASendMsg is part of the normal API -_-...
LPFN_WSARECVMSG WSARecvMsg;
GUID WSARecgMsg_GUID = WSAID_WSARECVMSG;
DWORD recMsgBytes;
WSAIoctl(
h,
SIO_GET_EXTENSION_FUNCTION_POINTER,
&WSARecgMsg_GUID,
sizeof(WSARecgMsg_GUID),
&WSARecvMsg,
sizeof(WSARecvMsg),
&recMsgBytes,
nullptr,
nullptr);
DWORD bytesReceived;
int res = WSARecvMsg(h, &wMsg, &bytesReceived, nullptr, nullptr);
errno = translate_wsa_error(WSAGetLastError());
if (res == 0) {
return bytesReceived;
}
if (fromlen != nullptr) {
*fromlen = wMsg.namelen;
}
if ((wMsg.dwFlags & MSG_TRUNC) == MSG_TRUNC) {
return wBuf.len + 1;
}
return -1;
}
return wrapSocketFunction<ssize_t>(
::recvfrom, s, (char*)buf, (int)len, flags, from, fromlen);
#else
return wrapSocketFunction<ssize_t>(
::recvfrom, s, buf, len, flags, from, fromlen);
#endif
}
ssize_t recvmsg(NetworkSocket s, msghdr* message, int flags) {
#if _WIN32
(void)flags;
SOCKET h = s.data;
// Don't currently support the name translation.
if (message->msg_name != nullptr || message->msg_namelen != 0) {
return (ssize_t)-1;
}
WSAMSG msg;
msg.name = nullptr;
msg.namelen = 0;
msg.Control.buf = (CHAR*)message->msg_control;
msg.Control.len = (ULONG)message->msg_controllen;
msg.dwFlags = 0;
msg.dwBufferCount = (DWORD)message->msg_iovlen;
msg.lpBuffers = new WSABUF[message->msg_iovlen];
SCOPE_EXIT {
delete[] msg.lpBuffers;
};
for (size_t i = 0; i < message->msg_iovlen; i++) {
msg.lpBuffers[i].buf = (CHAR*)message->msg_iov[i].iov_base;
msg.lpBuffers[i].len = (ULONG)message->msg_iov[i].iov_len;
}
// WSARecvMsg is an extension, so we don't get
// the convenience of being able to call it directly, even though
// WSASendMsg is part of the normal API -_-...
LPFN_WSARECVMSG WSARecvMsg;
GUID WSARecgMsg_GUID = WSAID_WSARECVMSG;
DWORD recMsgBytes;
WSAIoctl(
h,
SIO_GET_EXTENSION_FUNCTION_POINTER,
&WSARecgMsg_GUID,
sizeof(WSARecgMsg_GUID),
&WSARecvMsg,
sizeof(WSARecvMsg),
&recMsgBytes,
nullptr,
nullptr);
DWORD bytesReceived;
int res = WSARecvMsg(h, &msg, &bytesReceived, nullptr, nullptr);
errno = translate_wsa_error(WSAGetLastError());
return res == 0 ? (ssize_t)bytesReceived : -1;
#else
return wrapSocketFunction<ssize_t>(::recvmsg, s, message, flags);
#endif
}
ssize_t send(NetworkSocket s, const void* buf, size_t len, int flags) {
#if _WIN32
return wrapSocketFunction<ssize_t>(
::send, s, (const char*)buf, (int)len, flags);
#else
return wrapSocketFunction<ssize_t>(::send, s, buf, len, flags);
#endif
}
ssize_t sendmsg(NetworkSocket socket, const msghdr* message, int flags) {
#if _WIN32
(void)flags;
SOCKET h = socket.data;
// Unfortunately, WSASendMsg requires the socket to have been opened
// as either SOCK_DGRAM or SOCK_RAW, but sendmsg has no such requirement,
// so we have to implement it based on send instead :(
ssize_t bytesSent = 0;
for (size_t i = 0; i < message->msg_iovlen; i++) {
int r = -1;
if (message->msg_name != nullptr) {
r = ::sendto(
h,
(const char*)message->msg_iov[i].iov_base,
(int)message->msg_iov[i].iov_len,
message->msg_flags,
(const sockaddr*)message->msg_name,
(int)message->msg_namelen);
} else {
r = ::send(
h,
(const char*)message->msg_iov[i].iov_base,
(int)message->msg_iov[i].iov_len,
message->msg_flags);
}
if (r == -1 || size_t(r) != message->msg_iov[i].iov_len) {
errno = translate_wsa_error(WSAGetLastError());
if (WSAGetLastError() == WSAEWOULDBLOCK && bytesSent > 0) {
return bytesSent;
}
return -1;
}
bytesSent += r;
}
return bytesSent;
#else
return wrapSocketFunction<ssize_t>(::sendmsg, socket, message, flags);
#endif
}
int sendmmsg(
NetworkSocket socket,
mmsghdr* msgvec,
unsigned int vlen,
int flags) {
#if FOLLY_HAVE_SENDMMSG
return wrapSocketFunction<int>(::sendmmsg, socket, msgvec, vlen, flags);
#else
// implement via sendmsg
for (unsigned int i = 0; i < vlen; i++) {
ssize_t ret = sendmsg(socket, &msgvec[i].msg_hdr, flags);
// in case of an error
// we return the number of msgs sent if > 0
// or an error if no msg was sent
if (ret < 0) {
if (i) {
return static_cast<int>(i);
}
return static_cast<int>(ret);
}
}
return static_cast<int>(vlen);
#endif
}
ssize_t sendto(
NetworkSocket s,
const void* buf,
size_t len,
int flags,
const sockaddr* to,
socklen_t tolen) {
#if _WIN32
return wrapSocketFunction<ssize_t>(
::sendto, s, (const char*)buf, (int)len, flags, to, (int)tolen);
#else
return wrapSocketFunction<ssize_t>(::sendto, s, buf, len, flags, to, tolen);
#endif
}
int setsockopt(
NetworkSocket s,
int level,
int optname,
const void* optval,
socklen_t optlen) {
#if _WIN32
if (optname == SO_REUSEADDR) {
// We don't have an equivelent to the Linux & OSX meaning of this
// on Windows, so ignore it.
return 0;
} else if (optname == SO_REUSEPORT) {
// Windows's SO_REUSEADDR option is closer to SO_REUSEPORT than
// it is to the Linux & OSX meaning of SO_REUSEADDR.
return -1;
}
return wrapSocketFunction<int>(
::setsockopt, s, level, optname, (char*)optval, optlen);
#else
return wrapSocketFunction<int>(
::setsockopt, s, level, optname, optval, optlen);
#endif
}
int shutdown(NetworkSocket s, int how) {
return wrapSocketFunction<int>(::shutdown, s, how);
}
NetworkSocket socket(int af, int type, int protocol) {
return NetworkSocket(::socket(af, type, protocol));
}
int socketpair(int domain, int type, int protocol, NetworkSocket sv[2]) {
#if _WIN32
if (domain != PF_UNIX || type != SOCK_STREAM || protocol != 0) {
return -1;
}
intptr_t pair[2];
auto r = evutil_socketpair(AF_INET, type, protocol, pair);
if (r == -1) {
return r;
}
sv[0] = NetworkSocket(static_cast<SOCKET>(pair[0]));
sv[1] = NetworkSocket(static_cast<SOCKET>(pair[1]));
return r;
#else
int pair[2];
auto r = ::socketpair(domain, type, protocol, pair);
if (r == -1) {
return r;
}
sv[0] = NetworkSocket(pair[0]);
sv[1] = NetworkSocket(pair[1]);
return r;
#endif
}
int set_socket_non_blocking(NetworkSocket s) {
#if _WIN32
u_long nonBlockingEnabled = 1;
return ioctlsocket(s.data, FIONBIO, &nonBlockingEnabled);
#else
int flags = fcntl(s.data, F_GETFL, 0);
if (flags == -1) {
return -1;
}
return fcntl(s.data, F_SETFL, flags | O_NONBLOCK);
#endif
}
int set_socket_close_on_exec(NetworkSocket s) {
#if _WIN32
if (SetHandleInformation((HANDLE)s.data, HANDLE_FLAG_INHERIT, 0)) {
return 0;
}
return -1;
#else
return fcntl(s.data, F_SETFD, FD_CLOEXEC);
#endif
}
} // namespace netops
} // namespace folly
<|start_filename|>folly/experimental/symbolizer/test/SignalHandlerTest.cpp<|end_filename|>
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <folly/experimental/symbolizer/test/SignalHandlerTest.h>
#include <folly/experimental/symbolizer/SignalHandler.h>
#include <folly/CPortability.h>
#include <folly/FileUtil.h>
#include <folly/Range.h>
#include <folly/portability/GTest.h>
namespace folly {
namespace symbolizer {
namespace test {
namespace {
void print(StringPiece sp) {
writeFull(STDERR_FILENO, sp.data(), sp.size());
}
void callback1() {
print("Callback1\n");
}
void callback2() {
print("Callback2\n");
}
} // namespace
TEST(SignalHandler, Simple) {
addFatalSignalCallback(callback1);
addFatalSignalCallback(callback2);
installFatalSignalHandler();
installFatalSignalCallbacks();
EXPECT_DEATH(
failHard(),
"^\\*\\*\\* Aborted at [0-9]+ \\(Unix time, try 'date -d @[0-9]+'\\) "
"\\*\\*\\*\n"
"\\*\\*\\* Signal 11 \\(SIGSEGV\\) \\(0x2a\\) received by PID [0-9]+ "
"\\(pthread TID 0x[0-9a-f]+\\) \\(linux TID [0-9]+\\) "
"\\(code: address not mapped to object\\), "
"stack trace: \\*\\*\\*\n"
".*\n"
".* @ [0-9a-f]+.* folly::symbolizer::test::SignalHandler_Simple_Test"
"::TestBody\\(\\).*\n"
".*\n"
".* @ [0-9a-f]+.* main.*\n"
".*\n"
"Callback1\n"
"Callback2\n"
".*");
}
} // namespace test
} // namespace symbolizer
} // namespace folly
// Can't use initFacebookLight since that would install its own signal handlers
// Can't use initFacebookNoSignals since we cannot depend on common
int main(int argc, char** argv) {
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
<|start_filename|>folly/detail/SocketFastOpen.h<|end_filename|>
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <sys/types.h>
#include <folly/net/NetworkSocket.h>
#include <folly/portability/Sockets.h>
#if !defined(FOLLY_ALLOW_TFO)
#if defined(__linux__) || defined(__APPLE__)
// only allow for linux right now
#define FOLLY_ALLOW_TFO 1
#endif
#endif
namespace folly {
namespace detail {
/**
* tfo_sendto has the same semantics as sendmsg, but is used to
* send with TFO data.
*/
ssize_t tfo_sendmsg(NetworkSocket sockfd, const struct msghdr* msg, int flags);
/**
* Enable TFO on a listening socket.
*/
int tfo_enable(NetworkSocket sockfd, size_t max_queue_size);
/**
* Check if TFO succeeded in being used.
*/
bool tfo_succeeded(NetworkSocket sockfd);
} // namespace detail
} // namespace folly
<|start_filename|>folly/ConcurrentSkipList-inl.h<|end_filename|>
/*
* Copyright 2011-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// @author: <NAME> <<EMAIL>>
#pragma once
#include <algorithm>
#include <atomic>
#include <climits>
#include <cmath>
#include <memory>
#include <mutex>
#include <type_traits>
#include <vector>
#include <boost/random.hpp>
#include <glog/logging.h>
#include <folly/Memory.h>
#include <folly/ThreadLocal.h>
#include <folly/synchronization/MicroSpinLock.h>
namespace folly {
namespace detail {
template <typename ValT, typename NodeT>
class csl_iterator;
template <typename T>
class SkipListNode {
enum : uint16_t {
IS_HEAD_NODE = 1,
MARKED_FOR_REMOVAL = (1 << 1),
FULLY_LINKED = (1 << 2),
};
public:
typedef T value_type;
SkipListNode(const SkipListNode&) = delete;
SkipListNode& operator=(const SkipListNode&) = delete;
template <
typename NodeAlloc,
typename U,
typename =
typename std::enable_if<std::is_convertible<U, T>::value>::type>
static SkipListNode*
create(NodeAlloc& alloc, int height, U&& data, bool isHead = false) {
DCHECK(height >= 1 && height < 64) << height;
size_t size =
sizeof(SkipListNode) + height * sizeof(std::atomic<SkipListNode*>);
auto storage = std::allocator_traits<NodeAlloc>::allocate(alloc, size);
// do placement new
return new (storage)
SkipListNode(uint8_t(height), std::forward<U>(data), isHead);
}
template <typename NodeAlloc>
static void destroy(NodeAlloc& alloc, SkipListNode* node) {
size_t size = sizeof(SkipListNode) +
node->height_ * sizeof(std::atomic<SkipListNode*>);
node->~SkipListNode();
std::allocator_traits<NodeAlloc>::deallocate(alloc, node, size);
}
template <typename NodeAlloc>
struct DestroyIsNoOp : StrictConjunction<
AllocatorHasTrivialDeallocate<NodeAlloc>,
std::is_trivially_destructible<SkipListNode>> {};
// copy the head node to a new head node assuming lock acquired
SkipListNode* copyHead(SkipListNode* node) {
DCHECK(node != nullptr && height_ > node->height_);
setFlags(node->getFlags());
for (uint8_t i = 0; i < node->height_; ++i) {
setSkip(i, node->skip(i));
}
return this;
}
inline SkipListNode* skip(int layer) const {
DCHECK_LT(layer, height_);
return skip_[layer].load(std::memory_order_consume);
}
// next valid node as in the linked list
SkipListNode* next() {
SkipListNode* node;
for (node = skip(0); (node != nullptr && node->markedForRemoval());
node = node->skip(0)) {
}
return node;
}
void setSkip(uint8_t h, SkipListNode* next) {
DCHECK_LT(h, height_);
skip_[h].store(next, std::memory_order_release);
}
value_type& data() {
return data_;
}
const value_type& data() const {
return data_;
}
int maxLayer() const {
return height_ - 1;
}
int height() const {
return height_;
}
std::unique_lock<MicroSpinLock> acquireGuard() {
return std::unique_lock<MicroSpinLock>(spinLock_);
}
bool fullyLinked() const {
return getFlags() & FULLY_LINKED;
}
bool markedForRemoval() const {
return getFlags() & MARKED_FOR_REMOVAL;
}
bool isHeadNode() const {
return getFlags() & IS_HEAD_NODE;
}
void setIsHeadNode() {
setFlags(uint16_t(getFlags() | IS_HEAD_NODE));
}
void setFullyLinked() {
setFlags(uint16_t(getFlags() | FULLY_LINKED));
}
void setMarkedForRemoval() {
setFlags(uint16_t(getFlags() | MARKED_FOR_REMOVAL));
}
private:
// Note! this can only be called from create() as a placement new.
template <typename U>
SkipListNode(uint8_t height, U&& data, bool isHead)
: height_(height), data_(std::forward<U>(data)) {
spinLock_.init();
setFlags(0);
if (isHead) {
setIsHeadNode();
}
// need to explicitly init the dynamic atomic pointer array
for (uint8_t i = 0; i < height_; ++i) {
new (&skip_[i]) std::atomic<SkipListNode*>(nullptr);
}
}
~SkipListNode() {
for (uint8_t i = 0; i < height_; ++i) {
skip_[i].~atomic();
}
}
uint16_t getFlags() const {
return flags_.load(std::memory_order_consume);
}
void setFlags(uint16_t flags) {
flags_.store(flags, std::memory_order_release);
}
// TODO(xliu): on x86_64, it's possible to squeeze these into
// skip_[0] to maybe save 8 bytes depending on the data alignments.
// NOTE: currently this is x86_64 only anyway, due to the
// MicroSpinLock.
std::atomic<uint16_t> flags_;
const uint8_t height_;
MicroSpinLock spinLock_;
value_type data_;
std::atomic<SkipListNode*> skip_[0];
};
class SkipListRandomHeight {
enum { kMaxHeight = 64 };
public:
// make it a singleton.
static SkipListRandomHeight* instance() {
static SkipListRandomHeight instance_;
return &instance_;
}
int getHeight(int maxHeight) const {
DCHECK_LE(maxHeight, kMaxHeight) << "max height too big!";
double p = randomProb();
for (int i = 0; i < maxHeight; ++i) {
if (p < lookupTable_[i]) {
return i + 1;
}
}
return maxHeight;
}
size_t getSizeLimit(int height) const {
DCHECK_LT(height, kMaxHeight);
return sizeLimitTable_[height];
}
private:
SkipListRandomHeight() {
initLookupTable();
}
void initLookupTable() {
// set skip prob = 1/E
static const double kProbInv = exp(1);
static const double kProb = 1.0 / kProbInv;
static const size_t kMaxSizeLimit = std::numeric_limits<size_t>::max();
double sizeLimit = 1;
double p = lookupTable_[0] = (1 - kProb);
sizeLimitTable_[0] = 1;
for (int i = 1; i < kMaxHeight - 1; ++i) {
p *= kProb;
sizeLimit *= kProbInv;
lookupTable_[i] = lookupTable_[i - 1] + p;
sizeLimitTable_[i] = sizeLimit > kMaxSizeLimit
? kMaxSizeLimit
: static_cast<size_t>(sizeLimit);
}
lookupTable_[kMaxHeight - 1] = 1;
sizeLimitTable_[kMaxHeight - 1] = kMaxSizeLimit;
}
static double randomProb() {
static ThreadLocal<boost::lagged_fibonacci2281> rng_;
return (*rng_)();
}
double lookupTable_[kMaxHeight];
size_t sizeLimitTable_[kMaxHeight];
};
template <typename NodeType, typename NodeAlloc, typename = void>
class NodeRecycler;
template <typename NodeType, typename NodeAlloc>
class NodeRecycler<
NodeType,
NodeAlloc,
typename std::enable_if<
!NodeType::template DestroyIsNoOp<NodeAlloc>::value>::type> {
public:
explicit NodeRecycler(const NodeAlloc& alloc)
: refs_(0), dirty_(false), alloc_(alloc) {
lock_.init();
}
explicit NodeRecycler() : refs_(0), dirty_(false) {
lock_.init();
}
~NodeRecycler() {
CHECK_EQ(refs(), 0);
if (nodes_) {
for (auto& node : *nodes_) {
NodeType::destroy(alloc_, node);
}
}
}
void add(NodeType* node) {
std::lock_guard<MicroSpinLock> g(lock_);
if (nodes_.get() == nullptr) {
nodes_ = std::make_unique<std::vector<NodeType*>>(1, node);
} else {
nodes_->push_back(node);
}
DCHECK_GT(refs(), 0);
dirty_.store(true, std::memory_order_relaxed);
}
int addRef() {
return refs_.fetch_add(1, std::memory_order_relaxed);
}
int releaseRef() {
// We don't expect to clean the recycler immediately everytime it is OK
// to do so. Here, it is possible that multiple accessors all release at
// the same time but nobody would clean the recycler here. If this
// happens, the recycler will usually still get cleaned when
// such a race doesn't happen. The worst case is the recycler will
// eventually get deleted along with the skiplist.
if (LIKELY(!dirty_.load(std::memory_order_relaxed) || refs() > 1)) {
return refs_.fetch_add(-1, std::memory_order_relaxed);
}
std::unique_ptr<std::vector<NodeType*>> newNodes;
{
std::lock_guard<MicroSpinLock> g(lock_);
if (nodes_.get() == nullptr || refs() > 1) {
return refs_.fetch_add(-1, std::memory_order_relaxed);
}
// once refs_ reaches 1 and there is no other accessor, it is safe to
// remove all the current nodes in the recycler, as we already acquired
// the lock here so no more new nodes can be added, even though new
// accessors may be added after that.
newNodes.swap(nodes_);
dirty_.store(false, std::memory_order_relaxed);
}
// TODO(xliu) should we spawn a thread to do this when there are large
// number of nodes in the recycler?
for (auto& node : *newNodes) {
NodeType::destroy(alloc_, node);
}
// decrease the ref count at the very end, to minimize the
// chance of other threads acquiring lock_ to clear the deleted
// nodes again.
return refs_.fetch_add(-1, std::memory_order_relaxed);
}
NodeAlloc& alloc() {
return alloc_;
}
private:
int refs() const {
return refs_.load(std::memory_order_relaxed);
}
std::unique_ptr<std::vector<NodeType*>> nodes_;
std::atomic<int32_t> refs_; // current number of visitors to the list
std::atomic<bool> dirty_; // whether *nodes_ is non-empty
MicroSpinLock lock_; // protects access to *nodes_
NodeAlloc alloc_;
};
// In case of arena allocator, no recycling is necessary, and it's possible
// to save on ConcurrentSkipList size.
template <typename NodeType, typename NodeAlloc>
class NodeRecycler<
NodeType,
NodeAlloc,
typename std::enable_if<
NodeType::template DestroyIsNoOp<NodeAlloc>::value>::type> {
public:
explicit NodeRecycler(const NodeAlloc& alloc) : alloc_(alloc) {}
void addRef() {}
void releaseRef() {}
void add(NodeType* /* node */) {}
NodeAlloc& alloc() {
return alloc_;
}
private:
NodeAlloc alloc_;
};
} // namespace detail
} // namespace folly
<|start_filename|>folly/executors/TimekeeperScheduledExecutor.cpp<|end_filename|>
/*
* Copyright 2018-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <folly/executors/TimekeeperScheduledExecutor.h>
#include <folly/futures/Future.h>
namespace folly {
/* static */ Executor::KeepAlive<TimekeeperScheduledExecutor>
TimekeeperScheduledExecutor::create(
Executor::KeepAlive<> parent,
Function<std::shared_ptr<Timekeeper>()> getTimekeeper) {
return makeKeepAlive<TimekeeperScheduledExecutor>(
new TimekeeperScheduledExecutor(
std::move(parent), std::move(getTimekeeper)));
}
void TimekeeperScheduledExecutor::run(Func func) {
try {
func();
} catch (std::exception const& ex) {
LOG(ERROR) << "func threw unhandled exception " << folly::exceptionStr(ex);
} catch (...) {
LOG(ERROR) << "func threw unhandled non-exception object";
}
}
void TimekeeperScheduledExecutor::add(Func func) {
parent_->add(
[keepAlive = getKeepAliveToken(this), f = std::move(func)]() mutable {
keepAlive->run(std::move(f));
});
}
void TimekeeperScheduledExecutor::scheduleAt(
Func&& func,
ScheduledExecutor::TimePoint const& t) {
auto delay = std::chrono::duration_cast<folly::Duration>(
t - std::chrono::steady_clock::now());
if (delay.count() > 0) {
auto tk = getTimekeeper_();
if (UNLIKELY(!tk)) {
throw TimekeeperScheduledExecutorNoTimekeeper();
}
tk->after(delay)
.via(parent_.copy())
.thenValue([keepAlive = getKeepAliveToken(this), f = std::move(func)](
auto&&) mutable { keepAlive->run(std::move(f)); });
} else {
add(std::move(func));
}
}
bool TimekeeperScheduledExecutor::keepAliveAcquire() {
auto keepAliveCounter =
keepAliveCounter_.fetch_add(1, std::memory_order_relaxed);
DCHECK(keepAliveCounter > 0);
return true;
}
void TimekeeperScheduledExecutor::keepAliveRelease() {
auto keepAliveCounter =
keepAliveCounter_.fetch_sub(1, std::memory_order_acq_rel);
DCHECK(keepAliveCounter > 0);
if (keepAliveCounter == 1) {
delete this;
}
}
} // namespace folly
<|start_filename|>folly/synchronization/detail/test/ProxyLockableTest.cpp<|end_filename|>
/*
* Copyright 2018-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <folly/synchronization/detail/ProxyLockable.h>
#include <folly/Benchmark.h>
#include <folly/Random.h>
#include <folly/portability/GTest.h>
#include <folly/synchronization/DistributedMutex.h>
#include <atomic>
#include <chrono>
#include <mutex>
#include <thread>
#include <tuple>
#include <vector>
using namespace std::literals;
namespace folly {
namespace detail {
namespace {
DEFINE_int64(stress_test_seconds, 2, "Duration for stress tests");
class MockMutex {
public:
int lock() {
++locked_;
return 1;
}
void unlock(int integer) {
--locked_;
EXPECT_EQ(integer, 1);
}
int try_lock() {
if (!locked_) {
return lock();
}
return 0;
}
template <typename Duration>
int try_lock_for(const Duration&) {
return try_lock();
}
template <typename TimePoint>
int try_lock_until(const TimePoint&) {
return try_lock();
}
// counts the number of times the mutex has been locked
int locked_{0};
};
} // namespace
class ProxyLockableTest : public ::testing::Test {};
TEST_F(ProxyLockableTest, UniqueLockBasic) {
auto mutex = MockMutex{};
auto lck = ProxyLockableUniqueLock<MockMutex>{mutex};
std::ignore = lck;
EXPECT_EQ(mutex.locked_, 1);
}
TEST_F(ProxyLockableTest, UniqueLockDefaultConstruct) {
auto lck = ProxyLockableUniqueLock<MockMutex>{};
EXPECT_FALSE(lck.mutex());
EXPECT_FALSE(lck.proxy());
EXPECT_FALSE(lck.owns_lock());
EXPECT_FALSE(lck.operator bool());
}
TEST_F(ProxyLockableTest, UniqueLockLockOnConstruct) {
auto mutex = MockMutex{};
auto lck = ProxyLockableUniqueLock<MockMutex>{mutex};
EXPECT_TRUE(lck.mutex());
EXPECT_TRUE(lck.proxy());
EXPECT_EQ(mutex.locked_, 1);
}
TEST_F(ProxyLockableTest, UniqueLockConstructMoveConstructAssign) {
auto mutex = MockMutex{};
auto one = ProxyLockableUniqueLock<MockMutex>{mutex};
EXPECT_TRUE(one.mutex());
EXPECT_TRUE(one.proxy());
auto two = std::move(one);
EXPECT_FALSE(one.mutex());
EXPECT_FALSE(one.proxy());
EXPECT_FALSE(one.owns_lock());
EXPECT_FALSE(one.operator bool());
EXPECT_TRUE(two.mutex());
EXPECT_TRUE(two.proxy());
auto three = std::move(one);
EXPECT_FALSE(one.mutex());
EXPECT_FALSE(one.mutex());
EXPECT_FALSE(three.mutex());
EXPECT_FALSE(three.mutex());
auto four = std::move(two);
EXPECT_TRUE(four.mutex());
EXPECT_TRUE(four.proxy());
EXPECT_FALSE(one.proxy());
EXPECT_FALSE(one.proxy());
EXPECT_EQ(mutex.locked_, 1);
}
TEST_F(ProxyLockableTest, UniqueLockDeferLock) {
auto mutex = MockMutex{};
auto lck = ProxyLockableUniqueLock<MockMutex>{mutex, std::defer_lock};
EXPECT_EQ(mutex.locked_, 0);
lck.lock();
EXPECT_EQ(mutex.locked_, 1);
}
namespace {
template <typename Make>
void testTryToLock(Make make) {
auto mutex = MockMutex{};
{
auto lck = make(mutex);
EXPECT_TRUE(lck.mutex());
EXPECT_TRUE(lck.proxy());
EXPECT_EQ(mutex.locked_, 1);
}
EXPECT_EQ(mutex.locked_, 0);
mutex.lock();
auto lck = make(mutex);
EXPECT_EQ(mutex.locked_, 1);
EXPECT_TRUE(lck.mutex());
EXPECT_FALSE(lck.proxy());
}
} // namespace
TEST_F(ProxyLockableTest, UniqueLockTryToLock) {
testTryToLock([](auto& mutex) {
using Mutex = std::decay_t<decltype(mutex)>;
return ProxyLockableUniqueLock<Mutex>{mutex, std::try_to_lock};
});
}
TEST_F(ProxyLockableTest, UniqueLockTimedLockDuration) {
testTryToLock([](auto& mutex) {
using Mutex = std::decay_t<decltype(mutex)>;
return ProxyLockableUniqueLock<Mutex>{mutex, 1s};
});
}
TEST_F(ProxyLockableTest, UniqueLockTimedLockWithTime) {
testTryToLock([](auto& mutex) {
using Mutex = std::decay_t<decltype(mutex)>;
return ProxyLockableUniqueLock<Mutex>{
mutex, std::chrono::steady_clock::now() + 1s};
});
}
TEST_F(ProxyLockableTest, UniqueLockLockExplicitLockAfterDefer) {
auto mutex = MockMutex{};
auto lck = ProxyLockableUniqueLock<MockMutex>{mutex, std::defer_lock};
EXPECT_TRUE(lck.mutex());
EXPECT_FALSE(lck.proxy());
lck.lock();
EXPECT_TRUE(lck.mutex());
EXPECT_TRUE(lck.proxy());
EXPECT_EQ(mutex.locked_, 1);
}
TEST_F(ProxyLockableTest, UniqueLockLockExplicitUnlockAfterDefer) {
auto mutex = MockMutex{};
auto lck = ProxyLockableUniqueLock<MockMutex>{mutex, std::defer_lock};
EXPECT_TRUE(lck.mutex());
EXPECT_FALSE(lck.proxy());
lck.lock();
EXPECT_TRUE(lck.mutex());
EXPECT_TRUE(lck.proxy());
EXPECT_EQ(mutex.locked_, 1);
lck.unlock();
EXPECT_EQ(mutex.locked_, 0);
}
TEST_F(ProxyLockableTest, UniqueLockLockExplicitTryLockAfterDefer) {
auto mutex = MockMutex{};
auto lck = ProxyLockableUniqueLock<MockMutex>{mutex, std::defer_lock};
EXPECT_TRUE(lck.mutex());
EXPECT_FALSE(lck.proxy());
EXPECT_TRUE(lck.try_lock());
EXPECT_TRUE(lck.mutex());
EXPECT_TRUE(lck.proxy());
EXPECT_EQ(mutex.locked_, 1);
lck.unlock();
EXPECT_EQ(mutex.locked_, 0);
}
TEST_F(ProxyLockableTest, UniqueLockExceptionOnLock) {
{
auto lck = ProxyLockableUniqueLock<MockMutex>{};
if (kIsDebug) {
EXPECT_THROW(lck.lock(), std::system_error);
}
}
{
auto mutex = MockMutex{};
auto lck = ProxyLockableUniqueLock<MockMutex>{mutex};
if (kIsDebug) {
EXPECT_THROW(lck.lock(), std::system_error);
}
}
}
TEST_F(ProxyLockableTest, UniqueLockExceptionOnUnlock) {
{
auto lck = ProxyLockableUniqueLock<MockMutex>{};
if (kIsDebug) {
EXPECT_THROW(lck.unlock(), std::system_error);
}
}
{
auto mutex = MockMutex{};
auto lck = ProxyLockableUniqueLock<MockMutex>{mutex};
lck.unlock();
if (kIsDebug) {
EXPECT_THROW(lck.unlock(), std::system_error);
}
}
}
TEST_F(ProxyLockableTest, UniqueLockExceptionOnTryLock) {
{
auto lck = ProxyLockableUniqueLock<MockMutex>{};
if (kIsDebug) {
EXPECT_THROW(lck.try_lock(), std::system_error);
}
}
{
auto mutex = MockMutex{};
auto lck = ProxyLockableUniqueLock<MockMutex>{mutex};
if (kIsDebug) {
EXPECT_THROW(lck.try_lock(), std::system_error);
}
}
}
namespace {
class StdMutexWrapper {
public:
int lock() {
mutex_.lock();
return 1;
}
void unlock(int value) {
EXPECT_EQ(value, 1);
mutex_.unlock();
}
std::mutex mutex_{};
};
template <typename Mutex>
void stressTest() {
const auto&& kNumThreads = std::thread::hardware_concurrency();
auto&& mutex = Mutex{};
auto&& threads = std::vector<std::thread>{};
auto&& atomic = std::atomic<std::uint64_t>{0};
auto&& stop = std::atomic<bool>{false};
// try and randomize thread scheduling
auto&& randomize = [] {
if (folly::Random::oneIn(100)) {
/* sleep override */
std::this_thread::sleep_for(500us);
}
};
for (auto i = std::size_t{0}; i < kNumThreads; ++i) {
threads.emplace_back([&] {
while (!stop.load()) {
auto lck = ProxyLockableUniqueLock<Mutex>{mutex};
EXPECT_EQ(atomic.fetch_add(1, std::memory_order_relaxed), 0);
randomize();
EXPECT_EQ(atomic.fetch_sub(1, std::memory_order_relaxed), 1);
}
});
}
/* sleep override */
std::this_thread::sleep_for(std::chrono::seconds{FLAGS_stress_test_seconds});
stop.store(true);
for (auto& thread : threads) {
thread.join();
}
}
} // namespace
TEST_F(ProxyLockableTest, StressLockOnConstructionStdMutex) {
stressTest<StdMutexWrapper>();
}
TEST_F(ProxyLockableTest, StressLockOnConstructionFollyDistributedMutex) {
stressTest<folly::DistributedMutex>();
}
TEST_F(ProxyLockableTest, LockGuardBasic) {
auto mutex = MockMutex{};
auto&& lck = ProxyLockableLockGuard<MockMutex>{mutex};
std::ignore = lck;
EXPECT_TRUE(mutex.locked_);
}
} // namespace detail
} // namespace folly
<|start_filename|>folly/experimental/crypto/detail/LtHashInternal.h<|end_filename|>
/*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <folly/Range.h>
namespace folly {
namespace crypto {
namespace detail {
// As of 2019, most (or all?) modern Intel CPUs have 64-byte L1 cache lines,
// and aligning data buffers on cache line boundaries on such CPUs
// noticeably benefits performance (up to 10% difference).
//
// If you change this, code that depends on it in MathOperation_*.cpp may
// break and could need fixing.
constexpr size_t kCacheLineSize = 64;
// Invariants about kCacheLineSize that other logic depends on: it must be
// a power of 2 and cannot be zero.
static_assert(kCacheLineSize > 0, "kCacheLineSize cannot be 0");
static_assert(
(kCacheLineSize & (kCacheLineSize - 1)) == 0,
"kCacheLineSize must be a power of 2");
/**
* Defines available math engines that we can use to perform element-wise
* modular addition or subtraction of element vectors.
* - AUTO: pick the best available, from best to worst: AVX2, SSE2, SIMPLE
* - SIMPLE: perform addition/subtraction using uint64_t values
* - SSE2: perform addition/subtraction using 128-bit __m128i values.
* Intel only, requires SSE2 instruction support.
* - AVX2: perform addition/subtraction using 256-bit __m256i values.
* Intel only, requires AVX2 instruction support.
*/
enum class MathEngine { AUTO, SIMPLE, SSE2, AVX2 };
/**
* This actually implements the bulk addition/subtraction operations.
*/
template <MathEngine E>
struct MathOperation {
/**
* Returns true if the math engine E is supported by the CPU and OS and is
* implemented.
*/
static bool isAvailable();
/**
* Returns true if the math engine E is implemented.
*/
static bool isImplemented();
/**
* Performs element-wise modular addition of 2 vectors of elements packed
* into the buffers b1 and b2. Writes the output into the buffer out. The
* output buffer may be the same as one of the input buffers. The dataMask
* parameter should be Bits<B>::kDataMask() where B is the element size
* in bits.
*/
static void add(
uint64_t dataMask,
size_t bitsPerElement,
ByteRange b1,
ByteRange b2,
MutableByteRange out);
/**
* Performs element-wise modular subtraction of 2 groups of elements packed
* into the buffers b1 and b2. Note that (a - b) % M == (a + (M - b)) % M,
* which is how we actually implement it to avoid underflow issues. The
* dataMask parameter should be Bits<B>::kDataMask() where B is the element
* size in bits.
*/
static void sub(
uint64_t dataMask,
size_t bitsPerElement,
ByteRange b1,
ByteRange b2,
MutableByteRange out);
/**
* Clears the padding bits of the given buffer according to the given
* data mask: for each uint64_t in the input buffer, all 0 bits in the
* data mask are cleared, and all 1 bits in the data mask are preserved.
*/
static void clearPaddingBits(uint64_t dataMask, MutableByteRange buf);
/**
* Returns true if the given checksum buffer contains 0 bits at the padding
* bit positions, according to the given data mask.
*/
static bool checkPaddingBits(uint64_t dataMask, ByteRange buf);
};
// These forward declarations of explicit template instantiations seem to be
// required to get things to compile. I tried to get things to work without it,
// but the compiler complained when I had any AVX2 types in this header, so I
// think they need to be hidden in the .cpp file for some reason.
#define FORWARD_DECLARE_EXTERN_TEMPLATE(E) \
template <> \
bool MathOperation<E>::isAvailable(); \
template <> \
bool MathOperation<E>::isImplemented(); \
template <> \
void MathOperation<E>::add( \
uint64_t dataMask, \
size_t bitsPerElement, \
ByteRange b1, \
ByteRange b2, \
MutableByteRange out); \
template <> \
void MathOperation<E>::sub( \
uint64_t dataMask, \
size_t bitsPerElement, \
ByteRange b1, \
ByteRange b2, \
MutableByteRange out); \
template <> \
void MathOperation<E>::clearPaddingBits( \
uint64_t dataMask, MutableByteRange buf); \
template <> \
bool MathOperation<E>::checkPaddingBits(uint64_t dataMask, ByteRange buf); \
extern template struct MathOperation<E>
FORWARD_DECLARE_EXTERN_TEMPLATE(MathEngine::AUTO);
FORWARD_DECLARE_EXTERN_TEMPLATE(MathEngine::SIMPLE);
FORWARD_DECLARE_EXTERN_TEMPLATE(MathEngine::SSE2);
FORWARD_DECLARE_EXTERN_TEMPLATE(MathEngine::AVX2);
#undef FORWARD_DECLARE_EXTERN_TEMPLATE
} // namespace detail
} // namespace crypto
} // namespace folly
<|start_filename|>folly/experimental/crypto/test/LtHashBenchmark.cpp<|end_filename|>
/*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <folly/Benchmark.h>
#include <folly/Random.h>
#include <folly/experimental/crypto/LtHash.h>
#include <folly/init/Init.h>
#include <folly/io/IOBuf.h>
#include <glog/logging.h>
#include <sodium.h>
using namespace ::folly::crypto;
namespace {
constexpr size_t kObjectCount = 1000;
constexpr size_t kObjectSize = 150;
std::vector<std::unique_ptr<const folly::IOBuf>> kObjects;
} // namespace
std::unique_ptr<folly::IOBuf> makeRandomData(size_t length) {
auto data = std::make_unique<folly::IOBuf>(
folly::crypto::detail::allocateCacheAlignedIOBuf(length));
data->append(length);
randombytes_buf(data->writableData(), data->length());
return data;
}
template <std::size_t B, std::size_t N>
void runBenchmark(size_t n) {
LtHash<B, N> ltHash;
for (size_t i = 0; i < static_cast<size_t>(n); ++i) {
const folly::IOBuf& obj = *(kObjects[i % kObjects.size()]);
ltHash.addObject({obj.data(), obj.length()});
}
}
BENCHMARK(single_blake2b, n) {
std::array<unsigned char, crypto_generichash_blake2b_BYTES_MAX> result;
for (size_t i = 0; i < static_cast<size_t>(n); ++i) {
const folly::IOBuf& obj = *(kObjects[i % kObjects.size()]);
int res = crypto_generichash_blake2b(
result.data(), sizeof(result), obj.data(), obj.length(), nullptr, 0);
if (res != 0) {
throw std::runtime_error("blake2b hash failed");
}
}
}
BENCHMARK_RELATIVE(LtHash_element_count_1024_length_16, n) {
runBenchmark<16, 1024>(static_cast<size_t>(n));
}
BENCHMARK_RELATIVE(LtHash_element_count_1008_length_20, n) {
runBenchmark<20, 1008>(static_cast<size_t>(n));
}
BENCHMARK_RELATIVE(LtHash_element_count_1024_length_32, n) {
runBenchmark<32, 1024>(static_cast<size_t>(n));
}
BENCHMARK_RELATIVE(LtHash_element_count_2048_length_32, n) {
runBenchmark<32, 2048>(static_cast<size_t>(n));
}
BENCHMARK(calculateChecksumFor100KObjects_B20_N1008) {
LtHash<20, 1008> ltHash;
for (auto i = 0; i < 100000; ++i) {
const folly::IOBuf& obj = *(kObjects[i % kObjects.size()]);
ltHash.addObject({obj.data(), obj.length()});
}
}
BENCHMARK_RELATIVE(calculateChecksumFor100KObjects_B16_N1024) {
LtHash<16, 1024> ltHash;
for (auto i = 0; i < 100000; ++i) {
const folly::IOBuf& obj = *(kObjects[i % kObjects.size()]);
ltHash.addObject({obj.data(), obj.length()});
}
}
BENCHMARK_RELATIVE(calculateChecksumFor100KObjects_B32_N1024) {
LtHash<32, 1024> ltHash;
for (auto i = 0; i < 100000; ++i) {
const folly::IOBuf& obj = *(kObjects[i % kObjects.size()]);
ltHash.addObject({obj.data(), obj.length()});
}
}
BENCHMARK(subtractChecksumFor100KObjects_B20_N1008) {
LtHash<20, 1008> ltHash;
for (auto i = 0; i < 100000; ++i) {
const folly::IOBuf& obj = *(kObjects[i % kObjects.size()]);
ltHash.removeObject({obj.data(), obj.length()});
}
}
BENCHMARK_RELATIVE(subtractChecksumFor100KObjects_B16_N1024) {
LtHash<16, 1024> ltHash;
for (auto i = 0; i < 100000; ++i) {
const folly::IOBuf& obj = *(kObjects[i % kObjects.size()]);
ltHash.removeObject({obj.data(), obj.length()});
}
}
BENCHMARK_RELATIVE(subtractChecksumFor100KObjects_B32_N1024) {
LtHash<32, 1024> ltHash;
for (auto i = 0; i < 100000; ++i) {
const folly::IOBuf& obj = *(kObjects[i % kObjects.size()]);
ltHash.removeObject({obj.data(), obj.length()});
}
}
int main(int argc, char** argv) {
folly::init(&argc, &argv);
if (sodium_init() < 0) {
throw std::runtime_error("Failed to initialize libsodium");
}
// pre-generate objects with random length to hash
for (size_t i = 0; i < kObjectCount; i++) {
kObjects.push_back(makeRandomData(kObjectSize));
}
// Trigger the implementation selection of AUTO math operations before
// starting the benchmark, so log messages don't pollute the output table.
LtHash<20, 1008> ltHash;
ltHash.addObject(folly::range("hello world"));
ltHash.removeObject(folly::range("hello world"));
folly::runBenchmarks();
return 0;
}
<|start_filename|>folly/hash/test/FarmHashTest.cpp<|end_filename|>
/*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <folly/hash/FarmHash.h>
#include <folly/portability/GTest.h>
TEST(farmhash, simple) {
EXPECT_NE(
folly::hash::farmhash::Hash("foo", 3),
folly::hash::farmhash::Hash("bar", 3));
EXPECT_NE(
folly::hash::farmhash::Hash32("foo", 3),
folly::hash::farmhash::Hash32("bar", 3));
EXPECT_NE(
folly::hash::farmhash::Hash64("foo", 3),
folly::hash::farmhash::Hash64("bar", 3));
EXPECT_NE(
folly::hash::farmhash::Fingerprint32("foo", 3),
folly::hash::farmhash::Fingerprint32("bar", 3));
EXPECT_NE(
folly::hash::farmhash::Fingerprint64("foo", 3),
folly::hash::farmhash::Fingerprint64("bar", 3));
}
<|start_filename|>folly/MapUtil.h<|end_filename|>
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <folly/Conv.h>
#include <folly/Optional.h>
#include <folly/functional/Invoke.h>
#include <tuple>
namespace folly {
/**
* Given a map and a key, return the value corresponding to the key in the map,
* or a given default value if the key doesn't exist in the map.
*/
template <typename Map, typename Key>
typename Map::mapped_type get_default(const Map& map, const Key& key) {
auto pos = map.find(key);
return (pos != map.end()) ? (pos->second) : (typename Map::mapped_type{});
}
template <
class Map,
typename Key = typename Map::key_type,
typename Value = typename Map::mapped_type,
typename std::enable_if<!is_invocable<Value>::value>::type* = nullptr>
typename Map::mapped_type
get_default(const Map& map, const Key& key, Value&& dflt) {
using M = typename Map::mapped_type;
auto pos = map.find(key);
return (pos != map.end()) ? (pos->second) : M(std::forward<Value>(dflt));
}
/**
* Give a map and a key, return the value corresponding to the key in the map,
* or a given default value if the key doesn't exist in the map.
*/
template <
class Map,
typename Key = typename Map::key_type,
typename Func,
typename = typename std::enable_if<
is_invocable_r<typename Map::mapped_type, Func>::value>::type>
typename Map::mapped_type
get_default(const Map& map, const Key& key, Func&& dflt) {
auto pos = map.find(key);
return pos != map.end() ? pos->second : dflt();
}
/**
* Given a map and a key, return the value corresponding to the key in the map,
* or throw an exception of the specified type.
*/
template <
class E = std::out_of_range,
class Map,
typename Key = typename Map::key_type>
const typename Map::mapped_type& get_or_throw(
const Map& map,
const Key& key,
const std::string& exceptionStrPrefix = std::string()) {
auto pos = map.find(key);
if (pos != map.end()) {
return pos->second;
}
throw_exception<E>(folly::to<std::string>(exceptionStrPrefix, key));
}
template <
class E = std::out_of_range,
class Map,
typename Key = typename Map::key_type>
typename Map::mapped_type& get_or_throw(
Map& map,
const Key& key,
const std::string& exceptionStrPrefix = std::string()) {
auto pos = map.find(key);
if (pos != map.end()) {
return pos->second;
}
throw_exception<E>(folly::to<std::string>(exceptionStrPrefix, key));
}
/**
* Given a map and a key, return a Optional<V> if the key exists and None if the
* key does not exist in the map.
*/
template <class Map, typename Key = typename Map::key_type>
folly::Optional<typename Map::mapped_type> get_optional(
const Map& map,
const Key& key) {
auto pos = map.find(key);
if (pos != map.end()) {
return folly::Optional<typename Map::mapped_type>(pos->second);
} else {
return folly::none;
}
}
/**
* Given a map and a key, return a reference to the value corresponding to the
* key in the map, or the given default reference if the key doesn't exist in
* the map.
*/
template <class Map, typename Key = typename Map::key_type>
const typename Map::mapped_type& get_ref_default(
const Map& map,
const Key& key,
const typename Map::mapped_type& dflt) {
auto pos = map.find(key);
return (pos != map.end() ? pos->second : dflt);
}
/**
* Passing a temporary default value returns a dangling reference when it is
* returned. Lifetime extension is broken by the indirection.
* The caller must ensure that the default value outlives the reference returned
* by get_ref_default().
*/
template <class Map, typename Key = typename Map::key_type>
const typename Map::mapped_type& get_ref_default(
const Map& map,
const Key& key,
typename Map::mapped_type&& dflt) = delete;
template <class Map, typename Key = typename Map::key_type>
const typename Map::mapped_type& get_ref_default(
const Map& map,
const Key& key,
const typename Map::mapped_type&& dflt) = delete;
/**
* Given a map and a key, return a reference to the value corresponding to the
* key in the map, or the given default reference if the key doesn't exist in
* the map.
*/
template <
class Map,
typename Key = typename Map::key_type,
typename Func,
typename = typename std::enable_if<
is_invocable_r<const typename Map::mapped_type&, Func>::value>::type,
typename = typename std::enable_if<
std::is_reference<invoke_result_t<Func>>::value>::type>
const typename Map::mapped_type&
get_ref_default(const Map& map, const Key& key, Func&& dflt) {
auto pos = map.find(key);
return (pos != map.end() ? pos->second : dflt());
}
/**
* Given a map and a key, return a pointer to the value corresponding to the
* key in the map, or nullptr if the key doesn't exist in the map.
*/
template <class Map, typename Key = typename Map::key_type>
const typename Map::mapped_type* get_ptr(const Map& map, const Key& key) {
auto pos = map.find(key);
return (pos != map.end() ? &pos->second : nullptr);
}
/**
* Non-const overload of the above.
*/
template <class Map, typename Key = typename Map::key_type>
typename Map::mapped_type* get_ptr(Map& map, const Key& key) {
auto pos = map.find(key);
return (pos != map.end() ? &pos->second : nullptr);
}
// TODO: Remove the return type computations when clang 3.5 and gcc 5.1 are
// the minimum supported versions.
namespace detail {
template <
class T,
size_t pathLength,
class = typename std::enable_if<(pathLength > 0)>::type>
struct NestedMapType {
using type = typename NestedMapType<T, pathLength - 1>::type::mapped_type;
};
template <class T>
struct NestedMapType<T, 1> {
using type = typename T::mapped_type;
};
template <typename... KeysDefault>
struct DefaultType;
template <typename Default>
struct DefaultType<Default> {
using type = Default;
};
template <typename Key, typename... KeysDefault>
struct DefaultType<Key, KeysDefault...> {
using type = typename DefaultType<KeysDefault...>::type;
};
template <class... KeysDefault>
auto extract_default(const KeysDefault&... keysDefault) ->
typename DefaultType<KeysDefault...>::type const& {
return std::get<sizeof...(KeysDefault) - 1>(std::tie(keysDefault...));
}
} // namespace detail
/**
* Given a map of maps and a path of keys, return a Optional<V> if the nested
* key exists and None if the nested keys does not exist in the map.
*/
template <class Map, class Key1, class Key2, class... Keys>
auto get_optional(
const Map& map,
const Key1& key1,
const Key2& key2,
const Keys&... keys)
-> folly::Optional<
typename detail::NestedMapType<Map, 2 + sizeof...(Keys)>::type> {
auto pos = map.find(key1);
return pos != map.end() ? get_optional(pos->second, key2, keys...)
: folly::none;
}
/**
* Given a map of maps and a path of keys, return a pointer to the nested value,
* or nullptr if the key doesn't exist in the map.
*/
template <class Map, class Key1, class Key2, class... Keys>
auto get_ptr(
const Map& map,
const Key1& key1,
const Key2& key2,
const Keys&... keys) ->
typename detail::NestedMapType<Map, 2 + sizeof...(Keys)>::type const* {
auto pos = map.find(key1);
return pos != map.end() ? get_ptr(pos->second, key2, keys...) : nullptr;
}
template <class Map, class Key1, class Key2, class... Keys>
auto get_ptr(Map& map, const Key1& key1, const Key2& key2, const Keys&... keys)
-> typename detail::NestedMapType<Map, 2 + sizeof...(Keys)>::type* {
auto pos = map.find(key1);
return pos != map.end() ? get_ptr(pos->second, key2, keys...) : nullptr;
}
/**
* Given a map and a path of keys, return the value corresponding to the nested
* value, or a given default value if the path doesn't exist in the map.
* The default value is the last parameter, and is copied when returned.
*/
template <
class Map,
class Key1,
class Key2,
class... KeysDefault,
typename = typename std::enable_if<sizeof...(KeysDefault) != 0>::type>
auto get_default(
const Map& map,
const Key1& key1,
const Key2& key2,
const KeysDefault&... keysDefault) ->
typename detail::NestedMapType<Map, 1 + sizeof...(KeysDefault)>::type {
if (const auto* ptr = get_ptr(map, key1)) {
return get_default(*ptr, key2, keysDefault...);
}
return detail::extract_default(keysDefault...);
}
/**
* Given a map and a path of keys, return a reference to the value corresponding
* to the nested value, or the given default reference if the path doesn't exist
* in the map.
* The default value is the last parameter, and must be a lvalue reference.
*/
template <
class Map,
class Key1,
class Key2,
class... KeysDefault,
typename = typename std::enable_if<sizeof...(KeysDefault) != 0>::type,
typename = typename std::enable_if<std::is_lvalue_reference<
typename detail::DefaultType<KeysDefault...>::type>::value>::type>
auto get_ref_default(
const Map& map,
const Key1& key1,
const Key2& key2,
KeysDefault&&... keysDefault) ->
typename detail::NestedMapType<Map, 1 + sizeof...(KeysDefault)>::type
const& {
if (const auto* ptr = get_ptr(map, key1)) {
return get_ref_default(*ptr, key2, keysDefault...);
}
return detail::extract_default(keysDefault...);
}
} // namespace folly
<|start_filename|>folly/test/FBVectorBenchmark.cpp<|end_filename|>
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Author: <EMAIL>
#include <deque>
#include <list>
#include <memory>
#include <string>
#include <folly/FBVector.h>
#include <folly/Traits.h>
#include <folly/container/Foreach.h>
#include <folly/portability/GFlags.h>
#include <folly/small_vector.h>
#include <folly/test/FBVectorTestUtil.h>
using namespace std;
using namespace folly;
using namespace folly::test::detail;
using IntVector = vector<int>;
using IntFBVector = fbvector<int>;
using IntList = list<int>;
using IntDeque = deque<int>;
using IntSmallVector = small_vector<int>;
using StringVector = vector<std::string>;
using StringFBVector = fbvector<std::string>;
using StringList = list<std::string>;
using StringDeque = deque<std::string>;
using StringSmallVector = small_vector<std::string>;
using FBStringVector = vector<folly::fbstring>;
using FBStringFBVector = fbvector<folly::fbstring>;
#define VECTOR IntVector
#include <folly/test/FBVectorBenchmarks.cpp.h> // nolint
#undef VECTOR
#define VECTOR IntFBVector
#include <folly/test/FBVectorBenchmarks.cpp.h> // nolint
#undef VECTOR
#define VECTOR IntSmallVector
#include <folly/test/FBVectorBenchmarks.cpp.h> // nolint
#undef VECTOR
#define VECTOR IntList
#define SKIP_RESERVE
#include <folly/test/FBVectorBenchmarks.cpp.h> // nolint
#undef SKIP_RESERVE
#undef VECTOR
#define VECTOR IntDeque
#define SKIP_RESERVE
#include <folly/test/FBVectorBenchmarks.cpp.h> // nolint
#undef SKIP_RESERVE
#undef VECTOR
#define VECTOR StringVector
#include <folly/test/FBVectorBenchmarks.cpp.h> // nolint
#undef VECTOR
#define VECTOR StringFBVector
#include <folly/test/FBVectorBenchmarks.cpp.h> // nolint
#undef VECTOR
#define VECTOR StringSmallVector
#include <folly/test/FBVectorBenchmarks.cpp.h> // nolint
#undef VECTOR
#define VECTOR StringList
#define SKIP_RESERVE
#include <folly/test/FBVectorBenchmarks.cpp.h> // nolint
#undef SKIP_RESERVE
#undef VECTOR
#define VECTOR StringDeque
#define SKIP_RESERVE
#include <folly/test/FBVectorBenchmarks.cpp.h> // nolint
#undef SKIP_RESERVE
#undef VECTOR
#define VECTOR FBStringVector
#include <folly/test/FBVectorBenchmarks.cpp.h> // nolint
#undef VECTOR
#define VECTOR FBStringFBVector
#include <folly/test/FBVectorBenchmarks.cpp.h> // nolint
#undef VECTOR
int main(int argc, char** argv) {
gflags::ParseCommandLineFlags(&argc, &argv, true);
gflags::SetCommandLineOptionWithMode(
"bm_max_iters", "1000000", gflags::SET_FLAG_IF_DEFAULT);
gflags::SetCommandLineOptionWithMode(
"bm_min_iters", "100000", gflags::SET_FLAG_IF_DEFAULT);
gflags::SetCommandLineOptionWithMode(
"bm_max_secs", "1", gflags::SET_FLAG_IF_DEFAULT);
folly::runBenchmarks();
return 0;
}
// clang-format off
/*
============================================================================
buck-out/opt/gen/folly/test/fbvector_benchmark#gcc-5-glibc-2.23,private-headers/folly/test/FBVectorBenchmarks.cpp.hrelative time/iter iters/s
============================================================================
BM_zzInitRNG_IntVector 1.05us 951.24K
BM_defaultCtor_IntVector 1.31ns 765.93M
BM_sizeCtor_IntVector(16) 19.33ns 51.73M
BM_sizeCtor_IntVector(128) 42.11ns 23.75M
BM_sizeCtor_IntVector(1024) 60.90ns 16.42M
BM_fillCtor_IntVector(16) 30.67ns 32.61M
BM_fillCtor_IntVector(128) 41.22ns 24.26M
BM_fillCtor_IntVector(1024) 133.70ns 7.48M
BM_reserve_IntVector(16) 40.27ns 24.83M
BM_reserve_IntVector(128) 40.20ns 24.88M
BM_reserve_IntVector(1024) 40.17ns 24.90M
BM_insertFront_IntVector(16) 7.90us 126.52K
BM_insertFront_IntVector(128) 8.12us 123.09K
BM_insertFront_IntVector(1024) 8.30us 120.46K
BM_insertFront_IntVector(10240) 10.14us 98.67K
BM_insertFront_IntVector(102400) 30.71us 32.56K
BM_insertFront_IntVector(1024000) 220.69us 4.53K
BM_pushBack_IntVector(16) 776.38ps 1.29G
BM_pushBack_IntVector(128) 775.89ps 1.29G
BM_pushBack_IntVector(1024) 742.50ps 1.35G
BM_pushBack_IntVector(10240) 787.75ps 1.27G
BM_pushBack_IntVector(102400) 714.07ps 1.40G
BM_pushBack_IntVector(1024000) 3.15ns 317.26M
BM_zzInitRNG_IntFBVector 1.17us 853.35K
BM_defaultCtor_IntFBVector 989.76ps 1.01G
BM_sizeCtor_IntFBVector(16) 27.19ns 36.78M
BM_sizeCtor_IntFBVector(128) 46.73ns 21.40M
BM_sizeCtor_IntFBVector(1024) 69.03ns 14.49M
BM_fillCtor_IntFBVector(16) 35.97ns 27.80M
BM_fillCtor_IntFBVector(128) 55.11ns 18.15M
BM_fillCtor_IntFBVector(1024) 147.89ns 6.76M
BM_reserve_IntFBVector(16) 54.18ns 18.46M
BM_reserve_IntFBVector(128) 54.24ns 18.44M
BM_reserve_IntFBVector(1024) 54.24ns 18.44M
BM_insertFront_IntFBVector(16) 8.41us 118.86K
BM_insertFront_IntFBVector(128) 8.45us 118.41K
BM_insertFront_IntFBVector(1024) 8.56us 116.80K
BM_insertFront_IntFBVector(10240) 10.72us 93.32K
BM_insertFront_IntFBVector(102400) 30.83us 32.43K
BM_insertFront_IntFBVector(1024000) 217.31us 4.60K
BM_pushBack_IntFBVector(16) 2.05ns 488.26M
BM_pushBack_IntFBVector(128) 1.99ns 503.65M
BM_pushBack_IntFBVector(1024) 2.16ns 462.50M
BM_pushBack_IntFBVector(10240) 2.13ns 468.48M
BM_pushBack_IntFBVector(102400) 1.93ns 517.23M
BM_pushBack_IntFBVector(1024000) 1.89ns 529.29M
BM_zzInitRNG_IntSmallVector 1.17us 855.04K
BM_defaultCtor_IntSmallVector 698.82ps 1.43G
BM_sizeCtor_IntSmallVector(16) 37.59ns 26.60M
BM_sizeCtor_IntSmallVector(128) 85.90ns 11.64M
BM_sizeCtor_IntSmallVector(1024) 401.37ns 2.49M
BM_fillCtor_IntSmallVector(16) 48.22ns 20.74M
BM_fillCtor_IntSmallVector(128) 99.99ns 10.00M
BM_fillCtor_IntSmallVector(1024) 458.71ns 2.18M
BM_reserve_IntSmallVector(16) 44.30ns 22.57M
BM_reserve_IntSmallVector(128) 44.29ns 22.58M
BM_reserve_IntSmallVector(1024) 45.15ns 22.15M
BM_insertFront_IntSmallVector(16) 8.40us 119.11K
BM_insertFront_IntSmallVector(128) 7.74us 129.25K
BM_insertFront_IntSmallVector(1024) 8.17us 122.47K
BM_insertFront_IntSmallVector(10240) 10.17us 98.34K
BM_insertFront_IntSmallVector(102400) 29.60us 33.79K
BM_insertFront_IntSmallVector(1024000) 208.82us 4.79K
BM_pushBack_IntSmallVector(16) 2.92ns 342.66M
BM_pushBack_IntSmallVector(128) 2.91ns 343.36M
BM_pushBack_IntSmallVector(1024) 2.76ns 362.74M
BM_pushBack_IntSmallVector(10240) 2.71ns 369.18M
BM_pushBack_IntSmallVector(102400) 3.04ns 329.36M
BM_pushBack_IntSmallVector(1024000) 4.90ns 204.21M
BM_zzInitRNG_IntList 1.04us 958.67K
BM_defaultCtor_IntList 911.25ps 1.10G
BM_sizeCtor_IntList(16) 264.10ns 3.79M
BM_sizeCtor_IntList(128) 2.08us 481.87K
BM_sizeCtor_IntList(1024) 35.52us 28.15K
BM_fillCtor_IntList(16) 269.86ns 3.71M
BM_fillCtor_IntList(128) 2.12us 470.70K
BM_fillCtor_IntList(1024) 46.59us 21.47K
BM_insertFront_IntList(16) 18.88ns 52.95M
BM_insertFront_IntList(128) 19.67ns 50.85M
BM_insertFront_IntList(1024) 18.79ns 53.22M
BM_insertFront_IntList(10240) 20.47ns 48.85M
BM_insertFront_IntList(102400) 17.43ns 57.37M
BM_insertFront_IntList(1024000) 17.65ns 56.65M
BM_pushBack_IntList(16) 20.45ns 48.89M
BM_pushBack_IntList(128) 21.54ns 46.42M
BM_pushBack_IntList(1024) 20.14ns 49.64M
BM_pushBack_IntList(10240) 21.21ns 47.15M
BM_pushBack_IntList(102400) 18.53ns 53.98M
BM_pushBack_IntList(1024000) 22.16ns 45.12M
BM_zzInitRNG_IntDeque 1.14us 879.33K
BM_defaultCtor_IntDeque 33.14ns 30.18M
BM_sizeCtor_IntDeque(16) 44.34ns 22.56M
BM_sizeCtor_IntDeque(128) 81.28ns 12.30M
BM_sizeCtor_IntDeque(1024) 338.93ns 2.95M
BM_fillCtor_IntDeque(16) 52.18ns 19.16M
BM_fillCtor_IntDeque(128) 76.01ns 13.16M
BM_fillCtor_IntDeque(1024) 329.99ns 3.03M
BM_insertFront_IntDeque(16) 2.56ns 390.51M
BM_insertFront_IntDeque(128) 2.48ns 403.57M
BM_insertFront_IntDeque(1024) 2.31ns 432.60M
BM_insertFront_IntDeque(10240) 2.30ns 434.90M
BM_insertFront_IntDeque(102400) 2.32ns 431.00M
BM_insertFront_IntDeque(1024000) 2.36ns 423.26M
BM_pushBack_IntDeque(16) 935.50ps 1.07G
BM_pushBack_IntDeque(128) 935.72ps 1.07G
BM_pushBack_IntDeque(1024) 942.23ps 1.06G
BM_pushBack_IntDeque(10240) 934.27ps 1.07G
BM_pushBack_IntDeque(102400) 947.61ps 1.06G
BM_pushBack_IntDeque(1024000) 993.47ps 1.01G
BM_zzInitRNG_StringVector 1.03us 966.54K
BM_defaultCtor_StringVector 911.27ps 1.10G
BM_sizeCtor_StringVector(16) 35.94ns 27.83M
BM_sizeCtor_StringVector(128) 233.07ns 4.29M
BM_sizeCtor_StringVector(1024) 1.83us 546.61K
BM_fillCtor_StringVector(16) 10.30us 97.07K
BM_fillCtor_StringVector(128) 21.56us 46.37K
BM_fillCtor_StringVector(1024) 128.63us 7.77K
BM_reserve_StringVector(16) 45.76ns 21.85M
BM_reserve_StringVector(128) 60.52ns 16.52M
BM_reserve_StringVector(1024) 59.59ns 16.78M
BM_insertFront_StringVector(16) 124.99us 8.00K
BM_insertFront_StringVector(128) 120.57us 8.29K
BM_insertFront_StringVector(1024) 126.47us 7.91K
BM_insertFront_StringVector(10240) 153.43us 6.52K
BM_insertFront_StringVector(102400) 380.73us 2.63K
BM_insertFront_StringVector(1024000) 3.96ms 252.31
BM_pushBack_StringVector(16) 40.16ns 24.90M
BM_pushBack_StringVector(128) 41.94ns 23.85M
BM_pushBack_StringVector(1024) 36.92ns 27.08M
BM_pushBack_StringVector(10240) 18.19ns 54.99M
BM_pushBack_StringVector(102400) 41.21ns 24.27M
BM_pushBack_StringVector(1024000) 234.95ns 4.26M
BM_zzInitRNG_StringFBVector 1.05us 956.06K
BM_defaultCtor_StringFBVector 911.25ps 1.10G
BM_sizeCtor_StringFBVector(16) 38.40ns 26.04M
BM_sizeCtor_StringFBVector(128) 202.10ns 4.95M
BM_sizeCtor_StringFBVector(1024) 1.68us 593.56K
BM_fillCtor_StringFBVector(16) 6.65us 150.29K
BM_fillCtor_StringFBVector(128) 14.76us 67.76K
BM_fillCtor_StringFBVector(1024) 117.60us 8.50K
BM_reserve_StringFBVector(16) 60.40ns 16.56M
BM_reserve_StringFBVector(128) 62.28ns 16.06M
BM_reserve_StringFBVector(1024) 66.76ns 14.98M
BM_insertFront_StringFBVector(16) 126.51us 7.90K
BM_insertFront_StringFBVector(128) 121.29us 8.24K
BM_insertFront_StringFBVector(1024) 129.81us 7.70K
BM_insertFront_StringFBVector(10240) 148.77us 6.72K
BM_insertFront_StringFBVector(102400) 380.46us 2.63K
BM_insertFront_StringFBVector(1024000) 3.73ms 268.02
BM_pushBack_StringFBVector(16) 11.89ns 84.13M
BM_pushBack_StringFBVector(128) 20.32ns 49.20M
BM_pushBack_StringFBVector(1024) 47.91ns 20.87M
BM_pushBack_StringFBVector(10240) 39.74ns 25.16M
BM_pushBack_StringFBVector(102400) 36.86ns 27.13M
BM_pushBack_StringFBVector(1024000) 285.22ns 3.51M
BM_zzInitRNG_StringSmallVector 1.04us 965.73K
BM_defaultCtor_StringSmallVector 607.54ps 1.65G
BM_sizeCtor_StringSmallVector(16) 44.30ns 22.57M
BM_sizeCtor_StringSmallVector(128) 234.40ns 4.27M
BM_sizeCtor_StringSmallVector(1024) 1.96us 510.33K
BM_fillCtor_StringSmallVector(16) 6.12us 163.46K
BM_fillCtor_StringSmallVector(128) 18.65us 53.63K
BM_fillCtor_StringSmallVector(1024) 132.36us 7.56K
BM_reserve_StringSmallVector(16) 43.86ns 22.80M
BM_reserve_StringSmallVector(128) 51.03ns 19.60M
BM_reserve_StringSmallVector(1024) 48.61ns 20.57M
BM_insertFront_StringSmallVector(16) 127.32us 7.85K
BM_insertFront_StringSmallVector(128) 118.93us 8.41K
BM_insertFront_StringSmallVector(1024) 130.04us 7.69K
BM_insertFront_StringSmallVector(10240) 143.89us 6.95K
BM_insertFront_StringSmallVector(102400) 386.40us 2.59K
BM_insertFront_StringSmallVector(1024000) 3.74ms 267.73
BM_pushBack_StringSmallVector(16) 50.77ns 19.70M
BM_pushBack_StringSmallVector(128) 44.12ns 22.67M
BM_pushBack_StringSmallVector(1024) 45.62ns 21.92M
BM_pushBack_StringSmallVector(10240) 69.06ns 14.48M
BM_pushBack_StringSmallVector(102400) 139.62ns 7.16M
BM_pushBack_StringSmallVector(1024000) 445.65ns 2.24M
BM_zzInitRNG_StringList 1.17us 854.00K
BM_defaultCtor_StringList 911.39ps 1.10G
BM_sizeCtor_StringList(16) 309.90ns 3.23M
BM_sizeCtor_StringList(128) 3.18us 314.57K
BM_sizeCtor_StringList(1024) 41.72us 23.97K
BM_fillCtor_StringList(16) 7.12us 140.54K
BM_fillCtor_StringList(128) 19.22us 52.04K
BM_fillCtor_StringList(1024) 160.20us 6.24K
BM_insertFront_StringList(16) 27.71ns 36.09M
BM_insertFront_StringList(128) 51.34ns 19.48M
BM_insertFront_StringList(1024) 55.53ns 18.01M
BM_insertFront_StringList(10240) 24.62ns 40.62M
BM_insertFront_StringList(102400) 25.63ns 39.02M
BM_insertFront_StringList(1024000) 341.85ns 2.93M
BM_pushBack_StringList(16) 28.69ns 34.85M
BM_pushBack_StringList(128) 29.11ns 34.36M
BM_pushBack_StringList(1024) 33.28ns 30.05M
BM_pushBack_StringList(10240) 26.47ns 37.78M
BM_pushBack_StringList(102400) 48.51ns 20.62M
BM_pushBack_StringList(1024000) 75.97ns 13.16M
BM_zzInitRNG_StringDeque 1.17us 852.21K
BM_defaultCtor_StringDeque 39.44ns 25.36M
BM_sizeCtor_StringDeque(16) 88.29ns 11.33M
BM_sizeCtor_StringDeque(128) 444.53ns 2.25M
BM_sizeCtor_StringDeque(1024) 6.20us 161.17K
BM_fillCtor_StringDeque(16) 6.82us 146.73K
BM_fillCtor_StringDeque(128) 16.95us 58.99K
BM_fillCtor_StringDeque(1024) 121.97us 8.20K
BM_insertFront_StringDeque(16) 10.75ns 92.98M
BM_insertFront_StringDeque(128) 40.83ns 24.49M
BM_insertFront_StringDeque(1024) 10.26ns 97.43M
BM_insertFront_StringDeque(10240) 37.85ns 26.42M
BM_insertFront_StringDeque(102400) 34.75ns 28.78M
BM_insertFront_StringDeque(1024000) 39.31ns 25.44M
BM_pushBack_StringDeque(16) 11.32ns 88.31M
BM_pushBack_StringDeque(128) 11.93ns 83.80M
BM_pushBack_StringDeque(1024) 10.41ns 96.02M
BM_pushBack_StringDeque(10240) 9.83ns 101.72M
BM_pushBack_StringDeque(102400) 64.98ns 15.39M
BM_pushBack_StringDeque(1024000) 33.45ns 29.89M
BM_zzInitRNG_FBStringVector 1.17us 855.50K
BM_defaultCtor_FBStringVector 989.77ps 1.01G
BM_sizeCtor_FBStringVector(16) 35.38ns 28.26M
BM_sizeCtor_FBStringVector(128) 180.30ns 5.55M
BM_sizeCtor_FBStringVector(1024) 1.21us 823.15K
BM_fillCtor_FBStringVector(16) 6.42us 155.85K
BM_fillCtor_FBStringVector(128) 8.90us 112.32K
BM_fillCtor_FBStringVector(1024) 36.57us 27.35K
BM_reserve_FBStringVector(16) 50.12ns 19.95M
BM_reserve_FBStringVector(128) 50.09ns 19.96M
BM_reserve_FBStringVector(1024) 53.58ns 18.66M
BM_insertFront_FBStringVector(16) 105.90us 9.44K
BM_insertFront_FBStringVector(128) 102.06us 9.80K
BM_insertFront_FBStringVector(1024) 103.67us 9.65K
BM_insertFront_FBStringVector(10240) 122.63us 8.15K
BM_insertFront_FBStringVector(102400) 312.48us 3.20K
BM_insertFront_FBStringVector(1024000) 2.30ms 434.80
BM_pushBack_FBStringVector(16) 10.18ns 98.26M
BM_pushBack_FBStringVector(128) 10.13ns 98.75M
BM_pushBack_FBStringVector(1024) 10.14ns 98.62M
BM_pushBack_FBStringVector(10240) 11.60ns 86.19M
BM_pushBack_FBStringVector(102400) 8.47ns 118.02M
BM_pushBack_FBStringVector(1024000) 88.01ns 11.36M
BM_zzInitRNG_FBStringFBVector 1.03us 971.03K
BM_defaultCtor_FBStringFBVector 911.25ps 1.10G
BM_sizeCtor_FBStringFBVector(16) 33.53ns 29.82M
BM_sizeCtor_FBStringFBVector(128) 135.17ns 7.40M
BM_sizeCtor_FBStringFBVector(1024) 951.05ns 1.05M
BM_fillCtor_FBStringFBVector(16) 5.71us 175.27K
BM_fillCtor_FBStringFBVector(128) 8.11us 123.37K
BM_fillCtor_FBStringFBVector(1024) 37.95us 26.35K
BM_reserve_FBStringFBVector(16) 54.53ns 18.34M
BM_reserve_FBStringFBVector(128) 51.41ns 19.45M
BM_reserve_FBStringFBVector(1024) 55.52ns 18.01M
BM_insertFront_FBStringFBVector(16) 58.80us 17.01K
BM_insertFront_FBStringFBVector(128) 58.45us 17.11K
BM_insertFront_FBStringFBVector(1024) 59.08us 16.93K
BM_insertFront_FBStringFBVector(10240) 69.85us 14.32K
BM_insertFront_FBStringFBVector(102400) 176.99us 5.65K
BM_insertFront_FBStringFBVector(1024000) 4.07ms 245.84
BM_pushBack_FBStringFBVector(16) 4.19ns 238.39M
BM_pushBack_FBStringFBVector(128) 3.76ns 265.90M
BM_pushBack_FBStringFBVector(1024) 4.68ns 213.66M
BM_pushBack_FBStringFBVector(10240) 3.24ns 309.08M
BM_pushBack_FBStringFBVector(102400) 3.17ns 315.07M
BM_pushBack_FBStringFBVector(1024000) 25.88ns 38.65M
============================================================================
*/
// clang-format on
<|start_filename|>folly/test/DeterministicScheduleTest.cpp<|end_filename|>
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <folly/test/DeterministicSchedule.h>
#include <folly/portability/GFlags.h>
#include <folly/portability/GTest.h>
using namespace folly::test;
TEST(DeterministicSchedule, uniform) {
auto p = DeterministicSchedule::uniform(0);
int buckets[10] = {};
for (int i = 0; i < 100000; ++i) {
buckets[p(10)]++;
}
for (int i = 0; i < 10; ++i) {
EXPECT_TRUE(buckets[i] > 9000);
}
}
TEST(DeterministicSchedule, uniformSubset) {
auto ps = DeterministicSchedule::uniformSubset(0, 3, 100);
int buckets[10] = {};
std::set<int> seen;
for (int i = 0; i < 100000; ++i) {
if (i > 0 && (i % 100) == 0) {
EXPECT_EQ(seen.size(), 3);
seen.clear();
}
int x = ps(10);
seen.insert(x);
EXPECT_TRUE(seen.size() <= 3);
buckets[x]++;
}
for (int i = 0; i < 10; ++i) {
EXPECT_TRUE(buckets[i] > 9000);
}
}
TEST(DeterministicSchedule, buggyAdd) {
for (bool bug : {false, true}) {
DeterministicSchedule sched(DeterministicSchedule::uniform(0));
if (bug) {
FOLLY_TEST_DSCHED_VLOG("Test with race condition");
} else {
FOLLY_TEST_DSCHED_VLOG("Test without race condition");
}
DeterministicMutex m;
// The use of DeterinisticAtomic is not needed here, but it makes
// it easier to understand the sequence of events in logs.
DeterministicAtomic<int> test{0};
DeterministicAtomic<int> baseline{0};
int numThreads = 10;
std::vector<std::thread> threads(numThreads);
for (int t = 0; t < numThreads; ++t) {
threads[t] = DeterministicSchedule::thread([&, t] {
baseline.fetch_add(1);
// Atomic increment of test protected by mutex m
do {
// Some threads use lock() others use try_lock()
if ((t & 1) == 0) {
m.lock();
} else {
if (!m.try_lock()) {
continue;
}
}
int newval = test.load() + 1;
if (bug) {
// Break the atomicity of the increment operation
m.unlock();
m.lock();
}
test.store(newval);
m.unlock();
break;
} while (true);
}); // thread lambda
} // for t
DeterministicSchedule::joinAll(threads);
if (!bug) {
EXPECT_EQ(test.load(), baseline.load());
} else {
if (test.load() == baseline.load()) {
FOLLY_TEST_DSCHED_VLOG("Didn't catch the bug");
} else {
FOLLY_TEST_DSCHED_VLOG("Caught the bug");
}
}
} // for bug
}
/*
* Test DSched support for auxiliary data and global invariants
*
* How to use DSched support for auxiliary data and global invariants
* (Let Foo<T, Atom> be the template to be tested):
* 1. Add friend AnnotatedFoo<T> to Foo<T,Atom> (Typically, in Foo.h).
* 2. Define a class AuxData for whatever auxiliary data is needed
* to maintain global knowledge of shared and private state.
* 3. Define:
* static AuxData* aux_;
* static FOLLY_TLS uint32_t tid_;
* 4. (Optional) Define gflags for command line options. E.g.:
* DEFINE_int64(seed, 0, "Seed for random number generators");
* 5. (Optionl) Define macros for mangement of auxiliary data. E.g.,
* #define AUX_THR(x) (aux_->t_[tid_]->x)
* 6. (Optional) Define macro for creating auxiliary actions. E.g.,
* #define AUX_ACT(act) \
* { \
* AUX_THR(func_) = __func__; \
* AUX_THR(line_) = __LINE__; \
* AuxAct auxact([&](bool success) { if (success); act}); \
* DeterministicSchedule::setAuxAct(auxact); \
* }
* [Note: Auxiliary actions must not contain any standard shared
* accesses, or else deadlock will occur. Use the load_direct()
* member function of DeterministicAtomic instead.]
* 7. Define AnnotatedFoo<T> derived from Foo<T,DeterministicAtomic>.
* 8. Define member functions in AnnotatedFoo to manage DSched::auxChk.
* 9. Define member functions for logging and checkig global invariants.
* 10. Define member functions for direct access to data members of Foo.
* 11. (Optional) Add a member function dummyStep() to update
* auxiliary data race-free when the next step is unknoown or
* not conveniently accessible (e.g., in a different
* library). The functions adds a dummy shared step to force
* DSched to invoke the auxiliary action at a known point.This
* is needed for now because DSched allows threads to run in
* parallel between shared accesses. Hence, concurrent updates
* of shared auxiliary data can be racy if executed outside
* auxiliary actions. This may be obviated in the future if
* DSched supports fully seriallized execution.
* void dummyStep() {
* DeterministicSchedule::beforeSharedAccess();
* DeterministicSchedule::afterSharedAccess(true);
* }
* 12. Override member functions of Foo as needed in order to
* annotate the code with auxiliary actions. [Note: There may be
* a lot of duplication of Foo's code. Alternatively, Foo can be
* annotated directly.]
* 13. Define TEST using instances of AuxData and AnnotatedFoo.
* 14. For debugging, iteratively add (as needed) auxiliary data,
* global invariants, logging details, command line flags as
* needed and selectively generate relevant logs to detect the
* race condition shortly after it occurs.
*
* In the following example Foo = AtomicCounter
*/
using DSched = DeterministicSchedule;
/** Forward declaration of annotated template */
template <typename T>
struct AnnotatedAtomicCounter;
/** Original template to be tested */
template <typename T, template <typename> class Atom = std::atomic>
class AtomicCounter {
/** Friend declaration to allow full access */
friend struct AnnotatedAtomicCounter<T>;
public:
explicit AtomicCounter(T val) : counter_(val) {}
void inc() {
this->counter_.fetch_add(1);
}
void incBug() {
this->counter_.store(this->counter_.load() + 1);
}
T load() {
return this->counter_.load();
}
private:
Atom<T> counter_ = {0};
};
/** auxiliary data */
struct AuxData {
using T = int;
/* General */
uint64_t step_ = {0};
uint64_t lastUpdate_ = {0};
struct PerThread {
/* General */
std::string func_;
int line_;
/* Custom */
T count_ = {0};
};
std::vector<PerThread> t_;
explicit AuxData(int nthr) : t_(nthr) {}
};
static AuxData* aux_;
static FOLLY_TLS uint32_t tid_;
/* Command line flags */
DEFINE_int64(seed, 0, "Seed for random number generators");
DEFINE_int64(max_steps, 1000000, "Max. number of shared steps for the test");
DEFINE_int64(num_reps, 1, "Number of test repetitions");
DEFINE_int64(num_ops, 1000, "Number of increments per repetition");
DEFINE_int64(liveness_thresh, 1000000, "Liveness threshold");
DEFINE_int64(log_begin, 0, "Step number to start logging. No logging if <= 0");
DEFINE_int64(log_length, 1000, "Length of step by step log (if log_begin > 0)");
DEFINE_int64(log_freq, 100000, "Log every so many steps");
DEFINE_int32(num_threads, 1, "Number of producers");
DEFINE_bool(bug, false, "Introduce bug");
/** Aux macros */
#define AUX_THR(x) (aux_->t_[tid_].x)
#define AUX_UPDATE() (aux_->lastUpdate_ = aux_->step_ + 1)
/** Macro for inline definition of auxiliary actions */
#define AUX_ACT(act) \
do { \
AUX_THR(func_) = __func__; \
AUX_THR(line_) = __LINE__; \
AuxAct auxfn([&](bool success) { \
if (success) { \
} \
if (true) { \
act \
} \
}); \
DeterministicSchedule::setAuxAct(auxfn); \
} while (0)
/** Alias for original class */
template <typename T>
using Base = AtomicCounter<T, DeterministicAtomic>;
/** Annotated shared class */
template <typename T>
struct AnnotatedAtomicCounter : public Base<T> {
/** Manage DSched auxChk */
void setAuxChk() {
AuxChk auxfn([&](uint64_t step) {
auxLog(step);
auxCheck();
});
DeterministicSchedule::setAuxChk(auxfn);
}
void clearAuxChk() {
DeterministicSchedule::clearAuxChk();
}
/** Aux log function */
void auxLog(uint64_t step) {
if (aux_->step_ == 0) {
aux_->lastUpdate_ = step;
}
aux_->step_ = step;
if (step > (uint64_t)FLAGS_max_steps) {
exit(0);
}
bool doLog =
(((FLAGS_log_begin > 0) && (step >= (uint64_t)FLAGS_log_begin) &&
(step <= (uint64_t)FLAGS_log_begin + FLAGS_log_length)) ||
((step % FLAGS_log_freq) == 0));
if (doLog) {
doAuxLog(step);
}
}
void doAuxLog(uint64_t step) {
std::stringstream ss;
/* General */
ss << step << " - " << aux_->lastUpdate_ << " --";
/* Shared */
ss << " counter =" << this->counter_.load_direct();
/* Thread */
ss << " -- t" << tid_ << " " << AUX_THR(func_) << ":" << AUX_THR(line_);
ss << " count[" << tid_ << "] = " << AUX_THR(count_);
/* Output */
std::cerr << ss.str() << std::endl;
}
void auxCheck() {
/* Liveness */
CHECK_LT(aux_->step_, aux_->lastUpdate_ + FLAGS_liveness_thresh);
/* Safety */
int sum = {0};
for (auto& t : aux_->t_) {
sum += t.count_;
}
CHECK_EQ(this->counter_.load_direct(), sum);
}
/* Direct access without going through DSched */
T loadDirect() {
return this->counter_.load_direct();
}
/* Constructor -- calls original constructor */
explicit AnnotatedAtomicCounter(int val) : Base<T>(val) {}
/* Overloads of original member functions (as needed) */
void inc() {
AUX_ACT({ ++AUX_THR(count_); });
this->counter_.fetch_add(1);
}
void incBug() {
AUX_ACT({});
T newval = this->counter_.load() + 1;
AUX_ACT({ ++AUX_THR(count_); });
this->counter_.store(newval);
}
};
using Annotated = AnnotatedAtomicCounter<int>;
TEST(DeterministicSchedule, global_invariants) {
CHECK_GT(FLAGS_num_threads, 0);
DSched sched(DSched::uniform(FLAGS_seed));
for (int i = 0; i < FLAGS_num_reps; ++i) {
aux_ = new AuxData(FLAGS_num_threads);
Annotated annotated(0);
annotated.setAuxChk();
std::vector<std::thread> threads(FLAGS_num_threads);
for (int tid = 0; tid < FLAGS_num_threads; ++tid) {
threads[tid] = DSched::thread([&, tid]() {
tid_ = tid;
for (int j = tid; j < FLAGS_num_ops; j += FLAGS_num_threads) {
(FLAGS_bug) ? annotated.incBug() : annotated.inc();
}
});
}
for (auto& t : threads) {
DSched::join(t);
}
std::cerr << "====== rep " << i << " completed in step " << aux_->step_
<< std::endl;
annotated.doAuxLog(aux_->step_);
std::cerr << std::endl;
EXPECT_EQ(annotated.loadDirect(), FLAGS_num_ops);
annotated.clearAuxChk();
delete aux_;
}
}
struct DSchedTimestampTest : public DSchedTimestamp {
explicit DSchedTimestampTest(size_t v) : DSchedTimestamp(v) {}
};
TEST(DeterministicSchedule, thread_timestamps) {
ThreadTimestamps tss;
DSchedThreadId tid0(0);
DSchedThreadId tid1(1);
ASSERT_FALSE(tss.atLeastAsRecentAs(tid0, DSchedTimestampTest(1)));
tss.setIfNotPresent(tid0, DSchedTimestampTest(1));
ASSERT_TRUE(tss.atLeastAsRecentAs(tid0, DSchedTimestampTest(1)));
ASSERT_FALSE(tss.atLeastAsRecentAs(tid0, DSchedTimestampTest(2)));
ASSERT_FALSE(tss.atLeastAsRecentAs(tid1, DSchedTimestampTest(1)));
tss.setIfNotPresent(tid0, DSchedTimestampTest(2));
ASSERT_FALSE(tss.atLeastAsRecentAs(tid0, DSchedTimestampTest(2)));
auto ts = tss.advance(tid0);
ASSERT_TRUE(ts.atLeastAsRecentAs(DSchedTimestampTest(2)));
ASSERT_FALSE(ts.atLeastAsRecentAs(DSchedTimestampTest(3)));
ASSERT_TRUE(tss.atLeastAsRecentAs(tid0, DSchedTimestampTest(2)));
ASSERT_FALSE(tss.atLeastAsRecentAs(tid1, DSchedTimestampTest(1)));
ThreadTimestamps tss2;
tss2.setIfNotPresent(tid1, DSchedTimestampTest(3));
ASSERT_FALSE(tss2.atLeastAsRecentAs(tid1, DSchedTimestampTest(4)));
ASSERT_TRUE(tss2.atLeastAsRecentAs(tid1, DSchedTimestampTest(3)));
ASSERT_FALSE(tss.atLeastAsRecentAsAny(tss2));
tss.sync(tss2);
ASSERT_TRUE(tss.atLeastAsRecentAs(tid1, DSchedTimestampTest(3)));
ASSERT_FALSE(tss.atLeastAsRecentAs(tid1, DSchedTimestampTest(4)));
ThreadTimestamps tss3;
tss3.setIfNotPresent(tid1, DSchedTimestampTest(4));
ASSERT_TRUE(tss3.atLeastAsRecentAsAny(tss2));
ASSERT_FALSE(tss2.atLeastAsRecentAsAny(tss3));
ThreadTimestamps tss4, tss5;
tss4.setIfNotPresent(DSchedThreadId(10), DSchedTimestampTest(5));
tss5.setIfNotPresent(DSchedThreadId(11), DSchedTimestampTest(5));
ASSERT_FALSE(tss4.atLeastAsRecentAsAny(tss5));
ASSERT_FALSE(tss5.atLeastAsRecentAsAny(tss4));
}
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
gflags::ParseCommandLineFlags(&argc, &argv, true);
return RUN_ALL_TESTS();
}
<|start_filename|>folly/detail/RangeCommon.h<|end_filename|>
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <algorithm>
#include <cassert>
#include <string>
#include <folly/Likely.h>
namespace folly {
namespace detail {
/***
* The qfind_first_byte_of_* functions are declared here, before Range.h, so
* they cannot take StringPiece values. But they're there to operate on
* StringPiece values. Dependency cycles: fun.
*
* StringPieceLite is here to break that dependency cycle.
*/
class StringPieceLite {
public:
StringPieceLite(const char* b, const char* e) : b_(b), e_(e) {}
template <typename Range>
/* implicit */ StringPieceLite(const Range& r)
: StringPieceLite(r.data(), r.data() + r.size()) {}
const char* data() const {
return b_;
}
const char* begin() const {
return b_;
}
const char* end() const {
return e_;
}
size_t size() const {
return size_t(e_ - b_);
}
bool empty() const {
return size() == 0;
}
const char& operator[](size_t i) const {
assert(size() > i);
return b_[i];
}
template <typename Range>
explicit operator Range() const {
return Range(begin(), end());
}
private:
const char* b_;
const char* e_;
};
inline size_t qfind_first_byte_of_std(
const StringPieceLite haystack,
const StringPieceLite needles) {
auto ret = std::find_first_of(
haystack.begin(),
haystack.end(),
needles.begin(),
needles.end(),
[](char a, char b) { return a == b; });
return ret == haystack.end() ? std::string::npos : ret - haystack.begin();
}
size_t qfind_first_byte_of_bitset(
const StringPieceLite haystack,
const StringPieceLite needles);
size_t qfind_first_byte_of_byteset(
const StringPieceLite haystack,
const StringPieceLite needles);
inline size_t qfind_first_byte_of_nosse(
const StringPieceLite haystack,
const StringPieceLite needles) {
if (UNLIKELY(needles.empty() || haystack.empty())) {
return std::string::npos;
}
// The thresholds below were empirically determined by benchmarking.
// This is not an exact science since it depends on the CPU, the size of
// needles, and the size of haystack.
if ((needles.size() >= 4 && haystack.size() <= 10) ||
(needles.size() >= 16 && haystack.size() <= 64) || needles.size() >= 32) {
return qfind_first_byte_of_byteset(haystack, needles);
}
return qfind_first_byte_of_std(haystack, needles);
}
} // namespace detail
} // namespace folly
<|start_filename|>folly/executors/ExecutorWithPriority.h<|end_filename|>
/*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <folly/Executor.h>
#include <atomic>
namespace folly {
class ExecutorWithPriority : public virtual Executor {
public:
ExecutorWithPriority(ExecutorWithPriority const&) = delete;
ExecutorWithPriority& operator=(ExecutorWithPriority const&) = delete;
ExecutorWithPriority(ExecutorWithPriority&&) = delete;
ExecutorWithPriority& operator=(ExecutorWithPriority&&) = delete;
static Executor::KeepAlive<ExecutorWithPriority> create(
KeepAlive<Executor> executor,
int8_t priority);
void add(Func func) override;
protected:
bool keepAliveAcquire() override;
void keepAliveRelease() override;
private:
ExecutorWithPriority(KeepAlive<Executor> executor, int8_t priority)
: executor_(std::move(executor)), priority_(priority) {}
std::atomic<ssize_t> keepAliveCounter_{1};
KeepAlive<Executor> executor_;
int8_t priority_;
};
} // namespace folly
<|start_filename|>folly/futures/test/SelfDestructTest.cpp<|end_filename|>
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <folly/executors/InlineExecutor.h>
#include <folly/futures/Future.h>
#include <folly/portability/GTest.h>
using namespace folly;
TEST(SelfDestruct, then) {
auto* p = new Promise<int>();
auto future = p->getFuture().thenValue([p](int x) {
delete p;
return x + 1;
});
p->setValue(123);
EXPECT_EQ(124, std::move(future).get());
}
TEST(SelfDestruct, ensure) {
auto* p = new Promise<int>();
auto future = p->getFuture().ensure([p] { delete p; });
p->setValue(123);
EXPECT_EQ(123, std::move(future).get());
}
class ThrowingExecutorError : public std::runtime_error {
public:
using std::runtime_error::runtime_error;
};
class ThrowingExecutor : public folly::Executor {
public:
void add(folly::Func) override {
throw ThrowingExecutorError("ThrowingExecutor::add");
}
};
TEST(SelfDestruct, throwingExecutor) {
ThrowingExecutor executor;
auto* p = new Promise<int>();
auto future =
p->getFuture().via(&executor).onError([p](ThrowingExecutorError const&) {
delete p;
return 456;
});
p->setValue(123);
EXPECT_EQ(456, std::move(future).get());
}
TEST(SelfDestruct, throwingInlineExecutor) {
InlineExecutor executor;
auto* p = new Promise<int>();
auto future = p->getFuture()
.via(&executor)
.thenValue([p](auto &&) -> int {
delete p;
throw ThrowingExecutorError("callback throws");
})
.onError([](ThrowingExecutorError const&) { return 456; });
p->setValue(123);
EXPECT_EQ(456, std::move(future).get());
}
<|start_filename|>folly/experimental/pushmi/examples/for_each.h<|end_filename|>
/*
* Copyright 2018-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <folly/experimental/pushmi/examples/bulk.h>
#include <folly/experimental/pushmi/o/just.h>
#include <folly/experimental/pushmi/o/submit.h>
namespace folly {
namespace pushmi {
PUSHMI_INLINE_VAR constexpr struct for_each_fn {
private:
template <class Function>
struct fn {
Function f_;
template <class Cursor>
void operator()(detail::any, Cursor cursor) const {
f_(*cursor);
}
};
struct identity {
template <class T>
auto operator()(T&& t) const {
return (T &&) t;
}
};
struct zero {
int operator()(detail::any) const noexcept {
return 0;
}
};
public:
template <class ExecutionPolicy, class RandomAccessIterator, class Function>
void operator()(
ExecutionPolicy&& policy,
RandomAccessIterator begin,
RandomAccessIterator end,
Function f) const {
operators::just(0) |
operators::bulk(
fn<Function>{f}, begin, end, policy, identity{}, zero{}) |
operators::blocking_submit();
}
} for_each{};
} // namespace pushmi
} // namespace folly
<|start_filename|>folly/test/FixedStringTest.cpp<|end_filename|>
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Author: <EMAIL>
#include <folly/FixedString.h>
#include <folly/portability/GTest.h>
#define FS(x) ::folly::makeFixedString(x)
using namespace folly::string_literals;
TEST(FixedStringExamples, Examples) {
// Example from the docs:
using namespace folly;
constexpr auto hello = makeFixedString("hello"); // a FixedString<5>
constexpr auto world = makeFixedString("world"); // another FixedString<5>
constexpr auto hello_world = hello + ' ' + world + '!';
static_assert(hello_world == "hello world!", "w00t");
EXPECT_STREQ("hello world!", hello_world.c_str());
FixedString<10> test{"****"};
test.replace(1, 2, "!!!!");
EXPECT_STREQ("*!!!!*", test.c_str());
static_assert(makeFixedString("****").creplace(1, 2, "!!!!") == "*!!!!*", "");
}
TEST(FixedStringCtorTest, Default) {
constexpr folly::FixedString<42> s{};
static_assert(s[0] == '\0', "");
static_assert(s.size() == 0u, "");
constexpr auto s2 = s;
static_assert(s2[0] == '\0', "");
static_assert(s2.size() == 0u, "");
}
TEST(FixedStringCtorTest, FromLiterals) {
constexpr folly::FixedString<42> s{"hello world"};
static_assert(s[0] == 'h', "");
constexpr folly::FixedString<11> s2{"hello world"};
static_assert(s2[0] == 'h', "");
static_assert(s2[10] == 'd', "");
static_assert(s2[11] == '\0', "");
// Does not compile, hurray! :-)
// constexpr char a[1] = {'a'};
// constexpr folly::FixedString<10> s3(a);
}
TEST(FixedStringCtorTest, FromPtrAndLength) {
constexpr folly::FixedString<11> s{"hello world", 11};
static_assert(s[0] == 'h', "");
static_assert(s[10] == 'd', "");
static_assert(s[11] == '\0', "");
static_assert(s.size() == 11u, "");
constexpr folly::FixedString<5> s2{"hello world", 5};
static_assert(s2[0] == 'h', "");
static_assert(s2[4] == 'o', "");
static_assert(s2[5] == '\0', "");
static_assert(s2.size() == 5u, "");
constexpr folly::FixedString<20> s3{"hello world", 5};
static_assert(s2[0] == 'h', "");
static_assert(s2[4] == 'o', "");
static_assert(s2[5] == '\0', "");
static_assert(s2.size() == 5u, "");
static_assert("hello" == s3, "");
static_assert(s3 == "hello", "");
static_assert(s3 == s2, "");
static_assert("hell" != s3, "");
static_assert(s3 != "helloooo", "");
static_assert(!(s3 != s2), "");
}
TEST(FixedStringCtorTest, FromStringAndOffset) {
constexpr folly::FixedString<11> s{"hello world"};
constexpr folly::FixedString<5> s2{s, 6u, npos};
static_assert(s2 == "world", "");
constexpr folly::FixedString<0> s3{s, 11u, npos};
static_assert(s3 == "", "");
// Out of bounds offset, does not compile
// constexpr folly::FixedString<0> s4{s, 12};
}
TEST(FixedStringCtorTest, FromStringOffsetAndCount) {
constexpr folly::FixedString<11> s{"hello world"};
constexpr folly::FixedString<4> s2{s, 6u, 4u};
static_assert(s2 == "worl", "");
constexpr folly::FixedString<5> s3{s, 6u, 5u};
static_assert(s3 == "world", "");
// Out of bounds count, does not compile:
// constexpr folly::FixedString<5> s4{s, 6, 6};
}
TEST(FixedStringCtorTest, FromInitializerList) {
constexpr folly::FixedString<11> s{
'h', 'e', 'l', 'l', 'o', ' ', 'w', 'o', 'r', 'l', 'd'};
static_assert(s == "hello world", "");
// Out of bounds count, does not compile:
// constexpr folly::FixedString<10> s{
// {'h','e','l','l','o',' ','w','o','r','l','d'}};
}
TEST(FixedStringCtorTest, FromUDL) {
using namespace folly::literals;
#if defined(__GNUC__)
constexpr auto x = "hello"_fs;
static_assert(
std::is_same<decltype(x), const folly::FixedString<5>>::value, "");
static_assert(x[0] == 'h', "");
static_assert(x[1] == 'e', "");
static_assert(x[2] == 'l', "");
static_assert(x[3] == 'l', "");
static_assert(x[4] == 'o', "");
static_assert(x[5] == '\0', "");
static_assert(x.size() == 5u, "");
#endif
constexpr auto y = "goodbye"_fs8;
static_assert(
std::is_same<decltype(y), const folly::FixedString<8>>::value, "");
static_assert(y.size() == 7u, "");
static_assert(y == "goodbye", "");
constexpr auto z = "now is the time for all good llamas"_fs64;
static_assert(
std::is_same<decltype(z), const folly::FixedString<64>>::value, "");
static_assert(z.size() == 35u, "");
static_assert(z == "now is the time for all good llamas", "");
}
TEST(FixedStringConcatTest, FromStringAndLiteral) {
constexpr folly::FixedString<42> s{"hello world"};
constexpr auto res = s + "!!!";
static_assert(res.size() == 14u, "");
static_assert(res == "hello world!!!", "");
}
TEST(FixedStringConcatTest, FromTwoStrings) {
constexpr folly::FixedString<42> s{"hello world"};
constexpr auto res = s + "!!!";
static_assert(res.size() == 14u, "");
static_assert(res == "hello world!!!", "");
}
constexpr folly::FixedString<20> constexpr_swap_test() {
folly::FixedString<10> tmp1{"hello"}, tmp2{"world!"};
tmp2.swap(tmp1);
return tmp1 + tmp2;
}
TEST(FixedStringSwapTest, ConstexprSwap) {
static_assert(constexpr_swap_test() == "world!hello", "");
}
TEST(FixedStringSwapTest, RuntimeSwap) {
folly::FixedString<10> tmp1{"hello"}, tmp2{"world!"};
tmp2.swap(tmp1);
EXPECT_STREQ((tmp1 + tmp2).c_str(), "world!hello");
}
constexpr folly::FixedString<10> constexpr_assign_string_test_1() {
folly::FixedString<10> tmp1, tmp2{"world!"};
tmp1 = tmp2;
return tmp1;
}
constexpr folly::FixedString<10> constexpr_assign_string_test_2() {
folly::FixedString<10> tmp{"aaaaaaaaaa"};
tmp.assign("hello"_fs8);
return tmp;
}
constexpr folly::FixedString<10> constexpr_assign_string_test_3() {
folly::FixedString<10> tmp{"aaaaaaaaaa"};
tmp.assign("goodbye"_fs8, 3u, 2u);
return tmp;
}
constexpr folly::FixedString<10> constexpr_assign_string_test_4() {
folly::FixedString<10> tmp{"aaaaaaaaaa"};
tmp.assign("goodbye"_fs8, 3u, npos);
return tmp;
}
TEST(FixedStringAssignTest, ConstexprAssignString) {
static_assert(constexpr_assign_string_test_1() == "world!", "");
static_assert(constexpr_assign_string_test_2() == "hello", "");
static_assert(constexpr_assign_string_test_3() == "db", "");
static_assert(constexpr_assign_string_test_4() == "dbye", "");
}
TEST(FixedStringAssignTest, RuntimeAssignString) {
folly::FixedString<10> tmp1, tmp2{"world!"};
tmp1 = tmp2;
EXPECT_STREQ(tmp1.c_str(), "world!");
tmp1.assign("goodbye"_fs8);
EXPECT_STREQ("goodbye", tmp1.c_str());
tmp1.assign("goodbye"_fs8, 3u, npos);
EXPECT_STREQ("dbye", tmp1.c_str());
tmp1.assign("goodbye"_fs8, 3u, 3u);
EXPECT_STREQ("dby", tmp1.c_str());
}
constexpr folly::FixedString<10> constexpr_assign_literal_test_1() {
folly::FixedString<10> tmp{"aaaaaaaaaa"};
tmp = "hello";
// Not null-terminated, does not compile:
// using C = const char[1];
// tmp = C{'a'};
return tmp;
}
constexpr folly::FixedString<10> constexpr_assign_literal_test_2() {
folly::FixedString<10> tmp{"aaaaaaaaaa"};
tmp.assign("hello");
return tmp;
}
constexpr folly::FixedString<10> constexpr_assign_literal_test_3() {
folly::FixedString<10> tmp{"aaaaaaaaaa"};
tmp.assign("goodbye", 4u);
return tmp;
}
TEST(FixedStringAssignTest, ConstexprAssignLiteral) {
static_assert(constexpr_assign_literal_test_1() == "hello", "");
static_assert(constexpr_assign_literal_test_2() == "hello", "");
static_assert(constexpr_assign_literal_test_3() == "good", "");
}
TEST(FixedStringAssignTest, RuntimeAssignLiteral) {
folly::FixedString<10> tmp{"aaaaaaaaaa"};
tmp = "hello";
EXPECT_STREQ("hello", tmp.c_str());
tmp.assign("goodbye");
EXPECT_STREQ("goodbye", tmp.c_str());
tmp.assign("goodbye", 4u);
EXPECT_STREQ("good", tmp.c_str());
}
TEST(FixedStringIndexTest, Index) {
constexpr folly::FixedString<11> digits{"0123456789"};
static_assert(digits[0] == '0', "");
static_assert(digits[1] == '1', "");
static_assert(digits[2] == '2', "");
static_assert(digits[9] == '9', "");
static_assert(digits[10] == '\0', "");
#ifdef NDEBUG
// This should be allowed and work in constexpr mode since the internal array
// is actually big enough and op[] does no parameter validation:
static_assert(digits[11] == '\0', "");
#endif
static_assert(digits.at(0) == '0', "");
static_assert(digits.at(1) == '1', "");
static_assert(digits.at(2) == '2', "");
static_assert(digits.at(9) == '9', "");
static_assert(digits.at(10) == '\0', "");
EXPECT_THROW(digits.at(11), std::out_of_range);
}
TEST(FixedStringCompareTest, Compare) {
constexpr folly::FixedString<10> tmp1{"aaaaaaaaaa"};
constexpr folly::FixedString<12> tmp2{"aaaaaaaaaba"};
static_assert(-1 == tmp1.compare(tmp2), "");
static_assert(1 == tmp2.compare(tmp1), "");
static_assert(0 == tmp2.compare(tmp2), "");
static_assert(tmp1 < tmp2, "");
static_assert(tmp1 <= tmp2, "");
static_assert(tmp2 > tmp1, "");
static_assert(tmp2 >= tmp1, "");
static_assert(tmp2 == tmp2, ""); // @nolint
static_assert(tmp2 <= tmp2, ""); // @nolint
static_assert(tmp2 >= tmp2, ""); // @nolint
static_assert(!(tmp2 < tmp2), "");
static_assert(!(tmp2 > tmp2), "");
constexpr folly::FixedString<10> tmp3{"aaa"};
constexpr folly::FixedString<12> tmp4{"aaaa"};
static_assert(-1 == tmp3.compare(tmp4), "");
static_assert(1 == tmp4.compare(tmp3), "");
static_assert(tmp3 < tmp4, "");
static_assert(tmp3 <= tmp4, "");
static_assert(tmp4 > tmp3, "");
static_assert(tmp4 >= tmp3, "");
static_assert(tmp3 < "aaaa", "");
static_assert(tmp3 <= "aaaa", "");
static_assert(!(tmp3 == tmp4), "");
static_assert(tmp3 != tmp4, "");
static_assert("aaaa" > tmp3, "");
static_assert("aaaa" >= tmp3, "");
static_assert("aaaa" != tmp3, "");
static_assert("aaa" == tmp3, "");
static_assert(tmp3 != "aaaa", "");
static_assert(tmp3 == "aaa", "");
}
TEST(FixedStringCompareTest, CompareStdString) {
constexpr folly::FixedString<10> tmp1{"aaaaaaaaaa"};
std::string const tmp2{"aaaaaaaaaba"};
EXPECT_EQ(-1, tmp1.compare(tmp2));
// These are specifically testing the operators, and so we can't rely
// on whever the implementation details of EXPECT_<OP> might be.
EXPECT_FALSE(tmp1 == tmp2);
EXPECT_FALSE(tmp2 == tmp1);
EXPECT_TRUE(tmp1 != tmp2);
EXPECT_TRUE(tmp2 != tmp1);
EXPECT_TRUE(tmp1 < tmp2);
EXPECT_FALSE(tmp2 < tmp1);
EXPECT_TRUE(tmp1 <= tmp2);
EXPECT_FALSE(tmp2 <= tmp1);
EXPECT_FALSE(tmp1 > tmp2);
EXPECT_TRUE(tmp2 > tmp1);
EXPECT_FALSE(tmp1 >= tmp2);
EXPECT_TRUE(tmp2 >= tmp1);
}
constexpr folly::FixedString<20> constexpr_append_string_test() {
folly::FixedString<20> a{"hello"}, b{"X world!"};
a.append(1u, ' ');
a.append(b, 2u, 5u);
a.append(b, 7u, 1u);
return a;
}
TEST(FixedStringAssignTest, ConstexprAppendString) {
static_assert(constexpr_append_string_test() == "hello world!", "");
}
TEST(FixedStringAssignTest, RuntimeAppendString) {
folly::FixedString<20> a{"hello"}, b{"X world!"};
a.append(1u, ' ');
a.append(b, 2u, 5u);
a.append(b, 7u, 1u);
EXPECT_STREQ("hello world!", a.c_str());
}
constexpr folly::FixedString<20> constexpr_append_literal_test() {
folly::FixedString<20> a{"hello"};
a.append(1u, ' ');
a.append("X world!" + 2u, 5u);
a.append("X world!" + 7u);
return a;
}
TEST(FixedStringAssignTest, ConstexprAppendLiteral) {
static_assert(constexpr_append_literal_test() == "hello world!", "");
}
TEST(FixedStringAssignTest, RuntimeAppendLiteral) {
folly::FixedString<20> a{"hello"};
a.append(1u, ' ');
a.append("X world!" + 2u, 5u);
a.append("X world!" + 7u);
EXPECT_STREQ("hello world!", a.c_str());
}
TEST(FixedStringCAppendTest, CAppendString) {
constexpr folly::FixedString<10> a{"hello"}, b{"X world!"};
constexpr auto tmp1 = a.cappend(' ');
constexpr auto tmp2 = tmp1.cappend(b, 2u, 5u);
constexpr auto tmp3 = tmp2.cappend(b, 7u, 1u);
static_assert(tmp3 == "hello world!", "");
}
TEST(FixedStringCAppendTest, CAppendLiteral) {
constexpr folly::FixedString<10> a{"hello"};
constexpr auto tmp1 = a.cappend(' ');
constexpr auto tmp2 = tmp1.cappend("X world!", 2u, 5u);
constexpr auto tmp3 = tmp2.cappend("X world!", 7u, 1u);
static_assert(tmp3 == "hello world!", "");
}
constexpr folly::FixedString<10> constexpr_replace_string_test() {
folly::FixedString<10> tmp{"abcdefghij"};
tmp.replace(1, 5, FS("XX"));
return tmp;
}
TEST(FixedStringReplaceTest, ConstexprReplaceString) {
static_assert(constexpr_replace_string_test().size() == 7u, "");
static_assert(constexpr_replace_string_test() == "aXXghij", "");
}
TEST(FixedStringReplaceTest, RuntimeReplaceString) {
folly::FixedString<10> tmp{"abcdefghij"};
tmp.replace(1, 5, FS("XX"));
EXPECT_EQ(7u, tmp.size());
EXPECT_STREQ("aXXghij", tmp.c_str());
}
TEST(FixedStringEraseTest, RuntimeEraseTest) {
auto x = FS("abcdefghijklmnopqrstuvwxyz"), y = x;
x.erase(x.size());
EXPECT_EQ(26u, x.size());
EXPECT_STREQ(y.c_str(), x.c_str());
x.erase(25u).erase(24u);
EXPECT_EQ(24u, x.size());
EXPECT_STREQ("abcdefghijklmnopqrstuvwx", x.c_str());
x.erase(1u, x.size() - 2u);
EXPECT_EQ(2u, x.size());
EXPECT_STREQ("ax", x.c_str());
}
TEST(FixedStringEraseTest, CEraseTest) {
constexpr auto x = FS("abcdefghijklmnopqrstuvwxyz"), y = x;
constexpr auto tmp0 = x.cerase(x.size());
static_assert(26u == tmp0.size(), "");
static_assert(y == tmp0, "");
constexpr auto tmp1 = tmp0.cerase(25u).cerase(24u);
static_assert(24u == tmp1.size(), "");
static_assert("abcdefghijklmnopqrstuvwx" == tmp1, "");
constexpr auto tmp2 = tmp1.cerase(1u, tmp1.size() - 2u);
static_assert(2u == tmp2.size(), "");
static_assert("ax" == tmp2, "");
constexpr auto tmp3 = tmp2.cerase();
static_assert("" == tmp3, "");
}
TEST(FixedStringFindTest, FindString) {
constexpr folly::FixedString<10> tmp{"hijdefghij"};
static_assert(tmp.find(FS("hij")) == 0u, "");
static_assert(tmp.find(FS("hij"), 1u) == 7u, "");
static_assert(tmp.find(FS("hijdefghij")) == 0u, "");
static_assert(tmp.find(FS("")) == 0u, "");
}
TEST(FixedStringFindTest, FindLiteral) {
constexpr folly::FixedString<10> tmp{"hijdefghij"};
static_assert(tmp.find("hij") == 0u, "");
static_assert(tmp.find("hij", 1u) == 7u, "");
static_assert(tmp.find("hijdefghij") == 0u, "");
}
TEST(FixedStringReverseFindTest, FindChar) {
constexpr folly::FixedString<16> tmp{"This is a string"};
static_assert(tmp.find('s') == 3u, "");
static_assert(tmp.find('s', 9u) == 10u, "");
static_assert(tmp.find('s', 10u) == 10u, "");
static_assert(tmp.find('s', 11u) == tmp.npos, "");
}
TEST(FixedStringReverseFindTest, ReverseFindString) {
constexpr folly::FixedString<16> tmp{"This is a string"};
static_assert(tmp.rfind(FS("is")) == 5u, "");
static_assert(tmp.rfind(FS("is"), 4u) == 2u, "");
static_assert(tmp.rfind(FS("This is a string")) == 0u, "");
static_assert(tmp.rfind(FS("This is a string!")) == tmp.npos, "");
static_assert(tmp.rfind(FS("")) == 16u, "");
}
TEST(FixedStringReverseFindTest, ReverseFindLiteral) {
constexpr folly::FixedString<16> tmp{"This is a string"};
static_assert(tmp.rfind("is") == 5u, "");
static_assert(tmp.rfind("is", 4u) == 2u, "");
static_assert(tmp.rfind("This is a string") == 0u, "");
static_assert(tmp.rfind("This is a string!") == tmp.npos, "");
static_assert(tmp.rfind("") == 16u, "");
}
TEST(FixedStringReverseFindTest, ReverseFindChar) {
constexpr folly::FixedString<16> tmp{"This is a string"};
static_assert(tmp.rfind('s') == 10u, "");
static_assert(tmp.rfind('s', 5u) == 3u, "");
static_assert(tmp.rfind('s', 3u) == 3u, "");
static_assert(tmp.rfind('s', 2u) == tmp.npos, "");
}
TEST(FixedStringFindFirstOfTest, FindFirstOfString) {
constexpr folly::FixedString<16> tmp{"This is a string"};
static_assert(tmp.find_first_of(FS("hi")) == 1u, "");
static_assert(tmp.find_first_of(FS("xi")) == 2u, "");
static_assert(tmp.find_first_of(FS("xi"), 6u) == 13u, "");
static_assert(tmp.find_first_of(FS("xz")) == tmp.npos, "");
static_assert(FS("a").find_first_of(FS("cba")) == 0u, "");
static_assert(FS("").find_first_of(FS("cba")) == tmp.npos, "");
static_assert(FS("a").find_first_of(FS("")) == tmp.npos, "");
static_assert(FS("").find_first_of(FS("")) == tmp.npos, "");
}
TEST(FixedStringFindFirstOfTest, FindFirstOfLiteral) {
constexpr folly::FixedString<16> tmp{"This is a string"};
static_assert(tmp.find_first_of("hi") == 1u, "");
static_assert(tmp.find_first_of("xi") == 2u, "");
static_assert(tmp.find_first_of("xi", 6u) == 13u, "");
static_assert(tmp.find_first_of("xis", 6u, 2u) == 13u, "");
static_assert(tmp.find_first_of("xz") == tmp.npos, "");
static_assert(FS("a").find_first_of("cba") == 0u, "");
static_assert(FS("").find_first_of("cba") == tmp.npos, "");
static_assert(FS("a").find_first_of("") == tmp.npos, "");
static_assert(FS("").find_first_of("") == tmp.npos, "");
}
TEST(FixedStringFindFirstOfTest, FindFirstOfChar) {
constexpr folly::FixedString<16> tmp{"This is a string"};
static_assert(tmp.find_first_of('h') == 1u, "");
static_assert(tmp.find_first_of('i') == 2u, "");
static_assert(tmp.find_first_of('i', 6u) == 13u, "");
static_assert(tmp.find_first_of('x') == tmp.npos, "");
static_assert(FS("a").find_first_of('a') == 0u, "");
static_assert(FS("").find_first_of('a') == tmp.npos, "");
}
TEST(FixedStringFindFirstNotOfTest, FindFirstNotOfString) {
constexpr folly::FixedString<16> tmp{"This is a string"};
static_assert(tmp.find_first_not_of(FS("Ti")) == 1u, "");
static_assert(tmp.find_first_not_of(FS("hT")) == 2u, "");
static_assert(tmp.find_first_not_of(FS("s atr"), 6u) == 13u, "");
static_assert(tmp.find_first_not_of(FS("This atrng")) == tmp.npos, "");
static_assert(FS("a").find_first_not_of(FS("X")) == 0u, "");
static_assert(FS("").find_first_not_of(FS("cba")) == tmp.npos, "");
static_assert(FS("a").find_first_not_of(FS("")) == 0u, "");
static_assert(FS("").find_first_not_of(FS("")) == tmp.npos, "");
}
TEST(FixedStringFindFirstNotOfTest, FindFirstNotOfLiteral) {
constexpr folly::FixedString<16> tmp{"This is a string"};
static_assert(tmp.find_first_not_of("Ti") == 1u, "");
static_assert(tmp.find_first_not_of("hT") == 2u, "");
static_assert(tmp.find_first_not_of("s atr", 6u) == 13u, "");
static_assert(tmp.find_first_not_of("This atrng") == tmp.npos, "");
static_assert(FS("a").find_first_not_of("X") == 0u, "");
static_assert(FS("").find_first_not_of("cba") == tmp.npos, "");
static_assert(FS("a").find_first_not_of("") == 0u, "");
static_assert(FS("").find_first_not_of("") == tmp.npos, "");
}
TEST(FixedStringFindFirstNotOfTest, FindFirstNotOfChar) {
constexpr folly::FixedString<16> tmp{"This is a string"};
static_assert(tmp.find_first_not_of('T') == 1u, "");
static_assert(tmp.find_first_not_of('i') == 0u, "");
static_assert(tmp.find_first_not_of('x', 6u) == 6u, "");
static_assert(tmp.find_first_not_of('s', 6u) == 7u, "");
static_assert(FS("a").find_first_not_of('a') == tmp.npos, "");
static_assert(FS("").find_first_not_of('a') == tmp.npos, "");
}
TEST(FixedStringFindLastOfTest, FindLastOfString) {
constexpr folly::FixedString<16> tmp{"This is a string"};
static_assert(tmp.find_last_of(FS("hi")) == 13u, "");
static_assert(tmp.find_last_of(FS("xh")) == 1u, "");
static_assert(tmp.find_last_of(FS("xi"), 6u) == 5u, "");
static_assert(tmp.find_last_of(FS("xz")) == tmp.npos, "");
static_assert(FS("a").find_last_of(FS("cba")) == 0u, "");
static_assert(FS("").find_last_of(FS("cba")) == tmp.npos, "");
static_assert(FS("a").find_last_of(FS("")) == tmp.npos, "");
static_assert(FS("").find_last_of(FS("")) == tmp.npos, "");
}
TEST(FixedStringFindLastOfTest, FindLastOfLiteral) {
constexpr folly::FixedString<16> tmp{"This is a string"};
static_assert(tmp.find_last_of("hi") == 13u, "");
static_assert(tmp.find_last_of("xh") == 1u, "");
static_assert(tmp.find_last_of("xi", 6u) == 5u, "");
static_assert(tmp.find_last_of("xis", 6u, 2u) == 5u, "");
static_assert(tmp.find_last_of("xz") == tmp.npos, "");
static_assert(FS("a").find_last_of("cba") == 0u, "");
static_assert(FS("").find_last_of("cba") == tmp.npos, "");
static_assert(FS("a").find_last_of("") == tmp.npos, "");
static_assert(FS("").find_last_of("") == tmp.npos, "");
}
TEST(FixedStringFindLastOfTest, FindLastOfChar) {
constexpr folly::FixedString<16> tmp{"This is a string"};
static_assert(tmp.find_last_of('h') == 1u, "");
static_assert(tmp.find_last_of('i') == 13u, "");
static_assert(tmp.find_last_of('i', 6u) == 5u, "");
static_assert(tmp.find_last_of('x') == tmp.npos, "");
static_assert(FS("a").find_last_of('a') == 0u, "");
static_assert(FS("").find_last_of('a') == tmp.npos, "");
}
TEST(FixedStringFindLastNotOfTest, FindLastNotOfString) {
constexpr folly::FixedString<16> tmp{"This is a string"};
static_assert(tmp.find_last_not_of(FS("gstrin")) == 9u, "");
static_assert(tmp.find_last_not_of(FS("hT")) == 15u, "");
static_assert(tmp.find_last_not_of(FS("s atr"), 6u) == 5u, "");
static_assert(tmp.find_last_not_of(FS("This atrng")) == tmp.npos, "");
static_assert(FS("a").find_last_not_of(FS("X")) == 0u, "");
static_assert(FS("").find_last_not_of(FS("cba")) == tmp.npos, "");
static_assert(FS("a").find_last_not_of(FS("")) == 0u, "");
static_assert(FS("").find_last_not_of(FS("")) == tmp.npos, "");
}
TEST(FixedStringFindLastNotOfTest, FindLastNotOfLiteral) {
constexpr folly::FixedString<16> tmp{"This is a string"};
static_assert(tmp.find_last_not_of("gstrin") == 9u, "");
static_assert(tmp.find_last_not_of("hT") == 15u, "");
static_assert(tmp.find_last_not_of("s atr", 6u) == 5u, "");
static_assert(tmp.find_last_not_of(" atrs", 6u, 4u) == 6u, "");
static_assert(tmp.find_last_not_of("This atrng") == tmp.npos, "");
static_assert(FS("a").find_last_not_of("X") == 0u, "");
static_assert(FS("").find_last_not_of("cba") == tmp.npos, "");
static_assert(FS("a").find_last_not_of("") == 0u, "");
static_assert(FS("").find_last_not_of("") == tmp.npos, "");
}
TEST(FixedStringFindLastNotOfTest, FindLastNotOfChar) {
constexpr folly::FixedString<16> tmp{"This is a string"};
static_assert(tmp.find_last_not_of('g') == 14u, "");
static_assert(tmp.find_last_not_of('i') == 15u, "");
static_assert(tmp.find_last_not_of('x', 6u) == 6u, "");
static_assert(tmp.find_last_not_of('s', 6u) == 5u, "");
static_assert(FS("a").find_last_not_of('a') == tmp.npos, "");
static_assert(FS("").find_last_not_of('a') == tmp.npos, "");
}
TEST(FixedStringConversionTest, ConversionToStdString) {
constexpr folly::FixedString<16> tmp{"This is a string"};
std::string str = tmp;
EXPECT_STREQ("This is a string", str.c_str());
str = "another string"_fs16;
EXPECT_STREQ("another string", str.c_str());
}
constexpr std::size_t countSpacesReverse(folly::FixedString<50> s) {
std::size_t count = 0u;
auto i = s.rbegin();
for (; i != s.rend(); ++i, --i, i++, i--, i += 1, i -= 1, i += 1) {
if (' ' == *i) {
++count;
}
}
return count;
}
TEST(FixedStringReverseIteratorTest, Cpp14ConstexprReverseIteration) {
static_assert(3 == countSpacesReverse("This is a string"), "");
}
TEST(FixedStringReverseIteratorTest, ConstexprReverseIteration) {
static constexpr auto alpha = FS("abcdefghijklmnopqrstuvwxyz");
static_assert('a' == alpha.rbegin()[25], "");
static_assert('a' == *(alpha.rbegin() + 25), "");
static_assert('c' == *(alpha.rbegin() + 25 - 2), "");
static_assert((alpha.rend() - 2) == (alpha.rbegin() + 24), "");
}
namespace GCC61971 {
// FixedString runs afoul of GCC #61971 (spurious -Warray-bounds)
// in optimized builds. The following test case triggers it for gcc-4.x.
// Test that FixedString suppresses the warning correctly.
// https://gcc.gnu.org/bugzilla/show_bug.cgi?id=61971
constexpr auto xyz = folly::makeFixedString("xyz");
constexpr auto dot = folly::makeFixedString(".");
template <typename T1>
constexpr auto concatStuff(const T1& component) noexcept {
return xyz + dot + component;
}
constexpr auto co = folly::makeFixedString("co");
struct S {
std::string s{concatStuff(co)};
};
} // namespace GCC61971
TEST(FixedStringGCC61971, GCC61971) {
GCC61971::S s;
(void)s;
}
#include <folly/Range.h>
TEST(FixedStringConversionTest, ConversionToFollyRange) {
// The following declaraction is static for compilers that haven't implemented
// the resolution of:
// http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_defects.html#1454
static constexpr folly::FixedString<16> tmp{"This is a string"};
constexpr folly::StringPiece piece = tmp;
static_assert(tmp.begin() == piece.begin(), "");
static_assert(tmp.end() == piece.end(), "");
}
<|start_filename|>folly/container/test/IteratorTest.cpp<|end_filename|>
/*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include <cassert>
#include <cstddef>
#include <deque>
#include <functional>
#include <map>
#include <set>
#include <tuple>
#include <type_traits>
#include <utility>
#include <vector>
#include <folly/container/Iterator.h>
#include <folly/portability/GTest.h>
namespace {
/**
* Container type used for unit tests.
*/
template <typename T>
using Container = std::deque<T>;
// Constructor and assignment operator call counters for struct Object.
std::size_t gDefaultCtrCnt;
std::size_t gCopyCtrCnt;
std::size_t gMoveCtrCnt;
std::size_t gExplicitCtrCnt;
std::size_t gMultiargCtrCnt;
std::size_t gCopyOpCnt;
std::size_t gMoveOpCnt;
std::size_t gConvertOpCnt;
/**
* Class that increases various counters to keep track of how objects have
* been constructed or assigned to, to verify iterator behavior.
*/
struct Object {
Object() {
++gDefaultCtrCnt;
}
Object(const Object&) {
++gCopyCtrCnt;
}
Object(Object&&) noexcept {
++gMoveCtrCnt;
}
explicit Object(int) {
++gExplicitCtrCnt;
}
explicit Object(int, int) {
++gMultiargCtrCnt;
}
Object& operator=(const Object&) {
++gCopyOpCnt;
return *this;
}
Object& operator=(Object&&) noexcept {
++gMoveOpCnt;
return *this;
}
Object& operator=(int) noexcept {
++gConvertOpCnt;
return *this;
}
};
/**
* Reset all call counters to 0.
*/
void init_counters() {
gDefaultCtrCnt = gCopyCtrCnt = gMoveCtrCnt = gExplicitCtrCnt =
gMultiargCtrCnt = gCopyOpCnt = gMoveOpCnt = gConvertOpCnt = 0;
}
/**
* Test for iterator copy and move.
*/
template <typename Iterator>
void copy_and_move_test(Container<int>& q, Iterator it) {
assert(q.empty());
const auto it2(it); // copy construct
it = it2; // copy assign from const
it = it; // self assign
auto it3(std::move(it)); // move construct
it = std::move(it3); // move assign
// Make sure iterator still works.
it = 4711; // emplace
EXPECT_EQ(q, Container<int>{4711});
}
/**
* Test for emplacement with perfect forwarding.
*/
template <typename Iterator>
void emplace_test(Container<Object>& q, Iterator it) {
using folly::make_emplace_args;
assert(q.empty());
init_counters();
it = Object{}; // default construct + move construct
Object obj; // default construct
it = obj; // copy construct
it = std::move(obj); // move construct
const Object obj2; // default construct
it = obj2; // copy construct from const
it = std::move(obj2); // copy construct (const defeats move)
it = 0; // explicit construct
it = make_emplace_args(0, 0); // explicit multiarg construct
it = std::make_pair(0, 0); // implicit multiarg construct
it = std::make_tuple(0, 0); // implicit multiarg construct
auto args = make_emplace_args(Object{}); // default construct + move construct
it = args; // copy construct
it = const_cast<const decltype(args)&>(args); // copy construct from const
it = std::move(args); // move construct
auto args2 = std::make_tuple(Object{}); // default construct + move construct
it = args2; // (implicit multiarg) copy construct
it = std::move(args2); // (implicit multiarg) move construct
auto args3 = std::make_pair(0, 0);
it = args3; // implicit multiarg construct
it = std::move(args3); // implicit multiarg construct
ASSERT_EQ(q.size(), 16);
EXPECT_EQ(gDefaultCtrCnt, 5);
EXPECT_EQ(gCopyCtrCnt, 6);
EXPECT_EQ(gMoveCtrCnt, 6);
EXPECT_EQ(gExplicitCtrCnt, 1);
EXPECT_EQ(gMultiargCtrCnt, 5);
EXPECT_EQ(gCopyOpCnt, 0);
EXPECT_EQ(gMoveOpCnt, 0);
EXPECT_EQ(gConvertOpCnt, 0);
}
} // namespace
using namespace folly;
/**
* Basic tests for folly::emplace_iterator.
*/
TEST(EmplaceIterator, EmplacerTest) {
{
Container<int> q;
copy_and_move_test(q, emplacer(q, q.begin()));
}
{
Container<Object> q;
emplace_test(q, emplacer(q, q.begin()));
}
{
Container<int> q;
auto it = emplacer(q, q.begin());
it = 0;
it = 1;
it = 2;
it = emplacer(q, q.begin());
it = 3;
it = 4;
EXPECT_EQ(q, Container<int>({3, 4, 0, 1, 2}));
}
}
/**
* Basic tests for folly::front_emplace_iterator.
*/
TEST(EmplaceIterator, FrontEmplacerTest) {
{
Container<int> q;
copy_and_move_test(q, front_emplacer(q));
}
{
Container<Object> q;
emplace_test(q, front_emplacer(q));
}
{
Container<int> q;
auto it = front_emplacer(q);
it = 0;
it = 1;
it = 2;
it = front_emplacer(q);
it = 3;
it = 4;
EXPECT_EQ(q, Container<int>({4, 3, 2, 1, 0}));
}
}
/**
* Basic tests for folly::back_emplace_iterator.
*/
TEST(EmplaceIterator, BackEmplacerTest) {
{
Container<int> q;
copy_and_move_test(q, back_emplacer(q));
}
{
Container<Object> q;
emplace_test(q, back_emplacer(q));
}
{
Container<int> q;
auto it = back_emplacer(q);
it = 0;
it = 1;
it = 2;
it = back_emplacer(q);
it = 3;
it = 4;
EXPECT_EQ(q, Container<int>({0, 1, 2, 3, 4}));
}
}
/**
* Basic tests for folly::hint_emplace_iterator.
*/
TEST(EmplaceIterator, HintEmplacerTest) {
{
init_counters();
std::map<int, Object> m;
auto it = hint_emplacer(m, m.end());
it = make_emplace_args(
std::piecewise_construct,
std::forward_as_tuple(0),
std::forward_as_tuple(0));
it = make_emplace_args(
std::piecewise_construct,
std::forward_as_tuple(1),
std::forward_as_tuple(0, 0));
it = make_emplace_args(
std::piecewise_construct,
std::forward_as_tuple(2),
std::forward_as_tuple(Object{}));
ASSERT_EQ(m.size(), 3);
EXPECT_EQ(gDefaultCtrCnt, 1);
EXPECT_EQ(gCopyCtrCnt, 0);
EXPECT_EQ(gMoveCtrCnt, 1);
EXPECT_EQ(gExplicitCtrCnt, 1);
EXPECT_EQ(gMultiargCtrCnt, 1);
EXPECT_EQ(gCopyOpCnt, 0);
EXPECT_EQ(gMoveOpCnt, 0);
EXPECT_EQ(gConvertOpCnt, 0);
}
{
struct O {
explicit O(int i_) : i(i_) {}
bool operator<(const O& other) const {
return i < other.i;
}
bool operator==(const O& other) const {
return i == other.i;
}
int i;
};
std::vector<int> v1 = {0, 1, 2, 3, 4};
std::vector<int> v2 = {0, 2, 4};
std::set<O> diff;
std::set_difference(
v1.begin(),
v1.end(),
v2.begin(),
v2.end(),
hint_emplacer(diff, diff.end()));
std::set<O> expected = {O(1), O(3)};
ASSERT_EQ(diff, expected);
}
}
/**
* Test std::copy() with explicit conversion. This would not compile with a
* std::back_insert_iterator, because the constructor of Object that takes a
* single int is explicit.
*/
TEST(EmplaceIterator, Copy) {
init_counters();
Container<int> in({0, 1, 2});
Container<Object> out;
std::copy(in.begin(), in.end(), back_emplacer(out));
EXPECT_EQ(3, out.size());
EXPECT_EQ(gDefaultCtrCnt, 0);
EXPECT_EQ(gCopyCtrCnt, 0);
EXPECT_EQ(gMoveCtrCnt, 0);
EXPECT_EQ(gExplicitCtrCnt, 3);
EXPECT_EQ(gMultiargCtrCnt, 0);
EXPECT_EQ(gCopyOpCnt, 0);
EXPECT_EQ(gMoveOpCnt, 0);
EXPECT_EQ(gConvertOpCnt, 0);
}
/**
* Test std::transform() with multi-argument constructors. This would require
* a temporary Object with std::back_insert_iterator.
*/
TEST(EmplaceIterator, Transform) {
init_counters();
Container<int> in({0, 1, 2});
Container<Object> out;
std::transform(in.begin(), in.end(), back_emplacer(out), [](int i) {
return make_emplace_args(i, i);
});
EXPECT_EQ(3, out.size());
EXPECT_EQ(gDefaultCtrCnt, 0);
EXPECT_EQ(gCopyCtrCnt, 0);
EXPECT_EQ(gMoveCtrCnt, 0);
EXPECT_EQ(gExplicitCtrCnt, 0);
EXPECT_EQ(gMultiargCtrCnt, 3);
EXPECT_EQ(gCopyOpCnt, 0);
EXPECT_EQ(gMoveOpCnt, 0);
EXPECT_EQ(gConvertOpCnt, 0);
}
/**
* Test multi-argument store and forward.
*/
TEST(EmplaceIterator, EmplaceArgs) {
Object o1;
const Object o2;
Object& o3 = o1;
const Object& o4 = o3;
Object o5;
{
// Test copy construction.
auto args = make_emplace_args(0, o1, o2, o3, o4, Object{}, std::cref(o2));
init_counters();
auto args2 = args;
EXPECT_EQ(gDefaultCtrCnt, 0);
EXPECT_EQ(gCopyCtrCnt, 5);
EXPECT_EQ(gMoveCtrCnt, 0);
EXPECT_EQ(gExplicitCtrCnt, 0);
EXPECT_EQ(gMultiargCtrCnt, 0);
EXPECT_EQ(gCopyOpCnt, 0);
EXPECT_EQ(gMoveOpCnt, 0);
EXPECT_EQ(gConvertOpCnt, 0);
// Test copy assignment.
init_counters();
args = args2;
EXPECT_EQ(gDefaultCtrCnt, 0);
EXPECT_EQ(gCopyCtrCnt, 0);
EXPECT_EQ(gMoveCtrCnt, 0);
EXPECT_EQ(gExplicitCtrCnt, 0);
EXPECT_EQ(gMultiargCtrCnt, 0);
EXPECT_EQ(gCopyOpCnt, 5);
EXPECT_EQ(gMoveOpCnt, 0);
EXPECT_EQ(gConvertOpCnt, 0);
}
{
// Test RVO.
init_counters();
auto args = make_emplace_args(
0, o1, o2, o3, o4, Object{}, std::cref(o2), rref(std::move(o5)));
EXPECT_EQ(gDefaultCtrCnt, 1);
EXPECT_EQ(gCopyCtrCnt, 4);
EXPECT_EQ(gMoveCtrCnt, 1);
EXPECT_EQ(gExplicitCtrCnt, 0);
EXPECT_EQ(gMultiargCtrCnt, 0);
EXPECT_EQ(gCopyOpCnt, 0);
EXPECT_EQ(gMoveOpCnt, 0);
EXPECT_EQ(gConvertOpCnt, 0);
// Test move construction.
init_counters();
auto args2 = std::move(args);
EXPECT_EQ(gDefaultCtrCnt, 0);
EXPECT_EQ(gCopyCtrCnt, 0);
EXPECT_EQ(gMoveCtrCnt, 5);
EXPECT_EQ(gExplicitCtrCnt, 0);
EXPECT_EQ(gMultiargCtrCnt, 0);
EXPECT_EQ(gCopyOpCnt, 0);
EXPECT_EQ(gMoveOpCnt, 0);
EXPECT_EQ(gConvertOpCnt, 0);
// Test move assignment.
init_counters();
args = std::move(args2);
EXPECT_EQ(gDefaultCtrCnt, 0);
EXPECT_EQ(gCopyCtrCnt, 0);
EXPECT_EQ(gMoveCtrCnt, 0);
EXPECT_EQ(gExplicitCtrCnt, 0);
EXPECT_EQ(gMultiargCtrCnt, 0);
EXPECT_EQ(gCopyOpCnt, 0);
EXPECT_EQ(gMoveOpCnt, 5);
EXPECT_EQ(gConvertOpCnt, 0);
// Make sure arguments are stored correctly. lvalues by reference, rvalues
// by (moved) copy. Rvalues cannot be stored by reference because they may
// refer to an expired temporary by the time they are accessed.
static_assert(
std::is_same<
int,
std::tuple_element_t<0, decltype(args)::storage_type>>::value,
"");
static_assert(
std::is_same<
Object,
std::tuple_element_t<1, decltype(args)::storage_type>>::value,
"");
static_assert(
std::is_same<
Object,
std::tuple_element_t<2, decltype(args)::storage_type>>::value,
"");
static_assert(
std::is_same<
Object,
std::tuple_element_t<3, decltype(args)::storage_type>>::value,
"");
static_assert(
std::is_same<
Object,
std::tuple_element_t<4, decltype(args)::storage_type>>::value,
"");
static_assert(
std::is_same<
Object,
std::tuple_element_t<5, decltype(args)::storage_type>>::value,
"");
static_assert(
std::is_same<
std::reference_wrapper<const Object>,
std::tuple_element_t<6, decltype(args)::storage_type>>::value,
"");
static_assert(
std::is_same<
rvalue_reference_wrapper<Object>,
std::tuple_element_t<7, decltype(args)::storage_type>>::value,
"");
// Check whether args.get() restores the original argument type for
// rvalue references to emplace_args.
static_assert(
std::is_same<int&&, decltype(get_emplace_arg<0>(std::move(args)))>::
value,
"");
static_assert(
std::is_same<Object&, decltype(get_emplace_arg<1>(std::move(args)))>::
value,
"");
static_assert(
std::is_same<
const Object&,
decltype(get_emplace_arg<2>(std::move(args)))>::value,
"");
static_assert(
std::is_same<Object&, decltype(get_emplace_arg<3>(std::move(args)))>::
value,
"");
static_assert(
std::is_same<
const Object&,
decltype(get_emplace_arg<4>(std::move(args)))>::value,
"");
static_assert(
std::is_same<Object&&, decltype(get_emplace_arg<5>(std::move(args)))>::
value,
"");
static_assert(
std::is_same<
const Object&,
decltype(get_emplace_arg<6>(std::move(args)))>::value,
"");
static_assert(
std::is_same<Object&&, decltype(get_emplace_arg<7>(std::move(args)))>::
value,
"");
// lvalue references to emplace_args should behave mostly like std::tuples.
// Note that get_emplace_arg<7>(args) does not compile, because
// folly::rvalue_reference_wrappers can only be unwrapped through an rvalue
// reference.
static_assert(
std::is_same<int&, decltype(get_emplace_arg<0>(args))>::value, "");
static_assert(
std::is_same<Object&, decltype(get_emplace_arg<1>(args))>::value, "");
static_assert(
std::is_same<Object&, decltype(get_emplace_arg<2>(args))>::value, "");
static_assert(
std::is_same<Object&, decltype(get_emplace_arg<3>(args))>::value, "");
static_assert(
std::is_same<Object&, decltype(get_emplace_arg<4>(args))>::value, "");
static_assert(
std::is_same<Object&, decltype(get_emplace_arg<5>(args))>::value, "");
static_assert(
std::is_same<const Object&, decltype(get_emplace_arg<6>(args))>::value,
"");
}
}
/**
* Test implicit unpacking.
*/
TEST(EmplaceIterator, ImplicitUnpack) {
static std::size_t multiCtrCnt;
static std::size_t pairCtrCnt;
static std::size_t tupleCtrCnt;
struct Object2 {
Object2(int, int) {
++multiCtrCnt;
}
explicit Object2(const std::pair<int, int>&) {
++pairCtrCnt;
}
explicit Object2(const std::tuple<int, int>&) {
++tupleCtrCnt;
}
};
auto test = [](auto&& it, bool expectUnpack) {
multiCtrCnt = pairCtrCnt = tupleCtrCnt = 0;
it = std::make_pair(0, 0);
it = std::make_tuple(0, 0);
if (expectUnpack) {
EXPECT_EQ(multiCtrCnt, 2);
EXPECT_EQ(pairCtrCnt, 0);
EXPECT_EQ(tupleCtrCnt, 0);
} else {
EXPECT_EQ(multiCtrCnt, 0);
EXPECT_EQ(pairCtrCnt, 1);
EXPECT_EQ(tupleCtrCnt, 1);
}
};
Container<Object2> q;
test(emplacer(q, q.begin()), true);
test(emplacer<false>(q, q.begin()), false);
test(front_emplacer(q), true);
test(front_emplacer<false>(q), false);
test(back_emplacer(q), true);
test(back_emplacer<false>(q), false);
}
<|start_filename|>folly/fibers/SimpleLoopController.cpp<|end_filename|>
/*
* Copyright 2004-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <folly/fibers/SimpleLoopController.h>
#include <folly/io/async/TimeoutManager.h>
namespace folly {
namespace fibers {
/**
* A simple version of TimeoutManager that maintains only a single AsyncTimeout
* object that is used by HHWheelTimer in SimpleLoopController.
*/
class SimpleLoopController::SimpleTimeoutManager : public TimeoutManager {
public:
explicit SimpleTimeoutManager(SimpleLoopController& loopController)
: loopController_(loopController) {}
void attachTimeoutManager(
AsyncTimeout* /* unused */,
InternalEnum /* unused */) final {}
void detachTimeoutManager(AsyncTimeout* /* unused */) final {}
bool scheduleTimeout(AsyncTimeout* obj, timeout_type timeout) final {
// Make sure that we don't try to use this manager with two timeouts.
CHECK(!timeout_ || timeout_->first == obj);
timeout_.emplace(obj, std::chrono::steady_clock::now() + timeout);
return true;
}
void cancelTimeout(AsyncTimeout* obj) final {
CHECK(timeout_ && timeout_->first == obj);
timeout_.clear();
}
void bumpHandlingTime() final {}
bool isInTimeoutManagerThread() final {
return loopController_.isInLoopThread();
}
void runTimeouts() {
std::chrono::steady_clock::time_point tp = std::chrono::steady_clock::now();
if (!timeout_ || tp < timeout_->second) {
return;
}
auto* timeout = timeout_->first;
timeout_.clear();
timeout->timeoutExpired();
}
private:
SimpleLoopController& loopController_;
folly::Optional<
std::pair<AsyncTimeout*, std::chrono::steady_clock::time_point>>
timeout_;
};
SimpleLoopController::SimpleLoopController()
: fm_(nullptr),
stopRequested_(false),
loopThread_(),
timeoutManager_(std::make_unique<SimpleTimeoutManager>(*this)),
timer_(HHWheelTimer::newTimer(timeoutManager_.get())) {}
SimpleLoopController::~SimpleLoopController() {
scheduled_ = false;
}
void SimpleLoopController::runTimeouts() {
timeoutManager_->runTimeouts();
}
} // namespace fibers
} // namespace folly
<|start_filename|>folly/experimental/observer/detail/Core.h<|end_filename|>
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <folly/Function.h>
#include <folly/Synchronized.h>
#include <folly/futures/Future.h>
#include <atomic>
#include <memory>
#include <mutex>
#include <unordered_set>
#include <utility>
#include <vector>
namespace folly {
namespace observer_detail {
class ObserverManager;
/**
* Core stores the current version of the object held by Observer. It also keeps
* all dependencies and dependents of the Observer.
*/
class Core : public std::enable_shared_from_this<Core> {
public:
using Ptr = std::shared_ptr<Core>;
using WeakPtr = std::weak_ptr<Core>;
/**
* Blocks until creator is successfully run by ObserverManager
*/
static Ptr create(folly::Function<std::shared_ptr<const void>()> creator);
/**
* View of the observed object and its version
*/
struct VersionedData {
VersionedData() {}
VersionedData(std::shared_ptr<const void> data_, size_t version_)
: data(std::move(data_)), version(version_) {}
std::shared_ptr<const void> data;
size_t version{0};
};
/**
* Gets current view of the observed object.
* This is safe to call from any thread. If this is called from other Observer
* functor then that Observer is marked as dependent on current Observer.
*/
VersionedData getData();
/**
* Gets the version of the observed object.
*/
size_t getVersion() const {
return version_;
}
/**
* Get the last version at which the observed object was actually changed.
*/
size_t getVersionLastChange() {
return versionLastChange_;
}
/**
* Check if the observed object needs to be re-computed. Returns the version
* of last change.
*
* This should be only called from ObserverManager thread.
*/
size_t refresh(size_t version);
/**
* Force the next call to refresh to unconditionally re-compute the observed
* object, even if dependencies didn't change.
*/
void setForceRefresh();
~Core();
private:
explicit Core(folly::Function<std::shared_ptr<const void>()> creator);
void addDependent(Core::WeakPtr dependent);
void removeStaleDependents();
using Dependents = std::vector<WeakPtr>;
using Dependencies = std::unordered_set<Ptr>;
folly::Synchronized<Dependents> dependents_;
folly::Synchronized<Dependencies> dependencies_;
std::atomic<size_t> version_{0};
std::atomic<size_t> versionLastChange_{0};
folly::Synchronized<VersionedData> data_;
folly::Function<std::shared_ptr<const void>()> creator_;
std::mutex refreshMutex_;
bool forceRefresh_{false};
};
} // namespace observer_detail
} // namespace folly
<|start_filename|>folly/detail/SocketFastOpen.cpp<|end_filename|>
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <folly/detail/SocketFastOpen.h>
#include <folly/portability/Sockets.h>
#include <cerrno>
#include <cstdio>
namespace folly {
namespace detail {
#if FOLLY_ALLOW_TFO && defined(__linux__)
// Sometimes these flags are not present in the headers,
// so define them if not present.
#if !defined(MSG_FASTOPEN)
#define MSG_FASTOPEN 0x20000000
#endif
#if !defined(TCP_FASTOPEN)
#define TCP_FASTOPEN 23
#endif
#if !defined(TCPI_OPT_SYN_DATA)
#define TCPI_OPT_SYN_DATA 32
#endif
ssize_t tfo_sendmsg(NetworkSocket sockfd, const struct msghdr* msg, int flags) {
flags |= MSG_FASTOPEN;
return netops::sendmsg(sockfd, msg, flags);
}
int tfo_enable(NetworkSocket sockfd, size_t max_queue_size) {
return netops::setsockopt(
sockfd, SOL_TCP, TCP_FASTOPEN, &max_queue_size, sizeof(max_queue_size));
}
bool tfo_succeeded(NetworkSocket sockfd) {
// Call getsockopt to check if TFO was used.
struct tcp_info info;
socklen_t info_len = sizeof(info);
errno = 0;
if (netops::getsockopt(sockfd, IPPROTO_TCP, TCP_INFO, &info, &info_len) !=
0) {
// errno is set from getsockopt
return false;
}
return info.tcpi_options & TCPI_OPT_SYN_DATA;
}
#elif FOLLY_ALLOW_TFO && defined(__APPLE__)
ssize_t tfo_sendmsg(NetworkSocket sockfd, const struct msghdr* msg, int flags) {
sa_endpoints_t endpoints;
endpoints.sae_srcif = 0;
endpoints.sae_srcaddr = nullptr;
endpoints.sae_srcaddrlen = 0;
endpoints.sae_dstaddr = (struct sockaddr*)msg->msg_name;
endpoints.sae_dstaddrlen = msg->msg_namelen;
int ret = connectx(
sockfd.toFd(),
&endpoints,
SAE_ASSOCID_ANY,
CONNECT_RESUME_ON_READ_WRITE | CONNECT_DATA_IDEMPOTENT,
nullptr,
0,
nullptr,
nullptr);
if (ret != 0) {
return ret;
}
ret = netops::sendmsg(sockfd, msg, flags);
return ret;
}
int tfo_enable(NetworkSocket sockfd, size_t max_queue_size) {
return netops::setsockopt(
sockfd,
IPPROTO_TCP,
TCP_FASTOPEN,
&max_queue_size,
sizeof(max_queue_size));
}
bool tfo_succeeded(NetworkSocket /* sockfd */) {
errno = EOPNOTSUPP;
return false;
}
#else
ssize_t tfo_sendmsg(
NetworkSocket /* sockfd */,
const struct msghdr* /* msg */,
int /* flags */) {
errno = EOPNOTSUPP;
return -1;
}
int tfo_enable(NetworkSocket /* sockfd */, size_t /* max_queue_size */) {
errno = ENOPROTOOPT;
return -1;
}
bool tfo_succeeded(NetworkSocket /* sockfd */) {
errno = EOPNOTSUPP;
return false;
}
#endif
} // namespace detail
} // namespace folly
<|start_filename|>folly/dynamic.cpp<|end_filename|>
/*
* Copyright 2011-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <folly/dynamic.h>
#include <numeric>
#include <glog/logging.h>
#include <folly/Format.h>
#include <folly/container/Enumerate.h>
#include <folly/hash/Hash.h>
#include <folly/lang/Assume.h>
#include <folly/lang/Exception.h>
namespace folly {
//////////////////////////////////////////////////////////////////////
#define FOLLY_DYNAMIC_DEF_TYPEINFO(T) \
constexpr const char* dynamic::TypeInfo<T>::name; \
constexpr dynamic::Type dynamic::TypeInfo<T>::type; \
//
FOLLY_DYNAMIC_DEF_TYPEINFO(std::nullptr_t)
FOLLY_DYNAMIC_DEF_TYPEINFO(bool)
FOLLY_DYNAMIC_DEF_TYPEINFO(std::string)
FOLLY_DYNAMIC_DEF_TYPEINFO(dynamic::Array)
FOLLY_DYNAMIC_DEF_TYPEINFO(double)
FOLLY_DYNAMIC_DEF_TYPEINFO(int64_t)
FOLLY_DYNAMIC_DEF_TYPEINFO(dynamic::ObjectImpl)
#undef FOLLY_DYNAMIC_DEF_TYPEINFO
const char* dynamic::typeName() const {
return typeName(type_);
}
TypeError::TypeError(const std::string& expected, dynamic::Type actual)
: std::runtime_error(sformat(
"TypeError: expected dynamic type `{}', but had type `{}'",
expected,
dynamic::typeName(actual))) {}
TypeError::TypeError(
const std::string& expected,
dynamic::Type actual1,
dynamic::Type actual2)
: std::runtime_error(sformat(
"TypeError: expected dynamic types `{}, but had types `{}' and `{}'",
expected,
dynamic::typeName(actual1),
dynamic::typeName(actual2))) {}
// This is a higher-order preprocessor macro to aid going from runtime
// types to the compile time type system.
#define FB_DYNAMIC_APPLY(type, apply) \
do { \
switch ((type)) { \
case NULLT: \
apply(std::nullptr_t); \
break; \
case ARRAY: \
apply(Array); \
break; \
case BOOL: \
apply(bool); \
break; \
case DOUBLE: \
apply(double); \
break; \
case INT64: \
apply(int64_t); \
break; \
case OBJECT: \
apply(ObjectImpl); \
break; \
case STRING: \
apply(std::string); \
break; \
default: \
CHECK(0); \
abort(); \
} \
} while (0)
bool dynamic::operator<(dynamic const& o) const {
if (UNLIKELY(type_ == OBJECT || o.type_ == OBJECT)) {
throw_exception<TypeError>("object", type_);
}
if (type_ != o.type_) {
return type_ < o.type_;
}
#define FB_X(T) return CompareOp<T>::comp(*getAddress<T>(), *o.getAddress<T>())
FB_DYNAMIC_APPLY(type_, FB_X);
#undef FB_X
}
bool dynamic::operator==(dynamic const& o) const {
if (type() != o.type()) {
if (isNumber() && o.isNumber()) {
auto& integ = isInt() ? *this : o;
auto& doubl = isInt() ? o : *this;
return integ.asInt() == doubl.asDouble();
}
return false;
}
#define FB_X(T) return *getAddress<T>() == *o.getAddress<T>();
FB_DYNAMIC_APPLY(type_, FB_X);
#undef FB_X
}
dynamic& dynamic::operator=(dynamic const& o) {
if (&o != this) {
if (type_ == o.type_) {
#define FB_X(T) *getAddress<T>() = *o.getAddress<T>()
FB_DYNAMIC_APPLY(type_, FB_X);
#undef FB_X
} else {
destroy();
#define FB_X(T) new (getAddress<T>()) T(*o.getAddress<T>())
FB_DYNAMIC_APPLY(o.type_, FB_X);
#undef FB_X
type_ = o.type_;
}
}
return *this;
}
dynamic& dynamic::operator=(dynamic&& o) noexcept {
if (&o != this) {
if (type_ == o.type_) {
#define FB_X(T) *getAddress<T>() = std::move(*o.getAddress<T>())
FB_DYNAMIC_APPLY(type_, FB_X);
#undef FB_X
} else {
destroy();
#define FB_X(T) new (getAddress<T>()) T(std::move(*o.getAddress<T>()))
FB_DYNAMIC_APPLY(o.type_, FB_X);
#undef FB_X
type_ = o.type_;
}
}
return *this;
}
dynamic const& dynamic::atImpl(dynamic const& idx) const& {
if (auto* parray = get_nothrow<Array>()) {
if (!idx.isInt()) {
throw_exception<TypeError>("int64", idx.type());
}
if (idx < 0 || idx >= parray->size()) {
throw_exception<std::out_of_range>("out of range in dynamic array");
}
return (*parray)[size_t(idx.asInt())];
} else if (auto* pobject = get_nothrow<ObjectImpl>()) {
auto it = pobject->find(idx);
if (it == pobject->end()) {
throw_exception<std::out_of_range>(
sformat("couldn't find key {} in dynamic object", idx.asString()));
}
return it->second;
} else {
throw_exception<TypeError>("object/array", type());
}
}
dynamic const& dynamic::at(StringPiece idx) const& {
auto* pobject = get_nothrow<ObjectImpl>();
if (!pobject) {
throw_exception<TypeError>("object", type());
}
auto it = pobject->find(idx);
if (it == pobject->end()) {
throw_exception<std::out_of_range>(
sformat("couldn't find key {} in dynamic object", idx));
}
return it->second;
}
dynamic& dynamic::operator[](StringPiece k) & {
auto& obj = get<ObjectImpl>();
auto ret = obj.emplace(k, nullptr);
return ret.first->second;
}
dynamic dynamic::getDefault(StringPiece k, const dynamic& v) const& {
auto& obj = get<ObjectImpl>();
auto it = obj.find(k);
return it == obj.end() ? v : it->second;
}
dynamic dynamic::getDefault(StringPiece k, dynamic&& v) const& {
auto& obj = get<ObjectImpl>();
auto it = obj.find(k);
// Avoid clang bug with ternary
if (it == obj.end()) {
return std::move(v);
} else {
return it->second;
}
}
dynamic dynamic::getDefault(StringPiece k, const dynamic& v) && {
auto& obj = get<ObjectImpl>();
auto it = obj.find(k);
// Avoid clang bug with ternary
if (it == obj.end()) {
return v;
} else {
return std::move(it->second);
}
}
dynamic dynamic::getDefault(StringPiece k, dynamic&& v) && {
auto& obj = get<ObjectImpl>();
auto it = obj.find(k);
return std::move(it == obj.end() ? v : it->second);
}
const dynamic* dynamic::get_ptrImpl(dynamic const& idx) const& {
if (auto* parray = get_nothrow<Array>()) {
if (!idx.isInt()) {
throw_exception<TypeError>("int64", idx.type());
}
if (idx < 0 || idx >= parray->size()) {
return nullptr;
}
return &(*parray)[size_t(idx.asInt())];
} else if (auto* pobject = get_nothrow<ObjectImpl>()) {
auto it = pobject->find(idx);
if (it == pobject->end()) {
return nullptr;
}
return &it->second;
} else {
throw_exception<TypeError>("object/array", type());
}
}
const dynamic* dynamic::get_ptr(StringPiece idx) const& {
auto* pobject = get_nothrow<ObjectImpl>();
if (!pobject) {
throw_exception<TypeError>("object", type());
}
auto it = pobject->find(idx);
if (it == pobject->end()) {
return nullptr;
}
return &it->second;
}
std::size_t dynamic::size() const {
if (auto* ar = get_nothrow<Array>()) {
return ar->size();
}
if (auto* obj = get_nothrow<ObjectImpl>()) {
return obj->size();
}
if (auto* str = get_nothrow<std::string>()) {
return str->size();
}
throw_exception<TypeError>("array/object/string", type());
}
dynamic::iterator dynamic::erase(const_iterator first, const_iterator last) {
auto& arr = get<Array>();
return get<Array>().erase(
arr.begin() + (first - arr.begin()), arr.begin() + (last - arr.begin()));
}
std::size_t dynamic::hash() const {
switch (type()) {
case NULLT:
return 0xBAAAAAAD;
case OBJECT: {
// Accumulate using addition instead of using hash_range (as in the ARRAY
// case), as we need a commutative hash operation since unordered_map's
// iteration order is unspecified.
auto h = std::hash<std::pair<dynamic, dynamic>>{};
return std::accumulate(
items().begin(),
items().end(),
size_t{0x0B1EC7},
[&](auto acc, auto item) { return acc + h(item); });
}
case ARRAY:
return folly::hash::hash_range(begin(), end());
case INT64:
return std::hash<int64_t>()(getInt());
case DOUBLE:
return std::hash<double>()(getDouble());
case BOOL:
return std::hash<bool>()(getBool());
case STRING:
// keep consistent with detail::DynamicHasher
return Hash()(getString());
}
assume_unreachable();
}
char const* dynamic::typeName(Type t) {
#define FB_X(T) return TypeInfo<T>::name
FB_DYNAMIC_APPLY(t, FB_X);
#undef FB_X
}
void dynamic::destroy() noexcept {
// This short-circuit speeds up some microbenchmarks.
if (type_ == NULLT) {
return;
}
#define FB_X(T) detail::Destroy::destroy(getAddress<T>())
FB_DYNAMIC_APPLY(type_, FB_X);
#undef FB_X
type_ = NULLT;
u_.nul = nullptr;
}
dynamic dynamic::merge_diff(const dynamic& source, const dynamic& target) {
if (!source.isObject() || source.type() != target.type()) {
return target;
}
dynamic diff = object;
// added/modified keys
for (const auto& pair : target.items()) {
auto it = source.find(pair.first);
if (it == source.items().end()) {
diff[pair.first] = pair.second;
} else {
diff[pair.first] = merge_diff(source[pair.first], target[pair.first]);
}
}
// removed keys
for (const auto& pair : source.items()) {
auto it = target.find(pair.first);
if (it == target.items().end()) {
diff[pair.first] = nullptr;
}
}
return diff;
}
// clang-format off
dynamic::resolved_json_pointer<dynamic const>
// clang-format on
dynamic::try_get_ptr(json_pointer const& jsonPtr) const& {
using err_code = json_pointer_resolution_error_code;
using error = json_pointer_resolution_error<dynamic const>;
auto const& tokens = jsonPtr.tokens();
if (tokens.empty()) {
return json_pointer_resolved_value<dynamic const>{
nullptr, this, {nullptr, nullptr}, 0};
}
dynamic const* curr = this;
dynamic const* prev = nullptr;
size_t curr_idx{0};
StringPiece curr_key{};
for (auto&& it : enumerate(tokens)) {
// hit bottom but pointer not exhausted yet
if (!curr) {
return makeUnexpected(
error{err_code::json_pointer_out_of_bounds, it.index, prev});
}
prev = curr;
// handle lookup in array
if (auto const* parray = curr->get_nothrow<dynamic::Array>()) {
if (it->size() > 1 && it->at(0) == '0') {
return makeUnexpected(
error{err_code::index_has_leading_zero, it.index, prev});
}
// if last element of pointer is '-', this is an append operation
if (it->size() == 1 && it->at(0) == '-') {
// was '-' the last token in pointer?
if (it.index == tokens.size() - 1) {
return makeUnexpected(
error{err_code::append_requested, it.index, prev});
}
// Cannot resolve past '-' in an array
curr = nullptr;
continue;
}
auto const idx = tryTo<size_t>(*it);
if (!idx.hasValue()) {
return makeUnexpected(
error{err_code::index_not_numeric, it.index, prev});
}
if (idx.value() < parray->size()) {
curr = &(*parray)[idx.value()];
curr_idx = idx.value();
} else {
return makeUnexpected(
error{err_code::index_out_of_bounds, it.index, prev});
}
continue;
}
// handle lookup in object
if (auto const* pobject = curr->get_nothrow<dynamic::ObjectImpl>()) {
auto const sub_it = pobject->find(*it);
if (sub_it == pobject->end()) {
return makeUnexpected(error{err_code::key_not_found, it.index, prev});
}
curr = &sub_it->second;
curr_key = *it;
continue;
}
return makeUnexpected(
error{err_code::element_not_object_or_array, it.index, prev});
}
return json_pointer_resolved_value<dynamic const>{
prev, curr, curr_key, curr_idx};
}
const dynamic* dynamic::get_ptr(json_pointer const& jsonPtr) const& {
using err_code = json_pointer_resolution_error_code;
auto ret = try_get_ptr(jsonPtr);
if (ret.hasValue()) {
return ret.value().value;
}
auto const ctx = ret.error().context;
auto const objType = ctx ? ctx->type() : Type::NULLT;
switch (ret.error().error_code) {
case err_code::key_not_found:
return nullptr;
case err_code::index_out_of_bounds:
return nullptr;
case err_code::append_requested:
return nullptr;
case err_code::index_not_numeric:
throw std::invalid_argument("array index is not numeric");
case err_code::index_has_leading_zero:
throw std::invalid_argument(
"leading zero not allowed when indexing arrays");
case err_code::element_not_object_or_array:
throw_exception<TypeError>("object/array", objType);
case err_code::json_pointer_out_of_bounds:
return nullptr;
default:
return nullptr;
}
assume_unreachable();
}
//////////////////////////////////////////////////////////////////////
} // namespace folly
<|start_filename|>folly/lang/CustomizationPoint.h<|end_filename|>
/*
* Copyright 2019-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <folly/lang/StaticConst.h>
// FOLLY_DEFINE_CPO
//
// Helper for portably defining customization-point objects (CPOs).
//
// The customization-point object must be placed in a nested namespace to avoid
// potential conflicts with customizations defined as friend-functions of types
// defined in the same namespace as the CPO.
//
// In C++17 and later the object may be defined using 'inline constexpr' to
// avoid ODR issues. However, prior to that a helper template is required to
// ensure that there is only a single instance of the CPO created and then a
// named reference in an anonymous namespace is required to avoid duplicate
// symbol definitions.
#if __cpp_inline_variables >= 201606L
#define FOLLY_DEFINE_CPO(Type, Name) \
namespace folly_cpo__ { \
inline constexpr Type Name{}; \
} \
using namespace folly_cpo__;
#else
#define FOLLY_DEFINE_CPO(Type, Name) \
namespace { \
constexpr auto& Name = ::folly::StaticConst<Type>::value; \
}
#endif
<|start_filename|>folly/init/Init.h<|end_filename|>
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <folly/CPortability.h>
/*
* Calls common init functions in the necessary order
* Among other things, this ensures that folly::Singletons are initialized
* correctly and installs signal handlers for a superior debugging experience.
* It also initializes gflags and glog.
*
* @param argc, argv arguments to your main
* @param removeFlags if true, will update argc,argv to remove recognized
* gflags passed on the command line
*/
namespace folly {
void init(int* argc, char*** argv, bool removeFlags = true);
/*
* An RAII object to be constructed at the beginning of main() and destructed
* implicitly at the end of main().
*
* The constructor performs the same setup as folly::init(), including
* initializing singletons managed by folly::Singleton.
*
* The destructor destroys all singletons managed by folly::Singleton, yielding
* better shutdown behavior when performed at the end of main(). In particular,
* this guarantees that all singletons managed by folly::Singleton are destroyed
* before all Meyers singletons are destroyed.
*/
class Init {
public:
// Force ctor & dtor out of line for better stack traces even with LTO.
FOLLY_NOINLINE Init(int* argc, char*** argv, bool removeFlags = true);
FOLLY_NOINLINE ~Init();
Init(Init const&) = delete;
Init(Init&&) = delete;
Init& operator=(Init const&) = delete;
Init& operator=(Init&&) = delete;
};
} // namespace folly
<|start_filename|>folly/executors/TimekeeperScheduledExecutor.h<|end_filename|>
/*
* Copyright 2018-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <glog/logging.h>
#include <atomic>
#include <folly/executors/ScheduledExecutor.h>
#include <folly/futures/Future.h>
namespace folly {
struct FOLLY_EXPORT TimekeeperScheduledExecutorNoTimekeeper
: public std::logic_error {
TimekeeperScheduledExecutorNoTimekeeper()
: std::logic_error("No Timekeeper available") {}
};
// This class turns a Executor into a ScheduledExecutor.
class TimekeeperScheduledExecutor : public ScheduledExecutor {
public:
TimekeeperScheduledExecutor(TimekeeperScheduledExecutor const&) = delete;
TimekeeperScheduledExecutor& operator=(TimekeeperScheduledExecutor const&) =
delete;
TimekeeperScheduledExecutor(TimekeeperScheduledExecutor&&) = delete;
TimekeeperScheduledExecutor& operator=(TimekeeperScheduledExecutor&&) =
delete;
static Executor::KeepAlive<TimekeeperScheduledExecutor> create(
Executor::KeepAlive<> parent,
Function<std::shared_ptr<Timekeeper>()> getTimekeeper =
detail::getTimekeeperSingleton);
virtual void add(Func func) override;
virtual void scheduleAt(Func&& func, ScheduledExecutor::TimePoint const& t)
override;
protected:
bool keepAliveAcquire() override;
void keepAliveRelease() override;
private:
TimekeeperScheduledExecutor(
KeepAlive<Executor>&& parent,
Function<std::shared_ptr<Timekeeper>()> getTimekeeper)
: parent_(std::move(parent)), getTimekeeper_(std::move(getTimekeeper)) {}
~TimekeeperScheduledExecutor() {
DCHECK(!keepAliveCounter_);
}
void run(Func);
KeepAlive<Executor> parent_;
Function<std::shared_ptr<Timekeeper>()> getTimekeeper_;
std::atomic<ssize_t> keepAliveCounter_{1};
};
} // namespace folly
<|start_filename|>folly/logging/test/helpers/FatalHelper.cpp<|end_filename|>
/*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <folly/init/Init.h>
#include <folly/logging/xlog.h>
#include <folly/portability/Stdlib.h>
#include <iostream>
DEFINE_string(
category,
"",
"Crash with a message to this category instead of the default");
DEFINE_bool(crash, true, "Crash with a fatal log message.");
DEFINE_bool(
check_debug,
false,
"Print whether this binary was built in debug mode "
"and then exit successfully");
DEFINE_bool(fail_fatal_xlog_if, false, "Fail an XLOG_IF(FATAL) check.");
DEFINE_bool(fail_dfatal_xlog_if, false, "Fail an XLOG_IF(DFATAL) check.");
DEFINE_bool(fail_xcheck, false, "Fail an XCHECK() test.");
DEFINE_bool(
fail_xcheck_nomsg,
false,
"Fail an XCHECK() test with no additional message.");
DEFINE_bool(fail_xdcheck, false, "Fail an XDCHECK() test.");
DEFINE_int32(xcheck_eq0, 0, "Check this value using XCHECK_EQ(value, 0)");
DEFINE_int32(xcheck_ne0, 1, "Check this value using XCHECK_NE 0)");
DEFINE_int32(xcheck_lt0, -1, "Check this value using XCHECK_LT(value, 0)");
DEFINE_int32(xcheck_le0, 0, "Check this value using XCHECK_LE(value, 0)");
DEFINE_int32(xcheck_gt0, 1, "Check this value using XCHECK_GT(value, 0)");
DEFINE_int32(xcheck_ge0, 0, "Check this value using XCHECK_GE(value, 0)");
DEFINE_int32(xdcheck_eq0, 0, "Check this value using XDCHECK_EQ(value, 0)");
DEFINE_int32(xdcheck_ne0, 1, "Check this value using XDCHECK_NE 0)");
DEFINE_int32(xdcheck_lt0, -1, "Check this value using XDCHECK_LT(value, 0)");
DEFINE_int32(xdcheck_le0, 0, "Check this value using XDCHECK_LE(value, 0)");
DEFINE_int32(xdcheck_gt0, 1, "Check this value using XDCHECK_GT(value, 0)");
DEFINE_int32(xdcheck_ge0, 0, "Check this value using XDCHECK_GE(value, 0)");
DEFINE_bool(
test_xcheck_eq_evalutates_once,
false,
"Test an XCHECK_EQ() statement where the arguments have side effects");
using folly::LogLevel;
namespace {
/**
* Helper class to optionally log a fatal message during static initialization
* or destruction.
*
* Since command line arguments have not been processed during static
* initialization, we check an environment variable.
*/
class InitChecker {
public:
InitChecker() : value_{getenv("CRASH_DURING_INIT")} {
if (value_ && strcmp(value_, "shutdown") != 0) {
XLOG(FATAL) << "crashing during static initialization";
}
}
~InitChecker() {
if (value_) {
XLOG(FATAL) << "crashing during static destruction";
}
}
const char* value_{nullptr};
};
static InitChecker initChecker;
} // namespace
namespace {
int runHelper() {
if (!FLAGS_category.empty()) {
folly::Logger logger{FLAGS_category};
FB_LOG(logger, FATAL, "crashing to category ", FLAGS_category);
}
if (!FLAGS_crash) {
return 0;
}
XLOG(FATAL) << "test program crashing!";
// Even though this function is defined to return an integer, the compiler
// should be able to detect that XLOG(FATAL) never returns. It shouldn't
// complain that we don't return an integer here.
}
} // namespace
std::string fbLogFatalCheck() {
folly::Logger logger("some.category");
FB_LOG(logger, FATAL) << "we always crash";
// This function mostly exists to make sure the compiler does not warn
// about a missing return statement here.
}
/*
* This is a simple helper program to exercise the LOG(FATAL) functionality.
*/
int main(int argc, char* argv[]) {
folly::Init init(&argc, &argv);
if (FLAGS_check_debug) {
std::cout << "DEBUG=" << static_cast<int>(folly::kIsDebug) << "\n";
return 0;
}
XLOG_IF(FATAL, FLAGS_fail_fatal_xlog_if) << "--fail_fatal_xlog_if specified!";
XLOG_IF(DFATAL, FLAGS_fail_dfatal_xlog_if)
<< "--fail_dfatal_xlog_if specified!";
XCHECK(!FLAGS_fail_xcheck) << ": --fail_xcheck specified!";
XCHECK(!FLAGS_fail_xcheck_nomsg);
XDCHECK(!FLAGS_fail_xdcheck) << ": --fail_xdcheck specified!";
XCHECK_EQ(FLAGS_xcheck_eq0, 0) << " extra user args";
XCHECK_NE(FLAGS_xcheck_ne0, 0, " extra user args");
XCHECK_LT(FLAGS_xcheck_lt0, 0, " extra ", "user", " args");
XCHECK_LE(FLAGS_xcheck_le0, 0, " extra ", "user") << " args";
XCHECK_GT(FLAGS_xcheck_gt0, 0) << " extra user args";
XCHECK_GE(FLAGS_xcheck_ge0, 0) << " extra user args";
XDCHECK_EQ(FLAGS_xdcheck_eq0, 0) << " extra user args";
XDCHECK_NE(FLAGS_xdcheck_ne0, 0, " extra user args");
XDCHECK_LT(FLAGS_xdcheck_lt0, 0) << " extra user args";
XDCHECK_LE(FLAGS_xdcheck_le0, 0) << " extra user args";
XDCHECK_GT(FLAGS_xdcheck_gt0, 0) << " extra user args";
XDCHECK_GE(FLAGS_xdcheck_ge0, 0) << " extra user args";
if (FLAGS_test_xcheck_eq_evalutates_once) {
// Make sure XCHECK_EQ() only evaluates "++x" once,
// and logs that it equals 6 and not 7.
int x = 5;
XCHECK_EQ(++x, 7);
}
// Do the remainder of the work in a separate helper function.
//
// The main reason for putting this in a helper function is to ensure that
// the compiler does not warn about missing return statements on XLOG(FATAL)
// code paths. Unfortunately it appears like some compilers always suppress
// this warning for main().
return runHelper();
}
<|start_filename|>folly/synchronization/detail/test/ProxyLockableBenchmark.cpp<|end_filename|>
/*
* Copyright 2018-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <folly/Benchmark.h>
#include <folly/synchronization/detail/ProxyLockable.h>
#include <mutex>
#include <tuple>
namespace folly {
namespace detail {
namespace {
class StdMutexWrapper {
public:
int lock() {
mutex_.lock();
return 1;
}
void unlock(int) {
mutex_.unlock();
}
std::mutex mutex_{};
};
} // namespace
BENCHMARK(StdMutexWithoutUniqueLock, iters) {
auto&& mutex = std::mutex{};
for (auto i = std::size_t{0}; i < iters; ++i) {
mutex.lock();
mutex.unlock();
}
}
BENCHMARK(StdMutexWithUniqueLock, iters) {
auto&& mutex = std::mutex{};
for (auto i = std::size_t{0}; i < iters; ++i) {
auto&& lck = std::unique_lock<std::mutex>{mutex};
std::ignore = lck;
}
}
BENCHMARK(StdMutexWithLockGuard, iters) {
auto&& mutex = std::mutex{};
for (auto i = std::size_t{0}; i < iters; ++i) {
auto&& lck = std::lock_guard<std::mutex>{mutex};
std::ignore = lck;
}
}
BENCHMARK(StdMutexWithProxyLockableUniqueLock, iters) {
auto&& mutex = StdMutexWrapper{};
for (auto i = std::size_t{0}; i < iters; ++i) {
auto&& lck = ProxyLockableUniqueLock<StdMutexWrapper>{mutex};
std::ignore = lck;
}
}
BENCHMARK(StdMutexWithProxyLockableLockGuard, iters) {
auto&& mutex = StdMutexWrapper{};
for (auto i = std::size_t{0}; i < iters; ++i) {
auto&& lck = ProxyLockableLockGuard<StdMutexWrapper>{mutex};
std::ignore = lck;
}
}
} // namespace detail
} // namespace folly
int main(int argc, char** argv) {
gflags::ParseCommandLineFlags(&argc, &argv, true);
folly::runBenchmarks();
}
<|start_filename|>folly/test/SingletonThreadLocalTestOverload.cpp<|end_filename|>
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <folly/SingletonThreadLocal.h>
using namespace folly;
namespace folly {
using Make = detail::DefaultMake<int>;
} // namespace folly
struct Make1 : Make {};
struct Make2 : Make {};
struct DeathTag {};
int stl_get_sum() {
auto& i1 = SingletonThreadLocal<int, DeathTag, Make1>::get();
auto& i2 = SingletonThreadLocal<int, DeathTag, Make2>::get();
return i1 + i2;
}
<|start_filename|>folly/synchronization/DistributedMutexSpecializations.h<|end_filename|>
/*
* Copyright 2004-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <folly/synchronization/DistributedMutex.h>
#include <folly/synchronization/detail/ProxyLockable.h>
/**
* Specializations for DistributedMutex allow us to use it like a normal
* mutex. Even though it has a non-usual interface
*/
namespace std {
template <template <typename> class Atom, bool TimePublishing>
class unique_lock<
::folly::detail::distributed_mutex::DistributedMutex<Atom, TimePublishing>>
: public ::folly::detail::ProxyLockableUniqueLock<
::folly::detail::distributed_mutex::
DistributedMutex<Atom, TimePublishing>> {
public:
using ::folly::detail::ProxyLockableUniqueLock<
::folly::detail::distributed_mutex::
DistributedMutex<Atom, TimePublishing>>::ProxyLockableUniqueLock;
};
template <template <typename> class Atom, bool TimePublishing>
class lock_guard<
::folly::detail::distributed_mutex::DistributedMutex<Atom, TimePublishing>>
: public ::folly::detail::ProxyLockableLockGuard<
::folly::detail::distributed_mutex::
DistributedMutex<Atom, TimePublishing>> {
public:
using ::folly::detail::ProxyLockableLockGuard<
::folly::detail::distributed_mutex::
DistributedMutex<Atom, TimePublishing>>::ProxyLockableLockGuard;
};
} // namespace std
<|start_filename|>folly/Poly-inl.h<|end_filename|>
/*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace folly {
namespace detail {
template <class I>
inline PolyVal<I>::PolyVal(PolyVal&& that) noexcept {
that.vptr_->ops_(Op::eMove, &that, static_cast<Data*>(this));
vptr_ = std::exchange(that.vptr_, vtable<I>());
}
template <class I>
inline PolyVal<I>::PolyVal(PolyOrNonesuch const& that) {
that.vptr_->ops_(
Op::eCopy, const_cast<Data*>(that._data_()), PolyAccess::data(*this));
vptr_ = that.vptr_;
}
template <class I>
inline PolyVal<I>::~PolyVal() {
vptr_->ops_(Op::eNuke, this, nullptr);
}
template <class I>
inline Poly<I>& PolyVal<I>::operator=(PolyVal that) noexcept {
vptr_->ops_(Op::eNuke, _data_(), nullptr);
that.vptr_->ops_(Op::eMove, that._data_(), _data_());
vptr_ = std::exchange(that.vptr_, vtable<I>());
return static_cast<Poly<I>&>(*this);
}
template <class I>
template <class T, std::enable_if_t<ModelsInterface<T, I>::value, int>>
inline PolyVal<I>::PolyVal(T&& t) {
using U = std::decay_t<T>;
static_assert(
std::is_copy_constructible<U>::value || !Copyable::value,
"This Poly<> requires copyability, and the source object is not "
"copyable");
// The static and dynamic types should match; otherwise, this will slice.
assert(typeid(t) == typeid(std::decay_t<T>) ||
!"Dynamic and static exception types don't match. Object would "
"be sliced when storing in Poly.");
if (inSitu<U>()) {
auto const buff = static_cast<void*>(&_data_()->buff_);
::new (buff) U(static_cast<T&&>(t));
} else {
_data_()->pobj_ = new U(static_cast<T&&>(t));
}
vptr_ = vtableFor<I, U>();
}
template <class I>
template <class I2, std::enable_if_t<ValueCompatible<I, I2>::value, int>>
inline PolyVal<I>::PolyVal(Poly<I2> that) {
static_assert(
!Copyable::value || std::is_copy_constructible<Poly<I2>>::value,
"This Poly<> requires copyability, and the source object is not "
"copyable");
auto* that_vptr = PolyAccess::vtable(that);
if (that_vptr->state_ != State::eEmpty) {
that_vptr->ops_(Op::eMove, PolyAccess::data(that), _data_());
vptr_ = &select<I>(*std::exchange(that_vptr, vtable<std::decay_t<I2>>()));
}
}
template <class I>
template <class T, std::enable_if_t<ModelsInterface<T, I>::value, int>>
inline Poly<I>& PolyVal<I>::operator=(T&& t) {
*this = PolyVal(static_cast<T&&>(t));
return static_cast<Poly<I>&>(*this);
}
template <class I>
template <class I2, std::enable_if_t<ValueCompatible<I, I2>::value, int>>
inline Poly<I>& PolyVal<I>::operator=(Poly<I2> that) {
*this = PolyVal(std::move(that));
return static_cast<Poly<I>&>(*this);
}
template <class I>
inline void PolyVal<I>::swap(Poly<I>& that) noexcept {
switch (vptr_->state_) {
case State::eEmpty:
*this = std::move(that);
break;
case State::eOnHeap:
if (State::eOnHeap == that.vptr_->state_) {
std::swap(_data_()->pobj_, that._data_()->pobj_);
std::swap(vptr_, that.vptr_);
return;
}
FOLLY_FALLTHROUGH;
case State::eInSitu:
std::swap(
*this, static_cast<PolyVal<I>&>(that)); // NOTE: qualified, not ADL
}
}
template <class I>
inline AddCvrefOf<PolyRoot<I>, I>& PolyRef<I>::_polyRoot_() const noexcept {
return const_cast<AddCvrefOf<PolyRoot<I>, I>&>(
static_cast<PolyRoot<I> const&>(*this));
}
template <class I>
constexpr RefType PolyRef<I>::refType() noexcept {
using J = std::remove_reference_t<I>;
return std::is_rvalue_reference<I>::value
? RefType::eRvalue
: std::is_const<J>::value ? RefType::eConstLvalue : RefType::eLvalue;
}
template <class I>
template <class That, class I2>
inline PolyRef<I>::PolyRef(That&& that, Type<I2>) {
auto* that_vptr = PolyAccess::vtable(PolyAccess::root(that));
detail::State const that_state = that_vptr->state_;
if (that_state == State::eEmpty) {
throw BadPolyAccess();
}
auto* that_data = PolyAccess::data(PolyAccess::root(that));
_data_()->pobj_ = that_state == State::eInSitu
? const_cast<void*>(static_cast<void const*>(&that_data->buff_))
: that_data->pobj_;
this->vptr_ = &select<std::decay_t<I>>(
*static_cast<VTable<std::decay_t<I2>> const*>(that_vptr->ops_(
Op::eRefr, nullptr, reinterpret_cast<void*>(refType()))));
}
template <class I>
inline PolyRef<I>::PolyRef(PolyRef const& that) noexcept {
_data_()->pobj_ = that._data_()->pobj_;
this->vptr_ = that.vptr_;
}
template <class I>
inline Poly<I>& PolyRef<I>::operator=(PolyRef const& that) noexcept {
_data_()->pobj_ = that._data_()->pobj_;
this->vptr_ = that.vptr_;
return static_cast<Poly<I>&>(*this);
}
template <class I>
template <class T, std::enable_if_t<ModelsInterface<T, I>::value, int>>
inline PolyRef<I>::PolyRef(T&& t) noexcept {
_data_()->pobj_ =
const_cast<void*>(static_cast<void const*>(std::addressof(t)));
this->vptr_ = vtableFor<std::decay_t<I>, AddCvrefOf<std::decay_t<T>, I>>();
}
template <class I>
template <
class I2,
std::enable_if_t<ReferenceCompatible<I, I2, I2&&>::value, int>>
inline PolyRef<I>::PolyRef(Poly<I2>&& that) noexcept(
std::is_reference<I2>::value)
: PolyRef{that, Type<I2>{}} {
static_assert(
Disjunction<std::is_reference<I2>, std::is_rvalue_reference<I>>::value,
"Attempting to construct a Poly that is a reference to a temporary. "
"This is probably a mistake.");
}
template <class I>
template <class T, std::enable_if_t<ModelsInterface<T, I>::value, int>>
inline Poly<I>& PolyRef<I>::operator=(T&& t) noexcept {
*this = PolyRef(static_cast<T&&>(t));
return static_cast<Poly<I>&>(*this);
}
template <class I>
template <
class I2,
std::enable_if_t<ReferenceCompatible<I, I2, I2&&>::value, int>>
inline Poly<I>& PolyRef<I>::operator=(Poly<I2>&& that) noexcept(
std::is_reference<I2>::value) {
*this = PolyRef(std::move(that));
return static_cast<Poly<I>&>(*this);
}
template <class I>
template <
class I2,
std::enable_if_t<ReferenceCompatible<I, I2, I2&>::value, int>>
inline Poly<I>& PolyRef<I>::operator=(Poly<I2>& that) noexcept(
std::is_reference<I2>::value) {
*this = PolyRef(that);
return static_cast<Poly<I>&>(*this);
}
template <class I>
template <
class I2,
std::enable_if_t<ReferenceCompatible<I, I2, I2 const&>::value, int>>
inline Poly<I>& PolyRef<I>::operator=(Poly<I2> const& that) noexcept(
std::is_reference<I2>::value) {
*this = PolyRef(that);
return static_cast<Poly<I>&>(*this);
}
template <class I>
inline void PolyRef<I>::swap(Poly<I>& that) noexcept {
std::swap(_data_()->pobj_, that._data_()->pobj_);
std::swap(this->vptr_, that.vptr_);
}
template <class I>
inline AddCvrefOf<PolyImpl<I>, I>& PolyRef<I>::get() const noexcept {
return const_cast<AddCvrefOf<PolyImpl<I>, I>&>(
static_cast<PolyImpl<I> const&>(*this));
}
} // namespace detail
} // namespace folly
<|start_filename|>folly/io/async/TimeoutManager.h<|end_filename|>
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <chrono>
#include <cstdint>
#include <folly/Function.h>
#include <folly/Optional.h>
namespace folly {
class AsyncTimeout;
/**
* Base interface to be implemented by all classes expecting to manage
* timeouts. AsyncTimeout will use implementations of this interface
* to schedule/cancel timeouts.
*/
class TimeoutManager {
public:
typedef std::chrono::milliseconds timeout_type;
typedef std::chrono::microseconds timeout_type_high_res;
using Func = folly::Function<void()>;
enum class InternalEnum { INTERNAL, NORMAL };
TimeoutManager();
virtual ~TimeoutManager();
/**
* Attaches/detaches TimeoutManager to AsyncTimeout
*/
virtual void attachTimeoutManager(
AsyncTimeout* obj,
InternalEnum internal) = 0;
virtual void detachTimeoutManager(AsyncTimeout* obj) = 0;
/**
* Schedules AsyncTimeout to fire after `timeout` milliseconds
*/
virtual bool scheduleTimeout(AsyncTimeout* obj, timeout_type timeout) = 0;
/**
* Schedules AsyncTimeout to fire after `timeout` microseconds
*/
virtual bool scheduleTimeoutHighRes(
AsyncTimeout* obj,
timeout_type_high_res timeout);
/**
* Cancels the AsyncTimeout, if scheduled
*/
virtual void cancelTimeout(AsyncTimeout* obj) = 0;
/**
* This is used to mark the beginning of a new loop cycle by the
* first handler fired within that cycle.
*/
virtual void bumpHandlingTime() = 0;
/**
* Helper method to know whether we are running in the timeout manager
* thread
*/
virtual bool isInTimeoutManagerThread() = 0;
/**
* Runs the given Cob at some time after the specified number of
* milliseconds. (No guarantees exactly when.)
*
* Throws a std::system_error if an error occurs.
*/
void runAfterDelay(
Func cob,
uint32_t milliseconds,
InternalEnum internal = InternalEnum::NORMAL);
/**
* @see tryRunAfterDelay for more details
*
* @return true iff the cob was successfully registered.
*/
bool tryRunAfterDelay(
Func cob,
uint32_t milliseconds,
InternalEnum internal = InternalEnum::NORMAL);
protected:
void clearCobTimeouts();
private:
struct CobTimeouts;
std::unique_ptr<CobTimeouts> cobTimeouts_;
};
} // namespace folly
<|start_filename|>folly/io/async/ssl/BasicTransportCertificate.h<|end_filename|>
/*
* Copyright 2019-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <folly/io/async/AsyncTransportCertificate.h>
#include <memory>
namespace folly {
namespace ssl {
class BasicTransportCertificate : public folly::AsyncTransportCertificate {
public:
// Create a basic transport cert from an existing one. Returns nullptr
// if cert is null.
static std::unique_ptr<BasicTransportCertificate> create(
const folly::AsyncTransportCertificate* cert) {
if (!cert) {
return nullptr;
}
return std::make_unique<BasicTransportCertificate>(
cert->getIdentity(), cert->getX509());
}
BasicTransportCertificate(
std::string identity,
folly::ssl::X509UniquePtr x509)
: identity_(std::move(identity)), x509_(std::move(x509)) {}
std::string getIdentity() const override {
return identity_;
}
folly::ssl::X509UniquePtr getX509() const override {
if (!x509_) {
return nullptr;
}
auto x509raw = x509_.get();
X509_up_ref(x509raw);
return folly::ssl::X509UniquePtr(x509raw);
}
private:
std::string identity_;
folly::ssl::X509UniquePtr x509_;
};
} // namespace ssl
} // namespace folly
| nondejus/folly |
<|start_filename|>Source/mi-360/Program.cs<|end_filename|>
using System;
using System.Globalization;
using System.IO;
using System.Windows.Forms;
using Serilog;
namespace mi360
{
class Program
{
static void Main(string[] args)
{
const string LoggerTemplate = "[{Timestamp:HH:mm:ss} {Level:u3}][{SourceContext}] {Message:lj}{NewLine}{Exception}";
var timeStamp = DateTime.Now.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture);
var fileName = Path.Combine(Path.GetTempPath(), $"mi-360-{timeStamp}.log");
Log.Logger = new LoggerConfiguration()
.MinimumLevel.Debug()
.WriteTo.Console(outputTemplate: LoggerTemplate)
.WriteTo.File(path: fileName, outputTemplate: LoggerTemplate)
.CreateLogger();
Application.Run(new Mi360Application());
Log.CloseAndFlush();
}
}
}
| tangxinran2014/START_Gamepad-360 |
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Effects/ParsedShader.cs<|end_filename|>
using Stride.Core.Mathematics;
using Stride.Core.Shaders.Ast;
using Stride.Core.Shaders.Ast.Hlsl;
using Stride.Core.Shaders.Ast.Stride;
using Stride.Graphics;
using Stride.Rendering;
using Stride.Rendering.Materials;
using Stride.Rendering.Materials.ComputeColors;
using Stride.Shaders;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using VL.Stride.Shaders.ShaderFX;
using Buffer = Stride.Graphics.Buffer;
namespace VL.Stride.Rendering
{
public class ParsedShader
{
public readonly Shader Shader;
public readonly ClassType ShaderClass;
// base shaders
public IReadOnlyList<ParsedShader> BaseShaders => baseShaders;
private readonly List<ParsedShader> baseShaders = new List<ParsedShader>();
// compositions
public IReadOnlyDictionary<string, CompositionInput> Compositions => compositions;
private readonly Dictionary<string, CompositionInput> compositions;
public IReadOnlyDictionary<string, CompositionInput> CompositionsWithBaseShaders => compositionsWithBaseShaders.Value;
Lazy<IReadOnlyDictionary<string, CompositionInput>> compositionsWithBaseShaders;
public readonly IReadOnlyList<Variable> Variables;
public readonly IReadOnlyDictionary<string, Variable> VariablesByName;
private IEnumerable<CompositionInput> GetCompositionsWithBaseShaders()
{
foreach (var comp in Compositions)
{
yield return comp.Value;
}
foreach (var baseClass in BaseShaders)
{
foreach (var baseComp in baseClass.Compositions)
{
yield return baseComp.Value;
}
}
}
public ParsedShader(Shader shader)
{
Shader = shader;
ShaderClass = Shader.GetFirstClassDecl();
Variables = ShaderClass.Members.OfType<Variable>().Where(v => !v.Qualifiers.Contains(StrideStorageQualifier.Stream)).ToList(); //should include parent shaders?
VariablesByName = Variables.ToDictionary(v => v.Name.Text);
compositions = Variables
.Select((v, i) => (v, i))
.Where(v => v.v.Qualifiers.Contains(StrideStorageQualifier.Compose))
.Select(v => new CompositionInput(v.v, v.i))
.ToDictionary(v => v.Name);
compositionsWithBaseShaders = new Lazy<IReadOnlyDictionary<string, CompositionInput>>(() => GetCompositionsWithBaseShaders().ToDictionary(c => c.Name));
}
public void AddBaseShader(ParsedShader baseShader)
{
if (!baseShaders.Contains(baseShader))
baseShaders.Add(baseShader);
}
public IEnumerable<ParameterKey> GetUniformInputs()
{
foreach (var v in Variables)
{
var type = v.Type;
var keyName = ShaderClass.Name + "." + v.Name;
switch (type)
{
case ScalarType s when s.Name.Text == "float":
yield return ParameterKeys.NewValue(v.GetDefault<float>(), keyName);
break;
case ScalarType s when s.Name.Text == "int":
yield return ParameterKeys.NewValue(v.GetDefault<int>(), keyName);
break;
case ScalarType s when s.Name.Text == "uint":
yield return ParameterKeys.NewValue(v.GetDefault<uint>(), keyName);
break;
case ScalarType s when s.Name.Text == "bool":
yield return ParameterKeys.NewValue(v.GetDefault<bool>(), keyName);
break;
case TypeName n when n.Name.Text == "float2":
yield return ParameterKeys.NewValue(v.GetDefault<Vector2>(), keyName);
break;
case TypeName n when n.Name.Text == "float3":
yield return ParameterKeys.NewValue(v.GetDefault<Vector3>(), keyName);
break;
case TypeName n when n.Name.Text == "float4":
yield return ParameterKeys.NewValue(v.GetDefault<Vector4>(), keyName);
break;
case TypeName m when m.Name.Text == "float4x4":
yield return ParameterKeys.NewValue(Matrix.Identity, keyName);
break;
case TypeName s when s.Name.Text == "int2":
yield return ParameterKeys.NewValue(v.GetDefault<Int2>(), keyName);
break;
case TypeName s when s.Name.Text == "int3":
yield return ParameterKeys.NewValue(v.GetDefault<Int3>(), keyName);
break;
case TypeName s when s.Name.Text == "int4":
yield return ParameterKeys.NewValue(v.GetDefault<Int4>(), keyName);
break;
case TextureType t:
yield return new ObjectParameterKey<Texture>(keyName);
break;
case ObjectType o when o.Name.Text == "SamplerState":
yield return new ObjectParameterKey<SamplerState>(keyName);
break;
case GenericType b when b.Name.Text.Contains("Buffer"):
yield return new ObjectParameterKey<Buffer>(keyName);
break;
case GenericType t when t.Name.Text.Contains("Texture"):
yield return new ObjectParameterKey<Texture>(keyName);
break;
default:
break;
}
}
}
public override string ToString()
{
return ShaderClass?.ToString() ?? base.ToString();
}
}
public class ParsedShaderRef
{
public ParsedShader ParsedShader;
public Stack<ParsedShader> ParentShaders = new Stack<ParsedShader>();
}
public class UniformInput
{
public string Name;
public Type Type;
}
public class CompositionInput
{
public readonly string Name;
public readonly string TypeName;
public readonly string Summary;
public readonly string Remarks;
public bool IsOptional;
public readonly PermutationParameterKey<ShaderSource> Key;
/// <summary>
/// The local index of this variable in the shader file.
/// </summary>
public readonly int LocalIndex;
public readonly Variable Variable;
public CompositionInput(Variable v, int localIndex)
{
Name = v.Name.Text;
// parse attributes
foreach (var attr in v.Attributes.OfType<AttributeDeclaration>())
{
switch (attr.Name)
{
case ShaderMetadata.OptionalName:
IsOptional = true;
break;
case ShaderMetadata.SummaryName:
Summary = attr.ParseString();
break;
case ShaderMetadata.RemarksName:
Remarks = attr.ParseString();
break;
default:
break;
}
}
TypeName = v.Type.Name.Text;
Key = new PermutationParameterKey<ShaderSource>(Name);
LocalIndex = localIndex;
Variable = v;
}
// cache
ShaderSource defaultShaderSource;
IComputeNode defaultComputeNode;
IComputeNode defaultGetter;
public IComputeNode GetDefaultComputeNode(bool forPatch = false)
{
if (defaultComputeNode != null)
return forPatch ? defaultComputeNode : defaultGetter ?? defaultComputeNode;
try
{
if (knownShaderFXTypeInputs.TryGetValue(TypeName, out var compDefault))
{
var boxedDefaultValue = compDefault.BoxedDefault;
if (Variable.TryGetAttribute(ShaderMetadata.DefaultName, out var attribute))
{
boxedDefaultValue = attribute.ParseBoxed(compDefault.ValueType);
}
var def = compDefault.Factory(boxedDefaultValue);
defaultComputeNode = def.func;
defaultGetter = def.getter;
return forPatch ? defaultComputeNode : defaultGetter ?? defaultComputeNode;
}
defaultComputeNode = new ShaderSourceComputeNode(new ShaderClassSource(TypeName));
return defaultComputeNode;
}
catch
{
return null;
}
}
public ShaderSource GetDefaultShaderSource(ShaderGeneratorContext context, MaterialComputeColorKeys baseKeys)
{
if (defaultShaderSource != null)
return defaultShaderSource;
var defaultNode = GetDefaultComputeNode();
if (defaultNode != null)
{
defaultShaderSource = defaultNode.GenerateShaderSource(context, baseKeys);
return defaultShaderSource;
}
else
{
defaultShaderSource = new ShaderClassSource(TypeName);
return defaultShaderSource;
}
}
static Dictionary<string, CompDefault> knownShaderFXTypeInputs = new Dictionary<string, CompDefault>()
{
{ "ComputeVoid", new CompDefaultVoid() },
{ "ComputeFloat", new CompDefaultValue<float>() },
{ "ComputeFloat2", new CompDefaultValue<Vector2>() },
{ "ComputeFloat3", new CompDefaultValue<Vector3>() },
{ "ComputeFloat4", new CompDefaultValue<Vector4>() },
{ "ComputeMatrix", new CompDefaultValue<Matrix>() },
{ "ComputeBool", new CompDefaultValue<bool>() },
{ "ComputeInt", new CompDefaultValue<int>() },
{ "ComputeInt2", new CompDefaultValue<Int2>() },
{ "ComputeInt3", new CompDefaultValue<Int3>() },
{ "ComputeInt4", new CompDefaultValue<Int4>() },
{ "ComputeUInt", new CompDefaultValue<uint>() },
};
abstract class CompDefault
{
public readonly object BoxedDefault;
public readonly Func<object, (IComputeNode func, IComputeNode getter)> Factory;
public readonly Type ValueType;
public CompDefault(object defaultValue, Func<object, (IComputeNode func, IComputeNode getter)> factory, Type valueType)
{
BoxedDefault = defaultValue;
Factory = factory;
ValueType = valueType;
}
}
class CompDefaultVoid : CompDefault
{
public CompDefaultVoid()
: base(null, _ => (new ComputeOrder(), null), null)
{
}
}
class CompDefaultValue<T> : CompDefault where T : struct
{
public CompDefaultValue(T defaultValue = default)
: base(defaultValue, BuildInput, typeof(T))
{
}
static (IComputeNode, IComputeNode) BuildInput(object boxedDefaultValue)
{
var input = new InputValue<T>();
if (boxedDefaultValue is T defaultValue)
input.Input = defaultValue;
return (ShaderFXUtils.DeclAndSetVar("Default", input), input);
}
}
class ShaderSourceComputeNode : IComputeNode
{
readonly ShaderSource shaderSource;
public ShaderSourceComputeNode(ShaderSource shader)
=> shaderSource = shader;
public ShaderSource GenerateShaderSource(ShaderGeneratorContext context, MaterialComputeColorKeys baseKeys)
{
return shaderSource;
}
public IEnumerable<IComputeNode> GetChildren(object context = null)
{
return Enumerable.Empty<IComputeNode>();
}
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Graphics/GraphicsNodes.TextureBuilder.cs<|end_filename|>
using Stride.Core;
using Stride.Engine;
using Stride.Graphics;
using System;
using VL.Core;
using VL.Lib.Basics.Resources;
namespace VL.Stride.Graphics
{
static partial class GraphicsNodes
{
class TextureBuilder : IDisposable
{
private TextureDescription description;
private TextureViewDescription viewDescription;
private IGraphicsDataProvider[] initalData;
private bool needsRebuild = true;
private Texture texture;
internal bool Recreate;
private readonly IResourceHandle<Game> gameHandle;
public Texture Texture
{
get
{
if (needsRebuild || Recreate)
{
RebuildTexture();
needsRebuild = false;
}
return texture;
}
private set => texture = value;
}
public TextureDescription Description
{
get => description;
set
{
description = value;
needsRebuild = true;
}
}
public TextureViewDescription ViewDescription
{
get => viewDescription;
set
{
viewDescription = value;
needsRebuild = true;
}
}
public IGraphicsDataProvider[] InitalData
{
get => initalData;
set
{
initalData = value;
needsRebuild = true;
}
}
public TextureBuilder(NodeContext nodeContext)
{
gameHandle = nodeContext.GetGameHandle();
}
public void Dispose()
{
texture?.Dispose();
texture = null;
gameHandle.Dispose();
}
PinnedGraphicsData[] pinnedGraphicsDatas = new PinnedGraphicsData[0];
DataBox[] boxes = new DataBox[0];
private void RebuildTexture()
{
var dataCount = 0;
if (initalData != null)
{
dataCount = initalData.Length;
if (pinnedGraphicsDatas.Length != dataCount)
{
pinnedGraphicsDatas = new PinnedGraphicsData[dataCount];
boxes = new DataBox[dataCount];
}
if (dataCount > 0)
{
var pixelSize = description.Format.BlockSize();
var minRowSize = description.Width * pixelSize;
var minSliceSize = description.Depth * minRowSize;
for (int i = 0; i < dataCount; i++)
{
var id = initalData[i];
if (id is null)
{
pinnedGraphicsDatas[i] = PinnedGraphicsData.None;
boxes[i] = new DataBox(IntPtr.Zero, minRowSize, minSliceSize);
}
else
{
pinnedGraphicsDatas[i] = id.Pin();
var rowSize = Math.Max(id.RowSizeInBytes, minRowSize);
var sliceSize = Math.Max(id.SliceSizeInBytes, minSliceSize);
boxes[i] = new DataBox(pinnedGraphicsDatas[i].Pointer, rowSize, sliceSize);
}
}
}
}
try
{
texture?.Dispose();
texture = null;
var game = gameHandle.Resource;
texture = Texture.New(game.GraphicsDevice, description, viewDescription, boxes);
}
catch
{
texture = null;
}
finally
{
for (int i = 0; i < dataCount; i++)
{
pinnedGraphicsDatas[i].Dispose();
}
}
}
}
class TextureViewBuilder : IDisposable
{
private Texture texture;
private TextureViewDescription viewDescription;
private bool needsRebuild = true;
private Texture textureView;
internal bool Recreate;
private readonly IResourceHandle<Game> gameHandle;
public Texture TextureView
{
get
{
if (needsRebuild || Recreate)
{
RebuildTextureView();
needsRebuild = false;
}
return textureView;
}
private set => textureView = value;
}
public Texture Input
{
get => texture;
set
{
texture = value;
needsRebuild = true;
}
}
public TextureViewDescription ViewDescription
{
get => viewDescription;
set
{
viewDescription = value;
needsRebuild = true;
}
}
public TextureViewBuilder(NodeContext nodeContext)
{
gameHandle = nodeContext.GetGameHandle();
}
public void Dispose()
{
textureView?.Dispose();
textureView = null;
gameHandle.Dispose();
}
private void RebuildTextureView()
{
try
{
if (textureView != null)
{
textureView.Dispose();
textureView = null;
}
if (texture != null && (
viewDescription.Format == PixelFormat.None
|| (texture.Format == viewDescription.Format)
|| (texture.Format.IsTypeless() && (texture.Format.BlockSize() == viewDescription.Format.BlockSize()))
))
{
var game = gameHandle.Resource;
textureView = texture.ToTextureView(viewDescription);
}
}
catch
{
textureView = null;
}
}
}
}
}
<|start_filename|>packages/VL.Stride.Windows/src/Assets/IRuntimeDatabase.cs<|end_filename|>
// Copyright (c) Stride contributors (https://xenko.com) and Silicon Studio Corp. (https://www.siliconstudio.co.jp)
// Distributed under the MIT license. See the LICENSE.md file in the project root for more information.
using System;
using System.Threading.Tasks;
using Stride.Core;
using Stride.Core.Assets;
using Stride.Core.Assets.Analysis;
namespace VL.Stride.Assets
{
public interface IRuntimeDatabase : IDisposable
{
Task<ISyncLockable> ReserveSyncLock();
Task<IDisposable> LockAsync();
Task Build(AssetItem x, BuildDependencyType dependencyType);
Task<IDisposable> MountInCurrentMicroThread();
void ResetDependencyCompiler();
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Core/Assets/AssetWrapper.cs<|end_filename|>
using Stride.Core;
using Stride.Core.Serialization.Contents;
namespace VL.Stride.Assets
{
/// <summary>
/// Represets a reference to a runtime asset
/// </summary>
public abstract class AssetWrapperBase
{
protected int LoadRequests;
public bool Loading { get; set; }
public bool Exists { get; set; }
public string Name { get; set; }
public void AddLoadRequest() => LoadRequests++;
public abstract void SetAssetObject(object asset);
public abstract void ProcessLoadRequests(ContentManager contentManager, string url);
}
public class AssetWrapper<T> : AssetWrapperBase where T : class
{
T Asset;
public void SetAsset(T asset)
{
Asset = asset;
}
public void SetValues(T asset, bool exists)
{
Asset = asset;
Exists = exists;
}
public void GetValues(out T asset, out bool exists, out bool loading)
{
asset = Asset;
exists = Exists;
loading = Loading;
}
public override void SetAssetObject(object asset)
{
Asset = (T)asset;
if (Asset is ComponentBase componentBase && !string.IsNullOrWhiteSpace(Name))
componentBase.Name = Name;
}
public override void ProcessLoadRequests(ContentManager contentManager, string url)
{
for (int i = 0; i < LoadRequests - 1; i++)
{
contentManager.Load<T>(url);
}
LoadRequests = 0;
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/RenderFeatures/VLEffectRenderFeature.cs<|end_filename|>
using Stride.Rendering;
namespace VL.Stride.Rendering
{
/// <summary>
/// Custom render feature, that manages the VLEffectMain effect
/// </summary>
public class VLEffectRenderFeature : SubRenderFeature
{
private StaticObjectPropertyKey<RenderEffect> renderEffectKey;
/// <inheritdoc/>
protected override void InitializeCore()
{
base.InitializeCore();
renderEffectKey = ((RootEffectRenderFeature)RootRenderFeature).RenderEffectKey;
}
/// <inheritdoc/>
public override void PrepareEffectPermutations(RenderDrawContext context)
{
var renderEffects = RootRenderFeature.RenderData.GetData(renderEffectKey);
int effectSlotCount = ((RootEffectRenderFeature)RootRenderFeature).EffectPermutationSlotCount;
foreach (var renderObject in RootRenderFeature.RenderObjects)
{
var staticObjectNode = renderObject.StaticObjectNode;
var renderMesh = (RenderMesh)renderObject;
for (int i = 0; i < effectSlotCount; ++i)
{
var staticEffectObjectNode = staticObjectNode * effectSlotCount + i;
var renderEffect = renderEffects[staticEffectObjectNode];
// Skip effects not used during this frame
if (renderEffect == null || !renderEffect.IsUsedDuringThisFrame(RenderSystem))
continue;
// Generate shader permuatations
var enableBySource = renderMesh.MaterialPass.Parameters.Get(VLEffectParameters.EnableExtensionShader);
if (enableBySource)
{
renderEffect.EffectValidator.ValidateParameter(VLEffectParameters.EnableExtensionShader, enableBySource);
renderEffect.EffectValidator.ValidateParameter(VLEffectParameters.MaterialExtensionShader, renderMesh.MaterialPass.Parameters.Get(VLEffectParameters.MaterialExtensionShader));
}
var enableByNameMesh = renderMesh.Mesh.Parameters.Get(VLEffectParameters.EnableExtensionNameMesh);
if (enableByNameMesh)
{
renderEffect.EffectValidator.ValidateParameter(VLEffectParameters.EnableExtensionNameMesh, enableByNameMesh);
renderEffect.EffectValidator.ValidateParameter(VLEffectParameters.MaterialExtensionNameMesh, renderMesh.Mesh.Parameters.Get(VLEffectParameters.MaterialExtensionNameMesh));
}
var enableBySourceMesh = renderMesh.Mesh.Parameters.Get(VLEffectParameters.EnableExtensionShaderMesh);
if (enableBySourceMesh)
{
renderEffect.EffectValidator.ValidateParameter(VLEffectParameters.EnableExtensionShaderMesh, enableBySourceMesh);
renderEffect.EffectValidator.ValidateParameter(VLEffectParameters.MaterialExtensionShaderMesh, renderMesh.Mesh.Parameters.Get(VLEffectParameters.MaterialExtensionShaderMesh));
}
}
}
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Effects/ParserExtensions.cs<|end_filename|>
using Stride.Core.Shaders.Ast.Hlsl;
using System;
using System.Linq;
using VL.Lang;
using Stride.Core.Mathematics;
using Stride.Core.Shaders.Ast.Stride;
using Stride.Core.Shaders.Ast;
using System.Collections.Generic;
namespace VL.Stride.Rendering
{
public static class ParserExtensions
{
public static ClassType GetFirstClassDecl(this Shader shader)
{
var result = shader.Declarations.OfType<ClassType>().FirstOrDefault();
if (result == null)
{
var nameSpace = shader.Declarations.OfType<NamespaceBlock>().FirstOrDefault();
if (nameSpace != null)
{
result = nameSpace.Body.OfType<ClassType>().FirstOrDefault();
}
}
return result;
}
public static bool TryGetAttribute(this Variable v, string attrName, out AttributeDeclaration attribute)
{
attribute = v.Attributes.OfType<AttributeDeclaration>().Where(a => a.Name.Text == ShaderMetadata.DefaultName).FirstOrDefault();
return attribute != null;
}
public static string GetKeyName(this Variable v, ClassType shader)
=> shader.Name.Text + "." + v.Name.Text;
public static string ParseString(this AttributeDeclaration attr)
{
return attr.Parameters.FirstOrDefault()?.Value as string;
}
public static List<string> ParseStringList(this AttributeDeclaration attr)
{
return attr.Parameters
.Select(p => p?.Value as string)
.Where(s => !string.IsNullOrWhiteSpace(s))
.ToList();
}
public static List<string> ParseStringAsCommaSeparatedList(this AttributeDeclaration attr)
{
return attr.Parameters
.Select(p => p?.Value as string)
.SelectMany(s => s.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries))
.Where(s => !string.IsNullOrWhiteSpace(s))
.Select(s => s.Trim())
.Distinct()
.ToList();
}
public static bool ParseBool(this AttributeDeclaration attr, int index = 0)
{
if (attr.Parameters.Count > index)
{
if (bool.TryParse(attr.Parameters[index].Text, out var value))
{
return value;
}
}
return default;
}
public static float ParseFloat(this AttributeDeclaration attr, int index = 0)
{
if (attr.Parameters.Count > index)
{
if (UserInputParsing.TryParseFloat(attr.Parameters[index].Text, out var value))
{
return value;
}
}
return default;
}
public static int ParseInt(this AttributeDeclaration attr, int index = 0)
{
if (attr.Parameters.Count > index)
{
if (UserInputParsing.TryParseInt(attr.Parameters[index].Text, out var value))
{
return value;
}
}
return default;
}
public static Int2 ParseInt2(this AttributeDeclaration attr)
{
return new Int2(attr.ParseInt(0), attr.ParseInt(1));
}
public static Int3 ParseInt3(this AttributeDeclaration attr)
{
return new Int3(attr.ParseInt(0), attr.ParseInt(1), attr.ParseInt(2));
}
public static Int4 ParseInt4(this AttributeDeclaration attr)
{
return new Int4(attr.ParseInt(0), attr.ParseInt(1), attr.ParseInt(2), attr.ParseInt(3));
}
public static uint ParseUInt(this AttributeDeclaration attr, int index = 0)
{
if (attr.Parameters.Count > index)
{
if (UserInputParsing.TryParseUInt(attr.Parameters[index].Text, out var value))
{
return value;
}
}
return default;
}
public static Vector2 ParseVector2(this AttributeDeclaration attr)
{
return new Vector2(attr.ParseFloat(0), attr.ParseFloat(1));
}
public static Vector3 ParseVector3(this AttributeDeclaration attr)
{
return new Vector3(attr.ParseFloat(0), attr.ParseFloat(1), attr.ParseFloat(2));
}
public static Vector4 ParseVector4(this AttributeDeclaration attr)
{
return new Vector4(attr.ParseFloat(0), attr.ParseFloat(1), attr.ParseFloat(2), attr.ParseFloat(3));
}
public static object ParseBoxed(this AttributeDeclaration attr, Type type, object defaultVlaue = null)
{
if (type == typeof(float))
return attr.ParseFloat();
if (type == typeof(Vector2))
return attr.ParseVector2();
if (type == typeof(Vector3))
return attr.ParseVector3();
if (type == typeof(Vector4))
return attr.ParseVector4();
if (type == typeof(bool))
return attr.ParseBool();
if (type == typeof(int))
return attr.ParseInt();
if (type == typeof(Int2))
return attr.ParseInt2();
if (type == typeof(Int3))
return attr.ParseInt3();
if (type == typeof(Int4))
return attr.ParseInt4();
if (type == typeof(uint))
return attr.ParseUInt();
if (type == typeof(string))
return attr.ParseString();
return defaultVlaue ?? Activator.CreateInstance(type);
}
public static T GetDefault<T>(this Variable v)
{
var inital = v.InitialValue;
if (inital != null)
return inital.ParseDefault<T>();
return default;
}
static T ParseDefault<T>(this Expression e)
{
if (e is LiteralExpression l)
return (T)Convert.ChangeType(l.Literal?.Value, typeof(T));
if (e is MethodInvocationExpression m)
return m.ParseMethod<T>();
return default;
}
static T ParseArg<T>(this MethodInvocationExpression m, int i)
{
if (m.Arguments.Count > i)
return m.Arguments[i].ParseDefault<T>();
return default;
}
static T ParseMethod<T>(this MethodInvocationExpression m)
{
var type = typeof(T);
if (type == typeof(Vector2))
return (T)(object)new Vector2(m.ParseArg<float>(0), m.ParseArg<float>(1));
if (type == typeof(Vector3))
return (T)(object)new Vector3(m.ParseArg<float>(0), m.ParseArg<float>(1), m.ParseArg<float>(2));
if (type == typeof(Vector4))
return (T)(object)new Vector4(m.ParseArg<float>(0), m.ParseArg<float>(1), m.ParseArg<float>(2), m.ParseArg<float>(3));
if (type == typeof(Int2))
return (T)(object)new Int2(m.ParseArg<int>(0), m.ParseArg<int>(1));
if (type == typeof(Int3))
return (T)(object)new Int3(m.ParseArg<int>(0), m.ParseArg<int>(1), m.ParseArg<int>(2));
if (type == typeof(Int4))
return (T)(object)new Int4(m.ParseArg<int>(0), m.ParseArg<int>(1), m.ParseArg<int>(2), m.ParseArg<int>(3));
return default;
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Graphics/ResourceDataHelpers.cs<|end_filename|>
using Stride.Core;
using Stride.Graphics;
using System;
using System.Buffers;
using System.Runtime.InteropServices;
using VL.Lib.Basics.Imaging;
using VL.Lib.Collections;
namespace VL.Stride.Graphics
{
public interface IGraphicsDataProvider
{
int SizeInBytes { get; }
int ElementSizeInBytes { get; }
int RowSizeInBytes { get; }
int SliceSizeInBytes { get; }
PinnedGraphicsData Pin();
}
public interface IMemoryPinner
{
public IntPtr Pin();
public void Unpin();
}
public class ReadonlyMemoryPinner<T> : IMemoryPinner where T : struct
{
public ReadOnlyMemory<T> Memory;
MemoryHandle memoryHandle;
public unsafe IntPtr Pin()
{
memoryHandle = Memory.Pin();
return new IntPtr(memoryHandle.Pointer);
}
public void Unpin()
{
memoryHandle.Dispose();
}
}
public class ImagePinner : IMemoryPinner
{
public IImage Image;
IImageData imageData;
MemoryHandle memoryHandle;
public unsafe IntPtr Pin()
{
imageData = Image.GetData();
memoryHandle = imageData.Bytes.Pin();
return new IntPtr(memoryHandle.Pointer);
}
public void Unpin()
{
memoryHandle.Dispose();
imageData.Dispose();
}
}
public class NonePinner : IMemoryPinner
{
public IntPtr Pin()
{
return IntPtr.Zero;
}
public void Unpin()
{
}
}
public struct PinnedGraphicsData : IDisposable
{
public static readonly PinnedGraphicsData None = new PinnedGraphicsData(new NonePinner());
public readonly IntPtr Pointer;
readonly IMemoryPinner pinner;
public PinnedGraphicsData(IMemoryPinner pinner)
{
Pointer = pinner.Pin();
this.pinner = pinner;
}
public void Dispose()
{
pinner.Unpin();
}
}
public class MemoryDataProvider : IGraphicsDataProvider
{
public IMemoryPinner Pinner = new NonePinner();
public void SetMemoryData<T>(ReadOnlyMemory<T> memory, int offsetInBytes = 0, int sizeInBytes = 0, int elementSizeInBytes = 0, int rowSizeInBytes = 0, int sliceSizeInBytes = 0) where T : struct
{
var pnr = Pinner as ReadonlyMemoryPinner<T>;
pnr ??= new ReadonlyMemoryPinner<T>();
pnr.Memory = memory;
Pinner = pnr;
OffsetInBytes = offsetInBytes;
var tSize = Utilities.SizeOf<T>();
SizeInBytes = sizeInBytes > 0 ? sizeInBytes : memory.Length * tSize;
ElementSizeInBytes = elementSizeInBytes > 0 ? elementSizeInBytes : tSize;
RowSizeInBytes = rowSizeInBytes;
SliceSizeInBytes = sliceSizeInBytes;
}
public void SetImageData(IImage image, int offsetInBytes = 0, int sizeInBytes = 0, int elementSizeInBytes = 0, int rowSizeInBytes = 0, int sliceSizeInBytes = 0)
{
var pnr = Pinner as ImagePinner;
pnr ??= new ImagePinner();
pnr.Image = image;
Pinner = pnr;
OffsetInBytes = offsetInBytes;
SizeInBytes = sizeInBytes > 0 ? sizeInBytes : image.Info.ImageSize;
ElementSizeInBytes = elementSizeInBytes > 0 ? elementSizeInBytes : image.Info.Format.GetPixelSize();
RowSizeInBytes = rowSizeInBytes > 0 ? rowSizeInBytes : image.Info.ScanSize;
SliceSizeInBytes = sliceSizeInBytes > 0 ? sliceSizeInBytes : RowSizeInBytes * image.Info.Height;
}
public int OffsetInBytes { get; set; }
public int SizeInBytes { get; set; }
public int ElementSizeInBytes { get; set; }
public int RowSizeInBytes { get; set; }
public int SliceSizeInBytes { get; set; }
public PinnedGraphicsData Pin()
{
return new PinnedGraphicsData(Pinner);
}
}
public struct VLImagePinner : IDisposable
{
IImageData imageData;
MemoryHandle imageDataHandle;
IntPtr pointer;
public unsafe VLImagePinner(IImage image)
{
if (image != null)
{
imageData = image.GetData();
imageDataHandle = imageData.Bytes.Pin();
pointer = (IntPtr)imageDataHandle.Pointer;
}
else
{
imageData = null;
imageDataHandle = new MemoryHandle();
pointer = IntPtr.Zero;
}
}
public IntPtr Pointer
{
get => pointer;
}
public int SizeInBytes
{
get => imageData.Bytes.Length;
}
public int ScanSize
{
get => imageData.ScanSize;
}
public void Dispose()
{
imageDataHandle.Dispose();
imageData?.Dispose();
}
}
public struct GCPinner : IDisposable
{
GCHandle pinnedObject;
public GCPinner(object obj)
{
if (obj != null)
pinnedObject = GCHandle.Alloc(obj, GCHandleType.Pinned);
else
pinnedObject = new GCHandle();
}
public IntPtr Pointer
{
get => pinnedObject.AddrOfPinnedObject();
}
public void Dispose()
{
pinnedObject.Free();
}
}
public static class ResourceDataHelpers
{
public static void PinSpread<T>(Spread<T> input, out IntPtr pointer, out int sizeInBytes, out int byteStride, out GCPinner pinner) where T : struct
{
pointer = IntPtr.Zero;
sizeInBytes = 0;
byteStride = 0;
var count = input.Count;
if (count > 0)
{
byteStride = Utilities.SizeOf<T>();
sizeInBytes = byteStride * count;
pinner = new GCPinner(input);
pointer = pinner.Pointer;
return;
}
pinner = new GCPinner(null);
}
public static void PinArray<T>(T[] input, out IntPtr pointer, out int sizeInBytes, out int byteStride, out GCPinner pinner) where T : struct
{
pointer = IntPtr.Zero;
sizeInBytes = 0;
byteStride = 0;
var count = input.Length;
if (count > 0)
{
input.AsMemory();
byteStride = Utilities.SizeOf<T>();
sizeInBytes = byteStride * count;
pinner = new GCPinner(input);
pointer = pinner.Pointer;
return;
}
pinner = new GCPinner(null);
}
public static void PinImage(IImage input, out IntPtr pointer, out int sizeInBytes, out int bytePerRow, out int bytesPerPixel, out VLImagePinner pinner)
{
pointer = IntPtr.Zero;
sizeInBytes = 0;
bytePerRow = 0;
bytesPerPixel = 0;
if (input != null)
{
pinner = new VLImagePinner(input);
sizeInBytes = pinner.SizeInBytes;
bytePerRow = pinner.ScanSize;
bytesPerPixel = input.Info.PixelSize;
pointer = pinner.Pointer;
return;
}
pinner = new VLImagePinner(null);
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Effects/EffectUtils.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.RegularExpressions;
using VL.Core;
using Stride.Core.Mathematics;
using Stride.Rendering;
using Stride.Graphics;
using Stride.Core.IO;
using System.IO;
using Stride.Shaders.Compiler;
using Stride.Core.Shaders.Ast;
using Stride.Shaders.Parser;
using Stride.Core.Diagnostics;
using Stride.Shaders;
using ShaderMacro = Stride.Core.Shaders.Parser.ShaderMacro;
using System.Reflection;
using System.Diagnostics;
using Stride.Core;
using Stride.Shaders.Parser.Mixins;
using VL.App;
namespace VL.Stride.Rendering
{
static class EffectUtils
{
public static string GetPathOfSdslShader(string effectName, IVirtualFileProvider fileProvider, IVirtualFileProvider dbFileProvider = null)
{
var path = EffectCompilerBase.GetStoragePathFromShaderType(effectName);
if (fileProvider.TryGetFileLocation(path, out var filePath, out _, out _))
{
if (File.Exists(filePath))
return filePath;
}
var pathUrl = path + "/path";
if (fileProvider.FileExists(pathUrl))
{
using (var pathStream = fileProvider.OpenStream(pathUrl, VirtualFileMode.Open, VirtualFileAccess.Read))
using (var reader = new StreamReader(pathStream))
{
var dbPath = reader.ReadToEnd();
if (File.Exists(dbPath))
return dbPath;
}
}
if (dbFileProvider != null)
return GetPathOfSdslShader(effectName, dbFileProvider);
//find locally
if (LocalShaderFilePaths.TryGetValue(effectName, out var fp))
return fp;
return null;
}
//get shader source from data base, is there a more direct way?
public static string GetShaderSourceCode(string effectName, IVirtualFileProvider fileProvider, ShaderSourceManager shaderSourceManager)
{
if (!AppState.IsExported) //only try to load shader source from file when in VL editor
{
var path = GetPathOfSdslShader(effectName, fileProvider);
if (!string.IsNullOrWhiteSpace(path))
{
try
{
return File.ReadAllText(path);
}
catch (Exception)
{
//fall through
}
}
}
return shaderSourceManager?.LoadShaderSource(effectName).Source;
}
public static ShaderSourceManager GetShaderSourceManager(this IVirtualFileProvider fileProvider)
{
var effectCompiler = new EffectCompiler(fileProvider)
{
SourceDirectories = { EffectCompilerBase.DefaultSourceShaderFolder },
};
return effectCompiler.GetMixinParser().SourceManager;
}
static readonly Dictionary<string, string> LocalShaderFilePaths = GetShaders();
private static Dictionary<string, string> GetShaders()
{
var packsFolder = Path.Combine(PlatformFolders.ApplicationBinaryDirectory, "packs");
if (Directory.Exists(packsFolder))
{
return Directory.EnumerateDirectories(packsFolder, @"*Assets", SearchOption.AllDirectories)
.Where(p => p.Contains(@"\stride\Assets"))
.SelectMany(d => Directory.EnumerateFiles(d, "*.sdsl", SearchOption.AllDirectories))
.ToDictionary(fp => Path.GetFileNameWithoutExtension(fp));
}
else
{
return new Dictionary<string, string>();
}
}
static readonly Regex FCamelCasePattern = new Regex("[a-z][A-Z0-9]", RegexOptions.Compiled);
public static void SelectPin<TPin>(this IVLPin[] pins, IVLPinDescription description, ref TPin pin) where TPin : Pin
{
pin = pins.OfType<TPin>().FirstOrDefault(p => p.Name == description.Name);
}
public static string GetPinName(this ParameterKey key, HashSet<string> usedNames)
{
var variableName = key.GetVariableName();
var shaderName = key.GetShaderName();
var camelCasedName = FCamelCasePattern.Replace(variableName, match => $"{match.Value[0]} {match.Value[1]}");
var result = char.ToUpper(camelCasedName[0]) + camelCasedName.Substring(1);
if (usedNames.Add(result))
return result;
return $"{shaderName} {result}";
}
public static string GetShaderName(this ParameterKey key)
{
var name = key.Name;
var dotIndex = name.IndexOf('.');
if (dotIndex > 0)
return name.Substring(0, dotIndex);
return string.Empty;
}
public static string GetVariableName(this ParameterKey key)
{
var name = key.Name;
var dotIndex = name.IndexOf('.');
if (dotIndex >= 0)
return name.Substring(dotIndex + 1);
return name;
}
public static bool TryParseEffect(this IVirtualFileProvider fileProvider, string effectName, ShaderSourceManager shaderSourceManager, out ParsedShader result)
{
result = null;
var resultRef = new ParsedShaderRef();
var success = TryParseEffect(effectName, fileProvider, shaderSourceManager, resultRef);
Debug.Assert(resultRef.ParentShaders.Count == 0);
if (success)
result = resultRef.ParsedShader;
return success;
}
static object parserCacheLock = new object();
internal static Dictionary<string, ParsedShader> parserCache = new Dictionary<string, ParsedShader>();
public static void ResetParserCache(string shaderName = null)
{
lock (parserCacheLock)
{
if (!string.IsNullOrWhiteSpace(shaderName))
{
parserCache.Remove(shaderName);
}
else
{
parserCache.Clear();
}
}
}
public static bool TryParseEffect(string shaderName, IVirtualFileProvider fileProvider, ShaderSourceManager shaderSourceManager, ParsedShaderRef resultRef)
{
lock (parserCacheLock)
{
if (parserCache.TryGetValue(shaderName, out var localResult))
{
if (resultRef.ParsedShader == null)
{
resultRef.ParsedShader = localResult;
}
else
{
foreach (var parentShader in resultRef.ParentShaders)
{
parentShader.AddBaseShader(localResult);
// also add all base shaders of this base shader
foreach (var baseShader in localResult.BaseShaders)
{
parentShader.AddBaseShader(baseShader);
}
}
}
return true;
}
try
{
// SDSL
var macros = new[]
{
new ShaderMacro("class", "shader")
};
// get source code
var code = GetShaderSourceCode(shaderName, fileProvider, shaderSourceManager);
var inputFileName = shaderName + ".sdsl";
var parsingResult = StrideShaderParser.TryPreProcessAndParse(code, inputFileName, macros);
if (parsingResult.HasErrors)
{
return false;
}
else //success
{
localResult = new ParsedShader(parsingResult.Shader);
foreach (var parentShader in resultRef.ParentShaders)
{
parentShader.AddBaseShader(localResult);
}
// original shader
if (resultRef.ParsedShader == null)
resultRef.ParsedShader = localResult;
resultRef.ParentShaders.Push(localResult);
try
{
// base shaders
var baseShaders = localResult.ShaderClass.BaseClasses;
foreach (var baseClass in baseShaders)
{
var baseShaderName = baseClass.Name.Text;
TryParseEffect(baseShaderName, fileProvider, shaderSourceManager, resultRef);
}
}
finally
{
resultRef.ParentShaders.Pop();
}
parserCache[shaderName] = localResult;
return true;
}
}
catch (Exception)
{
return false;
}
}
}
static Lazy<EffectCompilerParameters> effectCompilerParameters = new Lazy<EffectCompilerParameters>(() =>
{
return new EffectCompilerParameters
{
Platform = GraphicsPlatform.Direct3D11,
Profile = GraphicsProfile.Level_11_0,
Debug = true,
OptimizationLevel = 0,
};
});
public static bool TryParseAndAnalyze(string shaderName, IVirtualFileProvider fileProvider, EffectCompiler effectCompiler, out Shader shader)
{
shader = null;
try
{
var effectParameters = effectCompilerParameters.Value;
var log = new LoggerResult();
var source = new ShaderClassSource(shaderName);
var mixinTree = new ShaderMixinSource();
mixinTree.Mixins.Add(source);
var shaderMixinSource = mixinTree;
var fullEffectName = mixinTree.Name;
// Make a copy of shaderMixinSource. Use deep clone since shaderMixinSource can be altered during compilation (e.g. macros)
var shaderMixinSourceCopy = new ShaderMixinSource();
shaderMixinSourceCopy.DeepCloneFrom(shaderMixinSource);
shaderMixinSource = shaderMixinSourceCopy;
// Generate platform-specific macros
switch (effectParameters.Platform)
{
case GraphicsPlatform.Direct3D11:
shaderMixinSource.AddMacro("STRIDE_GRAPHICS_API_DIRECT3D", 1);
shaderMixinSource.AddMacro("STRIDE_GRAPHICS_API_DIRECT3D11", 1);
break;
case GraphicsPlatform.Direct3D12:
shaderMixinSource.AddMacro("STRIDE_GRAPHICS_API_DIRECT3D", 1);
shaderMixinSource.AddMacro("STRIDE_GRAPHICS_API_DIRECT3D12", 1);
break;
case GraphicsPlatform.OpenGL:
shaderMixinSource.AddMacro("STRIDE_GRAPHICS_API_OPENGL", 1);
shaderMixinSource.AddMacro("STRIDE_GRAPHICS_API_OPENGLCORE", 1);
break;
case GraphicsPlatform.OpenGLES:
shaderMixinSource.AddMacro("STRIDE_GRAPHICS_API_OPENGL", 1);
shaderMixinSource.AddMacro("STRIDE_GRAPHICS_API_OPENGLES", 1);
break;
case GraphicsPlatform.Vulkan:
shaderMixinSource.AddMacro("STRIDE_GRAPHICS_API_VULKAN", 1);
break;
default:
throw new NotSupportedException();
}
// Generate profile-specific macros
shaderMixinSource.AddMacro("STRIDE_GRAPHICS_PROFILE", (int)effectParameters.Profile);
shaderMixinSource.AddMacro("GRAPHICS_PROFILE_LEVEL_9_1", (int)GraphicsProfile.Level_9_1);
shaderMixinSource.AddMacro("GRAPHICS_PROFILE_LEVEL_9_2", (int)GraphicsProfile.Level_9_2);
shaderMixinSource.AddMacro("GRAPHICS_PROFILE_LEVEL_9_3", (int)GraphicsProfile.Level_9_3);
shaderMixinSource.AddMacro("GRAPHICS_PROFILE_LEVEL_10_0", (int)GraphicsProfile.Level_10_0);
shaderMixinSource.AddMacro("GRAPHICS_PROFILE_LEVEL_10_1", (int)GraphicsProfile.Level_10_1);
shaderMixinSource.AddMacro("GRAPHICS_PROFILE_LEVEL_11_0", (int)GraphicsProfile.Level_11_0);
shaderMixinSource.AddMacro("GRAPHICS_PROFILE_LEVEL_11_1", (int)GraphicsProfile.Level_11_1);
shaderMixinSource.AddMacro("GRAPHICS_PROFILE_LEVEL_11_2", (int)GraphicsProfile.Level_11_2);
// In .sdsl, class has been renamed to shader to avoid ambiguities with HLSL
shaderMixinSource.AddMacro("class", "shader");
var parser = effectCompiler.GetMixinParser();
var parsingResult = parser.Parse(shaderMixinSource, shaderMixinSource.Macros.ToArray());
shader = parsingResult.Shader;
//parsingResult.Shader.
// Copy log from parser results to output
//CopyLogs(parsingResult, log);
return true;
}
catch (Exception)
{
return false;
}
}
public static ShaderMixinParser GetMixinParser(this EffectCompiler effectCompiler)
{
var getMixinParser = typeof(EffectCompiler).GetMethod("GetMixinParser", BindingFlags.NonPublic | BindingFlags.Instance);
return (ShaderMixinParser)getMixinParser.Invoke(effectCompiler, new object[0]);
}
}
static class WellKnownParameters
{
public static readonly Dictionary<string, PerFrameParameters> PerFrameMap = BuildParameterMap<PerFrameParameters>("Global");
public static readonly Dictionary<string, PerViewParameters> PerViewMap = BuildViewParameterMap();
public static readonly Dictionary<string, PerDrawParameters> PerDrawMap = BuildParameterMap<PerDrawParameters>("Transformation");
public static readonly Dictionary<string, TexturingParameters> TexturingMap = BuildParameterMap<TexturingParameters>("Texturing");
public static IEnumerable<T> GetWellKnownParameters<T>(this ParameterCollection parameters, Dictionary<string, T> map)
{
foreach (var p in parameters.Layout.LayoutParameterKeyInfos)
{
if (map.TryGetValue(p.Key.Name, out T entry))
yield return entry;
}
}
public static IEnumerable<TexturingParameters> GetTexturingParameters(this ParameterCollection parameters)
{
foreach (var p in parameters.Layout.LayoutParameterKeyInfos)
{
if (p.Key == TexturingKeys.Texture0)
yield return TexturingParameters.Texture0TexelSize;
if (p.Key == TexturingKeys.Texture1)
yield return TexturingParameters.Texture1TexelSize;
if (p.Key == TexturingKeys.Texture2)
yield return TexturingParameters.Texture2TexelSize;
if (p.Key == TexturingKeys.Texture3)
yield return TexturingParameters.Texture3TexelSize;
if (p.Key == TexturingKeys.Texture4)
yield return TexturingParameters.Texture4TexelSize;
if (p.Key == TexturingKeys.Texture5)
yield return TexturingParameters.Texture5TexelSize;
if (p.Key == TexturingKeys.Texture6)
yield return TexturingParameters.Texture6TexelSize;
if (p.Key == TexturingKeys.Texture7)
yield return TexturingParameters.Texture7TexelSize;
if (p.Key == TexturingKeys.Texture8)
yield return TexturingParameters.Texture8TexelSize;
if (p.Key == TexturingKeys.Texture9)
yield return TexturingParameters.Texture9TexelSize;
}
}
public static void SetPerDrawParameters(this ParameterCollection parameters, PerDrawParameters[] perDrawParams, RenderView renderView, ref Matrix world)
{
var worldInverse = world;
worldInverse.Invert();
Matrix.Multiply(ref world, ref renderView.View, out var worldView);
foreach (var perDraw in perDrawParams)
{
switch (perDraw)
{
case PerDrawParameters.World:
// Already handled. DON'T write it again or we introduce a feedback between render calls!
break;
case PerDrawParameters.WorldInverse:
parameters.Set(TransformationKeys.WorldInverse, ref worldInverse);
break;
case PerDrawParameters.WorldInverseTranspose:
var worldInverseTranspose = worldInverse;
worldInverseTranspose.Transpose();
parameters.Set(TransformationKeys.WorldInverseTranspose, ref worldInverseTranspose);
break;
case PerDrawParameters.WorldView:
parameters.Set(TransformationKeys.WorldView, ref worldView);
break;
case PerDrawParameters.WorldViewInverse:
var worldViewInverse = worldView;
worldViewInverse.Invert();
parameters.Set(TransformationKeys.WorldViewInverse, ref worldViewInverse);
break;
case PerDrawParameters.WorldViewProjection:
Matrix.Multiply(ref worldView, ref renderView.Projection, out var worldViewProjection);
parameters.Set(TransformationKeys.WorldViewProjection, ref worldViewProjection);
break;
case PerDrawParameters.WorldScale:
var worldScale = new Vector3(
((Vector3)world.Row1).Length(),
((Vector3)world.Row2).Length(),
((Vector3)world.Row3).Length());
parameters.Set(TransformationKeys.WorldScale, ref worldScale);
break;
case PerDrawParameters.EyeMS:
// TODO: This is how Stride does it - differs from patched version
//var eyeMS = new Vector4(worldInverse.M41, worldInverse.M42, worldInverse.M43, 1.0f);
var viewInverse = renderView.View;
viewInverse.Invert();
var eyeMS = Vector4.Transform(new Vector4(0, 0, -1, 0), viewInverse);
parameters.Set(TransformationKeys.EyeMS, ref eyeMS);
break;
default:
break;
}
}
}
public static void SetPerViewParameters(this ParameterCollection parameters, PerViewParameters[] perViewParams, RenderView renderView)
{
foreach (var perView in perViewParams)
{
switch (perView)
{
case PerViewParameters.View:
parameters.Set(TransformationKeys.View, ref renderView.View);
break;
case PerViewParameters.ViewInverse:
var view = renderView.View;
view.Invert();
parameters.Set(TransformationKeys.ViewInverse, ref view);
break;
case PerViewParameters.Projection:
parameters.Set(TransformationKeys.Projection, ref renderView.Projection);
break;
case PerViewParameters.ProjectionInverse:
var projection = renderView.Projection;
projection.Invert();
parameters.Set(TransformationKeys.ProjectionInverse, ref projection);
break;
case PerViewParameters.ViewProjection:
parameters.Set(TransformationKeys.ViewProjection, ref renderView.ViewProjection);
break;
case PerViewParameters.ProjScreenRay:
var projScreenRay = new Vector2(-1.0f / renderView.Projection.M11, 1.0f / renderView.Projection.M22);
parameters.Set(TransformationKeys.ProjScreenRay, ref projScreenRay);
break;
case PerViewParameters.Eye:
var viewInverse = renderView.View;
viewInverse.Invert();
// TODO: Differs from Stride
//var eye = new Vector4(viewInverse.M41, viewInverse.M42, viewInverse.M43, 1.0f);
var eye = viewInverse.Row4;
parameters.Set(TransformationKeys.Eye, ref eye);
break;
case PerViewParameters.AspectRatio:
parameters.Set(CameraKeys.AspectRatio, renderView.ViewSize.X / Math.Max(renderView.ViewSize.Y, 1.0f));
break;
case PerViewParameters.ViewSize:
parameters.Set(CameraKeys.ViewSize, ref renderView.ViewSize);
break;
//TODO:
//perViewCamera->NearClipPlane = view.NearClipPlane;
//perViewCamera->FarClipPlane = view.FarClipPlane;
//perViewCamera->ZProjection = CameraKeys.ZProjectionACalculate(view.NearClipPlane, view.FarClipPlane);
default:
break;
}
}
}
// Used by TextureFX, which uses an ImageEffect that already sets most parameters
public static void SetCameraParametersOnly(this ParameterCollection parameters, PerViewParameters[] perViewParams, ref Vector2 viewSize)
{
foreach (var perView in perViewParams)
{
switch (perView)
{
case PerViewParameters.AspectRatio:
parameters.Set(CameraKeys.AspectRatio, viewSize.X / Math.Max(viewSize.Y, 1.0f));
break;
case PerViewParameters.ViewSize:
parameters.Set(CameraKeys.ViewSize, ref viewSize);
break;
//TODO:
//perViewCamera->NearClipPlane = view.NearClipPlane;
//perViewCamera->FarClipPlane = view.FarClipPlane;
//perViewCamera->ZProjection = CameraKeys.ZProjectionACalculate(view.NearClipPlane, view.FarClipPlane);
default:
break;
}
}
}
public static void SetPerFrameParameters(this ParameterCollection parameters, PerFrameParameters[] perFrameParams, RenderContext renderContext)
{
foreach (var perFrame in perFrameParams)
{
switch (perFrame)
{
case PerFrameParameters.Time:
parameters.Set(GlobalKeys.Time, (float)renderContext.Time.Total.TotalSeconds);
break;
case PerFrameParameters.TimeStep:
parameters.Set(GlobalKeys.TimeStep, (float)renderContext.Time.Elapsed.TotalSeconds);
break;
default:
throw new NotImplementedException();
}
}
}
public static void SetTexturingParameters(this ParameterCollection parameters, TexturingParameters[] texturingParams)
{
foreach (var texturingParam in texturingParams)
{
switch (texturingParam)
{
case TexturingParameters.Texture0TexelSize:
SetTexelSize(parameters, 0);
break;
case TexturingParameters.Texture1TexelSize:
SetTexelSize(parameters, 1);
break;
case TexturingParameters.Texture2TexelSize:
SetTexelSize(parameters, 2);
break;
case TexturingParameters.Texture3TexelSize:
SetTexelSize(parameters, 3);
break;
case TexturingParameters.Texture4TexelSize:
SetTexelSize(parameters, 4);
break;
case TexturingParameters.Texture5TexelSize:
SetTexelSize(parameters, 5);
break;
case TexturingParameters.Texture6TexelSize:
SetTexelSize(parameters, 6);
break;
case TexturingParameters.Texture7TexelSize:
SetTexelSize(parameters, 7);
break;
case TexturingParameters.Texture8TexelSize:
SetTexelSize(parameters, 8);
break;
case TexturingParameters.Texture9TexelSize:
SetTexelSize(parameters, 9);
break;
default:
throw new NotImplementedException();
}
}
}
private static void SetTexelSize(ParameterCollection parameters, int i)
{
var tex = parameters.Get(TexturingKeys.DefaultTextures[i]);
if (tex != null)
parameters.Set(TexturingKeys.TexturesTexelSize[i], new Vector2(1.0f / tex.ViewWidth, 1.0f / tex.ViewHeight));
}
static Dictionary<string, T> BuildParameterMap<T>(string effectName)
{
var map = new Dictionary<string, T>();
foreach (var entry in Enum.GetValues(typeof(T)))
map.Add($"{effectName}.{entry.ToString()}", (T)entry);
return map;
}
static Dictionary<string, PerViewParameters> BuildViewParameterMap()
{
var map = new Dictionary<string, PerViewParameters>();
foreach (var entry in (PerViewParameters[])Enum.GetValues(typeof(PerViewParameters)))
{
//camera
if (entry == PerViewParameters.NearClipPlane
|| entry == PerViewParameters.FarClipPlane
|| entry == PerViewParameters.ZProjection
|| entry == PerViewParameters.AspectRatio
|| entry == PerViewParameters.ViewSize
)
{
map.Add($"Camera.{entry.ToString()}", entry);
}
else //transformation
{
map.Add($"Transformation.{entry.ToString()}", entry);
}
}
return map;
}
}
// from Globals shader
enum PerFrameParameters
{
Time,
TimeStep,
}
// from Transformation shader
enum PerViewParameters
{
/// <summary>
/// View matrix. Default to Matrix.Identity.
/// </summary>
View,
/// <summary>
/// Inverse View matrix. Default to Matrix.Inverse(View)
/// </summary>
ViewInverse,
/// <summary>
/// Projection matrix. Default to Matrix.Identity.
/// </summary>
Projection,
/// <summary>
/// Projection matrix. Default to Matrix.Inverse(Projection).
/// </summary>
ProjectionInverse,
/// <summary>
/// ViewProjection matrix. Default to = View * Projection.
/// </summary>
ViewProjection,
/// <summary>
/// Screen projected ray vector. Default to = new Vector2(-1.0f / Projection.M11, 1.0f / Projection.M22);
/// </summary>
ProjScreenRay,
/// <summary>
/// Eye vector. Default to = View^-1[M41,M42,M43,1.0]
/// </summary>
Eye,
/// <summary>
/// Camera Z NearClipPlane value.
/// </summary>
NearClipPlane,
/// <summary>
/// Camera Z FarClipPlane value.
/// </summary>
FarClipPlane,
/// <summary>
/// Z Retro projection factor used retro project a non-linear 1/z depth in the range [0.0 - 1.0] to a linear-depth in view space.
/// Remarks: ZInViewSpace = ZProjection.y / (depth - ZProjection.x)
/// </summary>
ZProjection,
/// <summary>
/// The aspect ratio of the current viewport
/// </summary>
AspectRatio,
/// <summary>
/// The size of the current viewport
/// </summary>
ViewSize,
}
// from Transformation shader
enum PerDrawParameters
{
/// <summary>
/// World matrix. Default to Matrix.Identity.
/// </summary>
World,
/// <summary>
/// Inverse World matrix. Default to Matrix.Inverse(World).
/// </summary>
WorldInverse,
/// <summary>
/// Inverse Transpose World matrix. Default to Matrix.Transpose(Matrix.Inverse(World)).
/// </summary>
WorldInverseTranspose,
/// <summary>
/// WorldView matrix. Default to = World * View.
/// </summary>
WorldView,
/// <summary>
/// Inverse WorldView matrix. Default to Matrix.Inverse(WorldView)
/// </summary>
WorldViewInverse,
/// <summary>
/// WorldViewProjection matrix. Default to = World * ViewProjection.
/// </summary>
WorldViewProjection,
/// <summary>
/// The scale of the World. Default to Vector2.One.
/// </summary>
WorldScale,
/// <summary>
/// Eye vector in model space. Default to = (World*View)^-1[M41,M42,M43,1.0]
/// </summary>
EyeMS
}
enum TexturingParameters
{
Texture0TexelSize,
Texture1TexelSize,
Texture2TexelSize,
Texture3TexelSize,
Texture4TexelSize,
Texture5TexelSize,
Texture6TexelSize,
Texture7TexelSize,
Texture8TexelSize,
Texture9TexelSize
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Effects/TextureInputPinsManager.cs<|end_filename|>
using Stride.Graphics;
using Stride.Rendering;
using System;
using System.Collections.Generic;
using VL.Core;
namespace VL.Stride.Rendering
{
class TexturePinManager : IDisposable
{
readonly NodeContext nodeContext;
readonly IVLPin<Texture> texturePin;
readonly IVLPin<Texture> shaderTexturePin;
readonly IVLPin<bool> alwaysGeneratePin;
readonly bool wantsMips;
readonly bool dontUnapplySRgb;
readonly string profilerName;
Texture lastInputTexture;
Texture inputTexture;
Texture nonSRgbView;
MipMapGenerator generator;
bool copyToNonSRgb;
bool renderMips;
public TexturePinManager(NodeContext nodeContext, IVLPin<Texture> texturePin, IVLPin<Texture> shaderTexturePin, IVLPin<bool> alwaysGeneratePin, bool dontUnapplySRgb, string profilerName = "Pin MipMap Generator")
{
this.nodeContext = nodeContext;
this.texturePin = texturePin;
this.shaderTexturePin = shaderTexturePin;
this.alwaysGeneratePin = alwaysGeneratePin;
this.wantsMips = alwaysGeneratePin != null;
this.dontUnapplySRgb = dontUnapplySRgb;
this.profilerName = profilerName;
}
public void Update()
{
var currentInputTexture = texturePin.Value;
var inputChanged = currentInputTexture != lastInputTexture;
lastInputTexture = currentInputTexture;
if (inputChanged)
{
inputTexture = currentInputTexture;
if (dontUnapplySRgb)
{
// clear non srgb view
copyToNonSRgb = false;
nonSRgbView?.Dispose();
nonSRgbView = null;
if (inputTexture != null)
{
var viewFormat = inputTexture.ViewFormat;
if (viewFormat.IsSRgb())
{
var resourceFormat = inputTexture.Format;
if (resourceFormat.IsTypeless()) // Simple case, typeless resource with sRGB view
{
nonSRgbView = inputTexture.ToTextureView(new TextureViewDescription() { Format = viewFormat.ToNonSRgb() });
}
else // needs a copy into a non sRGB texture
{
var desc = inputTexture.Description.ToCloneableDescription();
desc.Format = desc.Format.ToNonSRgb();
nonSRgbView = Texture.New(inputTexture.GraphicsDevice, desc);
copyToNonSRgb = true;
}
inputTexture = nonSRgbView;
}
}
}
}
// Input already has mips
if (!wantsMips || inputTexture?.MipLevels > 1)
{
shaderTexturePin.Value = inputTexture;
generator?.Dispose();
generator = null;
renderMips = false;
return; //done
}
// Mips must be generated
generator ??= new MipMapGenerator(nodeContext) { Name = profilerName };
generator.InputTexture = inputTexture;
shaderTexturePin.Value = generator.OutputTexture;
renderMips = inputChanged || alwaysGeneratePin.Value;
}
public void Draw(RenderDrawContext context)
{
if (copyToNonSRgb)
context.CommandList.Copy(lastInputTexture, nonSRgbView);
if (renderMips)
generator.Draw(context);
}
public void Dispose()
{
nonSRgbView?.Dispose();
nonSRgbView = null;
generator?.Dispose();
generator = null;
}
}
public class TextureInputPinsManager : IGraphicsRendererBase, IDisposable
{
readonly NodeContext nodeContext;
List<TexturePinManager> pins = new List<TexturePinManager>();
public TextureInputPinsManager(NodeContext nodeContext)
{
this.nodeContext = nodeContext;
}
public void AddInput(IVLPin<Texture> texturePin, IVLPin<Texture> shaderTexturePin, IVLPin<bool> alwaysGeneratePin, bool dontUnapplySRgb, string profilerName)
{
pins.Add(new TexturePinManager(nodeContext, texturePin, shaderTexturePin, alwaysGeneratePin, dontUnapplySRgb, profilerName));
}
public void Update()
{
for (int i = 0; i < pins.Count; i++)
{
pins[i].Update();
}
}
public void Draw(RenderDrawContext context)
{
for (int i = 0; i < pins.Count; i++)
{
pins[i].Draw(context);
}
}
public void Dispose()
{
for (int i = 0; i < pins.Count; i++)
{
pins[i].Dispose();
}
pins.Clear();
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Effects/TextureFX/Filters/ColorManipulation/Threshold_TextureFX.sdsl.cs<|end_filename|>
// <auto-generated>
// Do not edit this file yourself!
//
// This code was generated by Stride Shader Mixin Code Generator.
// To generate it yourself, please install Stride.VisualStudio.Package .vsix
// and re-save the associated .sdfx.
// </auto-generated>
using System;
using Stride.Core;
using Stride.Rendering;
using Stride.Graphics;
using Stride.Shaders;
using Stride.Core.Mathematics;
using Buffer = Stride.Graphics.Buffer;
namespace Stride.Rendering
{
public static partial class Threshold_TextureFXKeys
{
public static readonly ValueParameterKey<Color4> OutputColorA = ParameterKeys.NewValue<Color4>(new Color4(0,0,0,1));
public static readonly ValueParameterKey<Color4> OutputColorB = ParameterKeys.NewValue<Color4>(new Color4(1,1,1,1));
public static readonly ValueParameterKey<float> Threshold = ParameterKeys.NewValue<float>(0.5f);
public static readonly ValueParameterKey<float> Smooth = ParameterKeys.NewValue<float>(0.02f);
public static readonly ValueParameterKey<bool> Invert = ParameterKeys.NewValue<bool>();
public static readonly ValueParameterKey<bool> Antialiasing = ParameterKeys.NewValue<bool>();
}
}
<|start_filename|>packages/VL.Stride.Windows/src/SpoutCSharp/TextureDesc.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Runtime.InteropServices;
using System.IO;
using Stride.Graphics;
namespace VL.Stride.Spout
{
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct TextureDesc
{
public uint SharedHandle;
public uint Width;
public uint Height;
public uint Format;
public uint Usage;
public byte[] Description;
public TextureDesc(Texture texture)
{
SharedHandle = (uint)texture.SharedHandle.ToInt64();
Width = (uint)texture.Width;
Height = (uint)texture.Height;
Format = (uint)texture.Format;
/*
working formats by requirements of NVIDIA GL/ DX interop according to leadlege:
https://github.com/vvvv/vvvv-sdk/pull/293#issuecomment-645752662
DXGI_FORMAT_R8G8B8A8_TYPELESS - 27
DXGI_FORMAT_R8G8B8A8_UNORM - 28
DXGI_FORMAT_R8G8B8A8_UNORM_SRGB - 29
DXGI_FORMAT_R8G8B8A8_UINT - 30
DXGI_FORMAT_R8G8B8A8_SNORM - 31
DXGI_FORMAT_R8G8B8A8_SINT - 32
DXGI_FORMAT_B8G8R8A8_UNORM - 87
DXGI_FORMAT_B8G8R8X8_UNORM - 88
DXGI_FORMAT_B8G8R8A8_TYPELESS - 90
DXGI_FORMAT_B8G8R8A8_UNORM_SRGB - 91
DXGI_FORMAT_B8G8R8X8_TYPELESS - 92
DXGI_FORMAT_B8G8R8X8_UNORM_SRGB - 93
DXGI_FORMAT_R16G16B16A16_UNORM - 11
*/
Usage = 1;
Description = new byte[256];
}
public TextureDesc(System.IO.MemoryMappedFiles.MemoryMappedViewStream mmvs)
{
BinaryReader br = new BinaryReader(mmvs);
SharedHandle = br.ReadUInt32();
Width = br.ReadUInt32();
Height = br.ReadUInt32();
Format = br.ReadUInt32();
Usage = br.ReadUInt32();
Description = br.ReadBytes(256);
}
public byte[] ToByteArray()
{
byte[] b = new byte[280];
MemoryStream ms = new MemoryStream(b);
BinaryWriter bw = new BinaryWriter(ms);
bw.Write(SharedHandle);
bw.Write(Width);
bw.Write(Height);
bw.Write(Format);
bw.Write(Usage);
bw.Write(Description,0, Description.Length);
bw.Dispose();
ms.Dispose();
return b;
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Graphics/GraphicsNodes.BufferBuilder.cs<|end_filename|>
using Stride.Core;
using Stride.Engine;
using Stride.Graphics;
using System;
using VL.Core;
using VL.Lib.Basics.Resources;
using Buffer = Stride.Graphics.Buffer;
namespace VL.Stride.Graphics
{
static partial class GraphicsNodes
{
class BufferBuilder : IDisposable
{
private BufferDescription description;
private BufferViewDescription viewDescription;
private IGraphicsDataProvider initalData;
private bool needsRebuild = true;
private Buffer buffer;
internal bool Recreate;
private readonly IResourceHandle<Game> gameHandle;
public Buffer Buffer
{
get
{
if (needsRebuild || Recreate)
{
RebuildBuffer();
needsRebuild = false;
}
return buffer;
}
private set => buffer = value;
}
public BufferDescription Description
{
get => description;
set
{
description = value;
needsRebuild = true;
}
}
public BufferViewDescription ViewDescription
{
get => viewDescription;
set
{
viewDescription = value;
needsRebuild = true;
}
}
public IGraphicsDataProvider InitalData
{
get => initalData;
set
{
initalData = value;
needsRebuild = true;
}
}
public BufferBuilder(NodeContext nodeContext)
{
gameHandle = nodeContext.GetGameHandle();
}
public void Dispose()
{
buffer?.Dispose();
buffer = null;
gameHandle.Dispose();
}
private void RebuildBuffer()
{
var pin = PinnedGraphicsData.None;
if (initalData != null)
{
pin = initalData.Pin();
}
try
{
buffer?.Dispose();
buffer = null;
var game = gameHandle.Resource;
if (viewDescription.Flags == BufferFlags.None)
viewDescription.Flags = description.BufferFlags;
buffer = BufferExtensions.New(game.GraphicsDevice, description, viewDescription, pin.Pointer);
}
catch
{
buffer = null;
}
finally
{
pin.Dispose();
}
}
}
class BufferViewBuilder : IDisposable
{
private Buffer buffer;
private BufferViewDescription viewDescription;
private bool needsRebuild = true;
private Buffer bufferView;
internal bool Recreate;
private readonly IResourceHandle<Game> gameHandle;
public Buffer Buffer
{
get
{
if (needsRebuild || Recreate)
{
RebuildBufferView();
needsRebuild = false;
}
return bufferView;
}
private set => bufferView = value;
}
public Buffer Input
{
get => buffer;
set
{
buffer = value;
needsRebuild = true;
}
}
public BufferViewDescription ViewDescription
{
get => viewDescription;
set
{
viewDescription = value;
needsRebuild = true;
}
}
public BufferViewBuilder(NodeContext nodeContext)
{
gameHandle = nodeContext.GetGameHandle();
}
public void Dispose()
{
bufferView?.Dispose();
bufferView = null;
gameHandle.Dispose();
}
private void RebuildBufferView()
{
try
{
if (bufferView != null)
{
bufferView.Dispose();
bufferView = null;
}
if (buffer != null
&& viewDescription.Flags != BufferFlags.None
&& ((buffer.Flags & BufferFlags.RawBuffer) != 0))
{
bufferView ??= new Buffer();
var game = gameHandle.Resource;
bufferView = buffer.ToBufferView(bufferView, viewDescription, game.GraphicsDevice);
}
else
{
bufferView = null;
}
}
catch
{
bufferView = null;
}
}
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Effects/TextureFX/Filters/ColorManipulation/Keying/ChannelKeying_TextureFX.sdsl.cs<|end_filename|>
// <auto-generated>
// Do not edit this file yourself!
//
// This code was generated by Stride Shader Mixin Code Generator.
// To generate it yourself, please install Stride.VisualStudio.Package .vsix
// and re-save the associated .sdfx.
// </auto-generated>
using System;
using Stride.Core;
using Stride.Rendering;
using Stride.Graphics;
using Stride.Shaders;
using Stride.Core.Mathematics;
using Buffer = Stride.Graphics.Buffer;
namespace Stride.Rendering
{
public static partial class ChannelKeying_TextureFXKeys
{
public static readonly ValueParameterKey<int> Type = ParameterKeys.NewValue<int>();
public static readonly ValueParameterKey<float> Threshold = ParameterKeys.NewValue<float>(0.3f);
public static readonly ValueParameterKey<float> Smooth = ParameterKeys.NewValue<float>(0.1f);
public static readonly ValueParameterKey<float> AlphaBlur = ParameterKeys.NewValue<float>(0.0f);
public static readonly ValueParameterKey<bool> Invert = ParameterKeys.NewValue<bool>(false);
public static readonly ValueParameterKey<bool> SourceAlpha = ParameterKeys.NewValue<bool>(false);
public static readonly ValueParameterKey<bool> Premultiply = ParameterKeys.NewValue<bool>(false);
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Images/AmbientOcclusionWithOrtho/AmbientOcclusionWithOrtho.cs<|end_filename|>
// Copyright (c) Stride contributors (https://stride3d.net) and Silicon Studio Corp. (https://www.siliconstudio.co.jp)
// Distributed under the MIT license. See the LICENSE.md file in the project root for more information.
using System;
using System.ComponentModel;
using Stride.Core;
using Stride.Core.Annotations;
using Stride.Core.Mathematics;
using Stride.Graphics;
using Stride.Rendering;
using Stride.Rendering.Images;
namespace VL.Stride.Rendering.Images
{
/// <summary>
/// Applies an ambient occlusion effect to a scene. Ambient occlusion is a technique which fakes occlusion for objects close to other opaque objects.
/// It takes as input a color-buffer where the scene was rendered, with its associated depth-buffer.
/// You also need to provide the camera configuration you used when rendering the scene.
/// </summary>
[DataContract("AmbientOcclusionWithOrtho")]
public class AmbientOcclusionWithOrtho : AmbientOcclusion
{
private ImageEffectShader aoRawImageEffect;
private ImageEffectShader blurH;
private ImageEffectShader blurV;
private string nameGaussianBlurH;
private string nameGaussianBlurV;
private float[] offsetsWeights;
private ImageEffectShader aoApplyImageEffect;
public AmbientOcclusionWithOrtho()
{
//Enabled = false;
NumberOfSamples = 13;
ParamProjScale = 0.5f;
ParamIntensity = 0.2f;
ParamBias = 0.01f;
ParamRadius = 1f;
NumberOfBounces = 2;
BlurScale = 1.85f;
EdgeSharpness = 3f;
TempSize = TemporaryBufferSize.SizeFull;
}
protected override void InitializeCore()
{
base.InitializeCore();
aoApplyImageEffect = ToLoadAndUnload(new ImageEffectShader("ApplyAmbientOcclusionWithOrthoShader"));
aoRawImageEffect = ToLoadAndUnload(new ImageEffectShader("AmbientOcclusionWithOrthoRawAOEffect"));
aoRawImageEffect.Initialize(Context);
blurH = ToLoadAndUnload(new ImageEffectShader("AmbientOcclusionWithOrthoBlurEffect"));
blurV = ToLoadAndUnload(new ImageEffectShader("AmbientOcclusionWithOrthoBlurEffect", true));
blurH.Initialize(Context);
blurV.Initialize(Context);
// Setup Horizontal parameters
blurH.Parameters.Set(AmbientOcclusionWithOrthoBlurKeys.VerticalBlur, false);
blurV.Parameters.Set(AmbientOcclusionWithOrthoBlurKeys.VerticalBlur, true);
}
protected override void DrawCore(RenderDrawContext context)
{
var originalColorBuffer = GetSafeInput(0);
var originalDepthBuffer = GetSafeInput(1);
var outputTexture = GetSafeOutput(0);
var renderView = context.RenderContext.RenderView;
//---------------------------------
// Ambient Occlusion
//---------------------------------
var tempWidth = (originalColorBuffer.Width * (int)TempSize) / (int)TemporaryBufferSize.SizeFull;
var tempHeight = (originalColorBuffer.Height * (int)TempSize) / (int)TemporaryBufferSize.SizeFull;
var aoTexture1 = NewScopedRenderTarget2D(tempWidth, tempHeight, PixelFormat.R8_UNorm, 1);
var aoTexture2 = NewScopedRenderTarget2D(tempWidth, tempHeight, PixelFormat.R8_UNorm, 1);
aoRawImageEffect.Parameters.Set(AmbientOcclusionWithOrthoRawAOKeys.Count, NumberOfSamples > 0 ? NumberOfSamples : 9);
// check whether the projection matrix is orthographic
var isOrtho = renderView.Projection.M44 == 1;
aoRawImageEffect.Parameters.Set(AmbientOcclusionWithOrthoRawAOKeys.IsOrtho, isOrtho);
blurH.Parameters.Set(AmbientOcclusionWithOrthoBlurKeys.IsOrtho, isOrtho);
blurV.Parameters.Set(AmbientOcclusionWithOrthoBlurKeys.IsOrtho, isOrtho);
Vector2 zProj;
if (isOrtho)
{
zProj = new Vector2(renderView.NearClipPlane, renderView.FarClipPlane - renderView.NearClipPlane);
}
else
{
zProj = CameraKeys.ZProjectionACalculate(renderView.NearClipPlane, renderView.FarClipPlane);
}
// Set Near/Far pre-calculated factors to speed up the linear depth reconstruction
aoRawImageEffect.Parameters.Set(CameraKeys.ZProjection, ref zProj);
Vector4 screenSize = new Vector4(originalColorBuffer.Width, originalColorBuffer.Height, 0, 0);
screenSize.Z = screenSize.X / screenSize.Y;
aoRawImageEffect.Parameters.Set(AmbientOcclusionWithOrthoRawAOShaderKeys.ScreenInfo, screenSize);
Vector4 projInfo;
if (isOrtho)
{
// The ortho scale to map the xy coordinates
float scaleX = 1 / renderView.Projection.M11;
float scaleY = 1 / renderView.Projection.M22;
// Constant factor to map the ProjScale parameter to the ortho scale
float projZScale = Math.Max(scaleX, scaleY) * 4;
projInfo = new Vector4(scaleX, scaleY, projZScale, 0);
}
else
{
// Projection info used to reconstruct the View space position from linear depth
var p00 = renderView.Projection.M11;
var p11 = renderView.Projection.M22;
var p02 = renderView.Projection.M13;
var p12 = renderView.Projection.M23;
projInfo = new Vector4(
-2.0f / (screenSize.X * p00),
-2.0f / (screenSize.Y * p11),
(1.0f - p02) / p00,
(1.0f + p12) / p11);
}
aoRawImageEffect.Parameters.Set(AmbientOcclusionWithOrthoRawAOShaderKeys.ProjInfo, ref projInfo);
//**********************************
// User parameters
aoRawImageEffect.Parameters.Set(AmbientOcclusionWithOrthoRawAOShaderKeys.ParamProjScale, ParamProjScale);
aoRawImageEffect.Parameters.Set(AmbientOcclusionWithOrthoRawAOShaderKeys.ParamIntensity, ParamIntensity);
aoRawImageEffect.Parameters.Set(AmbientOcclusionWithOrthoRawAOShaderKeys.ParamBias, ParamBias);
aoRawImageEffect.Parameters.Set(AmbientOcclusionWithOrthoRawAOShaderKeys.ParamRadius, ParamRadius);
aoRawImageEffect.Parameters.Set(AmbientOcclusionWithOrthoRawAOShaderKeys.ParamRadiusSquared, ParamRadius * ParamRadius);
aoRawImageEffect.SetInput(0, originalDepthBuffer);
aoRawImageEffect.SetOutput(aoTexture1);
aoRawImageEffect.Draw(context, "AmbientOcclusionWithOrthoRawAO");
for (int bounces = 0; bounces < NumberOfBounces; bounces++)
{
if (offsetsWeights == null)
{
offsetsWeights = new[]
{
// 0.356642f, 0.239400f, 0.072410f, 0.009869f,
// 0.398943f, 0.241971f, 0.053991f, 0.004432f, 0.000134f, // stddev = 1.0
0.153170f, 0.144893f, 0.122649f, 0.092902f, 0.062970f, // stddev = 2.0
// 0.111220f, 0.107798f, 0.098151f, 0.083953f, 0.067458f, 0.050920f, 0.036108f, // stddev = 3.0
};
nameGaussianBlurH = string.Format("AmbientOcclusionWithOrthoBlurH{0}x{0}", offsetsWeights.Length);
nameGaussianBlurV = string.Format("AmbientOcclusionWithOrthoBlurV{0}x{0}", offsetsWeights.Length);
}
// Set Near/Far pre-calculated factors to speed up the linear depth reconstruction
blurH.Parameters.Set(CameraKeys.ZProjection, ref zProj);
blurV.Parameters.Set(CameraKeys.ZProjection, ref zProj);
// Update permutation parameters
blurH.Parameters.Set(AmbientOcclusionWithOrthoBlurKeys.Count, offsetsWeights.Length);
blurH.Parameters.Set(AmbientOcclusionWithOrthoBlurKeys.BlurScale, BlurScale);
blurH.Parameters.Set(AmbientOcclusionWithOrthoBlurKeys.EdgeSharpness, EdgeSharpness);
blurH.EffectInstance.UpdateEffect(context.GraphicsDevice);
blurV.Parameters.Set(AmbientOcclusionWithOrthoBlurKeys.Count, offsetsWeights.Length);
blurV.Parameters.Set(AmbientOcclusionWithOrthoBlurKeys.BlurScale, BlurScale);
blurV.Parameters.Set(AmbientOcclusionWithOrthoBlurKeys.EdgeSharpness, EdgeSharpness);
blurV.EffectInstance.UpdateEffect(context.GraphicsDevice);
// Update parameters
blurH.Parameters.Set(AmbientOcclusionWithOrthoBlurShaderKeys.Weights, offsetsWeights);
blurV.Parameters.Set(AmbientOcclusionWithOrthoBlurShaderKeys.Weights, offsetsWeights);
// Horizontal pass
blurH.SetInput(0, aoTexture1);
blurH.SetInput(1, originalDepthBuffer);
blurH.SetOutput(aoTexture2);
blurH.Draw(context, nameGaussianBlurH);
// Vertical pass
blurV.SetInput(0, aoTexture2);
blurV.SetInput(1, originalDepthBuffer);
blurV.SetOutput(aoTexture1);
blurV.Draw(context, nameGaussianBlurV);
}
aoApplyImageEffect.SetInput(0, originalColorBuffer);
aoApplyImageEffect.SetInput(1, aoTexture1);
aoApplyImageEffect.SetOutput(outputTexture);
aoApplyImageEffect.Draw(context, "AmbientOcclusionWithOrthoApply");
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Shaders/ShaderFX/Operations/BlendOperation.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Stride.Core.Mathematics;
using Stride.Rendering.Materials;
using Stride.Shaders;
namespace VL.Stride.Shaders.ShaderFX
{
public class BlendOperation : BinaryOperation<Vector4>
{
public BlendOperation(BlendOperator blendOperation, IComputeValue<Vector4> left, IComputeValue<Vector4> right)
: base(blendOperation.GetShaderSourceName(), left, right)
{
}
public override ShaderSource GenerateShaderSource(ShaderGeneratorContext context, MaterialComputeColorKeys baseKeys)
{
var leftColor = new ShaderClassSource("Float4ToColor").CreateMixin();
leftColor.AddComposition(Left, "Value", context, baseKeys);
var rightColor = new ShaderClassSource("Float4ToColor").CreateMixin();
rightColor.AddComposition(Right, "Value", context, baseKeys);
var opMixin = new ShaderClassSource(ShaderName).CreateMixin();
opMixin.AddComposition("color1", leftColor);
opMixin.AddComposition("color2", rightColor);
var result = new ShaderClassSource("ColorToFloat4").CreateMixin();
result.AddComposition("Value", opMixin);
return result;
}
}
/// <summary>
/// Operands of the Blend node.
/// </summary>
public enum BlendOperator
{
/// <summary>
/// Add of the two textures.
/// </summary>
Add,
/// <summary>
/// Average of the two textures.
/// </summary>
Average,
/// <summary>
/// Color effect from the two textures.
/// </summary>
Color,
/// <summary>
/// Color burn effect from the two textures.
/// </summary>
ColorBurn,
/// <summary>
/// Color dodge effect from the two textures.
/// </summary>
ColorDodge,
/// <summary>
/// Darken effect from the two textures.
/// </summary>
Darken,
/// <summary>
/// Desaturate effect from the two textures.
/// </summary>
Desaturate,
/// <summary>
/// Difference of the two textures.
/// </summary>
Difference,
/// <summary>
/// Divide first texture with the second one.
/// </summary>
Divide,
/// <summary>
/// Exclusion effect from the two textures.
/// </summary>
Exclusion,
/// <summary>
/// Hard light effect from the two textures.
/// </summary>
HardLight,
/// <summary>
/// hard mix effect from the two textures.
/// </summary>
HardMix,
/// <summary>
/// Hue effect from the two textures.
/// </summary>
Hue,
/// <summary>
/// Illuminate effect from the two textures.
/// </summary>
Illuminate,
/// <summary>
/// In effect from the two textures.
/// </summary>
In,
/// <summary>
/// Inverse effect from the two textures.
/// </summary>
Inverse,
/// <summary>
/// Lighten effect from the two textures.
/// </summary>
Lighten,
/// <summary>
/// Linear burn effect from the two textures.
/// </summary>
LinearBurn,
/// <summary>
/// Linear dodge effect from the two textures.
/// </summary>
LinearDodge,
/// <summary>
/// Apply mask from second texture to the first one.
/// </summary>
Mask,
/// <summary>
/// Multiply the two textures.
/// </summary>
Multiply,
/// <summary>
/// Out effect from the two textures.
/// </summary>
Out,
/// <summary>
/// Over effect from the two textures.
/// </summary>
Over,
/// <summary>
/// Overlay effect from the two textures.
/// </summary>
Overlay,
/// <summary>
/// Pin light effect from the two textures.
/// </summary>
PinLight,
/// <summary>
/// Saturate effect from the two textures.
/// </summary>
Saturate,
/// <summary>
/// Saturation effect from the two textures.
/// </summary>
Saturation,
/// <summary>
/// Screen effect from the two textures.
/// </summary>
Screen,
/// <summary>
/// Soft light effect from the two textures.
/// </summary>
SoftLight,
/// <summary>
/// Subtract the two textures.
/// </summary>
Subtract,
/// <summary>
/// Take color for the first texture but alpha from the second
/// </summary>
SubstituteAlpha,
/// <summary>
/// Threshold, resulting in a black-white texture for grayscale against a set threshold
/// </summary>
Threshold,
//TODO: lerp, clamp ?
}
public static class BlendOperatorExtensions
{
/// <summary>
/// Get the name of the ShaderClassSource corresponding to the operation
/// </summary>
/// <param name="blendOperation">The operand.</param>
/// <returns>The name of the ShaderClassSource.</returns>
public static string GetShaderSourceName(this BlendOperator blendOperation)
{
switch (blendOperation)
{
case BlendOperator.Add:
return "ComputeColorAdd3ds"; //TODO: change this (ComputeColorAdd?)
case BlendOperator.Average:
return "ComputeColorAverage";
case BlendOperator.Color:
return "ComputeColorColor";
case BlendOperator.ColorBurn:
return "ComputeColorColorBurn";
case BlendOperator.ColorDodge:
return "ComputeColorColorDodge";
case BlendOperator.Darken:
return "ComputeColorDarken3ds"; //"ComputeColorDarkenMaya" //TODO: change this
case BlendOperator.Desaturate:
return "ComputeColorDesaturate";
case BlendOperator.Difference:
return "ComputeColorDifference3ds"; //"ComputeColorDifferenceMaya" //TODO: change this
case BlendOperator.Divide:
return "ComputeColorDivide";
case BlendOperator.Exclusion:
return "ComputeColorExclusion";
case BlendOperator.HardLight:
return "ComputeColorHardLight";
case BlendOperator.HardMix:
return "ComputeColorHardMix";
case BlendOperator.Hue:
return "ComputeColorHue";
case BlendOperator.Illuminate:
return "ComputeColorIlluminate";
case BlendOperator.In:
return "ComputeColorIn";
case BlendOperator.Inverse:
return "ComputeColorInverse";
case BlendOperator.Lighten:
return "ComputeColorLighten3ds"; //"ComputeColorLightenMaya" //TODO: change this
case BlendOperator.LinearBurn:
return "ComputeColorLinearBurn";
case BlendOperator.LinearDodge:
return "ComputeColorLinearDodge";
case BlendOperator.Mask:
return "ComputeColorMask";
case BlendOperator.Multiply:
return "ComputeColorMultiply"; //return "ComputeColorMultiply3ds"; //"ComputeColorMultiplyMaya" //TODO: change this
case BlendOperator.Out:
return "ComputeColorOut";
case BlendOperator.Over:
return "ComputeColorOver3ds"; //TODO: change this to "ComputeColorLerpAlpha"
case BlendOperator.Overlay:
return "ComputeColorOverlay3ds"; //"ComputeColorOverlayMaya" //TODO: change this
case BlendOperator.PinLight:
return "ComputeColorPinLight";
case BlendOperator.Saturate:
return "ComputeColorSaturate";
case BlendOperator.Saturation:
return "ComputeColorSaturation";
case BlendOperator.Screen:
return "ComputeColorScreen";
case BlendOperator.SoftLight:
return "ComputeColorSoftLight";
case BlendOperator.Subtract:
return "ComputeColorSubtract"; // "ComputeColorSubtract3ds" "ComputeColorSubtractMaya" //TODO: change this
case BlendOperator.SubstituteAlpha:
return "ComputeColorSubstituteAlpha";
case BlendOperator.Threshold:
return "ComputeColorThreshold";
default:
throw new ArgumentOutOfRangeException("binaryOperand");
}
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Effects/EffectShaderNodes.ComputeFX.cs<|end_filename|>
using Stride.Core;
using Stride.Core.Mathematics;
using Stride.Graphics;
using Stride.Rendering;
using Stride.Rendering.ComputeEffect;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reactive.Linq;
using VL.Core;
using VL.Model;
using VL.Stride.Rendering.ComputeEffect;
namespace VL.Stride.Rendering
{
static partial class EffectShaderNodes
{
static IVLNodeDescription NewComputeEffectShaderNode(this IVLNodeDescriptionFactory factory, NameAndVersion name, string shaderName, ShaderMetadata shaderMetadata, IObservable<object> changes, Func<bool> openEditor, IServiceRegistry serviceRegistry, GraphicsDevice graphicsDevice)
{
return factory.NewNodeDescription(
name: name,
category: "Stride.Rendering.ComputeShaders",
tags: shaderMetadata.Tags,
fragmented: true,
invalidated: changes,
init: buildContext =>
{
var _parameters = new ParameterCollection();
_parameters.Set(ComputeShaderBaseKeys.ThreadGroupCountGlobal, Int3.One);
_parameters.Set(ComputeEffectShaderKeys.ThreadNumbers, Int3.One);
BuildBaseMixin(shaderName, shaderMetadata, graphicsDevice, out var shaderMixinSource, _parameters);
var (_effect, _messages) = CreateEffectInstance("ComputeFXEffect", shaderMetadata, serviceRegistry, graphicsDevice, _parameters, baseShaderName: shaderName);
var _dispatcherInput = new PinDescription<IComputeEffectDispatcher>("Dispatcher");
var _threadNumbersInput = new PinDescription<Int3>("Thread Group Size", Int3.One);
var _inputs = new List<IVLPinDescription>()
{
_dispatcherInput,
_threadNumbersInput
};
var _outputs = new List<IVLPinDescription>() { buildContext.Pin("Output", typeof(IGraphicsRendererBase)) };
var usedNames = new HashSet<string>()
{
"Enabled"
};
foreach (var parameter in GetParameters(_effect))
{
var key = parameter.Key;
var name = key.Name;
var typeInPatch = shaderMetadata.GetPinType(key, out var boxedDefaultValue);
shaderMetadata.GetPinDocuAndVisibility(key, out var summary, out var remarks, out var isOptional);
_inputs.Add(new ParameterPinDescription(usedNames, key, parameter.Count, defaultValue: boxedDefaultValue, typeInPatch: typeInPatch) { IsVisible = !isOptional, Summary = summary, Remarks = remarks });
}
IVLPinDescription _enabledInput;
_inputs.Add(_enabledInput = new PinDescription<bool>("Enabled", defaultValue: true));
return buildContext.NewNode(
inputs: _inputs,
outputs: _outputs,
messages: _messages,
summary: shaderMetadata.Summary,
remarks: shaderMetadata.Remarks,
newNode: nodeBuildContext =>
{
var gameHandle = nodeBuildContext.NodeContext.GetGameHandle();
var renderContext = RenderContext.GetShared(gameHandle.Resource.Services);
var mixinParams = BuildBaseMixin(shaderName, shaderMetadata, graphicsDevice, out var shaderMixinSource);
var effect = new VLComputeEffectShader(renderContext, shaderName, mixinParams);
var inputs = new List<IVLPin>();
var enabledInput = default(IVLPin);
foreach (var _input in _inputs)
{
// Handle the predefined pins first
if (_input == _dispatcherInput)
inputs.Add(nodeBuildContext.Input<IComputeEffectDispatcher>(setter: v => effect.Dispatcher = v));
else if (_input == _threadNumbersInput)
inputs.Add(nodeBuildContext.Input<Int3>(setter: v => effect.ThreadGroupSize = v));
else if (_input == _enabledInput)
inputs.Add(enabledInput = nodeBuildContext.Input<bool>(v => effect.Enabled = v, effect.Enabled));
else if (_input is ParameterPinDescription parameterPinDescription)
inputs.Add(parameterPinDescription.CreatePin(graphicsDevice, effect.Parameters));
}
var compositionPins = inputs.OfType<ShaderFXPin>().ToList();
var effectOutput = nodeBuildContext.Output(() =>
{
UpdateCompositions(compositionPins, graphicsDevice, effect.Parameters, shaderMixinSource, effect.Subscriptions);
return effect;
});
return nodeBuildContext.Node(
inputs: inputs,
outputs: new[] { effectOutput },
update: default,
dispose: () =>
{
effect.Dispose();
gameHandle.Dispose();
});
},
openEditor: openEditor
);
});
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Effects/EffectShaderNodes.TextureFX.cs<|end_filename|>
using Stride.Core;
using Stride.Core.Extensions;
using Stride.Core.Mathematics;
using Stride.Graphics;
using Stride.Rendering;
using Stride.Rendering.Images;
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Reactive.Linq;
using VL.Core;
using VL.Model;
using VL.Stride.Graphics;
using VL.Stride.Engine;
namespace VL.Stride.Rendering
{
static partial class EffectShaderNodes
{
const string textureInputName = "Input";
const string samplerInputName = "Sampler";
static IVLNodeDescription NewImageEffectShaderNode(this IVLNodeDescriptionFactory factory, NameAndVersion name, string shaderName, ShaderMetadata shaderMetadata, IObservable<object> changes, Func<bool> openEditor, IServiceRegistry serviceRegistry, GraphicsDevice graphicsDevice)
{
return factory.NewNodeDescription(
name: name,
category: "Stride.Rendering.ImageShaders.Experimental.Advanced",
tags: shaderMetadata.Tags,
fragmented: true,
invalidated: changes,
init: buildContext =>
{
var mixinParams = BuildBaseMixin(shaderName, shaderMetadata, graphicsDevice, out var shaderMixinSource);
var (_effect, _messages) = CreateEffectInstance("TextureFXEffect", shaderMetadata, serviceRegistry, graphicsDevice, mixinParams, baseShaderName: shaderName);
var _inputs = new List<IVLPinDescription>();
var _outputs = new List<IVLPinDescription>() { buildContext.Pin("Output", typeof(ImageEffectShader)) };
// The pins as specified by https://github.com/devvvvs/vvvv/issues/5756
var usedNames = new HashSet<string>()
{
"Output Size",
"Output Format",
"Output Texture",
"Enabled",
"Apply"
};
var _textureCount = 0;
var _samplerCount = 0;
var parameters = GetParameters(_effect).OrderBy(p => p.Key.Name.StartsWith("Texturing.Texture") ? 0 : 1).ToList();
//order sampler pins after their corresponding texture pins
var texturingSamplerPins = new Dictionary<ParameterKeyInfo, int>();
//find all samplers that have a corresponding texture
int insertOffset = 0;
foreach (var parameter in parameters)
{
if (parameter.Key.Name.StartsWith("Texturing.Sampler"))
{
var texturePinIdx = parameters.IndexOf(p => p.Key.Name == parameter.Key.Name.Replace("Sampler", "Texture"));
if (texturePinIdx >= 0)
{
texturingSamplerPins.Add(parameter, texturePinIdx + insertOffset);
insertOffset++;
}
}
}
//move the sampler pins after the corresponding texture pins
foreach (var samplerPin in texturingSamplerPins)
{
parameters.Remove(samplerPin.Key);
parameters.Insert(samplerPin.Value + 1, samplerPin.Key);
}
foreach (var parameter in parameters)
{
var key = parameter.Key;
var name = key.Name;
// Skip the matrix transform - we're drawing fullscreen
if (key == SpriteBaseKeys.MatrixTransform)
continue;
if (key.PropertyType == typeof(Texture))
{
var pinName = "";
if (shaderMetadata.IsTextureSource && !key.Name.StartsWith("Texturing.Texture"))
pinName = key.GetPinName(usedNames);
else
pinName = ++_textureCount == 1 ? textureInputName : $"{textureInputName} {_textureCount}";
usedNames.Add(pinName);
_inputs.Add(new ParameterKeyPinDescription<Texture>(pinName, (ParameterKey<Texture>)key));
}
else
{
var pinName = default(string); // Using null the name is based on the parameter name
var isOptional = false;
if (key.PropertyType == typeof(SamplerState) && key.Name.StartsWith("Texturing.Sampler"))
{
pinName = ++_samplerCount == 1 ? samplerInputName : $"{samplerInputName} {_samplerCount}";
usedNames.Add(pinName);
isOptional = true;
}
// also make other samplers from Texturing shader optional
else if (key.PropertyType == typeof(SamplerState) && key.Name.StartsWith("Texturing."))
{
isOptional = true;
}
var pinTypeInPatch = shaderMetadata.GetPinType(key, out var boxedDefaultValue);
shaderMetadata.GetPinDocuAndVisibility(key, out var summary, out var remarks, out var isOptionalAttr);
_inputs.Add(new ParameterPinDescription(usedNames, key, parameter.Count, name: pinName, defaultValue: boxedDefaultValue, typeInPatch: pinTypeInPatch) { IsVisible = !(isOptional || isOptionalAttr), Summary = summary, Remarks = remarks });
}
}
IVLPinDescription _outputTextureInput, _enabledInput;
_inputs.Add(
_outputTextureInput = new PinDescription<Texture>("Output Texture")
{
Summary = "The texture to render to. If not set, the node creates its own output texture based on the input texture.",
Remarks = "The provided texture must be a render target.",
IsVisible = false
});
_inputs.Add(_enabledInput = new PinDescription<bool>("Enabled", defaultValue: true));
return buildContext.NewNode(
inputs: _inputs,
outputs: _outputs,
messages: _messages,
summary: shaderMetadata.Summary,
remarks: shaderMetadata.Remarks,
newNode: nodeBuildContext =>
{
var gameHandle = nodeBuildContext.NodeContext.GetGameHandle();
var effect = new TextureFXEffect("TextureFXEffect") { Name = shaderName };
BuildBaseMixin(shaderName, shaderMetadata, graphicsDevice, out var textureFXEffectMixin, effect.Parameters);
//effect.Parameters.Set
var inputs = new List<IVLPin>();
var enabledInput = default(IVLPin);
var textureCount = 0;
foreach (var _input in _inputs)
{
// Handle the predefined pins first
if (_input == _outputTextureInput)
{
inputs.Add(nodeBuildContext.Input<Texture>(setter: t =>
{
if (t != null)
effect.SetOutput(t);
}));
}
else if (_input == _enabledInput)
inputs.Add(enabledInput = nodeBuildContext.Input<bool>(v => effect.Enabled = v, effect.Enabled));
else if (_input is ParameterPinDescription parameterPinDescription)
inputs.Add(parameterPinDescription.CreatePin(graphicsDevice, effect.Parameters));
else if (_input is ParameterKeyPinDescription<Texture> textureInput)
{
if (textureInput.Key.Name.StartsWith("Texturing.Texture"))
{
var slot = textureCount++;
inputs.Add(nodeBuildContext.Input<Texture>(setter: t =>
{
effect.SetInput(slot, t);
}));
}
else
{
inputs.Add(nodeBuildContext.Input<Texture>(setter: t =>
{
effect.Parameters.SetObject(textureInput.Key, t);
}));
}
}
}
var compositionPins = inputs.OfType<ShaderFXPin>().ToList();
var effectOutput = ToOutput(nodeBuildContext, effect, () =>
{
UpdateCompositions(compositionPins, graphicsDevice, effect.Parameters, textureFXEffectMixin, effect.Subscriptions);
});
return nodeBuildContext.Node(
inputs: inputs,
outputs: new[] { effectOutput },
update: default,
dispose: () =>
{
effect.Dispose();
gameHandle.Dispose();
});
},
openEditor: openEditor
);
});
}
static IVLNodeDescription NewTextureFXNode(this IVLNodeDescriptionFactory factory, IVLNodeDescription shaderDescription, string name, ShaderMetadata shaderMetadata)
{
return factory.NewNodeDescription(
name: name,
category: shaderMetadata.GetCategory("Stride.Textures"),
tags: shaderMetadata.Tags,
fragmented: true,
invalidated: shaderDescription.Invalidated,
init: buildContext =>
{
const string Enabled = "Enabled";
var _inputs = shaderDescription.Inputs.ToList();
var allTextureInputDescs = _inputs.OfType<ParameterKeyPinDescription<Texture>>().ToList();
var allTextureInputNames = allTextureInputDescs.Select(pd => pd.Key.GetVariableName()).ToList();
var texturePinsToManage = shaderMetadata.GetTexturePinsToManage(allTextureInputNames);
var hasTexturePinsToManage = texturePinsToManage.Count() > 0;
var isFilterOrMixer = !shaderMetadata.IsTextureSource;
shaderMetadata.GetOutputSize(out var defaultSize, out var outputSizeVisible);
shaderMetadata.GetPixelFormats(out var defaultFormat, out var defaultRenderFormat);
var _outputSize = new PinDescription<Int2>("Output Size", defaultSize) { IsVisible = outputSizeVisible };
var _outputFormat = new PinDescription<PixelFormat>("Output Format", defaultFormat) { IsVisible = false };
var _renderFormat = new PinDescription<PixelFormat>("Render Format", defaultRenderFormat) { IsVisible = false, Summary = "Allows to specify a render format that is differet to the output format" };
// mip manager pins
var wantsMips = shaderMetadata.WantsMips?.Count > 0;
if (wantsMips)
{
foreach (var textureName in shaderMetadata.WantsMips)
{
var texDesc = allTextureInputDescs.FirstOrDefault(p => p.Key.GetVariableName() == textureName);
if (texDesc != null)
{
var texIndex = _inputs.IndexOf(texDesc);
_inputs.Insert(texIndex + 1, new PinDescription<bool>("Always Generate Mips for " + texDesc.Name, true)
{
Summary = "If true, mipmaps will be generated in every frame, if false only on change of the texture reference. If the texture has mipmaps, nothing will be done."
});
}
}
}
if (isFilterOrMixer)
{
// Filter or Mixer
_inputs.Insert(_inputs.Count - 1, _outputSize);
_inputs.Insert(_inputs.Count - 1, _outputFormat);
_inputs.Insert(_inputs.Count - 1, _renderFormat);
// Replace Enabled with Apply
var _enabledPinIndex = _inputs.IndexOf(p => p.Name == Enabled);
if (_enabledPinIndex >= 0)
_inputs[_enabledPinIndex] = new PinDescription<bool>("Apply", defaultValue: true);
}
else
{
// Pure source
_inputs.Insert(_inputs.Count - 2, _outputSize);
_inputs.Insert(_inputs.Count - 2, _outputFormat);
_inputs.Insert(_inputs.Count - 2, _renderFormat);
}
return buildContext.NewNode(
inputs: _inputs,
outputs: new[] { buildContext.Pin("Output", typeof(Texture)) },
messages: shaderDescription.Messages,
summary: shaderMetadata.Summary,
remarks: shaderMetadata.Remarks,
newNode: nodeBuildContext =>
{
var nodeContext = nodeBuildContext.NodeContext;
var shaderNode = shaderDescription.CreateInstance(nodeContext);
var inputs = shaderNode.Inputs.ToList();
var mipmapManager = new TextureInputPinsManager(nodeContext);
var shaderNodeInputs = shaderDescription.Inputs.ToList();
// install pin managers for mipmaps or inputs that should be read in sRGB space
if (hasTexturePinsToManage)
{
foreach (var textureToManage in texturePinsToManage)
{
var texDesc = allTextureInputDescs.FirstOrDefault(p => p.Key.GetVariableName() == textureToManage.textureName);
if (texDesc != null)
{
var texIndex = shaderNodeInputs.IndexOf(texDesc);
var shaderTexturePin = inputs.ElementAtOrDefault(texIndex) as IVLPin<Texture>;
if (shaderTexturePin != null)
{
var newTexturePin = nodeBuildContext.Input<Texture>();
// Replace this texture input with the new one
inputs[texIndex] = newTexturePin;
// Insert generate pin
IVLPin<bool> alwaysGeneratePin = null;
if (textureToManage.wantsMips)
{
alwaysGeneratePin = nodeBuildContext.Input(true);
inputs.Insert(texIndex + 1, alwaysGeneratePin);
shaderNodeInputs.Insert(texIndex + 1, new PinDescription<bool>("Always Generate Mips for " + texDesc.Name, true) { IsVisible = false }); //keep shader pin indices in sync
}
// Setup pin manager
mipmapManager.AddInput(newTexturePin, shaderTexturePin, alwaysGeneratePin, textureToManage.dontUnapplySRgb, profilerName: name + " " + texDesc.Name + " Mipmap Generator");
}
}
}
}
var textureInput = inputs.ElementAtOrDefault(shaderNodeInputs.IndexOf(p => p.Name == textureInputName));
var outputTextureInput = inputs.ElementAtOrDefault(shaderNodeInputs.IndexOf(p => p.Name == "Output Texture"));
var enabledInput = (IVLPin<bool>)inputs.ElementAt(shaderNodeInputs.IndexOf(p => p.Name == Enabled));
var outputSize = nodeBuildContext.Input(defaultSize);
var outputFormat = nodeBuildContext.Input(defaultFormat);
var renderFormat = nodeBuildContext.Input(defaultRenderFormat);
if (isFilterOrMixer)
{
inputs.Insert(inputs.Count - 1, outputSize);
inputs.Insert(inputs.Count - 1, outputFormat);
inputs.Insert(inputs.Count - 1, renderFormat);
}
else
{
inputs.Insert(inputs.Count - 2, outputSize);
inputs.Insert(inputs.Count - 2, outputFormat);
inputs.Insert(inputs.Count - 2, renderFormat);
}
var gameHandle = nodeContext.GetGameHandle();
var game = gameHandle.Resource;
var scheduler = game.Services.GetService<SchedulerSystem>();
var graphicsDevice = game.GraphicsDevice;
// Remove this once FrameDelay can deal with textures properly
var output1 = default(((Int2 size, PixelFormat format, PixelFormat renderFormat) desc, Texture texture, Texture view));
var output2 = default(((Int2 size, PixelFormat format, PixelFormat renderFormat) desc, Texture texture, Texture view));
var lastViewFormat = PixelFormat.None;
var usedRenderFormat = PixelFormat.None;
var mainOutput = nodeBuildContext.Output(getter: () =>
{
var inputTexture = textureInput?.Value as Texture;
if (!enabledInput.Value)
{
if (isFilterOrMixer)
return inputTexture; // By pass
else
return output1.texture; // Last result
}
var outputTexture = outputTextureInput.Value as Texture;
if (outputTexture is null)
{
// No output texture is provided, generate one
const TextureFlags textureFlags = TextureFlags.ShaderResource | TextureFlags.RenderTarget;
var desc = (size: defaultSize, format: defaultFormat, renderFormat: defaultRenderFormat);
if (inputTexture != null)
{
// Base it on the input texture
var viewFormat = inputTexture.ViewFormat;
// Figure out render format
if (!shaderMetadata.IsTextureSource && shaderMetadata.DontConvertToSRgbOnOnWrite)
{
if (viewFormat != lastViewFormat)
usedRenderFormat = viewFormat.ToNonSRgb();
lastViewFormat = viewFormat;
}
else
{
usedRenderFormat = PixelFormat.None; //same as view format
}
desc = (new Int2(inputTexture.ViewWidth, inputTexture.ViewHeight), viewFormat, usedRenderFormat);
// Watch out for feedback loops
if (inputTexture == output1.texture)
{
Utilities.Swap(ref output1, ref output2);
}
}
// Overwrite with user settings
if (outputSize.Value.X > 0)
desc.size.X = outputSize.Value.X;
if (outputSize.Value.Y > 0)
desc.size.Y = outputSize.Value.Y;
if (outputFormat.Value != PixelFormat.None)
{
desc.format = outputFormat.Value;
desc.renderFormat = outputFormat.Value;
}
if (renderFormat.Value != PixelFormat.None)
desc.renderFormat = renderFormat.Value;
// Ensure we have an output of proper size
if (desc != output1.desc)
{
output1.view?.Dispose();
output1.texture?.Dispose();
output1.desc = desc;
if (desc.format != PixelFormat.None && desc.size.X > 0 && desc.size.Y > 0)
{
if (desc.renderFormat != PixelFormat.None
&& desc.renderFormat != desc.format
&& desc.renderFormat.BlockSize() == desc.format.BlockSize()
&& desc.format.TryToTypeless(out var typelessFormat))
{
var td = TextureDescription.New2D(desc.size.X, desc.size.Y, typelessFormat, textureFlags);
var tvd = new TextureViewDescription() { Format = desc.format, Flags = textureFlags };
var rvd = new TextureViewDescription() { Format = desc.renderFormat, Flags = textureFlags };
output1.texture = Texture.New(graphicsDevice, td, tvd);
output1.view = output1.texture.ToTextureView(rvd);
}
else
{
output1.texture = Texture.New2D(graphicsDevice, desc.size.X, desc.size.Y, desc.format, textureFlags);
output1.view = output1.texture;
}
}
else
{
output1.texture = null;
output1.view = null;
}
}
}
else //output texture set by patch
{
output1.texture = outputTexture;
output1.view = outputTexture;
}
var effect = shaderNode.Outputs[0].Value as TextureFXEffect;
if (scheduler != null && effect != null && output1.texture != null)
{
effect.SetOutput(output1.view);
if (hasTexturePinsToManage)
{
mipmapManager.Update();
scheduler.Schedule(mipmapManager);
}
scheduler.Schedule(effect);
return output1.texture;
}
return null;
});
return nodeBuildContext.Node(
inputs: inputs,
outputs: new[] { mainOutput },
dispose: () =>
{
output1.view?.Dispose();
output1.texture?.Dispose();
output2.view?.Dispose();
output2.texture?.Dispose();
mipmapManager?.Dispose();
gameHandle.Dispose();
shaderNode.Dispose();
});
},
openEditor: () => shaderDescription.OpenEditor()
);
});
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Games/WindowExtensions.cs<|end_filename|>
using Stride.Games;
using System;
using System.Reflection;
namespace VL.Stride.Games
{
public static class WindowExtensions
{
public static void BringToFront(this GameWindow window)
{
var field = Type.GetType("Stride.Games.GameWindowSDL, Stride.Games")?.GetField("window", BindingFlags.Instance | BindingFlags.NonPublic);
if (field != null)
{
var sdlWindow = field.GetValue(window) as GameFormSDL;
if (sdlWindow != null)
sdlWindow.BringToFront();
}
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Effects/TextureFX/Sources/Electricity_Internal_TextureFX.sdsl.cs<|end_filename|>
// <auto-generated>
// Do not edit this file yourself!
//
// This code was generated by Stride Shader Mixin Code Generator.
// To generate it yourself, please install Stride.VisualStudio.Package .vsix
// and re-save the associated .sdfx.
// </auto-generated>
using System;
using Stride.Core;
using Stride.Rendering;
using Stride.Graphics;
using Stride.Shaders;
using Stride.Core.Mathematics;
using Buffer = Stride.Graphics.Buffer;
namespace Stride.Rendering
{
public static partial class Electricity_Internal_TextureFXKeys
{
public static readonly ValueParameterKey<float> Time = ParameterKeys.NewValue<float>();
public static readonly ObjectParameterKey<Texture> NoiseTexture = ParameterKeys.NewObject<Texture>();
public static readonly ValueParameterKey<Color4> BackgroundColor = ParameterKeys.NewValue<Color4>(new Color4(0.0f,0.0f,0.0f,1.0f));
public static readonly ValueParameterKey<Color4> ElectricityColor = ParameterKeys.NewValue<Color4>(new Color4(1.0f,1.0f,1.0f,1.0f));
public static readonly ValueParameterKey<float> Height = ParameterKeys.NewValue<float>(0.25f);
public static readonly ValueParameterKey<float> VertexNoise = ParameterKeys.NewValue<float>(0.5f);
public static readonly ValueParameterKey<float> GlowStrength = ParameterKeys.NewValue<float>(0.5f);
public static readonly ValueParameterKey<float> GlowFallOff = ParameterKeys.NewValue<float>(0.5f);
public static readonly ValueParameterKey<float> AmbientGlow = ParameterKeys.NewValue<float>(0.25f);
public static readonly ValueParameterKey<float> AmbientGlowHeightScale = ParameterKeys.NewValue<float>(0.5f);
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Materials/VLMaterialEmissiveFeature.cs<|end_filename|>
using Stride.Core.Mathematics;
using Stride.Rendering;
using Stride.Rendering.Materials;
using Stride.Shaders;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using VL.Stride.Shaders.ShaderFX;
using VL.Stride.Shaders.ShaderFX.Control;
namespace VL.Stride.Rendering.Materials
{
public class VLMaterialEmissiveFeature : IMaterialEmissiveFeature
{
public IComputeNode VertexAddition { get; set; }
public IComputeNode PixelAddition { get; set; }
public IComputeNode MaterialExtension { get; set; }
public IMaterialEmissiveFeature MaterialEmissiveFeature { get; set; }
public bool Enabled { get; set; } = true;
public bool Equals(IMaterialShadingModelFeature other)
{
return other is MaterialEmissiveMapFeature;
}
public void Visit(MaterialGeneratorContext context)
{
MaterialEmissiveFeature?.Visit(context);
if (Enabled && context.Step == MaterialGeneratorStep.GenerateShader)
{
AddMaterialExtension(context);
if (VertexAddition != null)
{
AddVertexAddition(MaterialShaderStage.Vertex, context);
//context.AddFinalCallback(MaterialShaderStage.Vertex, AddVertexAddition);
}
if (PixelAddition != null)
{
AddPixelAddition(MaterialShaderStage.Pixel, context);
//context.AddFinalCallback(MaterialShaderStage.Pixel, AddPixelAddition);
}
}
}
void AddMaterialExtension(MaterialGeneratorContext context)
{
var enableExtension = MaterialExtension != null;
if (enableExtension)
{
var ext = MaterialExtension;
if (ext is IShaderFXNode node) // check for ShaderFX node
{
var compositionPins = node.InputPins;
var baseKeys = new MaterialComputeColorKeys(MaterialKeys.DiffuseMap, MaterialKeys.DiffuseValue, Color.White);
for (int i = 0; i < compositionPins.Count; i++)
{
var cp = compositionPins[i];
cp?.GenerateAndSetShaderSource(context, baseKeys);
}
}
var shaderSource = MaterialExtension.GenerateShaderSource(context, new MaterialComputeColorKeys(MaterialKeys.DiffuseMap, MaterialKeys.DiffuseValue, Color.White));
if (shaderSource is ShaderMixinSource mixinSource)
{
if (ext is IShaderFXNode node2) // check for ShaderFX node
{
var compositionPins = node2.InputPins;
for (int i = 0; i < compositionPins.Count; i++)
{
var cp = compositionPins[i];
var shader = context.Parameters.Get(cp.Key);
if (shader is ShaderSource classCode)
mixinSource.AddComposition(cp.Key.Name, classCode);
}
}
}
context.Parameters.Set(VLEffectParameters.EnableExtensionShader, enableExtension);
context.Parameters.Set(VLEffectParameters.MaterialExtensionShader, shaderSource);
}
}
void AddVertexAddition(MaterialShaderStage stage, MaterialGeneratorContext context)
{
var shaderSource = VertexAddition.GenerateShaderSource(context, new MaterialComputeColorKeys(MaterialKeys.DiffuseMap, MaterialKeys.DiffuseValue, Color.White));
context.AddShaderSource(MaterialShaderStage.Vertex, shaderSource);
}
void AddPixelAddition(MaterialShaderStage stage, MaterialGeneratorContext context)
{
var shaderSource = PixelAddition.GenerateShaderSource(context, new MaterialComputeColorKeys(MaterialKeys.DiffuseMap, MaterialKeys.DiffuseValue, Color.White));
context.AddShaderSource(MaterialShaderStage.Pixel, shaderSource);
}
//// takes care of the composition inputs of the connected node
//private static bool UpdateCompositions(IReadOnlyList<ShaderFXPin> compositionPins, GraphicsDevice graphicsDevice, ParameterCollection parameters, ShaderMixinSource mixin, CompositeDisposable subscriptions)
//{
// var anyChanged = false;
// for (int i = 0; i < compositionPins.Count; i++)
// {
// anyChanged |= compositionPins[i].ShaderSourceChanged;
// }
// if (anyChanged)
// {
// // Disposes all current subscriptions. So for example all data bindings between the sources and our parameter collection
// // gets removed.
// subscriptions.Clear();
// var context = ShaderGraph.NewShaderGeneratorContext(graphicsDevice, parameters, subscriptions);
// var updatedMixin = new ShaderMixinSource();
// updatedMixin.DeepCloneFrom(mixin);
// parameters.Set(EffectNodeBaseKeys.EffectNodeBaseShader, updatedMixin);
// return true;
// }
// return false;
//}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Input/InputExtensions.cs<|end_filename|>
using Stride.Core;
using Stride.Core.Mathematics;
using Stride.Input;
using Stride.Rendering;
namespace VL.Stride.Input
{
public static class InputExtensions
{
/// <summary>
/// A property key to get the window input source from the <see cref="ComponentBase.Tags"/>.
/// </summary>
public static readonly PropertyKey<IInputSource> WindowInputSource = new PropertyKey<IInputSource>("WindowInputSource", typeof(IInputSource));
public static RenderContext SetWindowInputSource(this RenderContext input, IInputSource inputSource)
{
input.Tags.Set(WindowInputSource, inputSource);
return input;
}
public static IInputSource GetWindowInputSource(this RenderContext input) => input.Tags.Get(WindowInputSource);
public static IInputSource GetDevices(this IInputSource inputSource, out IMouseDevice mouseDevice, out IKeyboardDevice keyboardDevice, out IPointerDevice pointerDevice)
{
mouseDevice = null;
keyboardDevice = null;
pointerDevice = null;
if (inputSource != null)
{
foreach (var item in inputSource.Devices)
{
var device = item.Value;
if (device is IMouseDevice mouse)
mouseDevice = mouse;
else if (device is IKeyboardDevice keyboard)
keyboardDevice = keyboard;
else if (device is IPointerDevice pointer)
pointerDevice = pointer;
}
}
return inputSource;
}
public static void UpdateSurfaceArea(this IInputSource inputSource, Vector2 size)
{
if (inputSource != null)
{
foreach (var item in inputSource.Devices)
{
var device = item.Value;
if (device is IPointerDevice pointer)
pointer.UpdateSurfaceArea(size);
}
}
}
public static void UpdateSurfaceArea(this IPointerDevice pointer, Vector2 size)
{
if (pointer != null)
{
var methodInfo = typeof(PointerDeviceBase).GetMethod("SetSurfaceSize", System.Reflection.BindingFlags.Instance | System.Reflection.BindingFlags.NonPublic);
methodInfo.Invoke(pointer, new object[] { size });
}
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Effects/TextureFX/Filters/Distortion/Bump_TextureFX.sdsl.cs<|end_filename|>
// <auto-generated>
// Do not edit this file yourself!
//
// This code was generated by Stride Shader Mixin Code Generator.
// To generate it yourself, please install Stride.VisualStudio.Package .vsix
// and re-save the associated .sdfx.
// </auto-generated>
using System;
using Stride.Core;
using Stride.Rendering;
using Stride.Graphics;
using Stride.Shaders;
using Stride.Core.Mathematics;
using Buffer = Stride.Graphics.Buffer;
namespace Stride.Rendering
{
public static partial class Bump_TextureFXKeys
{
public static readonly ValueParameterKey<int> Type = ParameterKeys.NewValue<int>();
public static readonly ValueParameterKey<Vector3> LightDirection = ParameterKeys.NewValue<Vector3>(new Vector3(0.25f,-0.5f,0.1f));
public static readonly ValueParameterKey<Color4> AmbientColor = ParameterKeys.NewValue<Color4>(new Color4(0.15f,0.15f,0.15f,1.0f));
public static readonly ValueParameterKey<Color4> DiffuseColor = ParameterKeys.NewValue<Color4>(new Color4(0.85f,0.85f,0.85f,1.0f));
public static readonly ValueParameterKey<Color4> SpecularColor = ParameterKeys.NewValue<Color4>(new Color4(0.35f,0.35f,0.35f,1.0f));
public static readonly ValueParameterKey<float> LightPower = ParameterKeys.NewValue<float>(25.0f);
public static readonly ValueParameterKey<float> LightRange = ParameterKeys.NewValue<float>(10.0f);
public static readonly ValueParameterKey<float> LightAttenuation0 = ParameterKeys.NewValue<float>(0.0f);
public static readonly ValueParameterKey<float> LightAttenuation1 = ParameterKeys.NewValue<float>(0.3f);
public static readonly ValueParameterKey<float> LightAttenuation2 = ParameterKeys.NewValue<float>(0.0f);
public static readonly ValueParameterKey<float> SmoothSpot = ParameterKeys.NewValue<float>(0.1f);
public static readonly ValueParameterKey<float> BumpAmount = ParameterKeys.NewValue<float>(1.0f);
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Effects/TextureFX/Filters/Depth/LinearDepth_TextureFX.sdsl.cs<|end_filename|>
// <auto-generated>
// Do not edit this file yourself!
//
// This code was generated by Stride Shader Mixin Code Generator.
// To generate it yourself, please install Stride.VisualStudio.Package .vsix
// and re-save the associated .sdfx.
// </auto-generated>
using System;
using Stride.Core;
using Stride.Rendering;
using Stride.Graphics;
using Stride.Shaders;
using Stride.Core.Mathematics;
using Buffer = Stride.Graphics.Buffer;
namespace Stride.Rendering
{
public static partial class LinearDepth_TextureFXKeys
{
public static readonly ValueParameterKey<Matrix> Projection = ParameterKeys.NewValue<Matrix>();
public static readonly ValueParameterKey<float> MinDepth = ParameterKeys.NewValue<float>(0.0f);
public static readonly ValueParameterKey<float> MaxDepth = ParameterKeys.NewValue<float>(10.0f);
public static readonly ValueParameterKey<bool> Mapped = ParameterKeys.NewValue<bool>(true);
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Initialization.cs<|end_filename|>
using Stride.Graphics;
using Stride.Input;
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using VL.Core;
using VL.Core.CompilerServices;
using VL.Lib.Basics.Resources;
using VL.Stride.Engine;
using VL.Stride.Graphics;
using VL.Stride.Rendering;
using VL.Stride.Rendering.Compositing;
using VL.Stride.Rendering.Lights;
using VL.Stride.Rendering.Materials;
[assembly: AssemblyInitializer(typeof(VL.Stride.Core.Initialization))]
namespace VL.Stride.Core
{
public sealed class Initialization : AssemblyInitializer<Initialization>
{
protected override void RegisterServices(IVLFactory factory)
{
// Graphics device
factory.RegisterService<NodeContext, IResourceProvider<GraphicsDevice>>(nodeContext =>
{
var gameProvider = nodeContext.GetGameProvider();
return gameProvider.Bind(game => ResourceProvider.Return(game.GraphicsDevice));
});
// Graphics context
factory.RegisterService<NodeContext, IResourceProvider<GraphicsContext>>(nodeContext =>
{
var gameProvider = nodeContext.GetGameProvider();
return gameProvider.Bind(game => ResourceProvider.Return(game.GraphicsContext));
});
// Input manager
factory.RegisterService<NodeContext, IResourceProvider<InputManager>>(nodeContext =>
{
var gameProvider = nodeContext.GetGameProvider();
return gameProvider.Bind(game => ResourceProvider.Return(game.Input));
});
RegisterNodeFactories(factory);
}
void RegisterNodeFactories(IVLFactory services)
{
// Use our own static node factory cache to manage the lifetime of our factories. The cache provided by VL itself is only per compilation.
// The node factory cache will invalidate itself in case a factory or one of its nodes invalidates.
// Not doing so can cause the hotswap to exchange nodes thereby causing weired crashes when for example
// one of those nodes being re-created is the graphics compositor.
RegisterStaticNodeFactory(services, "VL.Stride.Graphics.Nodes", nodeFactory =>
{
return GraphicsNodes.GetNodeDescriptions(nodeFactory);
});
RegisterStaticNodeFactory(services, "VL.Stride.Rendering.Nodes", nodeFactory =>
{
return MaterialNodes.GetNodeDescriptions(nodeFactory)
.Concat(LightNodes.GetNodeDescriptions(nodeFactory))
.Concat(CompositingNodes.GetNodeDescriptions(nodeFactory))
.Concat(RenderingNodes.GetNodeDescriptions(nodeFactory));
});
RegisterStaticNodeFactory(services, "VL.Stride.Engine.Nodes", nodeFactory =>
{
return EngineNodes.GetNodeDescriptions(nodeFactory)
.Concat(PhysicsNodes.GetNodeDescriptions(nodeFactory))
.Concat(VRNodes.GetNodeDescriptions(nodeFactory))
;
});
RegisterStaticNodeFactory(services, "VL.Stride.Rendering.EffectShaderNodes", init: EffectShaderNodes.Init);
}
void RegisterStaticNodeFactory(IVLFactory services, string name, Func<IVLNodeDescriptionFactory, IEnumerable<IVLNodeDescription>> init)
{
RegisterStaticNodeFactory(services, name, nodeFactory => NodeBuilding.NewFactoryImpl(init(nodeFactory).ToImmutableArray()));
}
void RegisterStaticNodeFactory(IVLFactory services, string name, Func<IVLNodeDescriptionFactory, NodeBuilding.FactoryImpl> init)
{
var cachedFactory = staticCache.GetOrAdd(name, () => NodeBuilding.NewNodeFactory(services, name, init));
services.RegisterNodeFactory(cachedFactory);
}
static readonly NodeFactoryCache staticCache = new NodeFactoryCache();
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Effects/TextureFX/Filters/ColorManipulation/Colorize_TextureFX.sdsl.cs<|end_filename|>
// <auto-generated>
// Do not edit this file yourself!
//
// This code was generated by Stride Shader Mixin Code Generator.
// To generate it yourself, please install Stride.VisualStudio.Package .vsix
// and re-save the associated .sdfx.
// </auto-generated>
using System;
using Stride.Core;
using Stride.Rendering;
using Stride.Graphics;
using Stride.Shaders;
using Stride.Core.Mathematics;
using Buffer = Stride.Graphics.Buffer;
namespace Stride.Rendering
{
public static partial class Colorize_TextureFXKeys
{
public static readonly ValueParameterKey<Color4> ColorA = ParameterKeys.NewValue<Color4>(new Color4(1.0f,0.0f,0.0f,1.0f));
public static readonly ValueParameterKey<Color4> ColorB = ParameterKeys.NewValue<Color4>(new Color4(0.0f,1.0f,0.0f,1.0f));
public static readonly ValueParameterKey<Color4> ColorC = ParameterKeys.NewValue<Color4>(new Color4(1.0f,0.0f,1.0f,1.0f));
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Effects/ComputeEffect/VLComputeEffectShader.cs<|end_filename|>
using Stride.Core;
using Stride.Core.Diagnostics;
using Stride.Core.Mathematics;
using Stride.Graphics;
using Stride.Rendering;
using Stride.Rendering.ComputeEffect;
using Stride.Shaders;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reactive.Disposables;
using System.Reflection;
using VL.Lib.Control;
using Buffer = Stride.Graphics.Buffer;
namespace VL.Stride.Rendering.ComputeEffect
{
/// <summary>
/// A compute effect allowing to customize the dispatch method through <see cref="IComputeEffectDispatcher"/>.
/// </summary>
public class VLComputeEffectShader : DrawEffect
{
PerFrameParameters[] perFrameParams;
PerViewParameters[] perViewParams;
PerDrawParameters[] perDrawParams;
TexturingParameters[] texturingParams;
static Dictionary<string, ProfilingKey> profilingKeys = new Dictionary<string, ProfilingKey>();
ProfilingKey profilingKey;
private MutablePipelineState pipelineState;
private bool pipelineStateDirty = true;
private EffectBytecode previousBytecode;
private TimeSpan FRefreshTime;
private bool FCompiled;
public string LastError { get; private set; }
public VLComputeEffectShader(RenderContext context, string name, ParameterCollection mixinParams = null)
: base(name)
{
if (mixinParams != null)
Parameters = mixinParams;
Subscriptions.DisposeBy(this);
Initialize(context);
}
internal readonly CompositeDisposable Subscriptions = new CompositeDisposable();
/// <summary>
/// The current effect instance.
/// </summary>
public DynamicEffectInstance EffectInstance { get; private set; }
/// <summary>
/// Gets or sets the number of threads per thread group.
/// This will be set as the [numthreads(X, Y, Z)] attribute in the shader.
/// </summary>
public Int3 ThreadGroupSize { get; set; }
/// <summary>
/// Gets or sets the dispatcher.
/// </summary>
public IComputeEffectDispatcher Dispatcher { get; set; }
protected override void InitializeCore()
{
base.InitializeCore();
pipelineState = new MutablePipelineState(Context.GraphicsDevice);
// Setup the effect compiler
EffectInstance = new DynamicEffectInstance("ComputeFXEffect", Parameters);
// We give ComputeEffectShader a higher priority, since they are usually executed serially and blocking
EffectInstance.EffectCompilerParameters.TaskPriority = -1;
Parameters.Set(ComputeEffectShaderKeys.ThreadNumbers, new Int3(1));
EffectInstance.Initialize(Context.Services);
EffectInstance.UpdateEffect(Context.GraphicsDevice);
perFrameParams = EffectInstance.Parameters.GetWellKnownParameters(WellKnownParameters.PerFrameMap).ToArray();
perViewParams = EffectInstance.Parameters.GetWellKnownParameters(WellKnownParameters.PerViewMap).ToArray();
perDrawParams = EffectInstance.Parameters.GetWellKnownParameters(WellKnownParameters.PerDrawMap).ToArray();
texturingParams = EffectInstance.Parameters.GetTexturingParameters().ToArray();
if (!profilingKeys.TryGetValue(Name, out profilingKey))
{
profilingKey = new ProfilingKey(Name);
profilingKeys[Name] = profilingKey;
}
}
/// <summary>
/// Sets the default parameters (called at constructor time and if <see cref="DrawEffect.Reset"/> is called)
/// </summary>
protected override void SetDefaultParameters()
{
ThreadGroupSize = new Int3(1);
}
protected override void PreDrawCore(RenderDrawContext context)
{
base.PreDrawCore(context);
// Default handler for parameters
UpdateParameters(context);
}
/// <summary>
/// Updates the effect <see cref="DrawEffect.Parameters"/> from properties defined in this instance.
/// </summary>
protected virtual void UpdateParameters(RenderDrawContext context)
{
Parameters.Set(ComputeEffectShaderKeys.ThreadNumbers, ThreadGroupSize);
Parameters.SetPerFrameParameters(perFrameParams, context.RenderContext);
var renderView = context.RenderContext.RenderView;
var parentTransformation = context.RenderContext.Tags.Get(EntityRendererRenderFeature.CurrentParentTransformation);
if (Parameters.ContainsKey(TransformationKeys.World))
{
var world = Parameters.Get(TransformationKeys.World);
Matrix.Multiply(ref world, ref parentTransformation, out var result);
Parameters.SetPerDrawParameters(perDrawParams, renderView, ref result);
}
else
{
Parameters.SetPerDrawParameters(perDrawParams, renderView, ref parentTransformation);
}
Parameters.SetPerViewParameters(perViewParams, renderView);
Parameters.SetTexturingParameters(texturingParams);
Dispatcher?.UpdateParameters(Parameters, ThreadGroupSize);
}
List<ParameterKey> uavs = new List<ParameterKey>();
protected override void DrawCore(RenderDrawContext context)
{
if (string.IsNullOrEmpty(Name) || FRefreshTime > context.RenderContext.Time.Total)
return;
using (Profiler.Begin(profilingKey))
using (context.PushRenderTargetsAndRestore())
{
var effectUpdated = false;
try
{
effectUpdated = EffectInstance.UpdateEffect(GraphicsDevice);
FCompiled = true;
LastError = string.Empty;
}
catch (Exception e)
{
LastError = e.InnermostException().Message;
FCompiled = false;
FRefreshTime = context.RenderContext.Time.Total + TimeSpan.FromSeconds(3);
}
if (!FCompiled)
return;
try
{
if (effectUpdated || pipelineStateDirty || previousBytecode != EffectInstance.Effect.Bytecode)
{
// The EffectInstance might have been updated from outside
previousBytecode = EffectInstance.Effect.Bytecode;
pipelineState.State.SetDefaults();
pipelineState.State.RootSignature = EffectInstance.RootSignature;
pipelineState.State.EffectBytecode = EffectInstance.Effect.Bytecode;
pipelineState.Update();
pipelineStateDirty = false;
// get potential UAVs to unset
uavs.Clear();
var parameters = EffectInstance.Parameters;
if (parameters.HasLayout)
{
uavs.AddRange(parameters.Layout.LayoutParameterKeyInfos
.Where(p => typeof(Buffer).IsAssignableFrom(p.Key.PropertyType) || typeof(Texture).IsAssignableFrom(p.Key.PropertyType))
.Select(k => k.Key));
}
}
}
catch (Exception e)
{
LastError = e.InnermostException().Message;
FCompiled = false;
FRefreshTime = context.RenderContext.Time.Total + TimeSpan.FromSeconds(3);
return;
}
// Clear render targets
context.CommandList.SetRenderTargets(null);
// Apply pipeline state
context.CommandList.SetPipelineState(pipelineState.CurrentState);
// Apply the effect
EffectInstance.Apply(context.GraphicsContext);
// Dispatch
Dispatcher?.Dispatch(context);
// Unset UAV
for (int i = 0; i < uavs.Count; i++)
{
UnsetUAV(context.CommandList, EffectInstance.Parameters, uavs[i]);
}
}
}
MethodInfo unsetUAV;
object[] unsetUAVArg = new object[1];
void UnsetUAV (CommandList commandList, ParameterCollection parameters, ParameterKey resourceKey)
{
var gr = parameters?.GetObject(resourceKey);
GraphicsResource resource = null;
if (gr is Buffer b)
{
if ((b.ViewFlags & BufferFlags.UnorderedAccess) != 0)
resource = b;
}
else if (gr is Texture t)
{
if ((t.ViewFlags & TextureFlags.UnorderedAccess) != 0)
resource = t;
}
if (resource != null)
{
unsetUAV ??= typeof(CommandList).GetMethod("UnsetUnorderedAccessView", BindingFlags.NonPublic | BindingFlags.Instance);
unsetUAVArg[0] = resource;
unsetUAV.Invoke(commandList, unsetUAVArg);
}
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Effects/TextureFX/Filters/Patterns/Scanline_TextureFX.sdsl.cs<|end_filename|>
// <auto-generated>
// Do not edit this file yourself!
//
// This code was generated by Stride Shader Mixin Code Generator.
// To generate it yourself, please install Stride.VisualStudio.Package .vsix
// and re-save the associated .sdfx.
// </auto-generated>
using System;
using Stride.Core;
using Stride.Rendering;
using Stride.Graphics;
using Stride.Shaders;
using Stride.Core.Mathematics;
using Buffer = Stride.Graphics.Buffer;
namespace Stride.Rendering
{
public static partial class Scanline_TextureFXKeys
{
public static readonly ValueParameterKey<float> Brightness = ParameterKeys.NewValue<float>(1);
public static readonly ValueParameterKey<Vector2> Amount = ParameterKeys.NewValue<Vector2>(new Vector2(1,1));
public static readonly ValueParameterKey<Vector2> RedShift = ParameterKeys.NewValue<Vector2>();
public static readonly ValueParameterKey<Vector2> GreenShift = ParameterKeys.NewValue<Vector2>();
public static readonly ValueParameterKey<Vector2> BlueShift = ParameterKeys.NewValue<Vector2>();
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Graphics/TextureExtensions.cs<|end_filename|>
using System;
using System.Collections.Immutable;
using System.IO;
using System.Runtime.CompilerServices;
using VL.Lib.Basics.Imaging;
using VL.Lib.Collections;
using Stride.Graphics;
using Buffer = Stride.Graphics.Buffer;
using StridePixelFormat = Stride.Graphics.PixelFormat;
using VLPixelFormat = VL.Lib.Basics.Imaging.PixelFormat;
using Stride.Core;
namespace VL.Stride.Graphics
{
public static class TextureExtensions
{
public static bool TryToTypeless(this StridePixelFormat format, out StridePixelFormat typelessFormat)
{
typelessFormat = format;
var formatString = Enum.GetName(typeof(StridePixelFormat), format);
var idx = formatString.IndexOf('_');
if (idx > 0)
{
formatString = formatString.Remove(idx);
formatString += "_Typeless";
if (Enum.TryParse<StridePixelFormat>(formatString, out var newFormat))
{
typelessFormat = newFormat;
return true;
}
}
return false;
}
/// <summary>
/// Copies the <paramref name="fromData"/> to the given <paramref name="texture"/> on GPU memory.
/// </summary>
/// <typeparam name="TData">The type of the T data.</typeparam>
/// <param name="texture"></param>
/// <param name="commandList">The <see cref="CommandList"/>.</param>
/// <param name="fromData">The data to copy from.</param>
/// <param name="arraySlice"></param>
/// <param name="mipSlice"></param>
/// <param name="region"></param>
/// <exception cref="ArgumentException"></exception>
/// <remarks>
/// See the unmanaged documentation about Map/UnMap for usage and restrictions.
/// </remarks>
/// <returns>The GPU buffer.</returns>
public static unsafe Texture SetData<TData>(this Texture texture, CommandList commandList, Spread<TData> fromData, int arraySlice, int mipSlice, ResourceRegion? region) where TData : struct
{
var immutableArray = fromData._array;
var array = Unsafe.As<ImmutableArray<TData>, TData[]>(ref immutableArray);
texture.SetData(commandList, array, arraySlice, mipSlice, region);
return texture;
}
public static unsafe Texture SetDataFromIImage(this Texture texture, CommandList commandList, IImage image, int arraySlice, int mipSlice, ResourceRegion? region)
{
using (var data = image.GetData())
using (var handle = data.Bytes.Pin())
{
var dp = new DataPointer(handle.Pointer, data.Bytes.Length);
texture.SetData(commandList, dp, arraySlice, mipSlice, region);
}
return texture;
}
public static unsafe Texture SetDataFromProvider(this Texture texture, CommandList commandList, IGraphicsDataProvider data, int arraySlice = 0, int mipSlice = 0, ResourceRegion? region = null)
{
if (texture != null && data != null)
{
using (var handle = data.Pin())
{
texture.SetData(commandList, new DataPointer(handle.Pointer, data.SizeInBytes), arraySlice, mipSlice, region);
}
}
return texture;
}
/// <summary>
/// Similiar to <see cref="Texture.Load(GraphicsDevice, Stream, TextureFlags, GraphicsResourceUsage, bool)"/> but allocates memory on unmanaged heap only.
/// </summary>
public static unsafe Texture Load(GraphicsDevice device, string file, TextureFlags textureFlags = TextureFlags.ShaderResource, GraphicsResourceUsage usage = GraphicsResourceUsage.Immutable, bool loadAsSRGB = false)
{
using var src = File.OpenRead(file);
var ptr = Utilities.AllocateMemory((int)src.Length);
using var dst = new UnmanagedMemoryStream((byte*)ptr.ToPointer(), 0, (int)src.Length, FileAccess.ReadWrite);
src.CopyTo(dst);
var dataBuffer = new DataPointer(ptr, (int)dst.Length);
using var image = Image.Load(dataBuffer, makeACopy: false, loadAsSRGB: loadAsSRGB);
return Texture.New(device, image, textureFlags, usage);
}
public static void SaveTexture(this Texture texture, CommandList commandList, string filename, ImageFileType imageFileType = ImageFileType.Png)
{
using (var image = texture.GetDataAsImage(commandList))
{
using (var resultFileStream = File.OpenWrite(filename))
{
image.Save(resultFileStream, imageFileType);
}
}
}
public static StridePixelFormat GetStridePixelFormat(ImageInfo info, bool isSRgb = true)
{
var format = info.Format;
switch (format)
{
case VLPixelFormat.Unknown:
return StridePixelFormat.None;
case VLPixelFormat.R8:
return StridePixelFormat.R8_UNorm;
case VLPixelFormat.R16:
return StridePixelFormat.R16_UNorm;
case VLPixelFormat.R32F:
return StridePixelFormat.R32_Float;
case VLPixelFormat.R8G8B8X8:
return isSRgb ? StridePixelFormat.R8G8B8A8_UNorm_SRgb : StridePixelFormat.R8G8B8A8_UNorm;
case VLPixelFormat.R8G8B8A8:
return isSRgb ? StridePixelFormat.R8G8B8A8_UNorm_SRgb : StridePixelFormat.R8G8B8A8_UNorm;
case VLPixelFormat.B8G8R8X8:
return isSRgb ? StridePixelFormat.B8G8R8X8_UNorm_SRgb : StridePixelFormat.B8G8R8X8_UNorm;
case VLPixelFormat.B8G8R8A8:
return isSRgb ? StridePixelFormat.B8G8R8A8_UNorm_SRgb : StridePixelFormat.B8G8R8A8_UNorm;
case VLPixelFormat.R32G32F:
return StridePixelFormat.R32G32_Float;
case VLPixelFormat.R16G16B16A16F:
return StridePixelFormat.R16G16B16A16_Float;
case VLPixelFormat.R32G32B32A32F:
return StridePixelFormat.R32G32B32A32_Float;
default:
throw new UnsupportedPixelFormatException(format);
}
}
public static VLPixelFormat GetVLImagePixelFormat(Texture texture, out bool isSRgb)
{
isSRgb = false;
if (texture == null)
return VLPixelFormat.Unknown;
var format = texture.Format;
switch (format)
{
case StridePixelFormat.None:
return VLPixelFormat.Unknown;
case StridePixelFormat.R8_UNorm:
return VLPixelFormat.R8;
case StridePixelFormat.R16_UNorm:
return VLPixelFormat.R16;
case StridePixelFormat.R32_Float:
return VLPixelFormat.R32F;
case StridePixelFormat.R8G8B8A8_UNorm:
return VLPixelFormat.R8G8B8A8;
case StridePixelFormat.R8G8B8A8_UNorm_SRgb:
isSRgb = true;
return VLPixelFormat.R8G8B8A8;
case StridePixelFormat.B8G8R8X8_UNorm:
return VLPixelFormat.B8G8R8X8;
case StridePixelFormat.B8G8R8X8_UNorm_SRgb:
isSRgb = true;
return VLPixelFormat.B8G8R8X8;
case StridePixelFormat.B8G8R8A8_UNorm:
return VLPixelFormat.B8G8R8A8;
case StridePixelFormat.B8G8R8A8_UNorm_SRgb:
isSRgb = true;
return VLPixelFormat.B8G8R8A8;
case StridePixelFormat.R32G32_Float:
return VLPixelFormat.R32G32F;
case StridePixelFormat.R16G16B16A16_Float:
return VLPixelFormat.R16G16B16A16F;
case StridePixelFormat.R32G32B32A32_Float:
return VLPixelFormat.R32G32B32A32F;
default:
throw new Exception("Unsupported Pixel Format");
}
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/_NodeFactory_/CustomNode.cs<|end_filename|>
using Stride.Core.Collections;
using Stride.Engine;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reactive.Linq;
using System.Reflection;
using VL.Core;
using VL.Core.Diagnostics;
using VL.Lib.Basics.Resources;
using VL.Lib.Collections.TreePatching;
using VL.Lib.Experimental;
namespace VL.Stride
{
static class FactoryExtensions
{
public static CustomNodeDesc<T> NewNode<T>(this IVLNodeDescriptionFactory factory,
Func<NodeContext, T> ctor,
string name = default,
string category = default,
bool copyOnWrite = true,
bool hasStateOutput = true)
where T : class
{
return new CustomNodeDesc<T>(factory,
ctor: ctx =>
{
var instance = ctor(ctx);
return (instance, default);
},
name: name,
category: category,
copyOnWrite: copyOnWrite,
hasStateOutput: hasStateOutput);
}
public static CustomNodeDesc<T> NewNode<T>(this IVLNodeDescriptionFactory factory,
string name = default,
string category = default,
bool copyOnWrite = true,
Action<T> init = default,
bool hasStateOutput = true)
where T : class, new()
{
return new CustomNodeDesc<T>(factory,
ctor: ctx =>
{
var instance = new T();
init?.Invoke(instance);
return (instance, default);
},
name: name,
category: category,
copyOnWrite: copyOnWrite,
hasStateOutput: hasStateOutput);
}
public static CustomNodeDesc<TComponent> NewComponentNode<TComponent>(this IVLNodeDescriptionFactory factory, string category, Action<TComponent> init = null, string name = null)
where TComponent : EntityComponent, new()
{
return new CustomNodeDesc<TComponent>(factory,
name: name,
ctor: nodeContext =>
{
var component = new TComponent();
var manager = new TreeNodeParentManager<Entity, EntityComponent>(component, (e, c) => e.Add(c), (e, c) => e.Remove(c));
var sender = new Sender<object, object>(nodeContext, component, manager);
var cachedMessages = default(List<VL.Lang.Message>);
var subscription = manager.ToggleWarning.Subscribe(v => ToggleMessages(v));
init?.Invoke(component);
return (component, () =>
{
ToggleMessages(false);
manager.Dispose();
sender.Dispose();
subscription.Dispose();
}
);
void ToggleMessages(bool on)
{
var messages = cachedMessages ?? (cachedMessages = nodeContext.Path.Stack
.Select(id => new VL.Lang.Message(id, Lang.MessageSeverity.Warning, "Component should only be connected to one Entity."))
.ToList());
foreach (var m in messages)
VL.Lang.PublicAPI.Session.ToggleMessage(m, on);
}
},
category: category,
copyOnWrite: false);
}
public static IVLNodeDescription WithEnabledPin<TComponent>(this CustomNodeDesc<TComponent> node)
where TComponent : ActivableEntityComponent
{
return node.AddCachedInput("Enabled", x => x.Enabled, (x, v) => x.Enabled = v, true);
}
}
class CustomNodeDesc<TInstance> : IVLNodeDescription, IInfo
where TInstance : class
{
readonly List<CustomPinDesc> inputs = new List<CustomPinDesc>();
readonly List<CustomPinDesc> outputs = new List<CustomPinDesc>();
readonly Func<NodeContext, (TInstance, Action)> ctor;
public CustomNodeDesc(IVLNodeDescriptionFactory factory, Func<NodeContext, (TInstance, Action)> ctor,
string name = default,
string category = default,
bool copyOnWrite = true,
bool hasStateOutput = true)
{
Factory = factory;
this.ctor = ctor;
Name = name ?? typeof(TInstance).Name;
Category = category ?? string.Empty;
CopyOnWrite = copyOnWrite;
if (hasStateOutput)
AddOutput("Output", x => x);
}
public IVLNodeDescriptionFactory Factory { get; }
public string Name { get; }
public string Category { get; }
public bool Fragmented => true;
public bool CopyOnWrite { get; }
public IReadOnlyList<IVLPinDescription> Inputs => inputs;
public IReadOnlyList<IVLPinDescription> Outputs => outputs;
public IEnumerable<Message> Messages => Enumerable.Empty<Message>();
public string Summary => typeof(TInstance).GetSummary();
public string Remarks => typeof(TInstance).GetRemarks();
public IObservable<object> Invalidated => Observable.Empty<object>();
public IVLNode CreateInstance(NodeContext context)
{
var (instance, onDispose) = ctor(context);
var node = new Node(context)
{
NodeDescription = this
};
var inputs = this.inputs.Select(p => p.CreatePin(node, instance)).ToArray();
var outputs = this.outputs.Select(p => p.CreatePin(node, instance)).ToArray();
node.Inputs = inputs;
node.Outputs = outputs;
if (CopyOnWrite)
{
node.updateAction = () =>
{
if (node.needsUpdate)
{
node.needsUpdate = false;
// TODO: Causes render pipeline to crash
//if (instance is IDisposable disposable)
// disposable.Dispose();
instance = ctor(context).Item1;
// Copy the values
foreach (var input in inputs)
input.Update(instance);
foreach (var output in outputs)
output.Instance = instance;
}
};
node.disposeAction = () =>
{
// TODO: Causes render pipeline to crash
//if (instance is IDisposable disposable)
// disposable.Dispose();
};
}
else
{
node.updateAction = () =>
{
if (node.needsUpdate)
{
node.needsUpdate = false;
}
};
node.disposeAction = () =>
{
if (instance is IDisposable disposable)
disposable.Dispose();
onDispose?.Invoke();
};
}
return node;
}
public bool OpenEditor()
{
return false;
}
public CustomNodeDesc<TInstance> AddInput<T>(string name, Func<TInstance, T> getter, Action<TInstance, T> setter, string summary = default, string remarks = default, bool isVisible = true)
{
inputs.Add(new CustomPinDesc(name, summary, remarks)
{
Name = name.InsertSpaces(),
Type = typeof(T),
CreatePin = (node, instance) => new InputPin<T>(node, instance, getter, setter, getter(instance)),
IsVisible = isVisible
});
return this;
}
public CustomNodeDesc<TInstance> AddInput<T>(string name, Func<TInstance, T> getter, Action<TInstance, T> setter, T defaultValue, string summary = default, string remarks = default, bool isVisible = true)
{
inputs.Add(new CustomPinDesc(name, summary, remarks)
{
Name = name.InsertSpaces(),
Type = typeof(T),
DefaultValue = defaultValue,
CreatePin = (node, instance) => new InputPin<T>(node, instance, getter, setter, defaultValue),
IsVisible = isVisible
});
return this;
}
public CustomNodeDesc<TInstance> AddCachedInput<T>(string name, Func<TInstance, T> getter, Action<TInstance, T> setter, Func<T, T, bool> equals = default, string summary = default, string remarks = default, bool isVisible = true)
{
inputs.Add(new CustomPinDesc(name, summary, remarks)
{
Name = name.InsertSpaces(),
Type = typeof(T),
CreatePin = (node, instance) => new CachedInputPin<T>(node, instance, getter, setter, getter(instance), equals),
IsVisible = isVisible
});
return this;
}
public CustomNodeDesc<TInstance> AddCachedInput<T>(string name, Func<TInstance, T> getter, Action<TInstance, T> setter, T defaultValue, string summary = default, string remarks = default, bool isVisible = true)
{
inputs.Add(new CustomPinDesc(name, summary, remarks)
{
Name = name.InsertSpaces(),
Type = typeof(T),
DefaultValue = defaultValue,
CreatePin = (node, instance) => new CachedInputPin<T>(node, instance, getter, setter, defaultValue),
IsVisible = isVisible
});
return this;
}
public CustomNodeDesc<TInstance> AddOptimizedInput<T>(string name, Func<TInstance, T> getter, Action<TInstance, T> setter, Func<T, T, bool> equals = default, string summary = default, string remarks = default, bool isVisible = true)
{
inputs.Add(new CustomPinDesc(name, summary, remarks)
{
Name = name.InsertSpaces(),
Type = typeof(T),
CreatePin = (node, instance) => new OptimizedInputPin<T>(node, instance, getter, setter, getter(instance)),
IsVisible = isVisible
});
return this;
}
public CustomNodeDesc<TInstance> AddOptimizedInput<T>(string name, Func<TInstance, T> getter, Action<TInstance, T> setter, T defaultValue, string summary = default, string remarks = default, bool isVisible = true)
{
inputs.Add(new CustomPinDesc(name, summary, remarks)
{
Name = name.InsertSpaces(),
Type = typeof(T),
DefaultValue = defaultValue,
CreatePin = (node, instance) => new OptimizedInputPin<T>(node, instance, getter, setter, defaultValue),
IsVisible = isVisible
});
return this;
}
static bool SequenceEqual<T>(IEnumerable<T> a, IEnumerable<T> b)
{
if (a is null)
return b is null;
if (b is null)
return false;
return a.SequenceEqual(b);
}
public CustomNodeDesc<TInstance> AddCachedListInput<T>(string name, Func<TInstance, IList<T>> getter, Action<TInstance> updateInstanceAfterSetter = null)
{
return AddCachedInput<IReadOnlyList<T>>(name,
getter: instance => (IReadOnlyList<T>)getter(instance),
equals: SequenceEqual,
setter: (x, v) =>
{
var currentItems = getter(x);
currentItems.Clear();
if (v != null)
{
foreach (var item in v)
{
if (item != null)
{
currentItems.Add(item);
}
}
}
updateInstanceAfterSetter?.Invoke(x);
});
}
public CustomNodeDesc<TInstance> AddCachedListInput<T>(string name, Func<TInstance, T[]> getter, Action<TInstance, T[]> setter)
{
return AddCachedInput<IReadOnlyList<T>>(name,
getter: getter,
equals: SequenceEqual,
setter: (x, v) =>
{
var newItems = v?.Where(i => i != null);
setter(x, newItems?.ToArray());
});
}
public CustomNodeDesc<TInstance> AddCachedListInput<T>(string name, Func<TInstance, IndexingDictionary<T>> getter)
where T : class
{
return AddCachedInput<IReadOnlyList<T>>(name,
getter: x => getter(x).Values.ToList(),
equals: SequenceEqual,
setter: (x, v) =>
{
var currentItems = getter(x);
currentItems.Clear();
if (v != null)
{
for (int i = 0; i < v.Count; i++)
{
currentItems[i] = v[i];
}
}
});
}
public CustomNodeDesc<TInstance> AddOutput<T>(string name, Func<TInstance, T> getter, string summary = default, string remarks = default, bool isVisible = true)
{
outputs.Add(new CustomPinDesc(name, summary, remarks)
{
Name = name.InsertSpaces(),
Type = typeof(T),
CreatePin = (node, instance) => new OutputPin<T>(node, instance, getter),
IsVisible = isVisible
});
return this;
}
public CustomNodeDesc<TInstance> AddCachedOutput<T>(string name, Func<TInstance, T> getter, string summary = default, string remarks = default, bool isVisible = true)
{
outputs.Add(new CustomPinDesc(name, summary, remarks)
{
Name = name.InsertSpaces(),
Type = typeof(T),
CreatePin = (node, instance) => new CachedOutputPin<T>(node, instance, getter),
IsVisible = isVisible
});
return this;
}
public CustomNodeDesc<TInstance> AddCachedOutput<T>(string name, Func<NodeContext, TInstance, T> getter, string summary = default, string remarks = default, bool isVisible = true)
{
outputs.Add(new CustomPinDesc(name, summary, remarks)
{
Name = name.InsertSpaces(),
Type = typeof(T),
CreatePin = (node, instance) => new CachedOutputPin<T>(node, instance, x => getter(node.Context, instance)),
IsVisible = isVisible
});
return this;
}
public CustomNodeDesc<TInstance> AddCachedOutput<T>(string name, Func<NodeContext, (Func<TInstance, T>, IDisposable)> ctor, string summary = default, string remarks = default, bool isVisible = true)
{
outputs.Add(new CustomPinDesc(name, summary, remarks)
{
Name = name.InsertSpaces(),
Type = typeof(T),
CreatePin = (node, instance) =>
{
var (getter, disposable) = ctor(node.Context);
return new CachedOutputPin<T>(node, instance, getter, disposable);
},
IsVisible = isVisible
});
return this;
}
class CustomPinDesc : IVLPinDescription, IInfo, IVLPinDescriptionWithVisibility
{
readonly string memberName;
string summary;
string remarks;
public CustomPinDesc(string memberName, string summary = default, string remarks = default)
{
this.memberName = memberName;
this.summary = summary;
this.remarks = remarks;
}
public string Name { get; set; }
public Type Type { get; set; }
public object DefaultValue { get; set; }
public Func<Node, TInstance, Pin> CreatePin { get; set; }
public string Summary => summary ?? (summary = typeof(TInstance).GetSummary(memberName));
public string Remarks => remarks ?? (remarks = typeof(TInstance).GetRemarks(memberName));
public bool IsVisible { get; set; } = true;
}
abstract class Pin : IVLPin
{
public readonly Node Node;
public TInstance Instance;
public Pin(Node node, TInstance instance)
{
Node = node;
Instance = instance;
}
public abstract object BoxedValue { get; set; }
// Update the pin by copying the underlying value to the new instance
public virtual void Update(TInstance instance)
{
Instance = instance;
}
object IVLPin.Value
{
get => BoxedValue;
set => BoxedValue = value;
}
}
abstract class Pin<T> : Pin, IVLPin<T>
{
public Func<TInstance, T> getter;
public Action<TInstance, T> setter;
public Pin(Node node, TInstance instance) : base(node, instance)
{
}
public override sealed object BoxedValue
{
get => Value;
set => Value = (T)value;
}
public abstract T Value { get; set; }
}
class InputPin<T> : Pin<T>, IVLPin
{
public InputPin(Node node, TInstance instance, Func<TInstance, T> getter, Action<TInstance, T> setter, T initialValue)
: base(node, instance)
{
this.getter = getter;
this.setter = setter;
this.InitialValue = initialValue;
setter(instance, initialValue);
}
public T InitialValue { get; }
public override T Value
{
get => getter(Instance);
set
{
// Normalize the value first
if (value is null)
value = InitialValue;
setter(Instance, value);
Node.needsUpdate = true;
}
}
public override void Update(TInstance instance)
{
var currentValue = getter(Instance);
base.Update(instance);
setter(instance, currentValue);
}
}
class CachedInputPin<T> : InputPin<T>, IVLPin
{
readonly Func<T, T, bool> equals;
T lastValue;
public CachedInputPin(Node node, TInstance instance, Func<TInstance, T> getter, Action<TInstance, T> setter, T initialValue, Func<T, T, bool> equals = default)
: base(node, instance, getter, setter, initialValue)
{
this.equals = equals ?? EqualityComparer<T>.Default.Equals;
lastValue = initialValue;
}
public override T Value
{
get => getter(Instance);
set
{
if (!equals(value, lastValue))
{
lastValue = value;
base.Value = value;
}
}
}
}
class OptimizedInputPin<T> : InputPin<T>, IVLPin
{
readonly Func<T, T, bool> equals;
public OptimizedInputPin(Node node, TInstance instance, Func<TInstance, T> getter, Action<TInstance, T> setter, T initialValue)
: base(node, instance, getter, setter, initialValue)
{
this.equals = equals ?? EqualityComparer<T>.Default.Equals;
}
public override T Value
{
get => getter(Instance);
set
{
if (!equals(value, Value))
base.Value = value;
}
}
}
class OutputPin<T> : Pin<T>
{
public OutputPin(Node node, TInstance instance, Func<TInstance, T> getter)
: base(node, instance)
{
this.getter = getter;
}
public override T Value
{
get
{
if (Node.needsUpdate)
Node.Update();
return getter(Instance);
}
set => throw new InvalidOperationException();
}
}
class CachedOutputPin<T> : OutputPin<T>, IDisposable
{
readonly IDisposable disposeable;
public CachedOutputPin(Node node, TInstance instance, Func<TInstance, T> getter, IDisposable disposeable = default)
: base(node, instance, getter)
{
this.disposeable = disposeable;
}
public override T Value
{
get
{
if (Node.needsUpdate)
{
Node.Update();
cachedValue = getter(Instance);
}
return cachedValue;
}
set => throw new InvalidOperationException();
}
T cachedValue;
public void Dispose()
{
disposeable?.Dispose();
}
}
class Node : VLObject, IVLNode
{
public Action updateAction;
public Action disposeAction;
public bool needsUpdate = true;
public Node(NodeContext nodeContext) : base(nodeContext)
{
}
public IVLNodeDescription NodeDescription { get; set; }
public IVLPin[] Inputs { get; set; }
public IVLPin[] Outputs { get; set; }
public void Dispose()
{
foreach (var p in Outputs)
if (p is IDisposable d)
d.Dispose();
disposeAction?.Invoke();
}
public void Update() => updateAction?.Invoke();
}
}
}
<|start_filename|>packages/VL.Stride.Windows/src/SpoutCSharp/SpoutSender.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.IO.MemoryMappedFiles;
using System.Threading;
using Stride.Graphics;
namespace VL.Stride.Spout
{
public class SpoutSender : SpoutThing
{
public SpoutSender(string senderName, Texture srcTexture)
:base()
{
SenderName = senderName;
this.frame = srcTexture;
textureDesc = new TextureDesc(Frame);
}
public void Initialize()
{
UpdateMaxSenders();
long len = SenderNameLength * MaxSenders;
SenderNamesMap = MemoryMappedFile.CreateOrOpen(SenderNamesMMF, len);
if (AddNameToSendersList(senderName))
{
byte[] desc = textureDesc.ToByteArray();
SenderDescriptionMap = MemoryMappedFile.CreateOrOpen(SenderName, desc.Length);
using (var mmvs = SenderDescriptionMap.CreateViewStream())
{
mmvs.Write(desc, 0, desc.Length);
}
//If we are the first/only sender, create a new ActiveSenderName map.
//This is a separate shared memory containing just a sender name
//that receivers can use to retrieve the current active Sender.
ActiveSenderMap = MemoryMappedFile.CreateOrOpen(ActiveSenderMMF, SenderNameLength);
using (var mmvs = ActiveSenderMap.CreateViewStream())
{
var firstByte = mmvs.ReadByte();
if (firstByte == 0) //no active sender yet
{
mmvs.Position = 0;
mmvs.Write(GetNameBytes(SenderName), 0, SenderNameLength);
}
}
}
}
bool AddNameToSendersList(string name)
{
bool createdNew;
Mutex mutex = new Mutex(true, SenderNamesMMF + "_mutex", out createdNew);
if (mutex == null)
return false;
bool success = false;
try
{
if (mutex.WaitOne(SpoutWaitTimeout))
{
success = true;
}
else
{
success = false;
}
}
catch (AbandonedMutexException e)
{
success = true;
}
finally
{
if (success)
{
List<string> senders = GetSenderNames();
if (senders.Contains(this.senderName))
{
success = false;
}
else
{
senders.Add(name);
WriteSenderNamesToMMF(senders);
}
}
mutex.ReleaseMutex();
mutex.Dispose();
}
return success;
}
void RemoveNameFromSendersList()
{
bool createdNew;
Mutex mutex = new Mutex(true, SenderNamesMMF + "_mutex", out createdNew);
if (mutex == null)
return;
try
{
mutex.WaitOne(SpoutWaitTimeout);
}
catch (AbandonedMutexException e)
{
//Log.Add(e);
}
finally
{
List<string> senders = GetSenderNames();
if (senders.Contains(this.senderName))
{
senders.Remove(senderName);
WriteSenderNamesToMMF(senders);
}
mutex.ReleaseMutex();
mutex.Dispose();
}
}
void WriteSenderNamesToMMF(List<string> senders)
{
using (var mmvs = SenderNamesMap.CreateViewStream())
{
for (int i = 0; i < MaxSenders; i++)
{
byte[] bytes;
if (i < senders.Count)
bytes = GetNameBytes(senders[i]);
else //fill with 0s
bytes = new byte[SenderNameLength];
mmvs.Write(bytes, 0, bytes.Length);
}
}
}
public override void Dispose()
{
UpdateMaxSenders();
RemoveNameFromSendersList();
base.Dispose();
}
}
}
<|start_filename|>packages/VL.Stride.Windows/src/Assets/AssetBuilderService.cs<|end_filename|>
// Copyright (c) Stride contributors (https://xenko.com) and Silicon Studio Corp. (https://www.siliconstudio.co.jp)
// Distributed under the MIT license. See the LICENSE.md file in the project root for more information.
using System;
using Stride.Core.Assets.Compiler;
using Stride.Core.BuildEngine;
using Stride.Core.Annotations;
using Stride.Core.Collections;
using Stride.Core.Diagnostics;
using Stride.Core.Mathematics;
using Stride.Core.Assets;
namespace VL.Stride.Assets
{
public class AssetBuilderService : IBuildService, IDisposable
{
private const string IndexName = "AssetBuilderServiceIndex";
private readonly object queueLock = new object();
private readonly DynamicBuilder builder;
private readonly PriorityNodeQueue<AssetBuildUnit> queue = new PriorityNodeQueue<AssetBuildUnit>();
// TODO: this is temporary until we have thread local databases (and a better solution for databases used in standard tasks)
public static readonly object OutOfMicrothreadDatabaseLock = new object();
public AssetBuilderService([NotNull] string buildDirectory)
{
if (buildDirectory == null) throw new ArgumentNullException(nameof(buildDirectory));
// We want at least 2 threads, since one will be used for DynamicBuildStep (which is a special blocking step)
var processorCount = Environment.ProcessorCount;
var threadCount = MathUtil.Clamp(2 * processorCount / 5, 2, processorCount - 1);
// Mount database (otherwise it will be mounted by DynamicBuilder thread, and it might happen too late)
Builder.OpenObjectDatabase(buildDirectory, IndexName);
var builderInstance = new Builder(GlobalLogger.GetLogger("AssetBuilderService"), buildDirectory, IndexName)
{
BuilderName = "AssetBuilderService Builder",
ThreadCount = threadCount,
};
builder = new DynamicBuilder(builderInstance, new AnonymousBuildStepProvider(GetNextBuildStep), "Asset Builder service thread.");
builder.Start();
}
public event EventHandler<AssetBuiltEventArgs> AssetBuilt;
public virtual void Dispose()
{
builder.Dispose();
}
private BuildStep GetNextBuildStep(int maxPriority)
{
while (true)
{
AssetBuildUnit unit;
lock (queueLock)
{
if (queue.Empty)
{
return null;
}
unit = queue.Dequeue();
}
// Check that priority is good enough
if (unit.PriorityMajor > maxPriority)
return null;
var buildStep = unit.GetBuildStep();
// If this build step couldn't be built, let's find another one
if (buildStep == null)
continue;
// Forward priority to build engine (still very coarse, but should help)
buildStep.Priority = unit.PriorityMajor;
foreach (var step in buildStep.EnumerateRecursively())
{
var assetStep = step as AssetBuildStep;
if (assetStep != null)
{
assetStep.Priority = unit.PriorityMajor;
assetStep.StepProcessed += (s, e) => NotifyAssetBuilt(assetStep.AssetItem, assetStep.Logger);
}
}
return buildStep;
}
}
public PriorityQueueNode<AssetBuildUnit> PushBuildUnit(AssetBuildUnit unit)
{
PriorityQueueNode<AssetBuildUnit> result;
lock (queueLock)
{
result = queue.Enqueue(unit);
}
builder.NotifyBuildStepAvailable();
return result;
}
public void RemoveBuildUnit(PriorityQueueNode<AssetBuildUnit> node)
{
lock (queueLock)
{
queue.Remove(node);
}
}
private void NotifyAssetBuilt(AssetItem assetItem, LoggerResult buildLog)
{
AssetBuilt?.Invoke(this, new AssetBuiltEventArgs(assetItem, buildLog));
}
}
}
<|start_filename|>packages/VL.Stride.Windows/src/SpoutCSharp/SpoutThing.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.IO;
using System.IO.MemoryMappedFiles;
using Stride.Graphics;
using Microsoft.Win32; // for accessing the registry
namespace VL.Stride.Spout
{
public abstract class SpoutThing : IDisposable
{
protected const string SenderNamesMMF = "SpoutSenderNames";
protected const string ActiveSenderMMF = "ActiveSenderName";
protected const int SpoutWaitTimeout = 100;
protected const int MaxSendersDefault = 64;
public const int SenderNameLength = 256;
protected MemoryMappedFile SenderDescriptionMap;
protected MemoryMappedFile SenderNamesMap;
protected MemoryMappedFile ActiveSenderMap;
protected Texture frame;
protected TextureDesc textureDesc;
protected string senderName;
public int MaxSenders { get; private set; } = MaxSendersDefault;
public Texture Frame
{
get { return frame; }
}
public virtual string SenderName
{
get { return senderName; }
set { senderName = value; }
}
public virtual void Dispose()
{
if (SenderDescriptionMap != null)
SenderDescriptionMap.Dispose();
if (SenderNamesMap != null)
SenderNamesMap.Dispose();
if (ActiveSenderMap != null)
ActiveSenderMap.Dispose();
}
public List<string> GetSenderNames()
{
int len = MaxSenders * SenderNameLength;
List<string> namesList = new List<string>();
StringBuilder name = new StringBuilder();
//Read the memory mapped file in to a byte array
using (var mmvs = SenderNamesMap.CreateViewStream())
{
var b = new byte[len];
mmvs.Read(b, 0, len);
//split into strings searching for the nulls
for (int i = 0; i < len; i++)
{
if (b[i] == 0)
{
if (name.Length == 0)
{
i += SenderNameLength - (i % SenderNameLength) - 1;
continue;
}
namesList.Add(name.ToString());
name.Clear();
}
else
name.Append((char)b[i]);
}
}
return namesList;
}
protected byte[] GetNameBytes(string name)
{
byte[] b = new byte[SenderNameLength];
byte[] nameBytes = Encoding.ASCII.GetBytes(name);
Array.Copy(nameBytes, b, nameBytes.Length);
return b;
}
// see https://github.com/vvvv/vvvv-sdk/blob/develop/vvvv45/src/nodes/plugins/System/SpoutSender.cs#L37
protected void UpdateMaxSenders()
{
MaxSenders = MaxSendersDefault;
RegistryKey subkey = Registry.CurrentUser.OpenSubKey("Software\\Leading Edge\\Spout");
if (subkey != null)
{
int m = (int)subkey.GetValue("MaxSenders"); // Get the value
if (m > 0)
{
MaxSenders = m; // Set the global max senders value
}
}
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Core/IO/AggregateFileProvider.cs<|end_filename|>
using Stride.Core.IO;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
namespace VL.Stride.Core.IO
{
class AggregateFileProvider : VirtualFileProviderBase
{
private readonly List<IVirtualFileProvider> virtualFileProviders = new List<IVirtualFileProvider>();
public AggregateFileProvider(params IVirtualFileProvider[] virtualFileProviders)
: base(rootPath: null)
{
this.virtualFileProviders.AddRange(virtualFileProviders);
}
public void Add(IVirtualFileProvider virtualFileProvider)
{
lock (virtualFileProviders)
{
virtualFileProviders.Add(virtualFileProvider);
}
}
public void Remove(IVirtualFileProvider virtualFileProvider)
{
lock (virtualFileProviders)
{
virtualFileProviders.Remove(virtualFileProvider);
}
}
public override bool TryGetFileLocation(string path, out string filePath, out long start, out long end)
{
lock (virtualFileProviders)
{
foreach (var provider in virtualFileProviders)
if (provider.TryGetFileLocation(path, out filePath, out start, out end) && File.Exists(filePath))
return true;
}
return base.TryGetFileLocation(path, out filePath, out start, out end);
}
public override bool FileExists(string url)
{
lock (virtualFileProviders)
{
foreach (var provider in virtualFileProviders)
if (provider.FileExists(url))
return true;
return false;
}
}
public override bool DirectoryExists(string url)
{
lock (virtualFileProviders)
{
foreach (var provider in virtualFileProviders)
if (provider.DirectoryExists(url))
return true;
return false;
}
}
public override string[] ListFiles(string url, string searchPattern, VirtualSearchOption searchOption)
{
return ListFilesInternal(url, searchPattern, searchOption).ToArray();
}
IEnumerable<string> ListFilesInternal(string url, string searchPattern, VirtualSearchOption searchOption)
{
lock (virtualFileProviders)
{
foreach (var provider in virtualFileProviders)
{
var result = new string[0];
try
{
result = provider.ListFiles(url, searchPattern, searchOption);
}
catch (Exception) { }
foreach (var filePath in result)
{
yield return filePath;
}
}
}
}
public override Stream OpenStream(string url, VirtualFileMode mode, VirtualFileAccess access, VirtualFileShare share = VirtualFileShare.Read, StreamFlags streamFlags = StreamFlags.None)
{
if (mode == VirtualFileMode.Open)
{
lock (virtualFileProviders)
{
foreach (var provider in virtualFileProviders)
{
if (provider.FileExists(url))
{
for (int i = 0; i < 10; i++)
{
try
{
return provider.OpenStream(url, mode, access, share, streamFlags);
}
catch (IOException)
{
// We sometimes get file already in use exception. Let's try again.
Thread.Sleep(10);
}
}
}
}
throw new FileNotFoundException(string.Format("Unable to find the file [{0}]", url));
}
}
throw new ArgumentException("mode");
}
}
}
<|start_filename|>packages/VL.Stride.Windows/src/SkiaRenderer.cs<|end_filename|>
using SharpDX.Direct3D11;
using SkiaSharp;
using Stride.Core.Mathematics;
using Stride.Graphics;
using Stride.Input;
using Stride.Rendering;
using System;
using System.Reactive.Disposables;
using VL.Skia;
using VL.Skia.Egl;
using VL.Stride.Input;
using CommandList = Stride.Graphics.CommandList;
using PixelFormat = Stride.Graphics.PixelFormat;
using SkiaRenderContext = VL.Skia.RenderContext;
namespace VL.Stride.Windows
{
/// <summary>
/// Renders the Skia layer into the Stride provided surface.
/// </summary>
public partial class SkiaRenderer : RendererBase
{
private static readonly SKColorSpace srgbLinearColorspace = SKColorSpace.CreateSrgbLinear();
private static readonly SKColorSpace srgbColorspace = SKColorSpace.CreateSrgb();
private readonly SerialDisposable inputSubscription = new SerialDisposable();
private IInputSource lastInputSource;
private Int2 lastRenderTargetSize;
private readonly InViewportUpstream viewportLayer = new InViewportUpstream();
public ILayer Layer { get; set; }
protected override void DrawCore(RenderDrawContext context)
{
if (Layer is null)
return;
var commandList = context.CommandList;
var renderTarget = commandList.RenderTarget;
// Fetch the skia render context (uses ANGLE -> DirectX11)
var interopContext = GetInteropContext(context.GraphicsDevice, (int)renderTarget.MultisampleCount);
var skiaRenderContext = interopContext.SkiaRenderContext;
var eglContext = skiaRenderContext.EglContext;
// Subscribe to input events - in case we have many sinks we assume that there's only one input source active
var renderTargetSize = new Int2(renderTarget.Width, renderTarget.Height);
var inputSource = context.RenderContext.GetWindowInputSource();
if (inputSource != lastInputSource || renderTargetSize != lastRenderTargetSize)
{
lastInputSource = inputSource;
lastRenderTargetSize = renderTargetSize;
inputSubscription.Disposable = SubscribeToInputSource(inputSource, context, canvas: null, skiaRenderContext.SkiaContext);
}
using (interopContext.Scoped(commandList))
{
var nativeTempRenderTarget = SharpDXInterop.GetNativeResource(renderTarget) as Texture2D;
using var eglSurface = eglContext.CreateSurfaceFromClientBuffer(nativeTempRenderTarget.NativePointer);
// Make the surface current (becomes default FBO)
skiaRenderContext.MakeCurrent(eglSurface);
// Uncomment for debugging
// SimpleStupidTestRendering();
// Setup a skia surface around the currently set render target
using var surface = CreateSkSurface(skiaRenderContext.SkiaContext, renderTarget, GraphicsDevice.ColorSpace == ColorSpace.Linear);
// Render
var canvas = surface.Canvas;
var viewport = context.RenderContext.ViewportState.Viewport0;
canvas.ClipRect(SKRect.Create(viewport.X, viewport.Y, viewport.Width, viewport.Height));
viewportLayer.Update(Layer, SKRect.Create(viewport.X, viewport.Y, viewport.Width, viewport.Height), CommonSpace.PixelTopLeft, out var layer);
layer.Render(CallerInfo.InRenderer(renderTarget.Width, renderTarget.Height, canvas, skiaRenderContext.SkiaContext));
// Flush
surface.Flush();
// Ensures surface gets released
eglContext.MakeCurrent(default);
}
}
SKSurface CreateSkSurface(GRContext context, Texture texture, bool isLinearColorspace)
{
var colorType = GetColorType(texture.ViewFormat);
NativeGles.glGetIntegerv(NativeGles.GL_FRAMEBUFFER_BINDING, out var framebuffer);
NativeGles.glGetIntegerv(NativeGles.GL_STENCIL_BITS, out var stencil);
NativeGles.glGetIntegerv(NativeGles.GL_SAMPLES, out var samples);
var maxSamples = context.GetMaxSurfaceSampleCount(colorType);
if (samples > maxSamples)
samples = maxSamples;
var glInfo = new GRGlFramebufferInfo(
fboId: (uint)framebuffer,
format: colorType.ToGlSizedFormat());
using var renderTarget = new GRBackendRenderTarget(
width: texture.Width,
height: texture.Height,
sampleCount: samples,
stencilBits: stencil,
glInfo: glInfo);
return SKSurface.Create(
context,
renderTarget,
GRSurfaceOrigin.TopLeft,
colorType,
colorspace: isLinearColorspace ? srgbLinearColorspace : srgbColorspace);
}
static SKColorType GetColorType(PixelFormat format)
{
switch (format)
{
case PixelFormat.B5G6R5_UNorm:
return SKColorType.Rgb565;
case PixelFormat.B8G8R8A8_UNorm:
case PixelFormat.B8G8R8A8_UNorm_SRgb:
return SKColorType.Bgra8888;
case PixelFormat.R8G8B8A8_UNorm:
case PixelFormat.R8G8B8A8_UNorm_SRgb:
return SKColorType.Rgba8888;
case PixelFormat.R10G10B10A2_UNorm:
return SKColorType.Rgba1010102;
case PixelFormat.R16G16B16A16_Float:
return SKColorType.RgbaF16;
case PixelFormat.R16G16B16A16_UNorm:
return SKColorType.Rgba16161616;
case PixelFormat.R32G32B32A32_Float:
return SKColorType.RgbaF32;
case PixelFormat.R16G16_Float:
return SKColorType.RgF16;
case PixelFormat.R16G16_UNorm:
return SKColorType.Rg1616;
case PixelFormat.R8G8_UNorm:
return SKColorType.Rg88;
case PixelFormat.A8_UNorm:
return SKColorType.Alpha8;
case PixelFormat.R8_UNorm:
return SKColorType.Gray8;
default:
return SKColorType.Unknown;
}
}
//static void SimpleStupidTestRendering()
//{
// if (++i % 2 == 0)
// Gles.glClearColor(1, 0, 0, 1);
// else
// Gles.glClearColor(1, 1, 0, 1);
// Gles.glClear(Gles.GL_COLOR_BUFFER_BIT);
// Gles.glFlush();
//}
//int i = 0;
// Works, also simple Gles drawing commands work but SkSurface.Flush causes device lost :(
static InteropContext GetInteropContext(GraphicsDevice graphicsDevice, int msaaSamples)
{
return graphicsDevice.GetOrCreateSharedData($"VL.Stride.Skia.InteropContext{msaaSamples}", gd =>
{
if (SharpDXInterop.GetNativeDevice(gd) is Device device)
{
// https://github.com/google/angle/blob/master/src/tests/egl_tests/EGLDeviceTest.cpp#L272
var angleDevice = EglDevice.FromD3D11(device.NativePointer);
var d1 = device.QueryInterface<Device1>();
var contextState = d1.CreateDeviceContextState<Device1>(
CreateDeviceContextStateFlags.None,
new[] { device.FeatureLevel },
out _);
return new InteropContext(SkiaRenderContext.New(angleDevice, msaaSamples), d1.ImmediateContext1, contextState);
}
return null;
});
}
sealed class InteropContext : IDisposable
{
public readonly SkiaRenderContext SkiaRenderContext;
public readonly DeviceContext1 DeviceContext;
public readonly DeviceContextState ContextState;
public InteropContext(SkiaRenderContext skiaRenderContext, DeviceContext1 deviceContext, DeviceContextState contextState)
{
SkiaRenderContext = skiaRenderContext;
DeviceContext = deviceContext;
ContextState = contextState;
}
public ScopedDeviceContext Scoped(CommandList commandList)
{
return new ScopedDeviceContext(commandList, DeviceContext, ContextState);
}
public void Dispose()
{
ContextState.Dispose();
SkiaRenderContext.Dispose();
}
}
readonly struct ScopedDeviceContext : IDisposable
{
readonly CommandList commandList;
readonly DeviceContext1 deviceContext;
readonly DeviceContextState oldContextState;
public ScopedDeviceContext(CommandList commandList, DeviceContext1 deviceContext, DeviceContextState contextState)
{
this.commandList = commandList;
this.deviceContext = deviceContext;
deviceContext.SwapDeviceContextState(contextState, out oldContextState);
}
public void Dispose()
{
// Ensure no references are held to the render targets (would prevent resize)
var currentRenderTarget = commandList.RenderTarget;
var currentDepthStencil = commandList.DepthStencilBuffer;
commandList.UnsetRenderTargets();
deviceContext.SwapDeviceContextState(oldContextState, out _);
commandList.SetRenderTarget(currentDepthStencil, currentRenderTarget);
// Doesn't work - why?
//var renderTargets = deviceContext.OutputMerger.GetRenderTargets(8, out var depthStencilView);
//deviceContext.OutputMerger.ResetTargets();
//deviceContext.SwapDeviceContextState(oldContextState, out _);
//deviceContext.OutputMerger.SetTargets(depthStencilView, renderTargets);
}
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Effects/ComputeEffect/IComputeEffectDispatcher.cs<|end_filename|>
using Stride.Core.Mathematics;
using Stride.Rendering;
namespace VL.Stride.Rendering.ComputeEffect
{
/// <summary>
/// A compute effect dispatcher is used by the compute effects to implement the shader dispatch (direct or indirect).
/// </summary>
public interface IComputeEffectDispatcher
{
/// <summary>
/// Updates the parameter collection of the shader.
/// </summary>
/// <param name="parameters">The parameter collection of the shader.</param>
/// <param name="threadGroupSize">The thread group size as defined by the shader in the [numthreads(X, Y, Z)] attribute.</param>
void UpdateParameters(ParameterCollection parameters, Int3 threadGroupSize);
/// <summary>
/// Dispatches the shader.
/// </summary>
/// <param name="context">The render draw context.</param>
void Dispatch(RenderDrawContext context);
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Effects/TextureFX/Sources/Mandelbrot_TextureFX.sdsl.cs<|end_filename|>
// <auto-generated>
// Do not edit this file yourself!
//
// This code was generated by Stride Shader Mixin Code Generator.
// To generate it yourself, please install Stride.VisualStudio.Package .vsix
// and re-save the associated .sdfx.
// </auto-generated>
using System;
using Stride.Core;
using Stride.Rendering;
using Stride.Graphics;
using Stride.Shaders;
using Stride.Core.Mathematics;
using Buffer = Stride.Graphics.Buffer;
namespace Stride.Rendering
{
public static partial class Mandelbrot_TextureFXKeys
{
public static readonly ValueParameterKey<Matrix> Transform = ParameterKeys.NewValue<Matrix>();
public static readonly ValueParameterKey<Color4> BackgroundColor = ParameterKeys.NewValue<Color4>(new Color4(0.0f,0.0f,0.0f,1.0f));
public static readonly ValueParameterKey<Color4> Color = ParameterKeys.NewValue<Color4>(new Color4(1.0f,1.0f,1.0f,1.0f));
public static readonly ValueParameterKey<int> Iterations = ParameterKeys.NewValue<int>(16);
public static readonly ValueParameterKey<Vector2> Control = ParameterKeys.NewValue<Vector2>(new Vector2(0.35f,0.35f));
public static readonly ValueParameterKey<float> Zoom = ParameterKeys.NewValue<float>(-0.5f);
public static readonly ValueParameterKey<float> Morph = ParameterKeys.NewValue<float>(1.0f);
public static readonly ValueParameterKey<bool> Aspect = ParameterKeys.NewValue<bool>(true);
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Shaders/ShaderFX/GenericComputeNode.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Reactive.Disposables;
using Stride.Rendering;
using Stride.Rendering.Materials;
using Stride.Shaders;
namespace VL.Stride.Shaders.ShaderFX
{
public class GenericComputeNode<TOut> : ComputeValue<TOut>
{
ShaderClassCode shaderClass;
public GenericComputeNode(
Func<ShaderGeneratorContext, MaterialComputeColorKeys, ShaderClassCode> getShaderSource,
IEnumerable<KeyValuePair<string, IComputeNode>> inputs)
{
Inputs = inputs?.Where(input => !string.IsNullOrWhiteSpace(input.Key) && input.Value != null).ToList();
GetShaderSource = getShaderSource;
ParameterCollections = ImmutableArray<ParameterCollection>.Empty;
}
public Func<ShaderGeneratorContext, MaterialComputeColorKeys, ShaderClassCode> GetShaderSource { get; }
public ImmutableArray<ParameterCollection> ParameterCollections { get; private set; }
public IEnumerable<KeyValuePair<string, IComputeNode>> Inputs { get; }
public ShaderClassCode ShaderClass => shaderClass;
public override ShaderSource GenerateShaderSource(ShaderGeneratorContext context, MaterialComputeColorKeys baseKeys)
{
shaderClass = GetShaderSource(context, baseKeys);
// TODO: Remove me - didn't know that this was already deprecated
//store the parameters - accessed by various patches (look for InputParameterManager)
var parameters = context.Parameters;
if (context.TryGetSubscriptions(out var s))
{
ParameterCollections = ParameterCollections.Add(parameters);
s.Add(Disposable.Create(() => ParameterCollections = ParameterCollections.Remove(parameters)));
}
//compose if necessary
if (Inputs != null && Inputs.Any())
{
var mixin = shaderClass.CreateMixin();
foreach (var input in Inputs)
{
mixin.AddComposition(input.Value, input.Key, context, baseKeys);
}
return mixin;
}
return shaderClass;
}
public override IEnumerable<IComputeNode> GetChildren(object context = null)
{
if (Inputs != null)
{
foreach (var item in Inputs)
{
if (item.Value != null)
yield return item.Value;
}
}
}
public override string ToString()
{
return shaderClass?.ToString() ?? GetType().ToString();
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Renderers/CustomRenderer.cs<|end_filename|>
using Stride.Rendering;
using Stride.Rendering.Compositing;
using System;
using VL.Core;
namespace VL.Stride.Rendering
{
/// <summary>
/// A renderer which can provide <see cref="RendererBase.Draw"/> implementation with a stateful region.
/// </summary>
public sealed class CustomRenderer<TState> : SceneRendererBase
where TState : class
{
private Func<TState> CreateFunc;
private Func<TState, RenderDrawContext, TState> UpdateFunc;
private TState State;
public void Update(Func<TState> create, Func<TState, RenderDrawContext, TState> update)
{
CreateFunc = create;
UpdateFunc = update;
}
protected override void DrawCore(RenderContext context, RenderDrawContext drawContext)
{
var state = State ?? CreateFunc?.Invoke();
State = UpdateFunc?.Invoke(state, drawContext);
}
protected override void Destroy()
{
if (State is IDisposable disposable)
disposable.Dispose();
base.Destroy();
}
}
//public sealed class ProxyRenderer<TState> : ISceneRenderer
//{
// public ISceneRenderer Input { get; private set; }
// public ProxyRenderer(ISceneRenderer input)
// {
// Input = input;
// }
// public bool Enabled
// {
// get => Input?.Enabled ?? false;
// set
// {
// if (Input != null)
// Input.Enabled = value;
// }
// }
// public bool Initialized => Input?.Initialized ?? false;
// public void Collect(RenderContext context)
// {
// Input?.Collect(context);
// }
// public void Dispose()
// {
// Input?.Dispose();
// }
// public void Draw(RenderDrawContext context)
// {
// Input?.Draw(context);
// }
// public void Initialize(RenderContext context)
// {
// Input?.Initialize(context);
// }
//}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Scripts/ProfilerInteraction.cs<|end_filename|>
using System;
using System.Threading.Tasks;
using Stride.Core.Mathematics;
using Stride.Core.Diagnostics;
using Stride.Input;
using Stride.Engine;
using Stride.Profiling;
using Stride.Core;
using VL.Core;
namespace VL.Stride
{
public class ProfilerInteraction
{
public bool Enabled;
/// <summary>
/// The time between two refreshes of the profiling information in milliseconds.
/// </summary>
[Display(2, "Refresh interval (ms)")]
public float RefreshTime { get; set; } = 500;
/// <summary>
/// Gets or set the sorting mode of the profiling entries
/// </summary>
[Display(1, "Sort by")]
public GameProfilingSorting SortingMode { get; set; } = GameProfilingSorting.ByTime;
/// <summary>
/// Gets or sets the type of the profiling to display: CPU or GPU
/// </summary>
[Display(0, "Filter")]
public GameProfilingResults FilteringMode { get; set; } = GameProfilingResults.Fps;
/// <summary>
/// Gets or sets the current profiling result page to display.
/// </summary>
[Display(3, "Display page")]
public int ResultPage { get; set; } = 1;
public IKeyboardDevice Keyboard { get; set; }
public void Update()
{
if (Keyboard != null)
{
if (Keyboard.IsKeyDown(Keys.LeftShift) && Keyboard.IsKeyDown(Keys.LeftCtrl) && Keyboard.IsKeyReleased(Keys.P))
{
Enabled = !Enabled;
}
if (Enabled)
{
// toggle the filtering mode
if (Keyboard.IsKeyPressed(Keys.F5))
{
FilteringMode = (GameProfilingResults)(((int)FilteringMode + 1) % Enum.GetValues(typeof(GameProfilingResults)).Length);
}
// toggle the sorting mode
if (Keyboard.IsKeyPressed(Keys.F6))
{
SortingMode = (GameProfilingSorting)(((int)SortingMode + 1) % Enum.GetValues(typeof(GameProfilingSorting)).Length);
}
// update the result page
if (Keyboard.IsKeyPressed(Keys.F7))
{
ResultPage = Math.Max(1, --ResultPage);
}
else if (Keyboard.IsKeyPressed(Keys.F8))
{
++ResultPage;
}
if (Keyboard.IsKeyPressed(Keys.D1))
{
ResultPage = 1;
}
else if (Keyboard.IsKeyPressed(Keys.D2))
{
ResultPage = 2;
}
else if (Keyboard.IsKeyPressed(Keys.D3))
{
ResultPage = 3;
}
else if (Keyboard.IsKeyPressed(Keys.D4))
{
ResultPage = 4;
}
else if (Keyboard.IsKeyPressed(Keys.D5))
{
ResultPage = 5;
}
// update the refreshing speed
if (Keyboard.IsKeyPressed(Keys.Subtract) || Keyboard.IsKeyPressed(Keys.OemMinus))
{
RefreshTime = Math.Min(RefreshTime * 2, 10000);
}
else if (Keyboard.IsKeyPressed(Keys.Add) || Keyboard.IsKeyPressed(Keys.OemPlus))
{
RefreshTime = Math.Max(RefreshTime / 2, 100);
}
}
}
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Models/MeshExtensions.cs<|end_filename|>
using System;
using System.Reflection;
using Stride.Core.Mathematics;
using Stride.Rendering;
namespace VL.Stride.Rendering
{
public static class MeshExtensions
{
public static Mesh CloneWithNewParameters(this Mesh mesh)
{
var newMesh = new Mesh(mesh);
newMesh.ReplaceParameters();
return newMesh;
}
public static Mesh ReplaceParameters(this Mesh mesh)
{
var p = typeof(Mesh).GetProperty(nameof(Mesh.Parameters), BindingFlags.Public | BindingFlags.Instance);
p.SetValue(mesh, new ParameterCollection(mesh.Parameters));
return mesh;
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Shaders/ShaderGraph.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using VL.Stride.Effects.ComputeFX;
using VL.Stride.Effects.ShaderFX;
using VL.Stride.Effects.TextureFX;
using VL.Stride.Rendering;
using Stride.Core;
using Stride.Core.Mathematics;
using Stride.Graphics;
using Stride.Rendering;
using Stride.Rendering.Images;
using Stride.Rendering.Materials;
using Stride.Rendering.Materials.ComputeColors;
using Stride.Shaders;
using Buffer = Stride.Graphics.Buffer;
using static VL.Stride.Shaders.ShaderFX.ShaderFXUtils;
using System.Reactive.Disposables;
using VL.Stride.Rendering.ComputeEffect;
using VL.Stride.Effects;
namespace VL.Stride.Shaders.ShaderFX
{
public static class ShaderGraph
{
/// <summary>
/// Gives access to the <see cref="CompositeDisposable"/> whose lifetime is tied to the graph being built.
/// See https://github.com/vvvv/VL.Stride/pull/451 for details.
/// </summary>
internal static readonly PropertyKey<CompositeDisposable> GraphSubscriptions = new PropertyKey<CompositeDisposable>(nameof(GraphSubscriptions), typeof(ShaderGraph));
public static IComputeVoid SomeComputeFXGraph(Buffer buffer)
{
var bufferDecl = new DeclBuffer();
bufferDecl.Resource = buffer;
var getItem = new GetItemBuffer<float>(bufferDecl, new ComputeValue<uint>());
var value1 = new ComputeValue<float>();
var value2 = new ComputeValue<float>();
var value3 = new ComputeValue<float>();
var var1 = DeclAndSetVar(getItem);
var var2 = DeclAndSetVar(value2);
var var3 = DeclAndSetVar(value3);
var first = BuildPlus(var1, var2);
var second = BuildPlus(first, var1);
var third = BuildPlus(second, var3);
var finalOrder = BuildFinalShaderGraph(third);
return finalOrder;
}
public static IComputeVoid BuildFinalShaderGraph(IComputeNode root, IEnumerable<IComputeNode> excludes = null)
{
if (root is null)
return new ComputeOrder();
var tree = root is IComputeVoid ? new[] { root } : root.GetChildren();
var visited = excludes != null ? new HashSet<IComputeNode>(excludes) : new HashSet<IComputeNode>();
var flat = tree.TraversePostOrder(n => n.GetChildren(), visited).ToList();
var statements = flat.OfType<IComputeVoid>();
var finalOrder = new ComputeOrder(statements);
return finalOrder;
}
public static SetVar<float> BuildPlus(SetVar<float> var1, SetVar<float> var2)
{
var getter1 = ShaderFXUtils.GetVarValue(var1);
var getter2 = ShaderFXUtils.GetVarValue(var2);
var plus = new BinaryOperation<float>("Plus", getter1, getter2);
return DeclAndSetVar("PlusResult", plus);
}
public static IEnumerable<T> TraversePostOrder<T>(this IEnumerable<T> e, Func<T, IEnumerable<T>> f, HashSet<T> visited) where T : IComputeNode
{
foreach (var item in e)
{
if (!visited.Contains(item))
{
visited.Add(item);
var children = f(item).TraversePostOrder(f, visited);
foreach (var child in children)
yield return child;
yield return item;
}
}
}
public static IEnumerable<T> Flatten<T>(this IEnumerable<T> e, Func<T, IEnumerable<T>> f)
{
return e.SelectMany(c => f(c).Flatten(f)).Concat(e);
}
public static IEnumerable<T> ExpandPreOrder<T>(this IEnumerable<T> source, Func<T, IEnumerable<T>> elementSelector)
{
var stack = new Stack<IEnumerator<T>>();
var e = source.GetEnumerator();
try
{
while (true)
{
while (e.MoveNext())
{
var item = e.Current;
yield return item;
var elements = elementSelector(item);
if (elements == null) continue;
stack.Push(e);
e = elements.GetEnumerator();
}
if (stack.Count == 0) break;
e.Dispose();
e = stack.Pop();
}
}
finally
{
e.Dispose();
while (stack.Count != 0) stack.Pop().Dispose();
}
}
public static VLComputeEffectShader ComposeComputeShader(GraphicsDevice graphicsDevice, IServiceRegistry services, IComputeVoid root)
{
var computeEffect = new VLComputeEffectShader(RenderContext.GetShared(services), "ComputeFXGraph");
if (root != null)
{
var context = NewShaderGeneratorContext(graphicsDevice, computeEffect.Parameters, computeEffect.Subscriptions);
var key = new MaterialComputeColorKeys(MaterialKeys.DiffuseMap, MaterialKeys.DiffuseValue, Color.White);
var shaderSource = root.GenerateShaderSource(context, key);
var mixin = new ShaderClassSource("ComputeFXGraph").CreateMixin();
mixin.AddComposition("Root", shaderSource);
computeEffect.Parameters.Set(EffectNodeBaseKeys.EffectNodeBaseShader, mixin);
}
return computeEffect;
}
public static TextureFXEffect ComposeShader(GraphicsDevice graphicsDevice, IComputeValue<Vector4> root)
{
var effectImageShader = new TextureFXEffect("TextureFXGraphEffect");
if (root != null)
{
var context = NewShaderGeneratorContext(graphicsDevice, effectImageShader.Parameters, effectImageShader.Subscriptions);
var key = new MaterialComputeColorKeys(MaterialKeys.DiffuseMap, MaterialKeys.DiffuseValue, Color.White);
var shaderSource = root.GenerateShaderSource(context, key);
var mixin = new ShaderMixinSource();
mixin.AddComposition("Root", shaderSource);
effectImageShader.Parameters.Set(TextureFXGraphKeys.TextureFXRoot, mixin);
}
return effectImageShader;
}
public static DynamicEffectInstance ComposeDrawShader(GraphicsDevice graphicsDevice, IComputeValue<Vector4> vertexRoot, IComputeValue<Vector4> pixelRoot)
{
var effectImageShader = new DynamicDrawEffectInstance("ShaderFXGraphEffect");
if (vertexRoot != null && pixelRoot != null)
{
var context = NewShaderGeneratorContext(graphicsDevice, effectImageShader.Parameters, effectImageShader.Subscriptions);
var key = new MaterialComputeColorKeys(MaterialKeys.DiffuseMap, MaterialKeys.DiffuseValue, Color.White);
var vertexShaderSource = vertexRoot.GenerateShaderSource(context, key);
var pixelShaderSource = pixelRoot.GenerateShaderSource(context, key);
var mixin = new ShaderMixinSource();
mixin.AddComposition("VertexRoot", vertexShaderSource);
mixin.AddComposition("PixelRoot", pixelShaderSource);
effectImageShader.Parameters.Set(ShaderFXGraphKeys.ShaderFXRoot, mixin);
}
return effectImageShader;
}
class DynamicDrawEffectInstance : DynamicEffectInstance
{
internal readonly CompositeDisposable Subscriptions = new CompositeDisposable();
public DynamicDrawEffectInstance(string effectName, ParameterCollection parameters = null) : base(effectName, parameters)
{
}
protected override void Destroy()
{
Subscriptions.Dispose();
base.Destroy();
}
}
public static ShaderSource ComposeShaderSource(GraphicsDevice graphicsDevice, IComputeNode root, CompositeDisposable subscriptions)
{
if (root != null)
{
try
{
var context = NewShaderGeneratorContext(graphicsDevice, new ParameterCollection(), subscriptions);
var key = new MaterialComputeColorKeys(MaterialKeys.DiffuseMap, MaterialKeys.DiffuseValue, Color.White);
return root.GenerateShaderSource(context, key);
}
catch (Exception)
{
}
}
return null;
}
internal static ShaderGeneratorContext NewShaderGeneratorContext(GraphicsDevice graphicsDevice, ParameterCollection parameters, CompositeDisposable subscriptions)
{
var context = new ShaderGeneratorContext(graphicsDevice)
{
Parameters = parameters
};
context.Tags.Set(GraphSubscriptions, subscriptions);
return context;
}
internal static bool TryGetSubscriptions(this ShaderGeneratorContext context, out CompositeDisposable subscriptions)
{
return context.Tags.TryGetValue(GraphSubscriptions, out subscriptions);
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/RenderingNodes.cs<|end_filename|>
using Stride.Core.Mathematics;
using Stride.Graphics.GeometricPrimitives;
using Stride.Rendering;
using Stride.Rendering.ProceduralModels;
using System;
using System.Collections.Generic;
using System.Reactive.Disposables;
using VL.Core;
using VL.Lang.Symbols;
using VL.Lib.Basics.Resources;
using VL.Lib.Mathematics;
using VL.Stride.Rendering.ComputeEffect;
namespace VL.Stride.Rendering
{
using Model = global::Stride.Rendering.Model;
static class RenderingNodes
{
public static IEnumerable<IVLNodeDescription> GetNodeDescriptions(IVLNodeDescriptionFactory factory)
{
var renderingCategory = "Stride.Rendering";
var renderingAdvancedCategory = $"{renderingCategory}.Advanced";
yield return NewInputRenderBaseNode<WithRenderTargetAndViewPort>(factory, category: renderingCategory)
.AddInput(nameof(WithRenderTargetAndViewPort.RenderTarget), x => x.RenderTarget, (x, v) => x.RenderTarget = v)
.AddInput(nameof(WithRenderTargetAndViewPort.DepthBuffer), x => x.DepthBuffer, (x, v) => x.DepthBuffer = v)
;
yield return NewInputRenderBaseNode<RenderContextModifierRenderer>(factory, category: renderingCategory)
.AddInput(nameof(RenderContextModifierRenderer.Modifier), x => x.Modifier, (x, v) => x.Modifier = v)
;
yield return factory.NewNode<ParentTransformationModifier>(category: renderingCategory, copyOnWrite: false)
.AddInput(nameof(ParentTransformationModifier.Transformation), x => x.Transformation, (x, v) => x.Transformation = v)
.AddInput(nameof(ParentTransformationModifier.ExistingTransformUsage), x => x.ExistingTransformUsage, (x, v) => x.ExistingTransformUsage = v)
;
yield return NewInputRenderBaseNode<WithinCommonSpace>(factory, category: renderingAdvancedCategory)
.AddInput(nameof(WithinCommonSpace.CommonScreenSpace), x => x.CommonScreenSpace, (x, v) => x.CommonScreenSpace = v, CommonSpace.DIPTopLeft)
;
yield return NewInputRenderBaseNode<WithinPhysicalScreenSpace>(factory, category: renderingAdvancedCategory)
.AddInput(nameof(WithinPhysicalScreenSpace.Units), x => x.Units, (x, v) => x.Units = v, ScreenSpaceUnits.DIP)
.AddInput(nameof(WithinPhysicalScreenSpace.Anchor), x => x.Anchor, (x, v) => x.Anchor = v, Lib.Mathematics.RectangleAnchor.Center)
.AddInput(nameof(WithinPhysicalScreenSpace.Offset), x => x.Offset, (x, v) => x.Offset = v)
.AddInput(nameof(WithinPhysicalScreenSpace.Scale), x => x.Scale, (x, v) => x.Scale = v, 1f)
.AddInput(nameof(WithinPhysicalScreenSpace.IgnoreExistingView), x => x.IgnoreExistingView, (x, v) => x.IgnoreExistingView = v, true)
.AddInput(nameof(WithinPhysicalScreenSpace.IgnoreExistingProjection), x => x.IgnoreExistingProjection, (x, v) => x.IgnoreExistingProjection = v, true)
;
yield return NewInputRenderBaseNode<WithinVirtualScreenSpace>(factory, category: renderingAdvancedCategory)
.AddInput(nameof(WithinVirtualScreenSpace.Bounds), x => x.Bounds, (x, v) => x.Bounds = v, new RectangleF(-0.5f, -0.5f, 1, 1))
.AddInput(nameof(WithinVirtualScreenSpace.AspectRatioCorrectionMode), x => x.AspectRatioCorrectionMode, (x, v) => x.AspectRatioCorrectionMode = v, SizeMode.FitOut)
.AddInput(nameof(WithinVirtualScreenSpace.Anchor), x => x.Anchor, (x, v) => x.Anchor = v, Lib.Mathematics.RectangleAnchor.Center)
.AddInput(nameof(WithinVirtualScreenSpace.IgnoreExistingView), x => x.IgnoreExistingView, (x, v) => x.IgnoreExistingView = v, true)
.AddInput(nameof(WithinVirtualScreenSpace.IgnoreExistingProjection), x => x.IgnoreExistingProjection, (x, v) => x.IgnoreExistingProjection = v, true)
;
yield return NewInputRenderBaseNode<WithRenderView>(factory, category: renderingAdvancedCategory)
.AddInput(nameof(WithRenderView.RenderView), x => x.RenderView, (x, v) => x.RenderView = v)
.AddInput(nameof(WithRenderView.AspectRatioCorrectionMode), x => x.AspectRatioCorrectionMode, (x, v) => x.AspectRatioCorrectionMode = v)
;
yield return NewInputRenderBaseNode<WithWindowInputSource>(factory, category: renderingAdvancedCategory)
.AddInput(nameof(WithWindowInputSource.InputSource), x => x.InputSource, (x, v) => x.InputSource = v)
;
yield return factory.NewNode<GetWindowInputSource>(name: nameof(GetWindowInputSource), category: renderingAdvancedCategory, copyOnWrite: false)
.AddInput(nameof(RendererBase.Input), x => x.Input, (x, v) => x.Input = v)
.AddOutput(nameof(GetWindowInputSource.InputSource), x => x.InputSource)
;
// Compute effect dispatchers
var dispatchersCategory = $"{renderingAdvancedCategory}.ComputeEffect";
yield return factory.NewNode<DirectComputeEffectDispatcher>(name: "DirectDispatcher", category: renderingAdvancedCategory, copyOnWrite: false, hasStateOutput: false)
.AddCachedInput(nameof(DirectComputeEffectDispatcher.ThreadGroupCount), x => x.ThreadGroupCount, (x, v) => x.ThreadGroupCount = v, Int3.One)
.AddOutput<IComputeEffectDispatcher>("Output", x => x);
yield return factory.NewNode<IndirectComputeEffectDispatcher>(name: "IndirectDispatcher", category: renderingAdvancedCategory, copyOnWrite: false, hasStateOutput: false)
.AddCachedInput(nameof(IndirectComputeEffectDispatcher.ArgumentBuffer), x => x.ArgumentBuffer, (x, v) => x.ArgumentBuffer = v)
.AddCachedInput(nameof(IndirectComputeEffectDispatcher.OffsetInBytes), x => x.OffsetInBytes, (x, v) => x.OffsetInBytes = v)
.AddOutput<IComputeEffectDispatcher>("Output", x => x);
yield return factory.NewNode<CustomComputeEffectDispatcher>(name: "CustomDispatcher", category: renderingAdvancedCategory, copyOnWrite: false, hasStateOutput: false)
.AddCachedInput(nameof(CustomComputeEffectDispatcher.ThreadGroupCountsSelector), x => x.ThreadGroupCountsSelector, (x, v) => x.ThreadGroupCountsSelector = v)
.AddOutput<IComputeEffectDispatcher>("Output", x => x);
//yield return factory.NewNode<RenderView>(name: "RenderView", category: renderingAdvancedCategory, copyOnWrite: false)
// .AddInput(nameof(RenderView.View), x => x.View, (x, v) => x.View = v)
// .AddInput(nameof(RenderView.Projection), x => x.Projection, (x, v) => x.Projection = v)
// .AddInput(nameof(RenderView.NearClipPlane), x => x.NearClipPlane, (x, v) => x.NearClipPlane = v)
// .AddInput(nameof(RenderView.FarClipPlane), x => x.FarClipPlane, (x, v) => x.FarClipPlane = v)
// // TODO: add more
// ;
// Meshes
yield return factory.NewMeshNode((CapsuleProceduralModel x) => (x.Length, x.Radius, x.Tessellation))
.AddCachedInput(nameof(CapsuleProceduralModel.Length), x => x.Length, (x, v) => x.Length = v, 0.5f)
.AddCachedInput(nameof(CapsuleProceduralModel.Radius), x => x.Radius, (x, v) => x.Radius = v, 0.5f)
.AddCachedInput(nameof(CapsuleProceduralModel.Tessellation), x => x.Tessellation, (x, v) => x.Tessellation = v, 16)
.AddDefaultPins();
yield return factory.NewMeshNode((ConeProceduralModel x) => (x.Height, x.Radius, x.Tessellation))
.AddCachedInput(nameof(ConeProceduralModel.Height), x => x.Height, (x, v) => x.Height = v, 1f)
.AddCachedInput(nameof(ConeProceduralModel.Radius), x => x.Radius, (x, v) => x.Radius = v, 0.5f)
.AddCachedInput(nameof(ConeProceduralModel.Tessellation), x => x.Tessellation, (x, v) => x.Tessellation = v, 16)
.AddDefaultPins();
yield return factory.NewMeshNode((CubeProceduralModel x) => x.Size, name: "BoxMesh")
.AddCachedInput(nameof(CubeProceduralModel.Size), x => x.Size, (x, v) => x.Size = v, Vector3.One)
.AddDefaultPins();
yield return factory.NewMeshNode((CylinderProceduralModel x) => (x.Height, x.Radius, x.Tessellation))
.AddCachedInput(nameof(CylinderProceduralModel.Height), x => x.Height, (x, v) => x.Height = v, 1f)
.AddCachedInput(nameof(CylinderProceduralModel.Radius), x => x.Radius, (x, v) => x.Radius = v, 0.5f)
.AddCachedInput(nameof(CylinderProceduralModel.Tessellation), x => x.Tessellation, (x, v) => x.Tessellation = v, 16)
.AddDefaultPins();
yield return factory.NewMeshNode((GeoSphereProceduralModel x) => (x.Radius, x.Tessellation))
.AddCachedInput(nameof(GeoSphereProceduralModel.Radius), x => x.Radius, (x, v) => x.Radius = v, 0.5f)
.AddCachedInput(nameof(GeoSphereProceduralModel.Tessellation), x => x.Tessellation, (x, v) => x.Tessellation = VLMath.Clamp(v, 1, 5), 3)
.AddDefaultPins();
yield return factory.NewMeshNode((PlaneProceduralModel x) => (x.Size, x.Tessellation, x.Normal, x.GenerateBackFace))
.AddCachedInput(nameof(PlaneProceduralModel.Size), x => x.Size, (x, v) => x.Size = v, Vector2.One)
.AddCachedInput(nameof(PlaneProceduralModel.Tessellation), x => x.Tessellation, (x, v) => x.Tessellation = v, Int2.One)
.AddCachedInput(nameof(PlaneProceduralModel.Normal), x => x.Normal, (x, v) => x.Normal = v, NormalDirection.UpZ)
.AddCachedInput(nameof(PlaneProceduralModel.GenerateBackFace), x => x.GenerateBackFace, (x, v) => x.GenerateBackFace = v, true)
.AddDefaultPins();
yield return factory.NewMeshNode((SphereProceduralModel x) => (x.Radius, x.Tessellation))
.AddCachedInput(nameof(SphereProceduralModel.Radius), x => x.Radius, (x, v) => x.Radius = v, 0.5f)
.AddCachedInput(nameof(SphereProceduralModel.Tessellation), x => x.Tessellation, (x, v) => x.Tessellation = v, 16)
.AddDefaultPins();
yield return factory.NewMeshNode((TeapotProceduralModel x) => (x.Size, x.Tessellation))
.AddCachedInput(nameof(TeapotProceduralModel.Size), x => x.Size, (x, v) => x.Size = v, 1f)
.AddCachedInput(nameof(TeapotProceduralModel.Tessellation), x => x.Tessellation, (x, v) => x.Tessellation = v, 16)
.AddDefaultPins();
yield return factory.NewMeshNode((TorusProceduralModel x) => (x.Radius, x.Thickness, x.Tessellation))
.AddCachedInput(nameof(TorusProceduralModel.Radius), x => x.Radius, (x, v) => x.Radius = v, 0.5f)
.AddCachedInput(nameof(TorusProceduralModel.Thickness), x => x.Thickness, (x, v) => x.Thickness = v, 0.25f)
.AddCachedInput(nameof(TorusProceduralModel.Tessellation), x => x.Tessellation, (x, v) => x.Tessellation = v, 16)
.AddDefaultPins();
// TextureFX
yield return factory.NewNode(c => new MipMapGenerator(c), name: "MipMap", category: "Stride.Textures.Experimental.Utils", copyOnWrite: false, hasStateOutput: false)
.AddInput("Input", x => x.InputTexture, (x, v) => x.InputTexture = v)
.AddInput(nameof(MipMapGenerator.MaxMipMapCount), x => x.MaxMipMapCount, (x, v) => x.MaxMipMapCount = v)
.AddOutput("Output", x => { x.ScheduleForRendering(); return x.OutputTexture; });
}
static CustomNodeDesc<TInputRenderBase> NewInputRenderBaseNode<TInputRenderBase>(IVLNodeDescriptionFactory factory, string category, string name = null)
where TInputRenderBase : RendererBase, new()
{
return factory.NewNode<TInputRenderBase>(name: name, category: category, copyOnWrite: false)
.AddInput(nameof(RendererBase.Input), x => x.Input, (x, v) => x.Input = v);
}
static CustomNodeDesc<TProceduralModel> NewMeshNode<TProceduralModel, TKey>(this IVLNodeDescriptionFactory factory, Func<TProceduralModel, TKey> getKey, string name = null)
where TProceduralModel : PrimitiveProceduralModelBase, new()
{
return new CustomNodeDesc<TProceduralModel>(factory,
name: name ?? typeof(TProceduralModel).Name.Replace("ProceduralModel", "Mesh"),
category: "Stride.Models.Meshes",
copyOnWrite: false,
hasStateOutput: false,
ctor: nodeContext =>
{
var generator = new TProceduralModel();
return (generator, default);
})
.AddCachedOutput("Output", nodeContext =>
{
var disposable = new SerialDisposable();
Func<TProceduralModel, Mesh> getter = generator =>
{
var key = (typeof(TProceduralModel), generator.Scale, generator.UvScale, generator.LocalOffset, generator.NumberOfTextureCoordinates, getKey(generator));
var provider = ResourceProvider.NewPooledPerApp(nodeContext, key, _ =>
{
return nodeContext.GetGameProvider()
.Bind(game =>
{
var model = new Model();
generator.Generate(game.Services, model);
return ResourceProvider.Return(model.Meshes[0], m =>
{
if (m.Draw != null)
{
m.Draw.IndexBuffer?.Buffer?.Dispose();
foreach (var b in m.Draw.VertexBuffers)
b.Buffer?.Dispose();
}
});
});
});
var meshHandle = provider.GetHandle();
disposable.Disposable = meshHandle;
return meshHandle.Resource;
};
return (getter, disposable);
});
}
static CustomNodeDesc<TProceduralModel> AddDefaultPins<TProceduralModel>(this CustomNodeDesc<TProceduralModel> node)
where TProceduralModel : PrimitiveProceduralModelBase, new()
{
return node
.AddCachedInput(nameof(PrimitiveProceduralModelBase.Scale), x => x.Scale, (x, v) => x.Scale = v, Vector3.One)
.AddCachedInput(nameof(PrimitiveProceduralModelBase.UvScale), x => x.UvScale, (x, v) => x.UvScale = v, Vector2.One)
.AddCachedInput(nameof(PrimitiveProceduralModelBase.LocalOffset), x => x.LocalOffset, (x, v) => x.LocalOffset = v, Vector3.Zero)
.AddCachedInput(nameof(PrimitiveProceduralModelBase.NumberOfTextureCoordinates), x => x.NumberOfTextureCoordinates, (x, v) => x.NumberOfTextureCoordinates = v, 1);
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Effects/EffectShaderNodes.DrawFX.cs<|end_filename|>
using Stride.Core;
using Stride.Graphics;
using Stride.Rendering;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reactive.Linq;
using VL.Core;
using VL.Model;
namespace VL.Stride.Rendering
{
static partial class EffectShaderNodes
{
static IVLNodeDescription NewDrawEffectShaderNode(this IVLNodeDescriptionFactory factory, NameAndVersion name, string shaderName, ShaderMetadata shaderMetadata, IObservable<object> changes, Func<bool> openEditor, IServiceRegistry serviceRegistry, GraphicsDevice graphicsDevice)
{
return factory.NewNodeDescription(
name: name,
category: "Stride.Rendering.DrawShaders",
tags: shaderMetadata.Tags,
fragmented: true,
invalidated: changes,
init: buildContext =>
{
var mixinParams = BuildBaseMixin(shaderName, shaderMetadata, graphicsDevice, out var shaderMixinSource);
var (_effect, _messages) = CreateEffectInstance("DrawFXEffect", shaderMetadata, serviceRegistry, graphicsDevice, mixinParams, baseShaderName: shaderName);
var _inputs = new List<IVLPinDescription>();
var _outputs = new List<IVLPinDescription>() { buildContext.Pin("Output", typeof(IEffect)) };
var _parameterSetterInput = new PinDescription<Action<ParameterCollection, RenderView, RenderDrawContext>>("Parameter Setter");
var usedNames = new HashSet<string>() { _parameterSetterInput.Name };
var needsWorld = false;
foreach (var parameter in GetParameters(_effect))
{
var key = parameter.Key;
var name = key.Name;
if (WellKnownParameters.PerDrawMap.ContainsKey(name))
{
// Expose World only - all other world dependent parameters we can compute on our own
needsWorld = true;
continue;
}
var typeInPatch = shaderMetadata.GetPinType(key, out var boxedDefaultValue);
shaderMetadata.GetPinDocuAndVisibility(key, out var summary, out var remarks, out var isOptional);
_inputs.Add(new ParameterPinDescription(usedNames, key, parameter.Count, defaultValue: boxedDefaultValue, typeInPatch: typeInPatch) { IsVisible = !isOptional, Summary = summary, Remarks = remarks });
}
if (needsWorld)
_inputs.Add(new ParameterPinDescription(usedNames, TransformationKeys.World));
_inputs.Add(_parameterSetterInput);
return buildContext.NewNode(
inputs: _inputs,
outputs: _outputs,
messages: _messages,
summary: shaderMetadata.Summary,
remarks: shaderMetadata.Remarks,
newNode: nodeBuildContext =>
{
var gameHandle = nodeBuildContext.NodeContext.GetGameHandle();
var game = gameHandle.Resource;
var mixinParams = BuildBaseMixin(shaderName, shaderMetadata, graphicsDevice, out var shaderMixinSource);
var effect = new CustomDrawEffect("DrawFXEffect", game.Services, game.GraphicsDevice, mixinParams);
var inputs = new List<IVLPin>();
foreach (var _input in _inputs)
{
// Handle the predefined pins first
if (_input == _parameterSetterInput)
inputs.Add(nodeBuildContext.Input<Action<ParameterCollection, RenderView, RenderDrawContext>>(v => effect.ParameterSetter = v));
else if (_input is ParameterPinDescription parameterPinDescription)
inputs.Add(parameterPinDescription.CreatePin(game.GraphicsDevice, effect.Parameters));
}
var compositionPins = inputs.OfType<ShaderFXPin>().ToList();
var effectOutput = nodeBuildContext.Output<IEffect>(() =>
{
UpdateCompositions(compositionPins, graphicsDevice, effect.Parameters, shaderMixinSource, effect.Subscriptions);
return effect;
});
return nodeBuildContext.Node(
inputs: inputs,
outputs: new[] { effectOutput },
update: default,
dispose: () =>
{
effect.Dispose();
gameHandle.Dispose();
});
},
openEditor: openEditor
);
});
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Effects/TextureFX/Filters/ColorManipulation/DropShadow_TextureFX.sdsl.cs<|end_filename|>
// <auto-generated>
// Do not edit this file yourself!
//
// This code was generated by Stride Shader Mixin Code Generator.
// To generate it yourself, please install Stride.VisualStudio.Package .vsix
// and re-save the associated .sdfx.
// </auto-generated>
using System;
using Stride.Core;
using Stride.Rendering;
using Stride.Graphics;
using Stride.Shaders;
using Stride.Core.Mathematics;
using Buffer = Stride.Graphics.Buffer;
namespace Stride.Rendering
{
public static partial class DropShadow_TextureFXKeys
{
public static readonly ValueParameterKey<float> Blur = ParameterKeys.NewValue<float>(0.5f);
public static readonly ValueParameterKey<float> Offset = ParameterKeys.NewValue<float>(0.05f);
public static readonly ValueParameterKey<float> Angle = ParameterKeys.NewValue<float>(0.9f);
public static readonly ValueParameterKey<Color4> Color = ParameterKeys.NewValue<Color4>(new Color4(0.0f,0.0f,0.0f,1.0f));
public static readonly ValueParameterKey<float> Alpha = ParameterKeys.NewValue<float>(1.0f);
public static readonly ValueParameterKey<float> Extension = ParameterKeys.NewValue<float>(0.0f);
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Effects/EffectShaderNodes.Utils.cs<|end_filename|>
using Stride.Core;
using Stride.Core.Extensions;
using Stride.Core.IO;
using Stride.Core.Mathematics;
using Stride.Core.Serialization.Contents;
using Stride.Graphics;
using Stride.Rendering;
using Stride.Rendering.ComputeEffect;
using Stride.Rendering.Images;
using Stride.Rendering.Materials;
using Stride.Shaders;
using Stride.Shaders.Compiler;
using Stride.Shaders.Parser;
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Reactive.Disposables;
using System.Reactive.Linq;
using System.Reflection;
using VL.Core;
using VL.Core.Diagnostics;
using VL.Model;
using VL.Stride.Core;
using VL.Stride.Graphics;
using VL.Stride.Effects;
using VL.Stride.Engine;
using VL.Stride.Rendering.ComputeEffect;
using VL.Stride.Shaders;
using VL.Stride.Shaders.ShaderFX;
namespace VL.Stride.Rendering
{
static partial class EffectShaderNodes
{
static bool IsShaderFile(string file) => string.Equals(Path.GetExtension(file), ".sdsl", StringComparison.OrdinalIgnoreCase) || string.Equals(Path.GetExtension(file), ".sdfx", StringComparison.OrdinalIgnoreCase);
static NameAndVersion GetNodeName(string effectName, string suffix)
{
// Levels_ClampBoth_TextureFX
var name = effectName.Substring(0, effectName.Length - suffix.Length);
// Levels_ClampBoth
var nameParts = name.Split('_');
if (nameParts.Length > 0)
{
name = nameParts[0];
return new NameAndVersion(name, string.Join(" ", nameParts.Skip(1)));
}
return new NameAndVersion(name);
}
static bool IsNewOrDeletedShaderFile(FileSystemEventArgs e)
{
// Check for shader files only. Editor (like VS) create lot's of other temporary files.
if (e.ChangeType == WatcherChangeTypes.Created || e.ChangeType == WatcherChangeTypes.Deleted)
return IsShaderFile(e.Name);
// Also the old name must be a shader file. We're not interested in weired renamings by VS.
if (e.ChangeType == WatcherChangeTypes.Renamed && e is RenamedEventArgs r)
return IsShaderFile(e.Name) && IsShaderFile(r.OldName);
return false;
}
static (DynamicEffectInstance effect, ImmutableArray<Message> messages) CreateEffectInstance(string effectName, ShaderMetadata shaderMetadata, IServiceRegistry serviceRegistry, GraphicsDevice graphicsDevice, ParameterCollection parameters = null, string baseShaderName = null)
{
var messages = ImmutableArray<Message>.Empty;
if (baseShaderName is null)
baseShaderName = effectName;
var effect = new DynamicEffectInstance(effectName, parameters);
if (parameters is null)
parameters = effect.Parameters;
try
{
effect.Initialize(serviceRegistry);
effect.UpdateEffect(graphicsDevice);
}
catch (InvalidOperationException)
{
try
{
// Compile manually to get detailed errors
var compilerParameters = new CompilerParameters() { EffectParameters = effect.EffectCompilerParameters };
foreach (var effectParameterKey in parameters.ParameterKeyInfos)
if (effectParameterKey.Key.Type == ParameterKeyType.Permutation)
compilerParameters.SetObject(effectParameterKey.Key, parameters.ObjectValues[effectParameterKey.BindingSlot]);
var compilerResult = serviceRegistry.GetService<EffectSystem>().Compiler.Compile(
shaderSource: GetShaderSource(effectName),
compilerParameters: compilerParameters);
messages = compilerResult.Messages.Select(m => m.ToMessage()).ToImmutableArray();
}
catch (Exception e)
{
messages = messages.Add(new Message(MessageType.Error, $"Shader compiler crashed: {e}"));
}
}
return (effect, messages);
}
static IEnumerable<ParameterKeyInfo> GetParameters(DynamicEffectInstance effectInstance)
{
var byteCode = effectInstance.Effect?.Bytecode;
if (byteCode is null)
yield break;
var layoutNames = byteCode.Reflection.ResourceBindings.Select(x => x.ResourceGroup ?? "Globals").Distinct().ToList();
var parameters = effectInstance.Parameters;
var compositionParameters = parameters.ParameterKeyInfos.Where(pki => pki.Key.PropertyType == typeof(ShaderSource) && pki.Key.Name != "EffectNodeBase.EffectNodeBaseShader");
foreach (var parameter in parameters.Layout.LayoutParameterKeyInfos.Concat(compositionParameters))
{
var key = parameter.Key;
var name = key.Name;
// Skip constant buffers
if (layoutNames.Contains(name))
continue;
// Skip compiler injected paddings
if (name.Contains("_padding_"))
continue;
// Skip well known parameters
if (WellKnownParameters.PerFrameMap.ContainsKey(name)
|| WellKnownParameters.PerViewMap.ContainsKey(name)
|| WellKnownParameters.TexturingMap.ContainsKey(name))
continue;
// Skip inputs from ShaderFX graph
if (name.StartsWith("ShaderFX.Input"))
continue;
yield return parameter;
}
}
private static ParameterCollection BuildBaseMixin(string shaderName, ShaderMetadata shaderMetadata, GraphicsDevice graphicsDevice, out ShaderMixinSource effectInstanceMixin, ParameterCollection parameters = null)
{
effectInstanceMixin = new ShaderMixinSource();
effectInstanceMixin.Mixins.Add(new ShaderClassSource(shaderName));
var mixinParams = parameters ?? new ParameterCollection();
mixinParams.Set(EffectNodeBaseKeys.EffectNodeBaseShader, effectInstanceMixin);
var context = new ShaderGeneratorContext(graphicsDevice)
{
Parameters = mixinParams,
};
//add composition parameters to parameters
if (shaderMetadata.ParsedShader != null)
{
foreach (var compKey in shaderMetadata.ParsedShader.CompositionsWithBaseShaders)
{
var comp = compKey.Value;
var shaderSource = comp.GetDefaultShaderSource(context, baseKeys);
effectInstanceMixin.AddComposition(comp.Name, shaderSource);
mixinParams.Set(comp.Key, shaderSource);
}
}
return mixinParams;
}
//used for shader source generation
static MaterialComputeColorKeys baseKeys = new MaterialComputeColorKeys(MaterialKeys.DiffuseMap, MaterialKeys.DiffuseValue, Color.White);
// check composition pins
private static bool UpdateCompositions(IReadOnlyList<ShaderFXPin> compositionPins, GraphicsDevice graphicsDevice, ParameterCollection parameters, ShaderMixinSource mixin, CompositeDisposable subscriptions)
{
var anyChanged = false;
for (int i = 0; i < compositionPins.Count; i++)
{
anyChanged |= compositionPins[i].ShaderSourceChanged;
}
if (anyChanged)
{
// Disposes all current subscriptions. So for example all data bindings between the sources and our parameter collection
// gets removed.
subscriptions.Clear();
var context = ShaderGraph.NewShaderGeneratorContext(graphicsDevice, parameters, subscriptions);
var updatedMixin = new ShaderMixinSource();
updatedMixin.DeepCloneFrom(mixin);
for (int i = 0; i < compositionPins.Count; i++)
{
var cp = compositionPins[i];
cp.GenerateAndSetShaderSource(context, baseKeys, updatedMixin);
}
parameters.Set(EffectNodeBaseKeys.EffectNodeBaseShader, updatedMixin);
return true;
}
return false;
}
static IVLPin ToOutput<T>(NodeBuilding.NodeInstanceBuildContext c, T value, Action getter)
{
return c.Output(() =>
{
getter();
return value;
});
}
static ShaderSource GetShaderSource(string effectName)
{
var isMixin = ShaderMixinManager.Contains(effectName);
if (isMixin)
return new ShaderMixinGeneratorSource(effectName);
return new ShaderClassSource(effectName);
}
// Not used yet
static Dictionary<string, Dictionary<string, ParameterKey>> GetCompilerParameters(string filePath, string sdfxEffectName)
{
// In .sdfx, shader has been renamed to effect, in order to avoid ambiguities with HLSL and .sdsl
var macros = new[]
{
new global::Stride.Core.Shaders.Parser.ShaderMacro("shader", "effect")
};
// Parse and collect
var shader = StrideShaderParser.PreProcessAndParse(filePath, macros);
var builder = new RuntimeShaderMixinBuilder(shader);
return builder.CollectParameters();
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Effects/VLEffectMain.sdfx.cs<|end_filename|>
// <auto-generated>
// Do not edit this file yourself!
//
// This code was generated by Stride Shader Mixin Code Generator.
// To generate it yourself, please install Stride.VisualStudio.Package .vsix
// and re-save the associated .sdfx.
// </auto-generated>
using System;
using Stride.Core;
using Stride.Rendering;
using Stride.Graphics;
using Stride.Shaders;
using Stride.Core.Mathematics;
using Buffer = Stride.Graphics.Buffer;
namespace VL.Stride.Rendering
{
[DataContract]public partial class VLEffectParameters : ShaderMixinParameters
{
public static readonly PermutationParameterKey<bool> EnableExtensionShader = ParameterKeys.NewPermutation<bool>(false);
public static readonly PermutationParameterKey<ShaderSource> MaterialExtensionShader = ParameterKeys.NewPermutation<ShaderSource>();
public static readonly PermutationParameterKey<bool> EnableExtensionNameMesh = ParameterKeys.NewPermutation<bool>(false);
public static readonly PermutationParameterKey<string> MaterialExtensionNameMesh = ParameterKeys.NewPermutation<string>();
public static readonly PermutationParameterKey<bool> EnableExtensionShaderMesh = ParameterKeys.NewPermutation<bool>(false);
public static readonly PermutationParameterKey<ShaderSource> MaterialExtensionShaderMesh = ParameterKeys.NewPermutation<ShaderSource>();
};
internal static partial class ShaderMixins
{
internal partial class VLEffectMain : IShaderMixinBuilder
{
public void Generate(ShaderMixinSource mixin, ShaderMixinContext context)
{
context.Mixin(mixin, "StrideForwardShadingEffect");
if (context.GetParam(VLEffectParameters.EnableExtensionShader))
{
context.Mixin(mixin, context.GetParam(VLEffectParameters.MaterialExtensionShader));
}
if (context.GetParam(VLEffectParameters.EnableExtensionNameMesh))
{
context.Mixin(mixin, context.GetParam(VLEffectParameters.MaterialExtensionNameMesh));
}
if (context.GetParam(VLEffectParameters.EnableExtensionShaderMesh))
{
context.Mixin(mixin, context.GetParam(VLEffectParameters.MaterialExtensionShaderMesh));
}
}
[ModuleInitializer]
internal static void __Initialize__()
{
ShaderMixinManager.Register("VLEffectMain", new VLEffectMain());
}
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Shaders/ShaderFX/Variable/SetVar.cs<|end_filename|>
using System;
using System.Collections.Generic;
using Stride.Core;
using Stride.Rendering.Materials;
using Stride.Shaders;
using VL.Core;
using static VL.Stride.Shaders.ShaderFX.ShaderFXUtils;
namespace VL.Stride.Shaders.ShaderFX
{
/// <summary>
/// Defines a variable and assigns a value to it. Can also re-assign an existing Var.
/// </summary>
/// <typeparam name="T"></typeparam>
/// <seealso cref="IComputeVoid" />
[Monadic(typeof(GpuMonadicFactory<>))]
public class SetVar<T> : VarBase<T>, IComputeVoid
{
public SetVar(IComputeValue<T> value, DeclVar<T> declaration, bool evaluateChildren = true)
: base(declaration)
{
Var = null;
Value = value;
EvaluateChildren = evaluateChildren;
}
public SetVar(IComputeValue<T> value, SetVar<T> var, bool evaluateChildren = true)
: base(var.Declaration)
{
Var = var;
Value = value;
EvaluateChildren = evaluateChildren;
}
[DataMemberIgnore]
public SetVar<T> Var { get; }
public IComputeValue<T> Value { get; }
public bool EvaluateChildren { get; }
public override IEnumerable<IComputeNode> GetChildren(object context = null)
{
return EvaluateChildren ? ReturnIfNotNull(Var, Value) : ReturnIfNotNull();
}
public override ShaderSource GenerateShaderSource(ShaderGeneratorContext context, MaterialComputeColorKeys baseKeys)
{
if (Value is null) //nothing will be set, i.e. declaration is a constant or an existing semantic
return new ShaderClassSource("ComputeVoid");
var varName = Declaration.GetNameForContext(context);
var shaderSource = Declaration is DeclSemantic<T> semantic
? GetShaderSourceForType<T>("AssignSemantic", varName, semantic.SemanticName)
: GetShaderSourceForType<T>("AssignVar", varName);
var mixin = shaderSource.CreateMixin();
mixin.AddComposition(Value, "Value", context, baseKeys);
return mixin;
}
public override string ToString()
{
if (Declaration is DeclConstant<T> constant)
return string.Format("Constant {0}", constant.VarName);
else if (Declaration is DeclSemantic<T> semantic)
return string.Format("Get Semantic {0}", semantic.SemanticName);
return string.Format("Assign {0} ", Declaration.VarName);
}
}
public sealed class GpuMonadicFactory<T> : IMonadicFactory<T, SetVar<T>>
{
public static readonly GpuMonadicFactory<T> Default = new GpuMonadicFactory<T>();
public IMonadBuilder<T, SetVar<T>> GetMonadBuilder(bool isConstant)
{
if (typeof(T).IsValueType)
return (IMonadBuilder<T, SetVar<T>>)Activator.CreateInstance(typeof(GpuValueBuilder<>).MakeGenericType(typeof(T)));
else
throw new NotImplementedException();
}
}
public sealed class GpuValueBuilder<T> : IMonadBuilder<T, SetVar<T>>
where T : struct
{
private readonly InputValue<T> inputValue;
private readonly SetVar<T> gpuValue;
public GpuValueBuilder()
{
inputValue = new InputValue<T>();
gpuValue = DeclAndSetVar("Input", inputValue);
}
public SetVar<T> Return(T value)
{
inputValue.Input = value;
return gpuValue;
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Effects/EffectShaderNodes.ShaderFX.cs<|end_filename|>
using Stride.Core;
using Stride.Graphics;
using Stride.Rendering;
using Stride.Rendering.Materials;
using Stride.Shaders;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reactive.Linq;
using System.Reflection;
using VL.Core;
using VL.Model;
using VL.Stride.Shaders.ShaderFX;
namespace VL.Stride.Rendering
{
static partial class EffectShaderNodes
{
static IVLNodeDescription NewShaderFXNode(this IVLNodeDescriptionFactory factory, NameAndVersion name, string shaderName, ShaderMetadata shaderMetadata, IObservable<object> changes, Func<bool> openEditor, IServiceRegistry serviceRegistry, GraphicsDevice graphicsDevice)
{
return factory.NewNodeDescription(
name: name,
category: "Stride.Rendering.Experimental.ShaderFX",
tags: shaderMetadata.Tags,
fragmented: true,
invalidated: changes,
init: buildContext =>
{
var outputType = shaderMetadata.GetShaderFXOutputType(out var innerType);
var mixinParams = BuildBaseMixin(shaderName, shaderMetadata, graphicsDevice, out var shaderMixinSource);
var (_effect, _messages) = CreateEffectInstance("ShaderFXEffect", shaderMetadata, serviceRegistry, graphicsDevice, mixinParams, baseShaderName: shaderName);
var _inputs = new List<IVLPinDescription>();
var _outputs = new List<IVLPinDescription>() { buildContext.Pin("Output", outputType) };
var usedNames = new HashSet<string>();
var needsWorld = false;
foreach (var parameter in GetParameters(_effect))
{
var key = parameter.Key;
var name = key.Name;
if (WellKnownParameters.PerDrawMap.ContainsKey(name))
{
// Expose World only - all other world dependent parameters we can compute on our own
needsWorld = true;
continue;
}
var typeInPatch = shaderMetadata.GetPinType(key, out var boxedDefaultValue);
shaderMetadata.GetPinDocuAndVisibility(key, out var summary, out var remarks, out var isOptional);
_inputs.Add(new ParameterPinDescription(usedNames, key, parameter.Count, defaultValue: boxedDefaultValue, typeInPatch: typeInPatch) { IsVisible = !isOptional, Summary = summary, Remarks = remarks });
}
// local input values
foreach (var key in shaderMetadata.ParsedShader?.GetUniformInputs() ?? Enumerable.Empty<ParameterKey>())
{
var name = key.Name;
if (WellKnownParameters.PerDrawMap.ContainsKey(name))
{
// Expose World only - all other world dependent parameters we can compute on our own
needsWorld = true;
continue;
}
var typeInPatch = shaderMetadata.GetPinType(key, out var boxedDefaultValue);
if (boxedDefaultValue == null)
boxedDefaultValue = key.DefaultValueMetadata.GetDefaultValue();
shaderMetadata.GetPinDocuAndVisibility(key, out var summary, out var remarks, out var isOptional);
_inputs.Add(new ParameterPinDescription(usedNames, key, 1, defaultValue: boxedDefaultValue, typeInPatch: typeInPatch) { IsVisible = !isOptional, Summary = summary, Remarks = remarks });
}
if (needsWorld)
_inputs.Add(new ParameterPinDescription(usedNames, TransformationKeys.World));
return buildContext.NewNode(
inputs: _inputs,
outputs: _outputs,
messages: _messages,
summary: shaderMetadata.Summary,
remarks: shaderMetadata.Remarks,
newNode: nodeBuildContext =>
{
var gameHandle = nodeBuildContext.NodeContext.GetGameHandle();
var game = gameHandle.Resource;
var tempParameters = new ParameterCollection(); // only needed for pin construction - parameter updater will later take care of multiple sinks
var nodeState = new ShaderFXNodeState(shaderName);
var inputs = new List<IVLPin>();
foreach (var _input in _inputs)
{
if (_input is ParameterPinDescription parameterPinDescription)
inputs.Add(parameterPinDescription.CreatePin(game.GraphicsDevice, tempParameters));
}
var outputMaker = typeof(EffectShaderNodes).GetMethod(nameof(BuildOutput), BindingFlags.Static | BindingFlags.NonPublic);
outputMaker = outputMaker.MakeGenericMethod(outputType, innerType);
outputMaker.Invoke(null, new object[] { nodeBuildContext, nodeState, inputs });
return nodeBuildContext.Node(
inputs: inputs,
outputs: new[] { nodeState.OutputPin },
update: default,
dispose: () =>
{
gameHandle.Dispose();
});
},
openEditor: openEditor
);
});
}
// For example T = SetVar<Vector3> and TInner = Vector3
static void BuildOutput<T, TInner>(NodeBuilding.NodeInstanceBuildContext context, ShaderFXNodeState nodeState, IReadOnlyList<IVLPin> inputPins)
{
var compositionPins = inputPins.OfType<ShaderFXPin>().ToList();
var inputs = inputPins.OfType<ParameterPin>().ToList();
Func<T> getOutput = () =>
{
//check shader fx inputs
var shaderChanged = nodeState.CurrentComputeNode == null;
for (int i = 0; i < compositionPins.Count; i++)
{
shaderChanged |= compositionPins[i].ShaderSourceChanged;
compositionPins[i].ShaderSourceChanged = false; //change seen
}
if (shaderChanged)
{
var newComputeNode = new ShaderFXNode<TInner>(
getShaderSource: (c, k) =>
{
//let the pins subscribe to the parameter collection of the sink
foreach (var pin in inputs)
pin.SubscribeTo(c);
return new ShaderClassSource(nodeState.ShaderName);
},
inputs: compositionPins);
nodeState.CurrentComputeNode = newComputeNode;
if (typeof(TInner) == typeof(VoidOrUnknown))
nodeState.CurrentOutputValue = nodeState.CurrentComputeNode;
else
nodeState.CurrentOutputValue = ShaderFXUtils.DeclAndSetVar(nodeState.ShaderName + "Result", newComputeNode);
}
return (T)nodeState.CurrentOutputValue;
};
nodeState.OutputPin = context.Output(getOutput);
}
class ShaderFXNodeState
{
public readonly string ShaderName;
public IVLPin OutputPin;
public object CurrentOutputValue;
public object CurrentComputeNode;
public ShaderFXNodeState(string shaderName)
{
ShaderName = shaderName;
}
}
}
interface IShaderFXNode
{
IList<ShaderFXPin> InputPins { get; }
}
class ShaderFXNode<T> : GenericComputeNode<T>, IShaderFXNode
{
public ShaderFXNode(Func<ShaderGeneratorContext, MaterialComputeColorKeys, ShaderClassCode> getShaderSource, IList<ShaderFXPin> inputs)
: base(getShaderSource, inputs.Select(p => new KeyValuePair<string, IComputeNode>(p.Key.Name, p.GetValueOrDefault())))
{
InputPins = inputs;
}
public IList<ShaderFXPin> InputPins { get; }
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Effects/EffectPins.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Reflection;
using VL.Core;
using Stride.Rendering;
using Stride.Core.Mathematics;
using Stride.Graphics;
using System.Runtime.CompilerServices;
using Stride.Rendering.Materials;
using VL.Stride.Shaders.ShaderFX;
using VL.Stride.Shaders.ShaderFX.Control;
using Stride.Shaders;
namespace VL.Stride.Rendering
{
abstract class EffectPinDescription : IVLPinDescription, IInfo, IVLPinDescriptionWithVisibility
{
public abstract string Name { get; }
public abstract Type Type { get; }
public abstract object DefaultValueBoxed { get; }
public string Summary { get; set; }
public string Remarks { get; set; }
public bool IsVisible { get; set; } = true;
object IVLPinDescription.DefaultValue
{
get
{
// The Gpu<T> code path seems to use SetVar<T> here - we can't really deal with this in target code generation.
// Therefor explicitly return null here, so the target code will not try to insert the types default through the monadic builder interface.
if (DefaultValueBoxed is IComputeNode)
return null;
return DefaultValueBoxed;
}
}
public abstract IVLPin CreatePin(GraphicsDevice graphicsDevice, ParameterCollection parameters);
}
class PinDescription<T> : EffectPinDescription
{
public PinDescription(string name, T defaultValue = default(T))
{
Name = name;
DefaultValue = defaultValue;
}
public override string Name { get; }
public override Type Type => typeof(T);
public override object DefaultValueBoxed => DefaultValue;
public T DefaultValue { get; }
public override IVLPin CreatePin(GraphicsDevice graphicsDevice, ParameterCollection parameters) => new Pin<T>(Name, DefaultValue);
}
/// <summary>
/// Currently used for texture input pins of TextureFX nodes that need access to the original ParameterKey of the shader.
/// </summary>
/// <typeparam name="T"></typeparam>
/// <seealso cref="VL.Stride.Rendering.EffectPinDescription" />
class ParameterKeyPinDescription<T> : PinDescription<T>
{
public ParameterKeyPinDescription(string name, ParameterKey<T> key, T defaultValue = default(T))
: base(name, defaultValue)
{
Key = key;
}
public ParameterKey<T> Key { get; }
}
class ParameterPinDescription : EffectPinDescription
{
public readonly ParameterKey Key;
public readonly int Count;
public readonly bool IsPermutationKey;
public ParameterPinDescription(HashSet<string> usedNames, ParameterKey key, int count = 1, object defaultValue = null, bool isPermutationKey = false, string name = null, Type typeInPatch = null)
{
Key = key;
IsPermutationKey = isPermutationKey;
Count = count;
Name = name ?? key.GetPinName(usedNames);
var elementType = typeInPatch ?? key.PropertyType;
defaultValue = defaultValue ?? key.DefaultValueMetadata?.GetDefaultValue();
// TODO: This should be fixed in Stride
if (key.PropertyType == typeof(Matrix))
defaultValue = Matrix.Identity;
if (count > 1)
{
Type = elementType.MakeArrayType();
var arr = Array.CreateInstance(elementType, count);
for (int i = 0; i < arr.Length; i++)
arr.SetValue(defaultValue, i);
DefaultValueBoxed = arr;
}
else
{
Type = elementType;
DefaultValueBoxed = defaultValue;
}
}
public override string Name { get; }
public override Type Type { get; }
public override object DefaultValueBoxed { get; }
public override IVLPin CreatePin(GraphicsDevice graphicsDevice, ParameterCollection parameters) => EffectPins.CreatePin(graphicsDevice, parameters, Key, Count, IsPermutationKey, DefaultValueBoxed, Type);
public override string ToString()
{
return "PinDesc: " + Name ?? base.ToString();
}
}
static class EffectPins
{
public static IVLPin CreatePin(GraphicsDevice graphicsDevice, ParameterCollection parameters, ParameterKey key, int count, bool isPermutationKey, object value, Type typeInPatch)
{
if (key is ValueParameterKey<Color4> colorKey)
return new ColorParameterPin(parameters, colorKey, graphicsDevice.ColorSpace, (Color4)value);
var argument = key.GetType().GetGenericArguments()[0];
if (typeInPatch.IsEnum)
{
var createPinMethod = typeof(EffectPins).GetMethod(nameof(CreateEnumPin), BindingFlags.Static | BindingFlags.Public);
return createPinMethod.MakeGenericMethod(argument, typeInPatch).Invoke(null, new object[] { parameters, key, value }) as IVLPin;
}
if (argument == typeof(ShaderSource))
{
if (typeInPatch.IsGenericType && typeInPatch.GetGenericTypeDefinition() == typeof(SetVar<>))
{
var typeParam = typeInPatch.GetGenericArguments()[0];
var createPinMethod = typeof(EffectPins).GetMethod(nameof(CreateGPUValueSinkPin), BindingFlags.Static | BindingFlags.Public);
return createPinMethod.MakeGenericMethod(typeParam).Invoke(null, new[] { parameters, key, value }) as IVLPin;
}
else
{
var createPinMethod = typeof(EffectPins).GetMethod(nameof(CreateShaderFXPin), BindingFlags.Static | BindingFlags.Public);
return createPinMethod.MakeGenericMethod(typeof(IComputeNode)).Invoke(null, new object[] { parameters, key, value }) as IVLPin;
}
}
if (isPermutationKey)
{
var createPinMethod = typeof(EffectPins).GetMethod(nameof(CreatePermutationPin), BindingFlags.Static | BindingFlags.Public);
return createPinMethod.MakeGenericMethod(argument).Invoke(null, new object[] { parameters, key, value }) as IVLPin;
}
else if (argument.IsValueType)
{
if (count > 1)
{
var createPinMethod = typeof(EffectPins).GetMethod(nameof(CreateArrayPin), BindingFlags.Static | BindingFlags.Public);
return createPinMethod.MakeGenericMethod(argument).Invoke(null, new object[] { parameters, key, value }) as IVLPin;
}
else
{
var createPinMethod = typeof(EffectPins).GetMethod(nameof(CreateValuePin), BindingFlags.Static | BindingFlags.Public);
return createPinMethod.MakeGenericMethod(argument).Invoke(null, new object[] { parameters, key, value }) as IVLPin;
}
}
else
{
var createPinMethod = typeof(EffectPins).GetMethod(nameof(CreateResourcePin), BindingFlags.Static | BindingFlags.Public);
return createPinMethod.MakeGenericMethod(argument).Invoke(null, new object[] { parameters, key }) as IVLPin;
}
}
public static IVLPin CreatePermutationPin<T>(ParameterCollection parameters, PermutationParameterKey<T> key, T value)
{
return new PermutationParameterPin<T>(parameters, key, value);
}
public static IVLPin CreateEnumPin<T, TEnum>(ParameterCollection parameters, ValueParameterKey<T> key, TEnum value) where T : unmanaged where TEnum : unmanaged
{
return new EnumParameterPin<T, TEnum>(parameters, key, value);
}
public static IVLPin CreateShaderFXPin<T>(ParameterCollection parameters, PermutationParameterKey<ShaderSource> key, T value) where T : class, IComputeNode
{
return new ShaderFXPin<T>(key, value);
}
public static IVLPin CreateGPUValueSinkPin<T>(ParameterCollection parameters, PermutationParameterKey<ShaderSource> key, SetVar<T> value)
{
return new GPUValueSinkPin<T>(key, value);
}
public static IVLPin CreateValuePin<T>(ParameterCollection parameters, ValueParameterKey<T> key, T value) where T : struct
{
return new ValueParameterPin<T>(parameters, key, value);
}
public static IVLPin CreateArrayPin<T>(ParameterCollection parameters, ValueParameterKey<T> key, T[] value) where T : struct
{
return new ArrayValueParameterPin<T>(parameters, key, value);
}
public static IVLPin CreateResourcePin<T>(ParameterCollection parameters, ObjectParameterKey<T> key) where T : class
{
return new ResourceParameterPin<T>(parameters, key);
}
}
abstract class ParameterPin
{
internal abstract void SubscribeTo(ShaderGeneratorContext c);
}
abstract class ParameterPin<T, TKey> : ParameterPin, IVLPin<T>
where TKey : ParameterKey
{
private readonly ParameterUpdater<T, TKey> updater;
protected ParameterPin(ParameterUpdater<T, TKey> updater, T value)
{
this.updater = updater;
this.updater.Value = value;
}
public T Value
{
get => updater.Value;
set => updater.Value = value;
}
object IVLPin.Value
{
get => Value;
set => Value = (T)value;
}
internal override sealed void SubscribeTo(ShaderGeneratorContext c)
{
updater.Track(c);
}
}
sealed class PermutationParameterPin<T> : ParameterPin<T, PermutationParameterKey<T>>
{
public PermutationParameterPin(ParameterCollection parameters, PermutationParameterKey<T> key, T value)
: base(new PermutationParameterUpdater<T>(parameters, key), value)
{
}
}
sealed class ValueParameterPin<T> : ParameterPin<T, ValueParameterKey<T>>
where T : struct
{
public ValueParameterPin(ParameterCollection parameters, ValueParameterKey<T> key, T value)
: base(new ValueParameterUpdater<T>(parameters, key), value)
{
}
}
sealed class EnumParameterPin<T, TEnum> : IVLPin<TEnum>
where T : unmanaged
where TEnum : unmanaged
{
private readonly ValueParameterPin<T> pin;
public EnumParameterPin(ParameterCollection parameters, ValueParameterKey<T> key, TEnum value)
{
pin = new ValueParameterPin<T>(parameters, key, Unsafe.As<TEnum, T>(ref value));
}
public TEnum Value
{
get
{
T val = pin.Value;
return Unsafe.As<T, TEnum>(ref val);
}
set => pin.Value = Unsafe.As<TEnum, T>(ref value);
}
object IVLPin.Value
{
get => Value;
set => Value = (TEnum)value;
}
}
sealed class ColorParameterPin : IVLPin<Color4>
{
private readonly ValueParameterPin<Color4> pin;
public readonly ColorSpace ColorSpace;
public ColorParameterPin(ParameterCollection parameters, ValueParameterKey<Color4> key, ColorSpace colorSpace, Color4 value)
{
pin = new ValueParameterPin<Color4>(parameters, key, value.ToColorSpace(colorSpace));
ColorSpace = colorSpace;
}
public Color4 Value
{
get => pin.Value;
set => pin.Value = value.ToColorSpace(ColorSpace);
}
object IVLPin.Value
{
get => Value;
set => Value = (Color4)value;
}
}
sealed class ArrayValueParameterPin<T> : ParameterPin<T[], ValueParameterKey<T>>
where T : struct
{
public ArrayValueParameterPin(ParameterCollection parameters, ValueParameterKey<T> key, T[] value)
: base(new ArrayParameterUpdater<T>(parameters, key), value)
{
}
}
sealed class ResourceParameterPin<T> : ParameterPin<T, ObjectParameterKey<T>>
where T : class
{
public ResourceParameterPin(ParameterCollection parameters, ObjectParameterKey<T> key)
: base(new ObjectParameterUpdater<T>(parameters, key), default)
{
}
}
abstract class ShaderFXPin : ParameterPin
{
public readonly PermutationParameterKey<ShaderSource> Key;
public ShaderFXPin(PermutationParameterKey<ShaderSource> key)
{
Key = key;
}
public bool ShaderSourceChanged { get; set; } = true;
public void GenerateAndSetShaderSource(ShaderGeneratorContext context, MaterialComputeColorKeys baseKeys, ShaderMixinSource mixin = null)
{
var shaderSource = GetShaderSource(context, baseKeys);
context.Parameters.Set(Key, shaderSource);
if (mixin != null)
{
mixin.Compositions[Key.Name] = shaderSource;
}
ShaderSourceChanged = false; //change seen
}
protected abstract ShaderSource GetShaderSource(ShaderGeneratorContext context, MaterialComputeColorKeys baseKeys);
public abstract IComputeNode GetValueOrDefault();
internal override sealed void SubscribeTo(ShaderGeneratorContext c)
{
// We're part of the shader graph -> GetShaderSource takes care of writing the immutable shader source to the parameters
// Should the shader source change a new graph will be generated (ShaderSourceChanged == true)
}
}
class ShaderFXPin<TShaderClass> : ShaderFXPin, IVLPin<TShaderClass> where TShaderClass : class, IComputeNode
{
private TShaderClass internalValue;
protected TShaderClass defaultValue;
public ShaderFXPin(PermutationParameterKey<ShaderSource> key, TShaderClass value)
: base(key)
{
internalValue = value;
defaultValue = value;
}
public TShaderClass Value
{
get => internalValue;
set
{
if (internalValue != value)
{
internalValue = value;
ShaderSourceChanged = true;
}
}
}
public override IComputeNode GetValueOrDefault()
{
return internalValue ?? defaultValue;
}
object IVLPin.Value
{
get => Value;
set => Value = (TShaderClass)value;
}
protected override sealed ShaderSource GetShaderSource(ShaderGeneratorContext context, MaterialComputeColorKeys baseKeys)
{
var shaderSource = GetShaderSourceCore(context, baseKeys);
context.Parameters.Set(Key, shaderSource);
return shaderSource;
}
protected virtual ShaderSource GetShaderSourceCore(ShaderGeneratorContext context, MaterialComputeColorKeys baseKeys)
{
return Value?.GenerateShaderSource(context, baseKeys) ?? defaultValue?.GenerateShaderSource(context, baseKeys);
}
}
sealed class GPUValueSinkPin<T> : ShaderFXPin<SetVar<T>>
{
public GPUValueSinkPin(PermutationParameterKey<ShaderSource> key, SetVar<T> value)
: base(key, value)
{
}
protected override ShaderSource GetShaderSourceCore(ShaderGeneratorContext context, MaterialComputeColorKeys baseKeys)
{
var input = Value ?? defaultValue;
var getter = input.GetVarValue();
var graph = ShaderGraph.BuildFinalShaderGraph(getter);
var finalVar = new Do<T>(graph, getter);
return finalVar.GenerateShaderSource(context, baseKeys);
}
public override IComputeNode GetValueOrDefault()
{
var input = Value ?? defaultValue;
return input.GetVarValue();
}
}
abstract class Pin : IVLPin
{
public Pin(string name)
{
Name = name;
}
public abstract object Value { get; set; }
public string Name { get; }
}
class Pin<T> : Pin, IVLPin<T>
{
public Pin(string name, T initialValue) : base(name)
{
Value = initialValue;
}
T IVLPin<T>.Value { get; set; }
public sealed override object Value
{
get => ((IVLPin<T>)this).Value;
set => ((IVLPin<T>)this).Value = (T)value;
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Effects/TextureFX/TextureFXTechniqueEnums.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace VL.Stride.Effects.TextureFX
{
public enum AlphaChannel { Average, R, G, B, A };
public enum BumpType { Directional, Point };
public enum ChannelKeyingType { Alpha, Red, Green, Blue, Luma, Saturation };
public enum ConvertColorType { HSVtoRGB, HSLtoRGB, RGBtoHSV, RGBtoHSL };
public enum MapColorType { Hue, HueSaturation, HueValue, Luma, RedBlue, RGBA, SaturationValue, Tone, Value };
public enum RampColorType { RGB, Hue, Luma, Saturation };
public enum GlowType { Pre, Glow, Mix };
public enum HaloType { Smooth, Linear, Spike };
public enum LevelsClampType { None, Top, Bottom, Both };
public enum LomographType { One, Two, Three, Four, Five, Six, Gray, Sepia };
public enum NoiseType { Perlin, PerlinGrad, Value, ValueGrad, Simplex, SimplexGrad, WorleyFast, WorleyFastGrad };
public enum PaletteType { HSL, HSV, Radial };
public enum CoordinatesType { Cartesian, Polar };
public enum TunnelType { Square, Cylinder, Fly };
public enum ResizeInterpolationType { NearestNeighbor, Linear, CubicBSpline, CubicCatmullRom/*, Lanczos*/ };
public enum RoundingType { Round, Floor, Ceil };
public enum TextureChannel { Channel0, Channel1, Channel2, Channel3 };
public enum BlurPasses { OnePass, TwoPasses, ThreePasses };
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Effects/EffectShaderNodes.cs<|end_filename|>
using Stride.Core.IO;
using Stride.Core.Serialization.Contents;
using Stride.Graphics;
using Stride.Rendering;
using Stride.Shaders.Compiler;
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Reactive.Linq;
using System.Reflection;
using VL.Core;
using VL.Model;
namespace VL.Stride.Rendering
{
static partial class EffectShaderNodes
{
public static NodeBuilding.FactoryImpl Init(IVLNodeDescriptionFactory factory)
{
ShaderMetadata.RegisterAdditionalShaderAttributes();
var nodes = GetNodeDescriptions(factory).ToImmutableArray();
return NodeBuilding.NewFactoryImpl(nodes, forPath: path => factory =>
{
// In case "shaders" directory gets added or deleted invalidate the whole factory
var invalidated = NodeBuilding.WatchDir(path)
.Where(e => (e.ChangeType == WatcherChangeTypes.Created || e.ChangeType == WatcherChangeTypes.Deleted || e.ChangeType == WatcherChangeTypes.Renamed) && e.Name == EffectCompilerBase.DefaultSourceShaderFolder);
// File provider crashes if directory doesn't exist :/
var shadersPath = Path.Combine(path, EffectCompilerBase.DefaultSourceShaderFolder);
if (Directory.Exists(shadersPath))
{
try
{
var nodes = GetNodeDescriptions(factory, path, shadersPath);
// Additionaly watch out for new/deleted/renamed files
invalidated = invalidated.Merge(NodeBuilding.WatchDir(shadersPath)
.Where(e => IsNewOrDeletedShaderFile(e)));
return NodeBuilding.NewFactoryImpl(nodes.ToImmutableArray(), invalidated,
export: c =>
{
// Copy all shaders to the project directory but do so only once per shader path relying on the assumption that the generated project
// containing the Assets folder will be referenced by projects further up in the dependency tree.
var pathExportedKey = (typeof(EffectShaderNodes), shadersPath);
if (c.SolutionWideStorage.TryAdd(pathExportedKey, pathExportedKey))
{
var assetsFolder = Path.Combine(c.DirectoryPath, "Assets");
Directory.CreateDirectory(assetsFolder);
foreach (var f in Directory.EnumerateFiles(shadersPath))
{
if (string.Equals(Path.GetExtension(f), ".sdsl", StringComparison.OrdinalIgnoreCase) || string.Equals(Path.GetExtension(f), ".sdfx", StringComparison.OrdinalIgnoreCase))
File.Copy(f, Path.Combine(assetsFolder, Path.GetFileName(f)), overwrite: true);
}
}
});
}
catch (UnauthorizedAccessException)
{
// When deleting a folder we can run into this one
}
}
// Just watch for changes
return NodeBuilding.NewFactoryImpl(invalidated: invalidated);
});
}
static IEnumerable<IVLNodeDescription> GetNodeDescriptions(IVLNodeDescriptionFactory factory, string path = default, string shadersPath = default)
{
var serviceRegistry = SharedServices.GetRegistry();
var graphicsDeviceService = serviceRegistry.GetService<IGraphicsDeviceService>();
var graphicsDevice = graphicsDeviceService.GraphicsDevice;
var contentManager = serviceRegistry.GetService<ContentManager>();
var effectSystem = serviceRegistry.GetService<EffectSystem>();
// Ensure path is visible to the effect system
if (path != null)
effectSystem.EnsurePathIsVisible(path);
// Ensure the effect system tracks the same files as we do
var fieldInfo = typeof(EffectSystem).GetField("directoryWatcher", BindingFlags.NonPublic | BindingFlags.Instance);
var directoryWatcher = fieldInfo.GetValue(effectSystem) as DirectoryWatcher;
var modifications = Observable.FromEventPattern<FileEvent>(directoryWatcher, nameof(DirectoryWatcher.Modified))
.Select(e => e.EventArgs)
.Where(e => e.ChangeType == FileEventChangeType.Changed || e.ChangeType == FileEventChangeType.Renamed);
// Effect system deals with its internal cache on update, so make sure its called.
effectSystem.Update(default);
const string sdslFileFilter = "*.sdsl";
const string drawFXSuffix = "_DrawFX";
const string computeFXSuffix = "_ComputeFX";
const string textureFXSuffix = "_TextureFX";
const string shaderFXSuffix = "_ShaderFX";
// Traverse either the "shaders" folder in the database or in the given path (if present)
IVirtualFileProvider fileProvider = default;
var dbFileProvider = effectSystem.FileProvider; //should include current path
var sourceManager = dbFileProvider.GetShaderSourceManager();
if (path != null)
fileProvider = new FileSystemProvider(null, path);
else
fileProvider = contentManager.FileProvider;
EffectUtils.ResetParserCache();
foreach (var file in fileProvider.ListFiles(EffectCompilerBase.DefaultSourceShaderFolder, sdslFileFilter, VirtualSearchOption.TopDirectoryOnly))
{
var effectName = Path.GetFileNameWithoutExtension(file);
if (effectName.EndsWith(drawFXSuffix))
{
// Shader only for now
var name = GetNodeName(effectName, drawFXSuffix);
var shaderNodeName = new NameAndVersion($"{name.NamePart}Shader", name.VersionPart);
var shaderMetadata = ShaderMetadata.CreateMetadata(effectName, dbFileProvider, sourceManager);
yield return factory.NewDrawEffectShaderNode(
shaderNodeName,
effectName,
shaderMetadata,
TrackChanges(effectName, shaderMetadata),
() => OpenEditor(effectName),
serviceRegistry,
graphicsDevice);
//DrawFX node
}
else if (effectName.EndsWith(textureFXSuffix))
{
var name = GetNodeName(effectName, textureFXSuffix);
var shaderNodeName = new NameAndVersion($"{name.NamePart}Shader", name.VersionPart);
var shaderMetadata = ShaderMetadata.CreateMetadata(effectName, dbFileProvider, sourceManager);
var shaderNodeDescription = factory.NewImageEffectShaderNode(
shaderNodeName,
effectName,
shaderMetadata,
TrackChanges(effectName, shaderMetadata),
() => OpenEditor(effectName),
serviceRegistry,
graphicsDevice);
yield return shaderNodeDescription;
yield return factory.NewTextureFXNode(shaderNodeDescription, name, shaderMetadata);
}
else if (effectName.EndsWith(computeFXSuffix))
{
// Shader only for now
var name = GetNodeName(effectName, computeFXSuffix);
var shaderNodeName = new NameAndVersion($"{name.NamePart}Shader", name.VersionPart);
var shaderMetadata = ShaderMetadata.CreateMetadata(effectName, dbFileProvider, sourceManager);
yield return factory.NewComputeEffectShaderNode(
shaderNodeName,
effectName,
shaderMetadata,
TrackChanges(effectName, shaderMetadata),
() => OpenEditor(effectName),
serviceRegistry,
graphicsDevice);
//ComputeFX node
}
else if (effectName.EndsWith(shaderFXSuffix))
{
// Shader only
var name = GetNodeName(effectName, shaderFXSuffix);
var shaderNodeName = new NameAndVersion($"{name.NamePart}", name.VersionPart);
var shaderMetadata = ShaderMetadata.CreateMetadata(effectName, dbFileProvider, sourceManager);
yield return factory.NewShaderFXNode(
shaderNodeName,
effectName,
shaderMetadata,
TrackChanges(effectName, shaderMetadata),
() => OpenEditor(effectName),
serviceRegistry,
graphicsDevice);
}
}
// build an observable to track the file changes, also the files of the base shaders
IObservable<object> TrackChanges(string shaderName, ShaderMetadata shaderMetadata)
{
var watchNames = new HashSet<string>() { shaderName };
foreach (var baseClass in shaderMetadata.ParsedShader?.BaseShaders ?? Enumerable.Empty<ParsedShader>())
{
var baseClassPath = baseClass.Shader.Span.Location.FileSource;
if (baseClassPath.ToLowerInvariant().Contains("/stride."))
continue; //in stride package folder
watchNames.Add(Path.GetFileNameWithoutExtension(baseClassPath));
}
IObservable<object> invalidated = modifications.Where(e => watchNames.Contains(Path.GetFileNameWithoutExtension(e.Name)));
// Setup our own watcher as Stride doesn't track shaders with errors
if (path != null)
{
invalidated = Observable.Merge(invalidated, NodeBuilding.WatchDir(shadersPath)
.Where(e => watchNames.Contains(Path.GetFileNameWithoutExtension(e.Name)))
.Do(e =>
{
((EffectCompilerBase)effectSystem.Compiler).ResetCache(new HashSet<string>() { Path.GetFileNameWithoutExtension(e.Name) });
foreach (var watchName in watchNames)
{
EffectUtils.ResetParserCache(watchName);
}
}));
}
return invalidated;
}
bool OpenEditor(string effectName)
{
var path = EffectUtils.GetPathOfSdslShader(effectName, fileProvider);
try
{
Process.Start(path);
return true;
}
catch
{
return false;
}
}
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Shaders/ShaderFX/Input/InputValue.cs<|end_filename|>
using Stride.Rendering;
using Stride.Rendering.Materials;
using Stride.Shaders;
using static VL.Stride.Shaders.ShaderFX.ShaderFXUtils;
namespace VL.Stride.Shaders.ShaderFX
{
public class InputValue<T> : ComputeValue<T>
where T : struct
{
private readonly ValueParameterUpdater<T> updater = new ValueParameterUpdater<T>();
public InputValue(ValueParameterKey<T> key = null, string constantBufferName = null)
{
Key = key;
ConstantBufferName = constantBufferName;
}
/// <summary>
/// Can be updated from mainloop
/// </summary>
public T Input
{
get => updater.Value;
set => updater.Value = value;
}
public ValueParameterKey<T> Key { get; }
public string ConstantBufferName { get; private set; }
public override ShaderSource GenerateShaderSource(ShaderGeneratorContext context, MaterialComputeColorKeys baseKeys)
{
ShaderClassSource shaderClassSource;
if (Key == null)
{
var usedKey = GetInputKey(context);
// keep track of the parameters
updater.Track(context, usedKey);
// find constant buffer name
var constantBufferName = ConstantBufferName;
if (string.IsNullOrWhiteSpace(constantBufferName))
{
constantBufferName = context is MaterialGeneratorContext ? "PerMaterial" : "PerUpdate";
}
shaderClassSource = GetShaderSourceForType<T>("Input", usedKey, constantBufferName);
}
else
{
shaderClassSource = GetShaderSourceForType<T>("InputKey", Key);
}
return shaderClassSource;
}
private ValueParameterKey<T> GetInputKey(ShaderGeneratorContext context)
{
return (ValueParameterKey<T>)context.GetParameterKey(Key ?? GenericValueKeys<T>.GenericValueParameter);
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Effects/TextureFX/Filters/Patterns/Kaleidoscope_TextureFX.sdsl.cs<|end_filename|>
// <auto-generated>
// Do not edit this file yourself!
//
// This code was generated by Stride Shader Mixin Code Generator.
// To generate it yourself, please install Stride.VisualStudio.Package .vsix
// and re-save the associated .sdfx.
// </auto-generated>
using System;
using Stride.Core;
using Stride.Rendering;
using Stride.Graphics;
using Stride.Shaders;
using Stride.Core.Mathematics;
using Buffer = Stride.Graphics.Buffer;
namespace Stride.Rendering
{
public static partial class Kaleidoscope_TextureFXKeys
{
public static readonly ValueParameterKey<int> Divisions = ParameterKeys.NewValue<int>(3);
public static readonly ValueParameterKey<int> Iterations = ParameterKeys.NewValue<int>(5);
public static readonly ValueParameterKey<float> IterationZoom = ParameterKeys.NewValue<float>(0.0f);
public static readonly ValueParameterKey<float> Rotation = ParameterKeys.NewValue<float>(0.0f);
public static readonly ValueParameterKey<float> Zoom = ParameterKeys.NewValue<float>(0.5f);
public static readonly ValueParameterKey<Vector2> Center = ParameterKeys.NewValue<Vector2>(new Vector2(0.0f,0.0f));
public static readonly ValueParameterKey<Vector2> CellOffset = ParameterKeys.NewValue<Vector2>(new Vector2(0.0f,0.0f));
public static readonly ValueParameterKey<float> CellRotation = ParameterKeys.NewValue<float>(0.0f);
public static readonly ValueParameterKey<Vector2> CellScale = ParameterKeys.NewValue<Vector2>(new Vector2(1.0f,1.0f));
public static readonly ValueParameterKey<Vector4> ControlFactor = ParameterKeys.NewValue<Vector4>(new Vector4(1.0f,0.0f,0.0f,0.0f));
public static readonly ValueParameterKey<bool> Aspect = ParameterKeys.NewValue<bool>(true);
public static readonly ObjectParameterKey<SamplerState> s0 = ParameterKeys.NewObject<SamplerState>();
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Effects/TextureFX/Filters/Distortion/Tunnels_TextureFX.sdsl.cs<|end_filename|>
// <auto-generated>
// Do not edit this file yourself!
//
// This code was generated by Stride Shader Mixin Code Generator.
// To generate it yourself, please install Stride.VisualStudio.Package .vsix
// and re-save the associated .sdfx.
// </auto-generated>
using System;
using Stride.Core;
using Stride.Rendering;
using Stride.Graphics;
using Stride.Shaders;
using Stride.Core.Mathematics;
using Buffer = Stride.Graphics.Buffer;
namespace Stride.Rendering
{
public static partial class Tunnels_TextureFXKeys
{
public static readonly ValueParameterKey<int> Type = ParameterKeys.NewValue<int>();
public static readonly ValueParameterKey<float> Time = ParameterKeys.NewValue<float>();
public static readonly ValueParameterKey<float> Rotation = ParameterKeys.NewValue<float>(0);
public static readonly ValueParameterKey<float> Distance = ParameterKeys.NewValue<float>(0.5f);
public static readonly ValueParameterKey<float> Offset = ParameterKeys.NewValue<float>(0.0f);
public static readonly ValueParameterKey<float> FogDistance = ParameterKeys.NewValue<float>(0.5f);
public static readonly ValueParameterKey<Color4> FogColor = ParameterKeys.NewValue<Color4>(new Color4(0,0,0,1));
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Shaders/ShaderFX/RaymarcherMatcap1.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using VL.Stride.Shaders.ShaderFX.Functions;
using Stride.Core.Mathematics;
using Stride.Graphics;
using Stride.Rendering;
using Stride.Rendering.Materials;
using Stride.Shaders;
using static VL.Stride.Shaders.ShaderFX.ShaderFXUtils;
namespace VL.Stride.Shaders.ShaderFX
{
public class RaymarcherMatcap : Funk1In1Out<Vector2, Vector4>
{
readonly ObjectParameterUpdater<Texture> updater = new ObjectParameterUpdater<Texture>();
public RaymarcherMatcap(string functionName, IEnumerable<KeyValuePair<string, IComputeNode>> inputs)
: base(functionName, inputs)
{
}
/// <summary>
/// Can be updated from mainloop
/// </summary>
public Texture Input
{
get => updater.Value;
set => updater.Value = value;
}
public ObjectParameterKey<Texture> UsedKey { get; protected set; }
public ObjectParameterKey<Texture> Key { get; }
public override ShaderSource GenerateShaderSource(ShaderGeneratorContext context, MaterialComputeColorKeys baseKeys)
{
var shaderSource = new ShaderClassSource(ShaderName);
UsedKey = Key ?? TexturingKeys.Texture0;
//track parameter collection
updater.Track(context, UsedKey);
//compose if necessary
if (Inputs != null && Inputs.Any())
{
var mixin = shaderSource.CreateMixin();
foreach (var input in Inputs)
{
mixin.AddComposition(input.Value, input.Key, context, baseKeys);
}
return mixin;
}
return shaderSource;
}
public override IEnumerable<IComputeNode> GetChildren(object context = null)
{
if (Inputs != null)
{
foreach (var item in Inputs)
{
if (item.Value != null)
yield return item.Value;
}
}
}
public override string ToString()
{
return ShaderName;
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Effects/TextureFX/Filters/ColorManipulation/Lomograph_TextureFX.sdsl.cs<|end_filename|>
// <auto-generated>
// Do not edit this file yourself!
//
// This code was generated by Stride Shader Mixin Code Generator.
// To generate it yourself, please install Stride.VisualStudio.Package .vsix
// and re-save the associated .sdfx.
// </auto-generated>
using System;
using Stride.Core;
using Stride.Rendering;
using Stride.Graphics;
using Stride.Shaders;
using Stride.Core.Mathematics;
using Buffer = Stride.Graphics.Buffer;
namespace Stride.Rendering
{
public static partial class Lomograph_TextureFXKeys
{
public static readonly ValueParameterKey<float> VignetteStart = ParameterKeys.NewValue<float>(0.1f);
public static readonly ValueParameterKey<float> VignetteAmount = ParameterKeys.NewValue<float>(0.25f);
public static readonly ValueParameterKey<float> VignetteDodge = ParameterKeys.NewValue<float>(0.1f);
public static readonly ValueParameterKey<float> Color = ParameterKeys.NewValue<float>(0.6f);
public static readonly ValueParameterKey<float> Contrast = ParameterKeys.NewValue<float>(0.5f);
public static readonly ValueParameterKey<float> Level = ParameterKeys.NewValue<float>(0.5f);
public static readonly ValueParameterKey<float> Effect = ParameterKeys.NewValue<float>(1.0f);
public static readonly ValueParameterKey<uint> Type = ParameterKeys.NewValue<uint>(0);
public static readonly ValueParameterKey<int> Iterations = ParameterKeys.NewValue<int>(4);
}
}
<|start_filename|>packages/VL.Stride.Windows/src/Assets/AssetBuilderServiceScript.cs<|end_filename|>
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Threading.Tasks;
using VL.Stride.Assets;
using Stride.Core.Assets;
using Stride.Engine;
using System.Linq;
using System;
namespace VL.Stride.Assets
{
/// <summary>
/// Custom vl script that sets MSBuild
/// </summary>
public class AssetBuilderServiceScript : AsyncScript
{
public RuntimeContentLoader ContentLoader;
ConcurrentQueue<AssetItem> workQueue = new ConcurrentQueue<AssetItem>();
public AssetBuilderServiceScript()
{
try
{
//set msbuild
PackageSessionPublicHelper.FindAndSetMSBuildVersion();
}
catch (Exception e)
{
Log.Warning("MSBuild not found", e);
}
}
public void PushWork(IEnumerable<AssetItem> items)
{
foreach(var item in items)
workQueue.Enqueue(item);
}
public void PushWork(AssetItem item)
{
workQueue.Enqueue(item);
}
public override async Task Execute()
{
while (true)
{
await Script.NextFrame();
if (!workQueue.IsEmpty)
{
var assetList = DequeueItems().ToList();
if (assetList.Count == 0)
return;
await ContentLoader?.BuildAndReloadAssetsInternal(assetList);
}
}
}
private IEnumerable<AssetItem> DequeueItems()
{
while (workQueue.TryDequeue(out var item))
yield return item;
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Materials/MaterialExtensions.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reactive.Disposables;
using System.Text;
using System.Threading.Tasks;
using Stride.Core.Serialization;
using Stride.Core.Serialization.Contents;
using Stride.Graphics;
using Stride.Rendering;
using Stride.Rendering.Materials;
using VL.Stride.Shaders.ShaderFX;
namespace VL.Stride.Rendering
{
/// <summary>
/// Extension methods for <see cref="Material"/>.
/// </summary>
public static class MaterialExtensions
{
/// <summary>
/// Clone the <see cref="Material"/>.
/// </summary>
/// <param name="material">The material to clone.</param>
/// <returns>The cloned material.</returns>
/// <exception cref="ArgumentNullException">If <paramref name="material"/> is <see langword="null"/>.</exception>
public static Material Clone(this Material material)
{
if (material == null)
{
throw new ArgumentNullException(nameof(material));
}
var clone = new Material();
CopyProperties(material, clone);
return clone;
}
internal static void CopyProperties(Material material, Material clone)
{
foreach (var pass in material.Passes)
{
clone.Passes.Add(new MaterialPass()
{
HasTransparency = pass.HasTransparency,
BlendState = pass.BlendState,
CullMode = pass.CullMode,
IsLightDependent = pass.IsLightDependent,
TessellationMethod = pass.TessellationMethod,
PassIndex = pass.PassIndex,
Parameters = new ParameterCollection(pass.Parameters)
});
}
}
/// <summary>
/// Same as Material.New but also loading referenced content in parameter collection (like EnvironmentLightingDFG_LUT)
/// as well as setting the <see cref="ShaderGraph.GraphSubscriptions"/> on the used <see cref="MaterialGeneratorContext"/>.
/// </summary>
internal static Material New(GraphicsDevice device, MaterialDescriptor descriptor, ContentManager content, CompositeDisposable subscriptions)
{
if (descriptor == null) throw new ArgumentNullException(nameof(descriptor));
if (subscriptions == null) throw new ArgumentNullException(nameof(subscriptions));
// The descriptor is not assigned to the material because
// 1) we don't know whether it will mutate and be used to generate another material
// 2) we don't wanna hold on to memory we actually don't need
var context = new MaterialGeneratorContext(new Material(), device)
{
GraphicsProfile = device.Features.RequestedProfile,
};
// Allows nodes in the graph to tie the lifetime of services to the graph itself
context.Tags.Set(ShaderGraph.GraphSubscriptions, subscriptions);
var result = MaterialGenerator.Generate(descriptor, context, string.Format("{0}:RuntimeMaterial", descriptor.MaterialId));
if (result.HasErrors)
{
throw new InvalidOperationException(string.Format("Error when creating the material [{0}]", result.ToText()));
}
var m = result.Material;
// Attach the descriptor (not sure why Stride is not doing that on its own) as its needed for material layers
m.Descriptor = descriptor;
foreach (var pass in m.Passes)
{
//var t = pass.Parameters.Get(MaterialSpecularMicrofacetEnvironmentGGXLUTKeys.EnvironmentLightingDFG_LUT);
//if (t != null)
//{
// var reference = AttachedReferenceManager.GetAttachedReference(t);
// var realT = content.Load<Texture>(reference.Url, ContentManagerLoaderSettings.StreamingDisabled);
// pass.Parameters.Set(MaterialSpecularMicrofacetEnvironmentGGXLUTKeys.EnvironmentLightingDFG_LUT, realT);
//}
foreach (var p in pass.Parameters.ParameterKeyInfos)
{
var key = p.Key;
if (key.Type != ParameterKeyType.Object)
continue;
var value = pass.Parameters.GetObject(key);
if (value is null)
continue;
var reference = AttachedReferenceManager.GetAttachedReference(value);
if (reference is null)
continue;
if (content.Exists(reference.Url))
{
var c = content.Load(key.PropertyType, reference.Url, ContentManagerLoaderSettings.StreamingDisabled);
if (c is null)
continue;
pass.Parameters.SetObject(key, c);
}
}
}
return m;
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Shaders/ShaderFX/DeclResource.cs<|end_filename|>
using Stride.Rendering;
using Stride.Rendering.Materials;
using Stride.Shaders;
using System;
namespace VL.Stride.Shaders.ShaderFX
{
public class DeclResource<T> : ComputeNode<T>, IComputeVoid
where T : class
{
readonly ObjectParameterUpdater<T> updater = new ObjectParameterUpdater<T>();
readonly string resourceGroupName;
public DeclResource(string resourceGroupName = null)
{
this.resourceGroupName = resourceGroupName;
}
/// <summary>
/// Can be updated from mainloop
/// </summary>
public T Resource
{
get => updater.Value;
set => updater.Value = value;
}
public ObjectParameterKey<T> Key { get; private set; }
public override ShaderSource GenerateShaderSource(ShaderGeneratorContext context, MaterialComputeColorKeys baseKeys)
{
Key = context.GetKeyForContext(this, Key);
//track the parameter collection
updater.Track(context, Key);
//no shader source to create here, only the key
return new ShaderClassSource("ComputeVoid");
}
public virtual string GetResourceGroupName(ShaderGeneratorContext context)
{
if (string.IsNullOrWhiteSpace(resourceGroupName))
{
return context is MaterialGeneratorContext ? "PerMaterial" : "PerUpdate";
}
return resourceGroupName;
}
public override string ToString()
{
return $"{typeof(T).Name} {Key?.Name}";
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Effects/TextureFX/Utils/Swizzle_TextureFX.sdsl.cs<|end_filename|>
// <auto-generated>
// Do not edit this file yourself!
//
// This code was generated by Stride Shader Mixin Code Generator.
// To generate it yourself, please install Stride.VisualStudio.Package .vsix
// and re-save the associated .sdfx.
// </auto-generated>
using System;
using Stride.Core;
using Stride.Rendering;
using Stride.Graphics;
using Stride.Shaders;
using Stride.Core.Mathematics;
using Buffer = Stride.Graphics.Buffer;
namespace Stride.Rendering
{
public static partial class Swizzle_TextureFXKeys
{
public static readonly ValueParameterKey<int> Channel0 = ParameterKeys.NewValue<int>(0);
public static readonly ValueParameterKey<int> Channel1 = ParameterKeys.NewValue<int>(1);
public static readonly ValueParameterKey<int> Channel2 = ParameterKeys.NewValue<int>(2);
public static readonly ValueParameterKey<int> Channel3 = ParameterKeys.NewValue<int>(3);
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Graphics/BufferViewDescription.cs<|end_filename|>
using System;
using System.Runtime.CompilerServices;
using Stride.Engine;
using Stride.Graphics;
using Stride.Core.IO;
using System.Buffers;
using System.IO;
using Buffer = Stride.Graphics.Buffer;
using VL.Core;
namespace VL.Stride.Graphics
{
public struct BufferViewDescription
{
/// <summary>
/// The flags used for the view. If <see cref="BufferFlags.None"/> then the view is using the flags from the buffer.
/// </summary>
public BufferFlags Flags;
/// <summary>
/// The format of the view, used for typed buffers, usually a 32-bit float format for e.g. Buffer<float4>. Set to <see cref="PixelFormat.None"/> when the buffer is raw or structured.
/// </summary>
public PixelFormat Format;
//used in patch
public static void Split(ref BufferViewDescription bufferViewDescription, out BufferFlags flags, out PixelFormat format)
{
flags = bufferViewDescription.Flags;
format = bufferViewDescription.Format;
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Effects/CustomDrawEffect.cs<|end_filename|>
using Stride.Core;
using Stride.Core.Mathematics;
using Stride.Graphics;
using Stride.Rendering;
using System;
using System.Linq;
using System.Reactive.Disposables;
using System.Reactive.Linq;
using VL.Core;
namespace VL.Stride.Rendering
{
public class CustomDrawEffect : IEffect, IDisposable
{
public readonly DynamicEffectInstance EffectInstance;
readonly PerFrameParameters[] perFrameParams;
readonly PerViewParameters[] perViewParams;
readonly PerDrawParameters[] perDrawParams;
readonly TexturingParameters[] texturingParams;
public CustomDrawEffect(string effectName, IServiceRegistry serviceRegistry, GraphicsDevice graphicsDevice, ParameterCollection parameters = default)
{
EffectInstance = new DynamicEffectInstance(effectName, parameters);
EffectInstance.Initialize(serviceRegistry);
EffectInstance.UpdateEffect(graphicsDevice);
perFrameParams = EffectInstance.Parameters.GetWellKnownParameters(WellKnownParameters.PerFrameMap).ToArray();
perViewParams = EffectInstance.Parameters.GetWellKnownParameters(WellKnownParameters.PerViewMap).ToArray();
perDrawParams = EffectInstance.Parameters.GetWellKnownParameters(WellKnownParameters.PerDrawMap).ToArray();
texturingParams = EffectInstance.Parameters.GetTexturingParameters().ToArray();
}
public ParameterCollection Parameters => EffectInstance.Parameters;
internal readonly CompositeDisposable Subscriptions = new CompositeDisposable();
public Action<ParameterCollection, RenderView, RenderDrawContext> ParameterSetter { get; set; }
public void Dispose()
{
Subscriptions.Dispose();
EffectInstance.Dispose();
}
public EffectInstance SetParameters(RenderView renderView, RenderDrawContext renderDrawContext)
{
EffectInstance.UpdateEffect(renderDrawContext.GraphicsDevice);
var parameters = EffectInstance.Parameters;
try
{
// TODO1: PerFrame could be done in Update if we'd have access to frame time
// TODO2: This code can be optimized by using parameter accessors and not parameter keys
parameters.SetPerFrameParameters(perFrameParams, renderDrawContext.RenderContext);
var parentTransformation = renderDrawContext.RenderContext.Tags.Get(EntityRendererRenderFeature.CurrentParentTransformation);
if (parameters.ContainsKey(TransformationKeys.World))
{
var world = parameters.Get(TransformationKeys.World);
Matrix.Multiply(ref world, ref parentTransformation, out var result);
parameters.SetPerDrawParameters(perDrawParams, renderView, ref result);
}
else
{
parameters.SetPerDrawParameters(perDrawParams, renderView, ref parentTransformation);
}
parameters.SetPerViewParameters(perViewParams, renderView);
parameters.SetTexturingParameters(texturingParams);
ParameterSetter?.Invoke(parameters, renderView, renderDrawContext);
}
catch (Exception e)
{
RuntimeGraph.ReportException(e);
}
return EffectInstance;
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Effects/TextureFX/Filters/ColorManipulation/Metallica_TextureFX.sdsl.cs<|end_filename|>
// <auto-generated>
// Do not edit this file yourself!
//
// This code was generated by Stride Shader Mixin Code Generator.
// To generate it yourself, please install Stride.VisualStudio.Package .vsix
// and re-save the associated .sdfx.
// </auto-generated>
using System;
using Stride.Core;
using Stride.Rendering;
using Stride.Graphics;
using Stride.Shaders;
using Stride.Core.Mathematics;
using Buffer = Stride.Graphics.Buffer;
namespace Stride.Rendering
{
public static partial class Metallica_TextureFXKeys
{
public static readonly ValueParameterKey<Color4> ColorA = ParameterKeys.NewValue<Color4>(new Color4(0.60f,0.99f,0.96f,1.0f));
public static readonly ValueParameterKey<Color4> ColorB = ParameterKeys.NewValue<Color4>(new Color4(0.57f,0.41f,0.07f,1.0f));
public static readonly ValueParameterKey<Vector2> Angle = ParameterKeys.NewValue<Vector2>(new Vector2(0.0f,0.0f));
public static readonly ValueParameterKey<Vector2> BumpAmount = ParameterKeys.NewValue<Vector2>(new Vector2(1.0f,1.0f));
public static readonly ValueParameterKey<Vector2> BumpGamma = ParameterKeys.NewValue<Vector2>(new Vector2(0.0f,0.0f));
public static readonly ValueParameterKey<Vector2> Brightness = ParameterKeys.NewValue<Vector2>(new Vector2(0.0f,0.0f));
public static readonly ValueParameterKey<float> Emboss = ParameterKeys.NewValue<float>(1.0f);
public static readonly ValueParameterKey<float> Shape = ParameterKeys.NewValue<float>(0.0f);
public static readonly ValueParameterKey<float> MaxRadius = ParameterKeys.NewValue<float>(1.0f);
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Effects/TextureFXEffect.cs<|end_filename|>
using System;
using System.Linq;
using System.Reactive.Disposables;
using Stride.Core;
using Stride.Core.Mathematics;
using Stride.Rendering;
using Stride.Rendering.Images;
namespace VL.Stride.Rendering
{
public class TextureFXEffect : ImageEffectShader
{
private TimeSpan? lastExceptionTime;
private TimeSpan retryTime = TimeSpan.FromSeconds(3);
PerFrameParameters[] perFrameParams;
PerViewParameters[] perViewParams;
public TextureFXEffect(string effectName = null, bool delaySetRenderTargets = false)
: base(effectName, delaySetRenderTargets)
{
Subscriptions.DisposeBy(this);
}
internal readonly CompositeDisposable Subscriptions = new CompositeDisposable();
protected override void InitializeCore()
{
base.InitializeCore();
EffectInstance.UpdateEffect(GraphicsDevice);
perFrameParams = EffectInstance.Parameters.GetWellKnownParameters(WellKnownParameters.PerFrameMap).ToArray();
perViewParams = EffectInstance.Parameters.GetWellKnownParameters(WellKnownParameters.PerViewMap).ToArray();
}
public bool IsOutputAssigned => OutputCount > 0 && GetOutput(0) != null;
protected override void PreDrawCore(RenderDrawContext context)
{
if (IsOutputAssigned)
{
base.PreDrawCore(context);
Parameters.SetPerFrameParameters(perFrameParams, context.RenderContext);
}
}
int lastViewWidth;
int lastViewHeight;
protected override void UpdateParameters()
{
base.UpdateParameters();
var output0 = GetOutput(0); //safe because it will only be called from base.PreDrawCore when IsOutputAssigned = true
var w = output0.ViewWidth;
var h = output0.ViewHeight;
if (w != lastViewWidth || h != lastViewHeight) //rarely changes
{
var viewSize = new Vector2(w, h);
Parameters.SetCameraParametersOnly(perViewParams, ref viewSize);
lastViewWidth = w;
lastViewHeight = h;
}
}
protected override void DrawCore(RenderDrawContext context)
{
var time = context.RenderContext.Time;
if (time != null && lastExceptionTime.HasValue && (time.Total - lastExceptionTime) < retryTime)
return;
if (IsOutputAssigned)
{
try
{
base.DrawCore(context);
}
catch (Exception e)
{
Console.WriteLine(e);
if (time != null)
lastExceptionTime = time.Total;
}
}
}
protected override void PostDrawCore(RenderDrawContext context)
{
if (IsOutputAssigned)
base.PostDrawCore(context);
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Effects/TextureFX/Filters/Patterns/Hatch_Line_TextureFX.sdsl.cs<|end_filename|>
// <auto-generated>
// Do not edit this file yourself!
//
// This code was generated by Stride Shader Mixin Code Generator.
// To generate it yourself, please install Stride.VisualStudio.Package .vsix
// and re-save the associated .sdfx.
// </auto-generated>
using System;
using Stride.Core;
using Stride.Rendering;
using Stride.Graphics;
using Stride.Shaders;
using Stride.Core.Mathematics;
using Buffer = Stride.Graphics.Buffer;
namespace Stride.Rendering
{
public static partial class Hatch_Line_TextureFXKeys
{
public static readonly ValueParameterKey<Color4> BackgroundColor = ParameterKeys.NewValue<Color4>(new Color4(0.0f,0.0f,0.0f,1.0f));
public static readonly ValueParameterKey<Color4> HatchColor = ParameterKeys.NewValue<Color4>(new Color4(1.0f,1.0f,1.0f,1.0f));
public static readonly ValueParameterKey<float> Threshhold = ParameterKeys.NewValue<float>(5.0f);
public static readonly ValueParameterKey<float> Thickness = ParameterKeys.NewValue<float>(4.0f);
public static readonly ValueParameterKey<float> Rotation = ParameterKeys.NewValue<float>(0.36f);
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Graphics/BufferExtensions.cs<|end_filename|>
using System;
using System.Runtime.CompilerServices;
using Stride.Engine;
using Stride.Graphics;
using Stride.Core.IO;
using System.Buffers;
using System.IO;
using Buffer = Stride.Graphics.Buffer;
using VL.Core;
using System.Reflection;
using Stride.Core;
using System.Diagnostics;
using MapMode = Stride.Graphics.MapMode;
namespace VL.Stride.Graphics
{
public static class BufferExtensions
{
public static Buffer New(GraphicsDevice graphicsDevice, BufferDescription description, BufferViewDescription viewDescription, IntPtr intialData)
{
var buffer = BufferCtor(graphicsDevice);
return BufferInit(buffer, description, viewDescription, intialData);
}
const BindingFlags NonPunblicInst = BindingFlags.NonPublic | BindingFlags.Instance;
static Buffer BufferCtor(GraphicsDevice graphicsDevice)
{
var ctor = typeof(Buffer).GetConstructor(NonPunblicInst, null, new[] { typeof(GraphicsDevice) }, null);
return (Buffer)ctor.Invoke(new[] { graphicsDevice });
}
static Buffer BufferInit(Buffer buffer, BufferDescription description, BufferViewDescription viewDescription, IntPtr intialData)
{
var init = typeof(Buffer).GetMethod("InitializeFromImpl", NonPunblicInst, null, new[] { typeof(BufferDescription), typeof(BufferFlags), typeof(PixelFormat), typeof(IntPtr) }, null);
return (Buffer)init.Invoke(buffer, new object[] { description, viewDescription.Flags, viewDescription.Format, intialData});
}
internal static readonly PropertyKey<Buffer> ParentBuffer = new PropertyKey<Buffer>(nameof(ParentBuffer), typeof(Buffer));
public static Buffer ToBufferView(this Buffer parentBuffer, Buffer bufferView, BufferViewDescription viewDescription, GraphicsDevice graphicsDevice)
{
SetGraphicsDevice(bufferView, graphicsDevice);
//bufferDescription = description;
SetField(bufferView, "bufferDescription", parentBuffer.Description);
//nativeDescription = ConvertToNativeDescription(Description);
SetField(bufferView, "nativeDescription", ConvertToNativeDescription(parentBuffer.Description));
//ViewFlags = viewFlags;
SetProp(bufferView, "ViewFlags", viewDescription.Flags);
//InitCountAndViewFormat(out this.elementCount, ref viewFormat);
InitCountAndViewFormat(bufferView, out var count, ref viewDescription.Format);
SetField(bufferView, "elementCount", count);
//ViewFormat = viewFormat;
SetProp(bufferView, "ViewFormat", viewDescription.Format);
//NativeDeviceChild = new SharpDX.Direct3D11.Buffer(GraphicsDevice.NativeDevice, dataPointer, nativeDescription);
SetNativeChild(bufferView, GetNativeChild(parentBuffer));
//if (nativeDescription.Usage != ResourceUsage.Staging)
// this.InitializeViews();
InitializeViews(bufferView);
if (parentBuffer is IReferencable referencable)
{
referencable.AddReference();
bufferView.Destroyed += (e, s) => referencable.Release();
}
return bufferView;
}
static SharpDX.Direct3D11.DeviceChild GetNativeChild(GraphicsResourceBase graphicsResource)
{
var prop = typeof(GraphicsResourceBase).GetProperty("NativeDeviceChild", NonPunblicInst);
return (SharpDX.Direct3D11.DeviceChild)prop.GetValue(graphicsResource);
}
static void SetNativeChild(GraphicsResourceBase graphicsResource, SharpDX.Direct3D11.DeviceChild deviceChild)
{
var iUnknownObject = deviceChild as SharpDX.IUnknown;
if (iUnknownObject != null)
{
var refCountResult = iUnknownObject.AddReference();
Debug.Assert(refCountResult > 1);
}
var prop = typeof(GraphicsResourceBase).GetProperty("NativeDeviceChild", NonPunblicInst);
prop.SetValue(graphicsResource, deviceChild);
}
static SharpDX.Direct3D11.BufferDescription ConvertToNativeDescription(BufferDescription description)
{
var method = typeof(Buffer).GetMethod("ConvertToNativeDescription", BindingFlags.Static | BindingFlags.NonPublic, null, new[] { typeof(BufferDescription) }, null);
return (SharpDX.Direct3D11.BufferDescription)method.Invoke(null, new object[] { description });
}
static void SetField(Buffer buffer, string name, object arg)
{
var field = typeof(Buffer).GetField(name, NonPunblicInst);
field.SetValue(buffer, arg);
}
static void SetGraphicsDevice(Buffer buffer, object arg)
{
var prop = typeof(GraphicsResourceBase).GetProperty("GraphicsDevice", BindingFlags.Public | BindingFlags.Instance);
prop.SetValue(buffer, arg);
}
static void SetProp(Buffer buffer, string name, object arg)
{
var prop = typeof(Buffer).GetProperty(name, BindingFlags.Public | BindingFlags.Instance);
prop.SetValue(buffer, arg);
}
static void InitCountAndViewFormat(Buffer buffer, out int count, ref PixelFormat viewFormat)
{
var method = typeof(Buffer).GetMethod("InitCountAndViewFormat", NonPunblicInst);
var args = new object[] { 0, viewFormat };
method.Invoke(buffer, args);
count = (int)args[0];
}
static void InitializeViews(Buffer buffer)
{
var method = typeof(Buffer).GetMethod("InitializeViews", NonPunblicInst);
method.Invoke(buffer, null);
}
/// <summary>
/// Copies the <paramref name="fromData"/> to the given <paramref name="buffer"/> on GPU memory.
/// </summary>
/// <typeparam name="TData">The type of the T data.</typeparam>
/// <param name="buffer">The <see cref="Buffer"/>.</param>
/// <param name="commandList">The <see cref="CommandList"/>.</param>
/// <param name="fromData">The data to copy from.</param>
/// <param name="offsetInBytes">The offset in bytes to write to.</param>
/// <exception cref="ArgumentException"></exception>
/// <remarks>
/// See the unmanaged documentation about Map/UnMap for usage and restrictions.
/// </remarks>
/// <returns>The GPU buffer.</returns>
public static unsafe Buffer SetData<TData>(this Buffer buffer, CommandList commandList, IHasMemory<TData> fromData, int offsetInBytes = 0) where TData : struct
{
if (fromData.TryGetMemory(out ReadOnlyMemory<TData> memory))
return buffer.SetData(commandList, memory, offsetInBytes);
return buffer;
}
/// <summary>
/// Copies the <paramref name="memory"/> to the given <paramref name="buffer"/> on GPU memory.
/// </summary>
/// <typeparam name="TData">The type of the T data.</typeparam>
/// <param name="buffer">The <see cref="Buffer"/>.</param>
/// <param name="commandList">The <see cref="CommandList"/>.</param>
/// <param name="memory">The memory to copy from.</param>
/// <param name="offsetInBytes">The offset in bytes to write to.</param>
/// <exception cref="ArgumentException"></exception>
/// <remarks>
/// See the unmanaged documentation about Map/UnMap for usage and restrictions.
/// </remarks>
/// <returns>The GPU buffer.</returns>
public static unsafe Buffer SetData<TData>(this Buffer buffer, CommandList commandList, ReadOnlyMemory<TData> memory, int offsetInBytes = 0) where TData : struct
{
using (var handle = memory.Pin())
{
var elementSize = Unsafe.SizeOf<TData>();
var dataPointer = new DataPointer(handle.Pointer, memory.Length * elementSize);
buffer.SetData(commandList, dataPointer, offsetInBytes);
return buffer;
}
}
public static Buffer SetDataFromProvider(this Buffer buffer, CommandList commandList, IGraphicsDataProvider data, int offsetInBytes = 0)
{
if (buffer != null && data != null)
{
using (var handle = data.Pin())
{
buffer.SetData(commandList, new DataPointer(handle.Pointer, data.SizeInBytes), offsetInBytes);
}
}
return buffer;
}
/// <summary>
/// Creates a new <see cref="Buffer"/> initialized with a copy of the given data.
/// </summary>
/// <typeparam name="TData">The element type.</typeparam>
/// <param name="device">The graphics device.</param>
/// <param name="fromData">The data to use to initialize the buffer.</param>
/// <param name="bufferFlags">The buffer flags.</param>
/// <param name="usage">The buffer usage.</param>
/// <exception cref="ArgumentException">If retrieval of read-only memory failed.</exception>
/// <returns>The newly created buffer.</returns>
public static unsafe Buffer New<TData>(GraphicsDevice device, IHasMemory<TData> fromData, BufferFlags bufferFlags, GraphicsResourceUsage usage) where TData : struct
{
if (fromData.TryGetMemory(out ReadOnlyMemory<TData> memory))
return New(device, memory, bufferFlags, usage);
throw new ArgumentException($"Failed to create buffer because retrieval of read-only memory failed.", nameof(fromData));
}
/// <summary>
/// Creates a new <see cref="Buffer"/> initialized with a copy of the given data.
/// </summary>
/// <typeparam name="TData">The element type.</typeparam>
/// <param name="device">The graphics device.</param>
/// <param name="memory">The data to use to initialize the buffer.</param>
/// <param name="bufferFlags">The buffer flags.</param>
/// <param name="usage">The buffer usage.</param>
/// <exception cref="ArgumentException">If retrieval of read-only memory failed.</exception>
/// <returns>The newly created buffer.</returns>
public static unsafe Buffer New<TData>(GraphicsDevice device, ReadOnlyMemory<TData> memory, BufferFlags bufferFlags, GraphicsResourceUsage usage) where TData : struct
{
using (var handle = memory.Pin())
{
var elementSize = Unsafe.SizeOf<TData>();
var dataPointer = new DataPointer(handle.Pointer, memory.Length * elementSize);
return Buffer.New(device, dataPointer, elementSize, bufferFlags, usage);
}
}
// public static unsafe void WriteToDisk(this Buffer buffer, string filepath)
// {
// }
// public static unsafe void WriteToDisk(this Buffer buffer, Stream stream)
// {
// buffer.GetData()
// var pool = ArrayPool<byte>.Shared;
// var chunk = pool.Rent(Math.Min(buffer.SizeInBytes, 0x10000));
// try
// {
// fixed (byte* chunkPtr = chunk)
// {
// var offset = 0;
// while (stream.CanRead)
// {
// var bytesRead = stream.Read(chunk, 0, chunk.Length);
// if (bytesRead > 0)
// {
// var dp = new DataPointer(chunkPtr, bytesRead);
// buffer.SetData(commandList, dp, offset);
// offset += bytesRead;
// }
// }
// }
// }
// finally
// {
// pool.Return(chunk);
// }
// }
//}
public static unsafe Buffer SetDataFromFile(this Buffer buffer, CommandList commandList, string filepath)
{
using (var stream = File.Open(filepath, FileMode.Open, FileAccess.Read, FileShare.Read))
{
buffer.SetDataFromStream(commandList, stream);
}
return buffer;
}
public static unsafe Buffer SetDataFromXenkoAssetURL(this Buffer buffer, CommandList commandList, Game game, string url)
{
using (var stream = game.Content.OpenAsStream(url, StreamFlags.None))
{
buffer.SetDataFromStream(commandList, stream);
}
return buffer;
}
public static unsafe Buffer SetDataFromStream(this Buffer buffer, CommandList commandList, Stream stream)
{
var pool = ArrayPool<byte>.Shared;
var chunk = pool.Rent(Math.Min(buffer.SizeInBytes, 0x10000));
try
{
fixed (byte* chunkPtr = chunk)
{
var offset = 0;
while (stream.CanRead)
{
var bytesRead = stream.Read(chunk, 0, chunk.Length);
if (bytesRead > 0)
{
var dp = new DataPointer(chunkPtr, bytesRead);
buffer.SetData(commandList, dp, offset);
offset += bytesRead;
}
}
}
}
finally
{
pool.Return(chunk);
}
return buffer;
}
/// <summary>
/// Calculates the expected element count of a buffer using a specified type.
/// </summary>
/// <typeparam name="TData">The type of the T pixel data.</typeparam>
/// <returns>The expected width</returns>
/// <exception cref="System.ArgumentException">If the size is invalid</exception>
public static int CalculateElementCount<TData>(this Buffer input) where TData : struct
{
var dataStrideInBytes = Utilities.SizeOf<TData>();
return input.SizeInBytes / dataStrideInBytes;
}
/// <summary>
/// Copies the content of this buffer to an array of data.
/// </summary>
/// <typeparam name="TData">The type of the T data.</typeparam>
/// <param name="thisBuffer"></param>
/// <param name="commandList">The command list.</param>
/// <param name="toData">The destination array to receive a copy of the buffer datas.</param>
/// <param name="doNotWait">if set to <c>true</c> this method will return immediately if the resource is still being used by the GPU for writing. Default is false</param>
/// <param name="offsetInBytes"></param>
/// <param name="lengthInBytes"></param>
/// <returns><c>true</c> if data was correctly retrieved, <c>false</c> if <see cref="doNotWait"/> flag was true and the resource is still being used by the GPU for writing.</returns>
/// <remarks>
/// This method is only working when called from the main thread that is accessing the main <see cref="GraphicsDevice"/>.
/// This method creates internally a stagging resource if this buffer is not already a stagging resouce, copies to it and map it to memory. Use method with explicit staging resource
/// for optimal performances.</remarks>
public static bool GetData<TData>(this Buffer thisBuffer, CommandList commandList, TData[] toData, bool doNotWait = false, int offsetInBytes = 0, int lengthInBytes = 0) where TData : struct
{
// Get data from this resource
if (thisBuffer.Usage == GraphicsResourceUsage.Staging)
{
// Directly if this is a staging resource
return thisBuffer.GetData(commandList, thisBuffer, toData, doNotWait, offsetInBytes, lengthInBytes);
}
else
{
// Unefficient way to use the Copy method using dynamic staging texture
using (var throughStaging = thisBuffer.ToStaging())
return thisBuffer.GetData(commandList, throughStaging, toData, doNotWait, offsetInBytes, lengthInBytes);
}
}
/// <summary>
/// Copies the content of this buffer from GPU memory to a CPU memory using a specific staging resource.
/// </summary>
/// <param name="thisBuffer"></param>
/// <param name="commandList"></param>
/// <param name="staginBuffer">The staging buffer used to transfer the buffer.</param>
/// <param name="toData">To data pointer.</param>
/// <param name="doNotWait"></param>
/// <param name="offsetInBytes"></param>
/// <param name="lengthInBytes"></param>
/// <exception cref="System.ArgumentException">When strides is different from optimal strides, and TData is not the same size as the pixel format, or Width * Height != toData.Length</exception>
/// <remarks>
/// This method is only working when called from the main thread that is accessing the main <see cref="GraphicsDevice"/>.
/// </remarks>
public static bool GetData<TData>(this Buffer thisBuffer, CommandList commandList, Buffer staginBuffer, TData[] toData, bool doNotWait = false, int offsetInBytes = 0, int lengthInBytes = 0) where TData : struct
{
using (var pinner = new GCPinner(toData))
return thisBuffer.GetData(commandList, staginBuffer, new DataPointer(pinner.Pointer, toData.Length * Utilities.SizeOf<TData>()), doNotWait, offsetInBytes, lengthInBytes);
}
/// <summary>
/// Copies the content of this buffer to an array of data.
/// </summary>
/// <typeparam name="TData">The type of the T data.</typeparam>
/// <param name="thisBuffer"></param>
/// <param name="commandList">The command list.</param>
/// <param name="toData">The destination array to receive a copy of the buffer datas.</param>
/// <param name="doNotWait">if set to <c>true</c> this method will return immediately if the resource is still being used by the GPU for writing. Default is false</param>
/// <param name="offsetInBytes"></param>
/// <param name="lengthInBytes"></param>
/// <returns><c>true</c> if data was correctly retrieved, <c>false</c> if <see cref="doNotWait"/> flag was true and the resource is still being used by the GPU for writing.</returns>
/// <remarks>
/// This method is only working when called from the main thread that is accessing the main <see cref="GraphicsDevice"/>.
/// This method creates internally a stagging resource if this buffer is not already a stagging resouce, copies to it and map it to memory. Use method with explicit staging resource
/// for optimal performances.</remarks>
public static bool GetData(this Buffer thisBuffer, CommandList commandList, DataPointer toData, bool doNotWait = false, int offsetInBytes = 0, int lengthInBytes = 0)
{
// Get data from this resource
if (thisBuffer.Usage == GraphicsResourceUsage.Staging)
{
// Directly if this is a staging resource
return thisBuffer.GetData(commandList, thisBuffer, toData, doNotWait, offsetInBytes, lengthInBytes);
}
else
{
// Unefficient way to use the Copy method using dynamic staging texture
using (var throughStaging = thisBuffer.ToStaging())
return thisBuffer.GetData(commandList, throughStaging, toData, doNotWait, offsetInBytes, lengthInBytes);
}
}
/// <summary>
/// Copies the content of this buffer from GPU memory to a CPU memory using a specific staging resource.
/// </summary>
/// <param name="thisBuffer"></param>
/// <param name="commandList"></param>
/// <param name="stagingBuffer">The staging buffer used to transfer the buffer.</param>
/// <param name="toData">To data pointer.</param>
/// <param name="doNotWait"></param>
/// <param name="offsetInBytes"></param>
/// <param name="lengthInBytes"></param>
/// <exception cref="System.ArgumentException">When strides is different from optimal strides, and TData is not the same size as the pixel format, or Width * Height != toData.Length</exception>
/// <remarks>
/// This method is only working when called from the main thread that is accessing the main <see cref="GraphicsDevice"/>.
/// </remarks>
public static bool GetData(this Buffer thisBuffer, CommandList commandList, Buffer stagingBuffer, DataPointer toData, bool doNotWait = false, int offsetInBytes = 0, int lengthInBytes = 0)
{
// Check size validity of data to copy to
if (toData.Pointer == IntPtr.Zero || toData.Size != thisBuffer.SizeInBytes)
return false;
// Copy the texture to a staging resource
if (!ReferenceEquals(thisBuffer, stagingBuffer))
commandList.Copy(thisBuffer, stagingBuffer);
var mappedResource = commandList.MapSubresource(stagingBuffer, 0, MapMode.Read, doNotWait, offsetInBytes, lengthInBytes);
try
{
if (mappedResource.DataBox.DataPointer != IntPtr.Zero)
{
Utilities.CopyMemory(toData.Pointer, mappedResource.DataBox.DataPointer, toData.Size);
}
else
{
return false;
}
}
finally
{
// Make sure that we unmap the resource in case of an exception
commandList.UnmapSubresource(mappedResource);
}
return true;
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Shaders/ParameterUpdater.cs<|end_filename|>
using Stride.Rendering;
using Stride.Rendering.Materials;
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Reactive.Disposables;
namespace VL.Stride.Shaders.ShaderFX
{
/// <summary>
/// Helper class to easily track parameter collections and update one of its parameters.
/// </summary>
/// <typeparam name="TValue">The type of the parameter value</typeparam>
/// <typeparam name="TKey">The type of the parameter key</typeparam>
abstract class ParameterUpdater<TValue, TKey>
where TKey : ParameterKey
{
private static readonly EqualityComparer<TValue> comparer = EqualityComparer<TValue>.Default;
// In most of the cases the parameter collection is known from the start and no other will come into play (pin and effect are in the same node)
private readonly ParameterCollection parameters;
// In case we end up in a shader graph multiple parameter collections could pop up (one for every effect) we need to keep track of
private Dictionary<(ParameterCollection, TKey), RefCountDisposable> trackedCollections;
private TValue value;
private TKey key;
public ParameterUpdater(ParameterCollection parameters = default, TKey key = default)
{
this.parameters = parameters;
this.key = key;
}
public TValue Value
{
get => value;
set
{
if (!comparer.Equals(value, Value))
{
this.value = value;
if (parameters != null)
{
Upload(parameters, key, ref value);
}
if (trackedCollections != null)
{
foreach (var (parameters, key) in trackedCollections.Keys)
Upload(parameters, key, ref value);
}
}
}
}
public ImmutableArray<ParameterCollection> GetTrackedCollections()
{
if (trackedCollections is null)
return ImmutableArray<ParameterCollection>.Empty;
var result = ImmutableArray.CreateBuilder<ParameterCollection>(trackedCollections.Count);
foreach (var (parameters, _) in trackedCollections.Keys)
result.Add(parameters);
return result.ToImmutable();
}
public void Track(ShaderGeneratorContext context)
{
Track(context, key);
}
public void Track(ShaderGeneratorContext context, TKey key)
{
if (context.TryGetSubscriptions(out var s))
s.Add(Subscribe(context.Parameters, key));
}
public IDisposable Subscribe(ParameterCollection parameters, TKey key)
{
var x = (parameters, key);
var trackedCollections = this.trackedCollections ??= new Dictionary<(ParameterCollection, TKey), RefCountDisposable>();
if (trackedCollections.TryGetValue(x, out var disposable))
return disposable.GetDisposable();
disposable = new RefCountDisposable(Disposable.Create(() => trackedCollections.Remove(x)));
trackedCollections.Add(x, disposable);
Upload(parameters, key, ref value);
return disposable;
}
protected abstract void Upload(ParameterCollection parameters, TKey key, ref TValue value);
}
sealed class ValueParameterUpdater<T> : ParameterUpdater<T, ValueParameterKey<T>>
where T : struct
{
public ValueParameterUpdater(ParameterCollection parameters = null, ValueParameterKey<T> key = null) : base(parameters, key)
{
}
protected override void Upload(ParameterCollection parameters, ValueParameterKey<T> key, ref T value)
{
parameters.Set(key, ref value);
}
}
sealed class ArrayParameterUpdater<T> : ParameterUpdater<T[], ValueParameterKey<T>>
where T : struct
{
public ArrayParameterUpdater(ParameterCollection parameters = null, ValueParameterKey<T> key = null) : base(parameters, key)
{
}
protected override void Upload(ParameterCollection parameters, ValueParameterKey<T> key, ref T[] value)
{
if (value.Length > 0)
parameters.Set(key, value);
}
}
sealed class ObjectParameterUpdater<T> : ParameterUpdater<T, ObjectParameterKey<T>>
where T : class
{
public ObjectParameterUpdater(ParameterCollection parameters = null, ObjectParameterKey<T> key = null) : base(parameters, key)
{
}
protected override void Upload(ParameterCollection parameters, ObjectParameterKey<T> key, ref T value)
{
parameters.Set(key, value);
}
}
sealed class PermutationParameterUpdater<T> : ParameterUpdater<T, PermutationParameterKey<T>>
{
public PermutationParameterUpdater(ParameterCollection parameters = null, PermutationParameterKey<T> key = null) : base(parameters, key)
{
}
protected override void Upload(ParameterCollection parameters, PermutationParameterKey<T> key, ref T value)
{
parameters.Set(key, value);
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Effects/TextureFX/Filters/Distortion/Undistort_TextureFX.sdsl.cs<|end_filename|>
// <auto-generated>
// Do not edit this file yourself!
//
// This code was generated by Stride Shader Mixin Code Generator.
// To generate it yourself, please install Stride.VisualStudio.Package .vsix
// and re-save the associated .sdfx.
// </auto-generated>
using System;
using Stride.Core;
using Stride.Rendering;
using Stride.Graphics;
using Stride.Shaders;
using Stride.Core.Mathematics;
using Buffer = Stride.Graphics.Buffer;
namespace Stride.Rendering
{
public static partial class Undistort_TextureFXKeys
{
public static readonly ValueParameterKey<Vector2> FocalLength = ParameterKeys.NewValue<Vector2>();
public static readonly ValueParameterKey<Vector2> PrincipalPoint = ParameterKeys.NewValue<Vector2>();
public static readonly ValueParameterKey<Vector4> Distortion = ParameterKeys.NewValue<Vector4>();
public static readonly ValueParameterKey<Vector2> Resolution = ParameterKeys.NewValue<Vector2>();
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Graphics/GraphicsNodes.cs<|end_filename|>
using Stride.Core.Mathematics;
using Stride.Graphics;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Reactive.Disposables;
using VL.Core;
using VL.Lib.Basics.Resources;
namespace VL.Stride.Graphics
{
static partial class GraphicsNodes
{
public static IEnumerable<IVLNodeDescription> GetNodeDescriptions(IVLNodeDescriptionFactory factory)
{
var graphicsCategory = "Stride.Graphics.Advanced";
yield return new CustomNodeDesc<MutablePipelineState>(factory,
ctor: nodeContext =>
{
var deviceHandle = nodeContext.GetDeviceHandle();
return (CreateInitialPipelineState(deviceHandle), () => deviceHandle.Dispose());
},
name: "PipelineState",
category: graphicsCategory,
copyOnWrite: false,
hasStateOutput: false)
.AddCachedInput(nameof(PipelineStateDescription.RootSignature), x => x.State.RootSignature, (x, v) => x.State.RootSignature = v)
.AddCachedInput(nameof(PipelineStateDescription.EffectBytecode), x => x.State.EffectBytecode, (x, v) => x.State.EffectBytecode = v)
.AddCachedInput(nameof(PipelineStateDescription.BlendState), x => x.State.BlendState, (x, v) => x.State.BlendState = v)
.AddCachedInput(nameof(PipelineStateDescription.SampleMask), x => x.State.SampleMask, (x, v) => x.State.SampleMask = v, 0xFFFFFFFF)
.AddCachedInput(nameof(PipelineStateDescription.RasterizerState), x => x.State.RasterizerState, (x, v) => x.State.RasterizerState = v)
.AddCachedInput(nameof(PipelineStateDescription.DepthStencilState), x => x.State.DepthStencilState, (x, v) => x.State.DepthStencilState = v)
.AddCachedListInput(nameof(PipelineStateDescription.InputElements), x => x.State.InputElements, (x, v) => x.State.InputElements = v)
.AddCachedInput(nameof(PipelineStateDescription.PrimitiveType), x => x.State.PrimitiveType, (x, v) => x.State.PrimitiveType = v)
.AddCachedInput(nameof(PipelineStateDescription.Output), x => x.State.Output, (x, v) => x.State.Output = v)
.AddCachedOutput("Output", x =>
{
x.Update();
return x.CurrentState;
});
yield return factory.NewDescriptionNode(graphicsCategory, new InputElementDescription())
.AddCachedInput(nameof(InputElementDescription.SemanticName), x => x.v.SemanticName, (x, v) => x.v.SemanticName = v)
.AddCachedInput(nameof(InputElementDescription.SemanticIndex), x => x.v.SemanticIndex, (x, v) => x.v.SemanticIndex = v)
.AddCachedInput(nameof(InputElementDescription.Format), x => x.v.Format, (x, v) => x.v.Format = v)
.AddCachedInput(nameof(InputElementDescription.InputSlot), x => x.v.InputSlot, (x, v) => x.v.InputSlot = v)
.AddCachedInput(nameof(InputElementDescription.AlignedByteOffset), x => x.v.AlignedByteOffset, (x, v) => x.v.AlignedByteOffset = v)
.AddCachedInput(nameof(InputElementDescription.InputSlotClass), x => x.v.InputSlotClass, (x, v) => x.v.InputSlotClass = v)
.AddCachedInput(nameof(InputElementDescription.InstanceDataStepRate), x => x.v.InstanceDataStepRate, (x, v) => x.v.InstanceDataStepRate = v)
.AddStateOutput();
yield return factory.NewDescriptionNode(graphicsCategory, new RenderOutputDescription())
.AddCachedInput<CommandList>("Input", x => default, (x, v) => { if (v != null) { x.v.CaptureState(v); } }, equals: (a, b) => false /* Always need to capture */)
.AddStateOutput();
yield return factory.NewDescriptionNode(name: "RenderOutputDescription (Manually)", category: graphicsCategory, initial: new RenderOutputDescription())
.AddCachedInput(nameof(RenderOutputDescription.RenderTargetCount), x => x.v.RenderTargetCount, (x, v) => x.v.RenderTargetCount = v)
.AddCachedInput(nameof(RenderOutputDescription.RenderTargetFormat0), x => x.v.RenderTargetFormat0, (x, v) => x.v.RenderTargetFormat0 = v)
.AddCachedInput(nameof(RenderOutputDescription.RenderTargetFormat1), x => x.v.RenderTargetFormat1, (x, v) => x.v.RenderTargetFormat1 = v)
.AddCachedInput(nameof(RenderOutputDescription.RenderTargetFormat2), x => x.v.RenderTargetFormat2, (x, v) => x.v.RenderTargetFormat2 = v)
.AddCachedInput(nameof(RenderOutputDescription.RenderTargetFormat3), x => x.v.RenderTargetFormat3, (x, v) => x.v.RenderTargetFormat3 = v)
.AddCachedInput(nameof(RenderOutputDescription.RenderTargetFormat4), x => x.v.RenderTargetFormat4, (x, v) => x.v.RenderTargetFormat4 = v)
.AddCachedInput(nameof(RenderOutputDescription.RenderTargetFormat5), x => x.v.RenderTargetFormat5, (x, v) => x.v.RenderTargetFormat5 = v)
.AddCachedInput(nameof(RenderOutputDescription.RenderTargetFormat6), x => x.v.RenderTargetFormat6, (x, v) => x.v.RenderTargetFormat6 = v)
.AddCachedInput(nameof(RenderOutputDescription.RenderTargetFormat7), x => x.v.RenderTargetFormat7, (x, v) => x.v.RenderTargetFormat7 = v)
.AddCachedInput(nameof(RenderOutputDescription.DepthStencilFormat), x => x.v.DepthStencilFormat, (x, v) => x.v.DepthStencilFormat = v)
.AddCachedInput(nameof(RenderOutputDescription.MultisampleCount), x => x.v.MultisampleCount, (x, v) => x.v.MultisampleCount = v)
.AddCachedInput(nameof(RenderOutputDescription.ScissorTestEnable), x => x.v.ScissorTestEnable, (x, v) => x.v.ScissorTestEnable = v)
.AddStateOutput();
yield return factory.NewDescriptionNode(name: "SamplerState", category: graphicsCategory, initial: SamplerStateDescription.Default)
.AddCachedInput(nameof(SamplerStateDescription.Filter), x => x.v.Filter, (x, v) => x.v.Filter = v, TextureFilter.Linear)
.AddCachedInput(nameof(SamplerStateDescription.AddressU), x => x.v.AddressU, (x, v) => x.v.AddressU = v, TextureAddressMode.Clamp)
.AddCachedInput(nameof(SamplerStateDescription.AddressV), x => x.v.AddressV, (x, v) => x.v.AddressV = v, TextureAddressMode.Clamp)
.AddCachedInput(nameof(SamplerStateDescription.AddressW), x => x.v.AddressW, (x, v) => x.v.AddressW = v, TextureAddressMode.Clamp)
.AddCachedInput(nameof(SamplerStateDescription.BorderColor), x => x.v.BorderColor, (x, v) => x.v.BorderColor = v, Color4.Black)
.AddCachedInput(nameof(SamplerStateDescription.MaxAnisotropy), x => x.v.MaxAnisotropy, (x, v) => x.v.MaxAnisotropy = v, 16)
.AddCachedInput(nameof(SamplerStateDescription.MinMipLevel), x => x.v.MinMipLevel, (x, v) => x.v.MinMipLevel = v, 0f)
.AddCachedInput(nameof(SamplerStateDescription.MaxMipLevel), x => x.v.MaxMipLevel, (x, v) => x.v.MaxMipLevel = v, float.MaxValue)
.AddCachedInput(nameof(SamplerStateDescription.MipMapLevelOfDetailBias), x => x.v.MipMapLevelOfDetailBias, (x, v) => x.v.MipMapLevelOfDetailBias = v, 0f)
.AddCachedInput(nameof(SamplerStateDescription.CompareFunction), x => x.v.CompareFunction, (x, v) => x.v.CompareFunction = v, CompareFunction.Never)
.AddCachedOutput("Output", nodeContext =>
{
var disposable = new SerialDisposable();
Func<StructRef<SamplerStateDescription>, SamplerState> getter = generator =>
{
var gdh = nodeContext.GetDeviceHandle();
var st = SamplerState.New(gdh.Resource, generator.v);
gdh.Dispose();
disposable.Disposable = st;
return st;
};
return (getter, disposable);
});
;
yield return factory.NewDescriptionNode(graphicsCategory, BlendStates.Default)
.AddCachedInput(nameof(BlendStateDescription.AlphaToCoverageEnable), x => x.v.AlphaToCoverageEnable, (x, v) => x.v.AlphaToCoverageEnable = v, false)
.AddCachedInput(nameof(BlendStateDescription.IndependentBlendEnable), x => x.v.IndependentBlendEnable, (x, v) => x.v.IndependentBlendEnable = v, false)
.AddCachedInput(nameof(BlendStateDescription.RenderTarget0), x => x.v.RenderTarget0, (x, v) => x.v.RenderTarget0 = v)
.AddCachedInput(nameof(BlendStateDescription.RenderTarget1), x => x.v.RenderTarget1, (x, v) => x.v.RenderTarget1 = v)
.AddCachedInput(nameof(BlendStateDescription.RenderTarget2), x => x.v.RenderTarget2, (x, v) => x.v.RenderTarget2 = v)
.AddCachedInput(nameof(BlendStateDescription.RenderTarget3), x => x.v.RenderTarget3, (x, v) => x.v.RenderTarget3 = v)
.AddCachedInput(nameof(BlendStateDescription.RenderTarget4), x => x.v.RenderTarget4, (x, v) => x.v.RenderTarget4 = v)
.AddCachedInput(nameof(BlendStateDescription.RenderTarget5), x => x.v.RenderTarget5, (x, v) => x.v.RenderTarget5 = v)
.AddCachedInput(nameof(BlendStateDescription.RenderTarget6), x => x.v.RenderTarget6, (x, v) => x.v.RenderTarget6 = v)
.AddCachedInput(nameof(BlendStateDescription.RenderTarget7), x => x.v.RenderTarget7, (x, v) => x.v.RenderTarget7 = v)
.AddStateOutput();
yield return factory.NewDescriptionNode(graphicsCategory,
new BlendStateRenderTargetDescription()
{
ColorSourceBlend = Blend.One,
ColorDestinationBlend = Blend.Zero,
ColorBlendFunction = BlendFunction.Add,
AlphaSourceBlend = Blend.One,
AlphaDestinationBlend = Blend.Zero,
AlphaBlendFunction = BlendFunction.Add,
ColorWriteChannels = ColorWriteChannels.All
})
.AddCachedInput(nameof(BlendStateRenderTargetDescription.BlendEnable), x => x.v.BlendEnable, (x, v) => x.v.BlendEnable = v, false)
.AddCachedInput(nameof(BlendStateRenderTargetDescription.ColorSourceBlend), x => x.v.ColorSourceBlend, (x, v) => x.v.ColorSourceBlend = v, Blend.One)
.AddCachedInput(nameof(BlendStateRenderTargetDescription.ColorDestinationBlend), x => x.v.ColorDestinationBlend, (x, v) => x.v.ColorDestinationBlend = v, Blend.Zero)
.AddCachedInput(nameof(BlendStateRenderTargetDescription.ColorBlendFunction), x => x.v.ColorBlendFunction, (x, v) => x.v.ColorBlendFunction = v, BlendFunction.Add)
.AddCachedInput(nameof(BlendStateRenderTargetDescription.AlphaSourceBlend), x => x.v.AlphaSourceBlend, (x, v) => x.v.AlphaSourceBlend = v, Blend.One)
.AddCachedInput(nameof(BlendStateRenderTargetDescription.AlphaDestinationBlend), x => x.v.AlphaDestinationBlend, (x, v) => x.v.AlphaDestinationBlend = v, Blend.Zero)
.AddCachedInput(nameof(BlendStateRenderTargetDescription.AlphaBlendFunction), x => x.v.AlphaBlendFunction, (x, v) => x.v.AlphaBlendFunction = v, BlendFunction.Add)
.AddCachedInput(nameof(BlendStateRenderTargetDescription.ColorWriteChannels), x => x.v.ColorWriteChannels, (x, v) => x.v.ColorWriteChannels = v, ColorWriteChannels.All)
.AddStateOutput();
yield return factory.NewDescriptionNode(graphicsCategory, RasterizerStateDescription.Default)
.AddCachedInput(nameof(RasterizerStateDescription.FillMode), x => x.v.FillMode, (x, v) => x.v.FillMode = v, FillMode.Solid)
.AddCachedInput(nameof(RasterizerStateDescription.CullMode), x => x.v.CullMode, (x, v) => x.v.CullMode = v, CullMode.Back)
.AddCachedInput(nameof(RasterizerStateDescription.DepthClipEnable), x => x.v.DepthClipEnable, (x, v) => x.v.DepthClipEnable = v, true)
.AddCachedInput(nameof(RasterizerStateDescription.FrontFaceCounterClockwise), x => x.v.FrontFaceCounterClockwise, (x, v) => x.v.FrontFaceCounterClockwise = v, false)
.AddCachedInput(nameof(RasterizerStateDescription.ScissorTestEnable), x => x.v.ScissorTestEnable, (x, v) => x.v.ScissorTestEnable = v, false)
.AddCachedInput(nameof(RasterizerStateDescription.MultisampleCount), x => x.v.MultisampleCount, (x, v) => x.v.MultisampleCount = v, MultisampleCount.X8)
.AddCachedInput(nameof(RasterizerStateDescription.MultisampleAntiAliasLine), x => x.v.MultisampleAntiAliasLine, (x, v) => x.v.MultisampleAntiAliasLine = v, true)
.AddCachedInput(nameof(RasterizerStateDescription.DepthBias), x => x.v.DepthBias, (x, v) => x.v.DepthBias = v, 0)
.AddCachedInput(nameof(RasterizerStateDescription.DepthBiasClamp), x => x.v.DepthBiasClamp, (x, v) => x.v.DepthBiasClamp = v, 0f)
.AddCachedInput(nameof(RasterizerStateDescription.SlopeScaleDepthBias), x => x.v.SlopeScaleDepthBias, (x, v) => x.v.SlopeScaleDepthBias = v, 0f)
.AddStateOutput();
yield return factory.NewDescriptionNode(graphicsCategory, DepthStencilStates.Default)
.AddCachedInput(nameof(DepthStencilStateDescription.DepthBufferEnable), x => x.v.DepthBufferEnable, (x, v) => x.v.DepthBufferEnable = v, true)
.AddCachedInput(nameof(DepthStencilStateDescription.DepthBufferWriteEnable), x => x.v.DepthBufferWriteEnable, (x, v) => x.v.DepthBufferWriteEnable = v, true)
.AddCachedInput(nameof(DepthStencilStateDescription.DepthBufferFunction), x => x.v.DepthBufferFunction, (x, v) => x.v.DepthBufferFunction = v, CompareFunction.LessEqual)
.AddCachedInput(nameof(DepthStencilStateDescription.StencilEnable), x => x.v.StencilEnable, (x, v) => x.v.StencilEnable = v, false)
.AddCachedInput(nameof(DepthStencilStateDescription.FrontFace), x => x.v.FrontFace, (x, v) => x.v.FrontFace = v)
.AddCachedInput(nameof(DepthStencilStateDescription.BackFace), x => x.v.BackFace, (x, v) => x.v.BackFace = v)
.AddCachedInput(nameof(DepthStencilStateDescription.StencilMask), x => x.v.StencilMask, (x, v) => x.v.StencilMask = v, byte.MaxValue)
.AddCachedInput(nameof(DepthStencilStateDescription.StencilWriteMask), x => x.v.StencilWriteMask, (x, v) => x.v.StencilWriteMask = v, byte.MaxValue)
.AddStateOutput();
yield return factory.NewDescriptionNode(graphicsCategory, TextureDescription.New2D(512, 512, PixelFormat.R8G8B8A8_UNorm_SRgb))
.AddCachedInput(nameof(TextureDescription.Dimension), x => x.v.Dimension, (x, v) => x.v.Dimension = v, TextureDimension.Texture2D)
.AddCachedInput(nameof(TextureDescription.Width), x => x.v.Width, (x, v) => x.v.Width = v, 512)
.AddCachedInput(nameof(TextureDescription.Height), x => x.v.Height, (x, v) => x.v.Height = v, 512)
.AddCachedInput(nameof(TextureDescription.Depth), x => x.v.Depth, (x, v) => x.v.Depth = v, 1)
.AddCachedInput(nameof(TextureDescription.ArraySize), x => x.v.ArraySize, (x, v) => x.v.ArraySize = v, 1)
.AddCachedInput(nameof(TextureDescription.MipLevels), x => x.v.MipLevels, (x, v) => x.v.MipLevels = v, 1)
.AddCachedInput(nameof(TextureDescription.Format), x => x.v.Format, (x, v) => x.v.Format = v, PixelFormat.R8G8B8A8_UNorm_SRgb)
.AddCachedInput(nameof(TextureDescription.MultisampleCount), x => x.v.MultisampleCount, (x, v) => x.v.MultisampleCount = v, MultisampleCount.None)
.AddCachedInput(nameof(TextureDescription.Usage), x => x.v.Usage, (x, v) => x.v.Usage = v, GraphicsResourceUsage.Default)
.AddCachedInput(nameof(TextureDescription.Flags), x => x.v.Flags, (x, v) => x.v.Flags = v, TextureFlags.ShaderResource)
.AddCachedInput(nameof(TextureDescription.Options), x => x.v.Options, (x, v) => x.v.Options = v, TextureOptions.None)
.AddStateOutput();
yield return factory.NewDescriptionNode(graphicsCategory, new TextureViewDescription() { Format = PixelFormat.None, Flags = TextureFlags.None, Type = ViewType.Full })
.AddCachedInput(nameof(TextureViewDescription.Format), x => x.v.Format, (x, v) => x.v.Format = v, PixelFormat.None)
.AddCachedInput(nameof(TextureViewDescription.MipLevel), x => x.v.MipLevel, (x, v) => x.v.MipLevel = v, 0)
.AddCachedInput(nameof(TextureViewDescription.ArraySlice), x => x.v.ArraySlice, (x, v) => x.v.ArraySlice = v, 0)
.AddCachedInput(nameof(TextureViewDescription.Flags), x => x.v.Flags, (x, v) => x.v.Flags = v, TextureFlags.None)
.AddCachedInput(nameof(TextureViewDescription.Type), x => x.v.Type, (x, v) => x.v.Type = v, ViewType.Full)
.AddStateOutput();
yield return factory.NewNode(
name: "Texture",
category: graphicsCategory,
ctor: ctx => new TextureBuilder(ctx),
copyOnWrite: false,
hasStateOutput: false)
.AddCachedInput(nameof(TextureBuilder.Description), x => x.Description, (x, v) => x.Description = v)
.AddCachedInput(nameof(TextureBuilder.ViewDescription), x => x.ViewDescription, (x, v) => x.ViewDescription = v)
.AddCachedInput(nameof(TextureBuilder.InitalData), x => x.InitalData, (x, v) => x.InitalData = v)
.AddInput(nameof(TextureBuilder.Recreate), x => x.Recreate, (x, v) => x.Recreate = v)
.AddOutput("Output", x => x.Texture);
yield return factory.NewNode(
name: "TextureView",
category: graphicsCategory,
ctor: ctx => new TextureViewBuilder(ctx),
copyOnWrite: false,
hasStateOutput: false)
.AddCachedInput(nameof(TextureViewBuilder.Input), x => x.Input, (x, v) => x.Input = v)
.AddCachedInput(nameof(TextureViewBuilder.ViewDescription), x => x.ViewDescription, (x, v) => x.ViewDescription = v)
.AddInput(nameof(TextureViewBuilder.Recreate), x => x.Recreate, (x, v) => x.Recreate = v)
.AddOutput("Output", x => x.TextureView);
yield return factory.NewDescriptionNode(graphicsCategory, new BufferDescription(64, BufferFlags.ShaderResource, GraphicsResourceUsage.Default))
.AddCachedInput(nameof(BufferDescription.SizeInBytes), x => x.v.SizeInBytes, (x, v) => x.v.SizeInBytes = v, 64)
.AddCachedInput(nameof(BufferDescription.StructureByteStride), x => x.v.StructureByteStride, (x, v) => x.v.StructureByteStride = v, 0)
.AddCachedInput(nameof(BufferDescription.Usage), x => x.v.Usage, (x, v) => x.v.Usage = v, GraphicsResourceUsage.Default)
.AddCachedInput(nameof(BufferDescription.BufferFlags), x => x.v.BufferFlags, (x, v) => x.v.BufferFlags = v, BufferFlags.ShaderResource)
.AddStateOutput();
yield return factory.NewDescriptionNode(graphicsCategory, new BufferViewDescription() { Flags = BufferFlags.ShaderResource, Format = PixelFormat.None })
.AddCachedInput(nameof(BufferViewDescription.Flags), x => x.v.Flags, (x, v) => x.v.Flags = v, BufferFlags.ShaderResource)
.AddCachedInput(nameof(BufferViewDescription.Format), x => x.v.Format, (x, v) => x.v.Format = v, PixelFormat.None)
.AddStateOutput();
yield return factory.NewNode(
name: "Buffer",
category: graphicsCategory,
ctor: ctx => new BufferBuilder(ctx),
copyOnWrite: false,
hasStateOutput: false)
.AddCachedInput(nameof(BufferBuilder.Description), x => x.Description, (x, v) => x.Description = v)
.AddCachedInput(nameof(BufferBuilder.ViewDescription), x => x.ViewDescription, (x, v) => x.ViewDescription = v)
.AddCachedInput(nameof(BufferBuilder.InitalData), x => x.InitalData, (x, v) => x.InitalData = v)
.AddInput(nameof(BufferBuilder.Recreate), x => x.Recreate, (x, v) => x.Recreate = v)
.AddOutput("Output", x => x.Buffer);
yield return factory.NewNode(
name: "BufferView",
category: graphicsCategory,
ctor: ctx => new BufferViewBuilder(ctx),
copyOnWrite: false,
hasStateOutput: false)
.AddCachedInput(nameof(BufferViewBuilder.Input), x => x.Input, (x, v) => x.Input = v)
.AddCachedInput(nameof(BufferViewBuilder.ViewDescription), x => x.ViewDescription, (x, v) => x.ViewDescription = v)
.AddInput(nameof(BufferViewBuilder.Recreate), x => x.Recreate, (x, v) => x.Recreate = v)
.AddOutput("Output", x => x.Buffer);
yield return factory.NewDescriptionNode(graphicsCategory,
new DepthStencilStencilOpDescription()
{
StencilFunction = CompareFunction.Always,
StencilPass = StencilOperation.Keep,
StencilFail = StencilOperation.Keep,
StencilDepthBufferFail = StencilOperation.Keep
})
.AddCachedInput(nameof(DepthStencilStencilOpDescription.StencilFunction), x => x.v.StencilFunction, (x, v) => x.v.StencilFunction = v, CompareFunction.Always)
.AddCachedInput(nameof(DepthStencilStencilOpDescription.StencilPass), x => x.v.StencilPass, (x, v) => x.v.StencilPass = v, StencilOperation.Keep)
.AddCachedInput(nameof(DepthStencilStencilOpDescription.StencilFail), x => x.v.StencilFail, (x, v) => x.v.StencilFail = v, StencilOperation.Keep)
.AddCachedInput(nameof(DepthStencilStencilOpDescription.StencilDepthBufferFail), x => x.v.StencilDepthBufferFail, (x, v) => x.v.StencilDepthBufferFail = v, StencilOperation.Keep)
.AddStateOutput();
}
private static MutablePipelineState CreateInitialPipelineState(IResourceHandle<GraphicsDevice> deviceHandle)
{
var mps = new MutablePipelineState(deviceHandle.Resource);
mps.State.RasterizerState.MultisampleAntiAliasLine = true;
mps.State.RasterizerState.MultisampleCount = MultisampleCount.X8;
return mps;
}
static CustomNodeDesc<StructRef<T>> NewDescriptionNode<T>(this IVLNodeDescriptionFactory factory, string category, T initial, string name = default) where T : struct
{
return factory.NewNode(name: name ?? typeof(T).Name, category: category, copyOnWrite: false, hasStateOutput: false, ctor: _ => S(initial));
}
static CustomNodeDesc<StructRef<T>> AddStateOutput<T>(this CustomNodeDesc<StructRef<T>> node) where T : struct
{
return node.AddOutput("Output", x => x.v);
}
static StructRef<T> S<T>(T value) where T : struct => new StructRef<T>(value);
class StructRef<T> where T : struct
{
public T v;
public StructRef(T value)
{
v = value;
}
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/Effects/ShaderMetadata.cs<|end_filename|>
using System;
using Stride.Graphics;
using Stride.Core.Shaders.Ast;
using System.Linq;
using Stride.Core.Shaders.Ast.Hlsl;
using System.Collections.Generic;
using Stride.Core.IO;
using Stride.Rendering;
using Stride.Core.Shaders.Ast.Stride;
using Stride.Shaders;
using VL.Stride.Shaders.ShaderFX;
using Stride.Core.Mathematics;
using Stride.Rendering.Materials;
using System.ComponentModel;
using Stride.Shaders.Parser.Mixins;
namespace VL.Stride.Rendering
{
public class ShaderMetadata
{
public PixelFormat OutputFormat { get; private set; } = PixelFormat.None;
public Int2 OutputSize { get; private set; }
public PixelFormat RenderFormat { get; private set; } = PixelFormat.None;
public string Category { get; private set; }
public string Summary { get; private set; }
public string Remarks { get; private set; }
public string Tags { get; private set; }
public ParsedShader ParsedShader { get; private set; }
public bool IsTextureSource { get; private set; }
public List<string> WantsMips { get; private set; }
public List<string> DontConvertToLinearOnRead{ get; private set; }
public bool DontConvertToSRgbOnOnWrite { get; private set; }
public void GetPixelFormats(out PixelFormat outputFormat, out PixelFormat renderFormat)
{
if (IsTextureSource)
{
if (OutputFormat == PixelFormat.None)
outputFormat = PixelFormat.R8G8B8A8_UNorm_SRgb;
else
outputFormat = OutputFormat;
if (DontConvertToSRgbOnOnWrite && RenderFormat == PixelFormat.None)
renderFormat = outputFormat.ToNonSRgb();
else
renderFormat = RenderFormat;
}
else
{
outputFormat = OutputFormat;
renderFormat = RenderFormat;
}
}
public void GetOutputSize(out Int2 outputSize, out bool outputSizeVisible)
{
// default
outputSize = IsTextureSource ? new Int2(512, 512) : Int2.Zero;
// overwritten in shader?
var hasOutputSize = OutputSize != Int2.Zero;
if (hasOutputSize)
outputSize = OutputSize;
// visible if set in shader or is source
outputSizeVisible = IsTextureSource || hasOutputSize;
}
public string GetCategory(string prefix)
{
var result = prefix;
if (string.IsNullOrWhiteSpace(Category))
return result;
if (!Category.StartsWith(prefix))
return prefix + "." + Category;
return Category;
}
Dictionary<string, EnumMetadata> pinEnumTypes = new Dictionary<string, EnumMetadata>();
HashSet<string> optionalPins = new HashSet<string>();
private void AddEnumTypePinAttribute(string name, string enumTypeName, Expression initialValue)
{
var type = Type.GetType(enumTypeName);
if (type != null && type.IsEnum)
{
object initalVal = Activator.CreateInstance(type);
if (initialValue is LiteralExpression literal)
{
var defaultText = literal.Text;
var converter = TypeDescriptor.GetConverter(Enum.GetUnderlyingType(type));
if (converter != null && converter.IsValid(defaultText))
{
var underVal = converter.ConvertFromString(defaultText);
initalVal = Enum.ToObject(type, underVal);
}
}
pinEnumTypes[name] = new EnumMetadata(type, initalVal);
}
}
private void AddOptionalPinAttribute(string name)
{
optionalPins.Add(name);
}
Dictionary<string, string> pinSummaries = new Dictionary<string, string>();
Dictionary<string, string> pinRemarks = new Dictionary<string, string>();
Dictionary<string, string> pinAssets = new Dictionary<string, string>();
private void AddPinSummary(string pinKeyName, string summary)
{
pinSummaries[pinKeyName] = summary;
}
private void AddPinRemarks(string pinKeyName, string remarks)
{
pinRemarks[pinKeyName] = remarks;
}
private void AddPinAsset(string pinKeyName, string assetURL)
{
pinAssets[pinKeyName] = assetURL;
}
public void GetPinDocuAndVisibility(ParameterKey key, out string summary, out string remarks, out bool isOptional)
{
var name = key.Name;
summary = "";
remarks = "";
isOptional = false;
if (key.PropertyType == typeof(ShaderSource) && ParsedShader != null)
{
if (ParsedShader.CompositionsWithBaseShaders.TryGetValue(key.GetVariableName(), out var composition))
{
if (!string.IsNullOrWhiteSpace(composition.Summary))
summary = composition.Summary;
if (!string.IsNullOrWhiteSpace(composition.Remarks))
remarks = composition.Remarks;
isOptional = composition.IsOptional;
}
}
else
{
if (pinSummaries.TryGetValue(name, out var sum))
summary = sum;
if (pinRemarks.TryGetValue(name, out var rem))
remarks = rem;
isOptional = optionalPins.Contains(name);
}
// add type in shader to pin summary, if not float, int or bool type
var varName = key.GetVariableName();
if (ParsedShader != null && ParsedShader.VariablesByName.TryGetValue(varName, out var variable))
{
var varType = variable.Type.ToString();
if (!(varType.StartsWith("float", StringComparison.OrdinalIgnoreCase)
|| varType.StartsWith("int", StringComparison.OrdinalIgnoreCase)
|| varType.StartsWith("bool", StringComparison.OrdinalIgnoreCase)
|| varType.StartsWith("uint", StringComparison.OrdinalIgnoreCase)
|| varType.StartsWith("Sampler", StringComparison.OrdinalIgnoreCase)
|| varType.StartsWith("ComputeFloat", StringComparison.OrdinalIgnoreCase)
|| varType.StartsWith("ComputeInt", StringComparison.OrdinalIgnoreCase)
|| varType.StartsWith("ComputeBool", StringComparison.OrdinalIgnoreCase)
|| varType.StartsWith("ComputeUInt", StringComparison.OrdinalIgnoreCase)
|| varType.StartsWith("ComputeMatrix", StringComparison.OrdinalIgnoreCase)
|| varType.StartsWith("matrix", StringComparison.OrdinalIgnoreCase)
))
{
summary += (string.IsNullOrWhiteSpace(summary) ? "" : Environment.NewLine) + varType;
}
}
}
public IEnumerable<(string textureName, bool wantsMips, bool dontUnapplySRgb)> GetTexturePinsToManage(IEnumerable<string> allTextureInputNames)
=> GetTexturePinsToManageInternal(allTextureInputNames).ToList();
IEnumerable<(string textureName, bool wantsMips, bool dontUnapplySRgb)> GetTexturePinsToManageInternal(IEnumerable<string> allTextureInputNames)
{
var wantsMips = WantsMips?.Count > 0;
var wantsSRgb = DontConvertToLinearOnRead != null;
var mipPins = wantsMips ? WantsMips : Enumerable.Empty<string>();
var srgbPins = Enumerable.Empty<string>();
if (wantsSRgb)
{
if (DontConvertToLinearOnRead.Count > 0)
srgbPins = DontConvertToLinearOnRead;
else
srgbPins = allTextureInputNames;
}
foreach (var textureName in allTextureInputNames)
{
var m = mipPins.Contains(textureName);
var s = srgbPins.Contains(textureName);
if (m || s)
yield return (textureName, m, s);
}
}
/// <summary>
/// Gets the type of the pin, if overwritten by an attribute, e.g. int -> enum.
/// </summary>
public Type GetPinType(ParameterKey key, out object boxedDefaultValue)
{
boxedDefaultValue = null;
if (pinEnumTypes.TryGetValue(key.Name, out var enumTypeName))
{
boxedDefaultValue = enumTypeName.defaultValue;
return enumTypeName.typeName;
}
if (key.PropertyType == typeof(ShaderSource) && ParsedShader != null)
{
if (ParsedShader.CompositionsWithBaseShaders.TryGetValue(key.GetVariableName(), out var composition))
{
boxedDefaultValue = composition.GetDefaultComputeNode(forPatch: true);
if (knownShaderFXTypes.TryGetValue(composition.TypeName, out var type))
{
return type;
}
}
return typeof(IComputeNode);
}
return null;
}
public Type GetShaderFXOutputType(out Type innerType)
{
innerType = typeof(VoidOrUnknown);
foreach (var baseShader in ParsedShader?.BaseShaders ?? Enumerable.Empty<ParsedShader>())
{
var baseName = baseShader?.ShaderClass?.Name;
if (!string.IsNullOrWhiteSpace(baseName))
{
if (knownShaderFXTypes.TryGetValue(baseName, out var type))
{
if (type.IsGenericType)
innerType = type.GetGenericArguments()[0];
return type;
}
}
}
return typeof(IComputeNode);
}
static Dictionary<string, Type> knownShaderFXTypes = new Dictionary<string, Type>()
{
{ "ComputeVoid", typeof(ComputeVoid) },
{ "ComputeFloat", typeof(SetVar<float>) },
{ "ComputeFloat2", typeof(SetVar<Vector2>) },
{ "ComputeFloat3", typeof(SetVar<Vector3>) },
{ "ComputeFloat4", typeof(SetVar<Vector4>) },
{ "ComputeMatrix", typeof(SetVar<Matrix>) },
{ "ComputeBool", typeof(SetVar<bool>) },
{ "ComputeInt", typeof(SetVar<int>) },
{ "ComputeInt2", typeof(SetVar<Int2>) },
{ "ComputeInt3", typeof(SetVar<Int3>) },
{ "ComputeInt4", typeof(SetVar<Int4>) },
{ "ComputeUInt", typeof(SetVar<uint>) },
};
/// <summary>
/// Determines whether the specified pin with the given key is optional.
/// </summary>
public bool IsOptional(ParameterKey key)
{
return optionalPins.Contains(key.Name);
}
//shader
public const string CategoryName = "Category";
public const string SummaryName = "Summary";
public const string RemarksName = "Remarks";
public const string TagsName = "Tags";
public const string OutputFormatName = "OutputFormat";
public const string OutputSizeName = "OutputSize";
public const string RenderFormatName = "RenderFormat";
public const string TextureSourceName = "TextureSource";
public const string WantsMipsName = "WantsMips";
public const string DontConvertToLinearOnReadName = "DontConvertToLinearOnRead";
public const string DontConvertToSRgbOnName = "DontConvertToSRgbOnWrite";
//pin
public const string EnumTypeName = "EnumType";
public const string OptionalName = "Optional";
public const string DefaultName = "Default";
public const string AssetName = "Asset";
/// <summary>
/// Registers the additional stride variable attributes. Avoids writing them to the final shader, which would create an error in the native platform compiler.
/// </summary>
public static void RegisterAdditionalShaderAttributes()
{
// only pin attributes need to be registered
StrideAttributes.AvailableAttributes.Add(EnumTypeName);
StrideAttributes.AvailableAttributes.Add(OptionalName);
StrideAttributes.AvailableAttributes.Add(DefaultName);
StrideAttributes.AvailableAttributes.Add(SummaryName);
StrideAttributes.AvailableAttributes.Add(RemarksName);
StrideAttributes.AvailableAttributes.Add(AssetName);
}
public static ShaderMetadata CreateMetadata(string effectName, IVirtualFileProvider fileProvider, ShaderSourceManager shaderSourceManager)
{
//create metadata with default values
var shaderMetadata = new ShaderMetadata();
//try to populate metdata with information form the shader
if (fileProvider.TryParseEffect(effectName, shaderSourceManager, out var result))
{
shaderMetadata.ParsedShader = result;
var shaderDecl = result.ShaderClass;
if (shaderDecl != null)
{
//shader
foreach (var attr in shaderDecl.Attributes.OfType<AttributeDeclaration>())
{
switch (attr.Name)
{
case CategoryName:
shaderMetadata.Category = attr.ParseString();
break;
case SummaryName:
shaderMetadata.Summary = attr.ParseString();
break;
case RemarksName:
shaderMetadata.Remarks = attr.ParseString();
break;
case TagsName:
shaderMetadata.Tags = attr.ParseString();
break;
case OutputFormatName:
if (Enum.TryParse<PixelFormat>(attr.ParseString(), true, out var outputFormat))
shaderMetadata.OutputFormat = outputFormat;
break;
case RenderFormatName:
if (Enum.TryParse<PixelFormat>(attr.ParseString(), true, out var renderFormat))
shaderMetadata.RenderFormat = renderFormat;
break;
case OutputSizeName:
shaderMetadata.OutputSize = attr.ParseInt2();
break;
case TextureSourceName:
shaderMetadata.IsTextureSource = true;
break;
case WantsMipsName:
shaderMetadata.WantsMips = attr.ParseStringAsCommaSeparatedList();
break;
case DontConvertToLinearOnReadName:
shaderMetadata.DontConvertToLinearOnRead = attr.ParseStringAsCommaSeparatedList();
break;
case DontConvertToSRgbOnName:
shaderMetadata.DontConvertToSRgbOnOnWrite = true;
break;
default:
break;
}
}
//pins
var pinDecls = shaderDecl.Members.OfType<Variable>().Where(v => !v.Qualifiers.Contains(StrideStorageQualifier.Compose) && !v.Qualifiers.Contains(StrideStorageQualifier.Stream));
foreach (var pinDecl in pinDecls)
{
foreach (var attr in pinDecl.Attributes.OfType<AttributeDeclaration>())
{
switch (attr.Name)
{
case EnumTypeName:
shaderMetadata.AddEnumTypePinAttribute(pinDecl.GetKeyName(shaderDecl), attr.ParseString(), pinDecl.InitialValue);
break;
case OptionalName:
shaderMetadata.AddOptionalPinAttribute(pinDecl.GetKeyName(shaderDecl));
break;
case SummaryName:
shaderMetadata.AddPinSummary(pinDecl.GetKeyName(shaderDecl), attr.ParseString());
break;
case RemarksName:
shaderMetadata.AddPinRemarks(pinDecl.GetKeyName(shaderDecl), attr.ParseString());
break;
case DefaultName:
// handled in composition parsing in ParseShader.cs
break;
case AssetName:
shaderMetadata.AddPinAsset(pinDecl.GetKeyName(shaderDecl), attr.ParseString());
break;
default:
break;
}
}
}
}
}
return shaderMetadata;
}
class EnumMetadata
{
public readonly Type typeName;
public readonly object defaultValue;
public EnumMetadata(Type enumType, object boxedDefaultValue)
{
typeName = enumType;
defaultValue = boxedDefaultValue;
}
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Rendering/RenderFeatures/EntityRendererRenderFeature.cs<|end_filename|>
using Stride.Core;
using Stride.Core.Diagnostics;
using Stride.Core.Mathematics;
using Stride.Graphics;
using Stride.Rendering;
using System;
using System.Collections.Generic;
using VL.Core;
namespace VL.Stride.Rendering
{
/// <summary>
/// The render feature redirects low level rendering calls to the <see cref="IGraphicsRendererBase"/>
/// </summary>
public class EntityRendererRenderFeature : RootRenderFeature
{
/// <summary>
/// A property key to get the current parent transformation from the <see cref="RenderContext.Tags"/>.
/// </summary>
public static readonly PropertyKey<Matrix> CurrentParentTransformation = new PropertyKey<Matrix>("EntityRendererRenderFeature.CurrentParentTransformation", typeof(Matrix), DefaultValueMetadata.Static(Matrix.Identity, keepDefaultValue: true));
private readonly List<RenderRenderer> singleCallRenderers = new List<RenderRenderer>();
private readonly List<RenderRenderer> renderers = new List<RenderRenderer>();
private int lastFrameNr;
private IVLRuntime runtime;
public RenderStage HelpersRenderStage { get; set; } // TODO: shouldn't be a pin
public IGraphicsRendererBase HelpersRenderer { get; set; }
public EntityRendererRenderFeature()
{
// Pre adjust render priority, low numer is early, high number is late (advantage of backbuffer culling)
SortKey = 190;
}
public override void Prepare(RenderDrawContext context)
{
base.Prepare(context);
runtime ??= context.RenderContext.Services.GetService<IVLRuntime>();
}
public override Type SupportedRenderObjectType => typeof(RenderRenderer);
public override void Draw(RenderDrawContext context, RenderView renderView, RenderViewStage renderViewStage)
{
base.Draw(context, renderView, renderViewStage);
if (HelpersRenderStage != null && HelpersRenderer != null && renderViewStage.Index == HelpersRenderStage.Index)
{
HelpersRenderer.Draw(context);
}
}
public override void Draw(RenderDrawContext context, RenderView renderView, RenderViewStage renderViewStage, int startIndex, int endIndex)
{
//CPU and GPU profiling
using (Profiler.Begin(VLProfilerKeys.InSceneRenderProfilingKey))
using (context.QueryManager.BeginProfile(Color.Green, VLProfilerKeys.InSceneRenderProfilingKey))
{
// Do not call into VL if not running
if (runtime != null && !runtime.IsRunning)
return;
// Build the list of renderers to render
singleCallRenderers.Clear();
renderers.Clear();
for (var index = startIndex; index < endIndex; index++)
{
var renderNodeReference = renderViewStage.SortedRenderNodes[index].RenderNode;
var renderNode = GetRenderNode(renderNodeReference);
var renderRenderer = (RenderRenderer)renderNode.RenderObject;
if (renderRenderer.Enabled)
{
if (renderRenderer.SingleCallPerFrame)
singleCallRenderers.Add(renderRenderer);
else
renderers.Add(renderRenderer);
}
}
if (singleCallRenderers.Count == 0 && renderers.Count == 0)
return;
using (context.RenderContext.PushRenderViewAndRestore(renderView))
{
// Call renderers which want to get invoked only once per frame first
var currentFrameNr = context.RenderContext.Time.FrameCount;
if (lastFrameNr != currentFrameNr)
{
lastFrameNr = currentFrameNr;
foreach (var renderer in singleCallRenderers)
{
try
{
using (context.RenderContext.PushTagAndRestore(CurrentParentTransformation, renderer.ParentTransformation))
{
renderer.Renderer?.Draw(context);
}
}
catch (Exception e)
{
RuntimeGraph.ReportException(e);
}
}
}
// Call renderers which can get invoked twice per frame (for each eye)
foreach (var renderer in renderers)
{
try
{
using (context.RenderContext.PushTagAndRestore(CurrentParentTransformation, renderer.ParentTransformation))
{
renderer.Renderer?.Draw(context);
}
}
catch (Exception e)
{
RuntimeGraph.ReportException(e);
}
}
}
}
}
}
}
<|start_filename|>packages/VL.Stride.Runtime/src/Games/GameWindowRenderer.cs<|end_filename|>
// Modified version of Stride.Games.GameWindowRenderer using GameWindowRendererManager.
// This class should be kept internal
using System;
using Stride.Core;
using Stride.Core.Mathematics;
using Stride.Games;
using Stride.Graphics;
namespace VL.Stride.Games
{
/// <summary>
/// A GameSystem that allows to draw to another window or control. Currently only valid on desktop with Windows.Forms.
/// </summary>
public class GameWindowRenderer : GameSystemBase
{
private GraphicsPresenter savedPresenter;
private bool beginDrawOk;
/// <summary>
/// Initializes a new instance of the <see cref="GameWindowRenderer" /> class.
/// </summary>
/// <param name="registry">The registry.</param>
/// <param name="gameContext">The window context.</param>
public GameWindowRenderer(IServiceRegistry registry, GameContext gameContext)
: base(registry)
{
GameContext = gameContext;
WindowManager = new GameWindowRendererManager();
}
/// <summary>
/// Gets the underlying native window.
/// </summary>
/// <value>The underlying native window.</value>
public GameContext GameContext { get; private set; }
/// <summary>
/// Gets the window.
/// </summary>
/// <value>The window.</value>
public GameWindow Window { get; private set; }
/// <summary>
/// Gets or sets the presenter.
/// </summary>
/// <value>The presenter.</value>
public GraphicsPresenter Presenter { get; protected set; }
/// <summary>
/// Gets the window manager.
/// </summary>
/// <value>
/// The window manager.
/// </value>
public GameWindowRendererManager WindowManager { get; private set; }
public override void Initialize()
{
var gamePlatform = Services.GetService<IGamePlatform>();
//GameContext.RequestedWidth = WindowManager.PreferredBackBufferWidth;
//GameContext.RequestedHeight = WindowManager.PreferredBackBufferHeight;
Window = gamePlatform.CreateWindow(GameContext);
Window.SetSize(new Int2(WindowManager.PreferredBackBufferWidth, WindowManager.PreferredBackBufferHeight));
Window.Visible = true;
base.Initialize();
}
protected override void Destroy()
{
if (Presenter != null)
{
// Make sure that the Presenter is reverted to window before shuting down
// otherwise the Direct3D11.Device will generate an exception on Dispose()
Presenter.IsFullScreen = false;
Presenter.Dispose();
}
Presenter = null;
WindowManager?.Dispose();
WindowManager = null;
Window?.Dispose();
Window = null;
base.Destroy();
}
protected virtual void EnsurePresenter()
{
if (Presenter == null)
{
var presentationParameters = new PresentationParameters(
WindowManager.PreferredBackBufferWidth,
WindowManager.PreferredBackBufferHeight,
Window.NativeWindow,
WindowManager.PreferredBackBufferFormat)
{
DepthStencilFormat = WindowManager.PreferredDepthStencilFormat,
PresentationInterval = PresentInterval.Immediate,
MultisampleCount = WindowManager.PreferredMultisampleCount,
};
#if STRIDE_GRAPHICS_API_DIRECT3D11 && STRIDE_PLATFORM_UWP
if (Game.Context is GameContextUWPCoreWindow context && context.IsWindowsMixedReality)
{
Presenter = new WindowsMixedRealityGraphicsPresenter(GraphicsDevice, presentationParameters);
}
else
#endif
{
Presenter = new SwapChainGraphicsPresenter(GraphicsDevice, presentationParameters);
}
WindowManager.Initialize(this, GraphicsDevice, Services.GetService<IGraphicsDeviceFactory>());
}
}
public override bool BeginDraw()
{
if (GraphicsDevice != null && Window.Visible)
{
savedPresenter = GraphicsDevice.Presenter;
EnsurePresenter();
GraphicsDevice.Presenter = Presenter;
// Perform begin of frame presenter operations
var commandList = Game.GraphicsContext.CommandList;
if (Presenter.DepthStencilBuffer != null)
commandList.ResourceBarrierTransition(Presenter.DepthStencilBuffer, GraphicsResourceState.DepthWrite);
if (Presenter.BackBuffer != null)
commandList.ResourceBarrierTransition(Presenter.BackBuffer, GraphicsResourceState.RenderTarget);
Presenter.BeginDraw(commandList);
beginDrawOk = true;
return true;
}
beginDrawOk = false;
return false;
}
public override void EndDraw()
{
if (beginDrawOk)
{
// We'd like to call Present() here like in the original code, however that would be too early
// in case other game systems want to draw into our backbuffer (like GameProfilingSystem).
// Present();
var game = (VLGame)Game;
game.PendingPresentCalls.Add(this);
if (savedPresenter != null)
{
GraphicsDevice.Presenter = savedPresenter;
}
}
}
// Called by VLGame at the very end to ensure that other game systems (like the GameProfilingSystem) were able to post their draw commands
internal void Present()
{
if (beginDrawOk)
{
// Perform end of frame presenter operations
var commandList = Game.GraphicsContext.CommandList;
Presenter.EndDraw(commandList, present: true);
if (Presenter.BackBuffer != null)
commandList.ResourceBarrierTransition(Presenter.BackBuffer, GraphicsResourceState.Present);
try
{
Presenter.Present();
}
catch (GraphicsException ex)
{
if (ex.Status != GraphicsDeviceStatus.Removed && ex.Status != GraphicsDeviceStatus.Reset)
{
throw;
}
}
beginDrawOk = false;
}
}
}
}
| vvvv/VL.Stride |
<|start_filename|>build-utils.js<|end_filename|>
/* eslint-env node */
// TODO: Rewrite in TypeScript
import n from 'nunjucks';
import fs from 'fs';
const { BUILD_ENV, NODE_ENV } = process.env;
export const production = NODE_ENV === 'production';
const buildTarget = BUILD_ENV ? BUILD_ENV : production ? 'production' : 'development';
const getConfigPath = () => {
const path = `./config/${buildTarget}.json`;
if (!fs.existsSync(path)) {
throw new Error(`
ERROR: Config path '${path}' does not exists.
Please, use production|development.json files or add a configuration file at '${path}'.
`);
}
console.log(`File path ${path} selected as config...`);
return path;
};
const getData = () => {
const settingsFiles = ['./data/resources.json', './data/settings.json', getConfigPath()];
const combineSettings = (currentData, path) => {
return {
...currentData,
...require(path),
};
};
return settingsFiles.reduce(combineSettings, { NODE_ENV });
};
const data = getData();
const nunjucks = n.configure({
tags: {
variableStart: '{$',
variableEnd: '$}',
},
});
export const compileTemplate = (template) => {
return nunjucks.renderString(template, data);
};
export const compileBufferTemplate = (body) => compileTemplate(body.toString());
<|start_filename|>workbox-config.js<|end_filename|>
/* eslint-env node */
// TODO: Rewrite in TypeScript
const ONE_WEEK = 60 * 60 * 24 * 7;
const FIREBASE_RESERVED_URLS = /\/__\/.*/;
export const workboxConfig = {
swDest: 'dist/service-worker.js',
navigateFallback: '/index.html',
navigateFallbackDenylist: [
FIREBASE_RESERVED_URLS, // Private Firebase URLs
],
skipWaiting: true,
offlineGoogleAnalytics: true,
globDirectory: 'dist',
globPatterns: ['**/*.{html,js,css,json,svg,md}'],
runtimeCaching: [
{
urlPattern: /\/images\/.*/,
handler: 'NetworkFirst',
options: {
cacheName: 'images-cache',
expiration: {
maxAgeSeconds: ONE_WEEK,
maxEntries: 200,
},
},
},
{
urlPattern: /https:\/\/maps\.googleapis\.com\/maps.*/,
handler: 'NetworkFirst',
options: {
cacheName: 'google-maps-cache',
},
},
{
urlPattern: FIREBASE_RESERVED_URLS,
handler: 'NetworkFirst',
options: {
cacheName: 'firebase-cache',
},
},
{
urlPattern: /https:\/\/firebasestorage\.googleapis\.com\/.*/,
handler: 'NetworkFirst',
options: {
cacheName: 'firebase-storage-cache',
},
},
{
urlPattern: /https:\/\/storage\.googleapis\.com\/.*/,
handler: 'NetworkFirst',
options: {
cacheName: 'google-storage-cache',
cacheableResponse: {
statuses: [0, 200],
},
},
},
],
};
| gdg-x/hoverboard |
<|start_filename|>src/main/java/yahoofinance/exchanges/ExchangeTimeZone.java<|end_filename|>
package yahoofinance.exchanges;
import java.util.HashMap;
import java.util.Map;
import java.util.TimeZone;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author <NAME>
*/
public class ExchangeTimeZone {
private static final Logger log = LoggerFactory.getLogger(ExchangeTimeZone.class);
public static final Map<String, TimeZone> SUFFIX_TIMEZONES = new HashMap<>();
public static final Map<String, TimeZone> INDEX_TIMEZONES = new HashMap<>();
static {
SUFFIX_TIMEZONES.put("", TimeZone.getTimeZone("America/New_York"));
SUFFIX_TIMEZONES.put("CBT", TimeZone.getTimeZone("America/New_York"));
SUFFIX_TIMEZONES.put("CME", TimeZone.getTimeZone("America/New_York"));
SUFFIX_TIMEZONES.put("NYB", TimeZone.getTimeZone("America/New_York"));
SUFFIX_TIMEZONES.put("CMX", TimeZone.getTimeZone("America/New_York"));
SUFFIX_TIMEZONES.put("NYM", TimeZone.getTimeZone("America/New_York"));
SUFFIX_TIMEZONES.put("OB", TimeZone.getTimeZone("America/New_York"));
SUFFIX_TIMEZONES.put("PK", TimeZone.getTimeZone("America/New_York"));
SUFFIX_TIMEZONES.put("BA", TimeZone.getTimeZone("America/Buenos_Aires"));
SUFFIX_TIMEZONES.put("VI", TimeZone.getTimeZone("Europe/Vienna"));
SUFFIX_TIMEZONES.put("AX", TimeZone.getTimeZone("Australia/ACT"));
SUFFIX_TIMEZONES.put("SA", TimeZone.getTimeZone("America/Sao_Paulo"));
SUFFIX_TIMEZONES.put("TO", TimeZone.getTimeZone("America/Toronto"));
SUFFIX_TIMEZONES.put("V", TimeZone.getTimeZone("America/Toronto"));
SUFFIX_TIMEZONES.put("SN", TimeZone.getTimeZone("America/Santiago"));
SUFFIX_TIMEZONES.put("SS", TimeZone.getTimeZone("Asia/Shanghai"));
SUFFIX_TIMEZONES.put("SZ", TimeZone.getTimeZone("Asia/Shanghai"));
SUFFIX_TIMEZONES.put("CO", TimeZone.getTimeZone("Europe/Copenhagen"));
SUFFIX_TIMEZONES.put("NX", TimeZone.getTimeZone("Europe/Paris"));
SUFFIX_TIMEZONES.put("PA", TimeZone.getTimeZone("Europe/Paris"));
SUFFIX_TIMEZONES.put("BE", TimeZone.getTimeZone("Europe/Berlin"));
SUFFIX_TIMEZONES.put("BM", TimeZone.getTimeZone("Europe/Berlin"));
SUFFIX_TIMEZONES.put("DU", TimeZone.getTimeZone("Europe/Berlin"));
SUFFIX_TIMEZONES.put("F", TimeZone.getTimeZone("Europe/Berlin"));
SUFFIX_TIMEZONES.put("HM", TimeZone.getTimeZone("Europe/Berlin"));
SUFFIX_TIMEZONES.put("HA", TimeZone.getTimeZone("Europe/Berlin"));
SUFFIX_TIMEZONES.put("MU", TimeZone.getTimeZone("Europe/Berlin"));
SUFFIX_TIMEZONES.put("SG", TimeZone.getTimeZone("Europe/Berlin"));
SUFFIX_TIMEZONES.put("DE", TimeZone.getTimeZone("Europe/Berlin"));
SUFFIX_TIMEZONES.put("IR", TimeZone.getTimeZone("Europe/Dublin"));
SUFFIX_TIMEZONES.put("BR", TimeZone.getTimeZone("Europe/Brussels"));
SUFFIX_TIMEZONES.put("HE", TimeZone.getTimeZone("Europe/Helsinki"));
SUFFIX_TIMEZONES.put("HK", TimeZone.getTimeZone("Asia/Hong_Kong"));
SUFFIX_TIMEZONES.put("BO", TimeZone.getTimeZone("Asia/Kolkata"));
SUFFIX_TIMEZONES.put("NS", TimeZone.getTimeZone("Asia/Kolkata"));
SUFFIX_TIMEZONES.put("JK", TimeZone.getTimeZone("Asia/Jakarta"));
SUFFIX_TIMEZONES.put("TA", TimeZone.getTimeZone("Asia/Tel_Aviv"));
SUFFIX_TIMEZONES.put("MI", TimeZone.getTimeZone("Europe/Rome"));
SUFFIX_TIMEZONES.put("MX", TimeZone.getTimeZone("America/Mexico_City"));
SUFFIX_TIMEZONES.put("AS", TimeZone.getTimeZone("Europe/Amsterdam"));
SUFFIX_TIMEZONES.put("NZ", TimeZone.getTimeZone("Pacific/Auckland"));
SUFFIX_TIMEZONES.put("OL", TimeZone.getTimeZone("Europe/Oslo"));
SUFFIX_TIMEZONES.put("SI", TimeZone.getTimeZone("Asia/Singapore"));
SUFFIX_TIMEZONES.put("KS", TimeZone.getTimeZone("Asia/Seoul"));
SUFFIX_TIMEZONES.put("KQ", TimeZone.getTimeZone("Asia/Seoul"));
SUFFIX_TIMEZONES.put("KL", TimeZone.getTimeZone("Asia/Kuala_Lumpur"));
SUFFIX_TIMEZONES.put("BC", TimeZone.getTimeZone("Europe/Madrid"));
SUFFIX_TIMEZONES.put("BI", TimeZone.getTimeZone("Europe/Madrid"));
SUFFIX_TIMEZONES.put("MF", TimeZone.getTimeZone("Europe/Madrid"));
SUFFIX_TIMEZONES.put("MC", TimeZone.getTimeZone("Europe/Madrid"));
SUFFIX_TIMEZONES.put("MA", TimeZone.getTimeZone("Europe/Madrid"));
SUFFIX_TIMEZONES.put("ST", TimeZone.getTimeZone("Europe/Stockholm"));
SUFFIX_TIMEZONES.put("SW", TimeZone.getTimeZone("Europe/Zurich"));
SUFFIX_TIMEZONES.put("Z", TimeZone.getTimeZone("Europe/Zurich"));
SUFFIX_TIMEZONES.put("VX", TimeZone.getTimeZone("Europe/Zurich"));
SUFFIX_TIMEZONES.put("TWO", TimeZone.getTimeZone("Asia/Taipei"));
SUFFIX_TIMEZONES.put("TW", TimeZone.getTimeZone("Asia/Taipei"));
SUFFIX_TIMEZONES.put("L", TimeZone.getTimeZone("Europe/London"));
SUFFIX_TIMEZONES.put("PR", TimeZone.getTimeZone("Europe/Prague"));
SUFFIX_TIMEZONES.put("ME", TimeZone.getTimeZone("Europe/Moscow"));
SUFFIX_TIMEZONES.put("AT", TimeZone.getTimeZone("Europe/Athens"));
SUFFIX_TIMEZONES.put("LS", TimeZone.getTimeZone("Europe/Lisbon"));
INDEX_TIMEZONES.put("^FTSE", TimeZone.getTimeZone("Europe/London"));
INDEX_TIMEZONES.put("^GDAXI", TimeZone.getTimeZone("Europe/Berlin"));
INDEX_TIMEZONES.put("^FCHI", TimeZone.getTimeZone("Europe/Paris"));
INDEX_TIMEZONES.put("^IBEX", TimeZone.getTimeZone("Europe/Madrid"));
INDEX_TIMEZONES.put("^OMX", TimeZone.getTimeZone("Europe/Stockholm"));
INDEX_TIMEZONES.put("^OSEAX", TimeZone.getTimeZone("Europe/Oslo"));
INDEX_TIMEZONES.put("ATX", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^SSMI", TimeZone.getTimeZone("Europe/Zurich"));
INDEX_TIMEZONES.put("^BFX", TimeZone.getTimeZone("Europe/Brussels"));
INDEX_TIMEZONES.put("^DJI", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^OEX", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^NDX", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^BATSK", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^N225", TimeZone.getTimeZone("Asia/Tokyo"));
INDEX_TIMEZONES.put("^HSI", TimeZone.getTimeZone("Asia/Hong_Kong"));
INDEX_TIMEZONES.put("^STI", TimeZone.getTimeZone("Asia/Singapore"));
INDEX_TIMEZONES.put("^AORD", TimeZone.getTimeZone("Australia/ACT"));
INDEX_TIMEZONES.put("^BSESN", TimeZone.getTimeZone("Asia/Kolkata"));
INDEX_TIMEZONES.put("^JKSE", TimeZone.getTimeZone("Asia/Jakarta"));
INDEX_TIMEZONES.put("^KLSE", TimeZone.getTimeZone("Asia/Kuala_Lumpur"));
INDEX_TIMEZONES.put("^NZ50", TimeZone.getTimeZone("Pacific/Auckland"));
INDEX_TIMEZONES.put("^NSEI", TimeZone.getTimeZone("Asia/Kolkata"));
INDEX_TIMEZONES.put("^KS11", TimeZone.getTimeZone("Asia/Seoul"));
INDEX_TIMEZONES.put("^TWII", TimeZone.getTimeZone("Asia/Taipei"));
INDEX_TIMEZONES.put("^MERV", TimeZone.getTimeZone("America/Buenos_Aires"));
INDEX_TIMEZONES.put("^BVSP", TimeZone.getTimeZone("America/Sao_Paulo"));
INDEX_TIMEZONES.put("^GSPTSE", TimeZone.getTimeZone("America/Toronto"));
INDEX_TIMEZONES.put("^MXX", TimeZone.getTimeZone("America/Mexico_City"));
INDEX_TIMEZONES.put("^GSPC", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^CCSI", TimeZone.getTimeZone("Africa/Cairo"));
INDEX_TIMEZONES.put("^TA100", TimeZone.getTimeZone("Asia/Tel_Aviv"));
INDEX_TIMEZONES.put("^FTMC", TimeZone.getTimeZone("Europe/London"));
INDEX_TIMEZONES.put("^FTLC", TimeZone.getTimeZone("Europe/London"));
INDEX_TIMEZONES.put("^FTAI", TimeZone.getTimeZone("Europe/London"));
INDEX_TIMEZONES.put("^FTAS", TimeZone.getTimeZone("Europe/London"));
INDEX_TIMEZONES.put("^FTSC", TimeZone.getTimeZone("Europe/London"));
INDEX_TIMEZONES.put("^FTT1X", TimeZone.getTimeZone("Europe/London"));
INDEX_TIMEZONES.put("^MID", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^SP600", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^SPSUPX", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^VIX", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^DJC", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^XAU", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^DJT", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^DJU", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^DJA", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^DWCF", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^DJU", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^IXIC", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^BANK", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^NBI", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^IXCO", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^IXF", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^INDS", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^INSR", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^OFIN", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^IXTC", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^TRAN", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^NYA", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^NYE", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^NYK", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^NYP", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^NYY", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^NYI", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^NY", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^NYL", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^XMI", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^XAX", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^BATSK", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^RUI", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^RUT", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^RUA", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^SOX", TimeZone.getTimeZone("America/New_York"));
INDEX_TIMEZONES.put("^BKX", TimeZone.getTimeZone("America/New_York"));
}
/**
* Get the time zone for a specific exchange suffix
*
* @param suffix suffix for the exchange in YahooFinance
* @return time zone of the exchange
*/
public static TimeZone get(String suffix) {
if(SUFFIX_TIMEZONES.containsKey(suffix)) {
return SUFFIX_TIMEZONES.get(suffix);
}
log.warn("Cannot find time zone for exchange suffix: '{}'. Using default: America/New_York", suffix);
return SUFFIX_TIMEZONES.get("");
}
/**
* Get the time zone for a specific stock or index.
* For stocks, the exchange suffix is extracted from the stock symbol to retrieve the time zone.
*
* @param symbol stock symbol in YahooFinance
* @return time zone of the exchange on which this stock is traded
*/
public static TimeZone getStockTimeZone(String symbol) {
// First check if it's a known stock index
if(INDEX_TIMEZONES.containsKey(symbol)) {
return INDEX_TIMEZONES.get(symbol);
}
if(!symbol.contains(".")) {
return ExchangeTimeZone.get("");
}
String[] split = symbol.split("\\.");
return ExchangeTimeZone.get(split[split.length - 1]);
}
}
<|start_filename|>src/main/java/yahoofinance/quotes/query1v7/QuotesRequest.java<|end_filename|>
package yahoofinance.quotes.query1v7;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import yahoofinance.Utils;
import yahoofinance.YahooFinance;
import yahoofinance.util.RedirectableRequest;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
*
* @author <NAME>
* @param <T> Type of object that can contain the retrieved information from a
* quotes request
*/
public abstract class QuotesRequest<T> {
private static final Logger log = LoggerFactory.getLogger(QuotesRequest.class);
private static final ObjectMapper objectMapper = new ObjectMapper();
protected final String symbols;
public QuotesRequest(String symbols) {
this.symbols = symbols;
}
public String getSymbols() {
return symbols;
}
protected abstract T parseJson(JsonNode node);
public T getSingleResult() throws IOException {
List<T> results = this.getResult();
if (results.size() > 0) {
return results.get(0);
}
return null;
}
/**
* Sends the request to Yahoo Finance and parses the result
*
* @return List of parsed objects resulting from the Yahoo Finance request
* @throws IOException when there's a connection problem or the request is incorrect
*/
public List<T> getResult() throws IOException {
List<T> result = new ArrayList<>();
Map<String, String> params = new LinkedHashMap<>();
params.put("symbols", this.symbols);
String url = YahooFinance.QUOTES_QUERY1V7_BASE_URL + "?" + Utils.getURLParameters(params);
// Get JSON from Yahoo
log.info("Sending request: " + url);
URL request = new URL(url);
RedirectableRequest redirectableRequest = new RedirectableRequest(request, 5);
redirectableRequest.setConnectTimeout(YahooFinance.CONNECTION_TIMEOUT);
redirectableRequest.setReadTimeout(YahooFinance.CONNECTION_TIMEOUT);
URLConnection connection = redirectableRequest.openConnection();
try (InputStreamReader is = new InputStreamReader(connection.getInputStream())) {
JsonNode node = objectMapper.readTree(is);
if (node.has("quoteResponse") && node.get("quoteResponse").has("result")) {
node = node.get("quoteResponse").get("result");
for (int i = 0; i < node.size(); i++) {
result.add(this.parseJson(node.get(i)));
}
} else {
throw new IOException("Invalid response");
}
}
return result;
}
}
<|start_filename|>src/test/java/yahoofinance/HistoricalQuoteRequestTest.java<|end_filename|>
package yahoofinance;
import org.junit.Before;
import org.junit.Test;
import yahoofinance.histquotes.HistoricalQuote;
import yahoofinance.histquotes.Interval;
import yahoofinance.mock.MockedServersTest;
import java.io.IOException;
import java.math.BigDecimal;
import java.util.Calendar;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.*;
/**
*
* @author <NAME>
*/
public class HistoricalQuoteRequestTest extends MockedServersTest {
private Calendar today;
private Calendar from;
@Before
public void setup() {
today = Calendar.getInstance();
today.set(Calendar.YEAR, 2016);
today.set(Calendar.MONTH, 8);
today.set(Calendar.DATE, 11);
from = (Calendar) today.clone();
from.add(Calendar.YEAR, -1);
}
@Test
public void historicalQuoteTest() throws IOException {
Stock goog = YahooFinance.get("GOOG", from, today);
assertNotNull(goog.getHistory());
assertEquals(13, goog.getHistory().size());
for(HistoricalQuote histQuote : goog.getHistory()) {
assertEquals("GOOG", histQuote.getSymbol());
assertTrue(histQuote.getAdjClose().compareTo(BigDecimal.ZERO) > 0);
assertTrue(histQuote.getClose().compareTo(BigDecimal.ZERO) > 0);
assertTrue(histQuote.getHigh().compareTo(BigDecimal.ZERO) > 0);
assertTrue(histQuote.getLow().compareTo(BigDecimal.ZERO) > 0);
assertTrue(histQuote.getOpen().compareTo(BigDecimal.ZERO) > 0);
assertTrue(histQuote.getVolume() > 0);
assertNotNull(histQuote.getDate());
}
HistoricalQuote histQuote = goog.getHistory().get(5);
assertEquals(new BigDecimal("693.01001"), histQuote.getAdjClose());
assertEquals(new BigDecimal("693.01001"), histQuote.getClose());
assertEquals(new BigDecimal("769.900024"), histQuote.getHigh());
assertEquals(new BigDecimal("689.00"), histQuote.getLow());
assertEquals(new BigDecimal("738.599976"), histQuote.getOpen());
assertEquals(Long.valueOf(2125700), histQuote.getVolume());
assertEquals(3, histQuote.getDate().get(Calendar.MONTH));
assertEquals(1, histQuote.getDate().get(Calendar.DATE));
assertEquals(2016, histQuote.getDate().get(Calendar.YEAR));
}
@Test
public void intervalTest() throws IOException {
Stock tsla = YahooFinance.get("TSLA", from, today, Interval.DAILY);
Stock scty = YahooFinance.get("SCTY", from, today, Interval.WEEKLY);
Stock goog = YahooFinance.get("GOOG", from, today, Interval.MONTHLY);
assertEquals(252, tsla.getHistory().size());
assertEquals(53, scty.getHistory().size());
assertEquals(13, goog.getHistory().size());
}
@Test
public void multiYearTest() throws IOException {
Calendar from = (Calendar) today.clone();
Calendar to = (Calendar) today.clone();
from.add(Calendar.YEAR, -5); // from 5 years ago
Stock goog = YahooFinance.get("GOOG", from, to, Interval.WEEKLY);
assertEquals(261, goog.getHistory().size());
HistoricalQuote histQuote = goog.getHistory().get(0);
assertEquals(8, histQuote.getDate().get(Calendar.MONTH));
assertEquals(6, histQuote.getDate().get(Calendar.DATE));
assertEquals(2016, histQuote.getDate().get(Calendar.YEAR));
histQuote = goog.getHistory().get(260);
assertEquals(8, histQuote.getDate().get(Calendar.MONTH));
assertEquals(12, histQuote.getDate().get(Calendar.DATE));
assertEquals(2011, histQuote.getDate().get(Calendar.YEAR));
}
@Test
public void multiStockTest() throws IOException {
String[] symbols = new String[] {"INTC", "AIR.PA"};
Map<String, Stock> stocks = YahooFinance.get(symbols, from, today);
Stock intel = stocks.get("INTC");
Stock airbus = stocks.get("AIR.PA");
assertEquals(13, intel.getHistory().size());
assertEquals(13, airbus.getHistory().size());
assertEquals("INTC", intel.getHistory().get(3).getSymbol());
assertEquals("AIR.PA", airbus.getHistory().get(5).getSymbol());
}
@Test
public void historicalFlowTest() throws IOException {
Stock goog = YahooFinance.get("GOOG");
int requestCount = MockedServersTest.histQuotesServer.getRequestCount();
assertNotNull(goog.getHistory(from, today));
requestCount += 1;
assertEquals(requestCount, MockedServersTest.histQuotesServer.getRequestCount());
assertEquals(13, goog.getHistory().size());
assertEquals(requestCount, MockedServersTest.histQuotesServer.getRequestCount());
Calendar from = (Calendar) today.clone();
Calendar to = (Calendar) today.clone();
from.add(Calendar.YEAR, -5); // from 5 years ago
assertNotNull(goog.getHistory(from, to, Interval.WEEKLY));
requestCount += 1;
assertEquals(requestCount, MockedServersTest.histQuotesServer.getRequestCount());
assertEquals(261, goog.getHistory().size());
}
@Test
public void impossibleRequestTest() throws IOException {
Calendar from = Calendar.getInstance();
Calendar to = Calendar.getInstance();
from.add(Calendar.DATE, 2); // from > to
Exception reqEx = null;
Stock goog = YahooFinance.get("GOOG");
List<HistoricalQuote> histQuotes = null;
int requestCount = MockedServersTest.histQuotesServer.getRequestCount();
try {
histQuotes = goog.getHistory(from, to);
} catch (IOException ex) {
reqEx = ex;
}
// Didn't send any requests since the problem was detected
assertEquals(requestCount, MockedServersTest.histQuotesServer.getRequestCount());
assertNull(reqEx);
assertEquals(0, histQuotes.size());
}
}
<|start_filename|>src/main/java/yahoofinance/quotes/csv/StockQuotesRequest.java<|end_filename|>
package yahoofinance.quotes.csv;
import java.util.ArrayList;
import java.util.List;
/**
*
* @author <NAME>
*/
public class StockQuotesRequest extends QuotesRequest<StockQuotesData> {
/**
* Yahoo Finance is responding with formatted numbers in some cases. Because
* of this, those number may contain commas. This will screw up the CSV
* file.
*
* It's not possible to choose a different delimiter for the CSV or to
* disable the number formatting
*
* To work around this, we surround the vulnerable values by the stock
* symbol. This forces us to do manual parsing of the CSV lines instead of
* using the easy String.split
*
*/
public static final List<QuotesProperty> DEFAULT_PROPERTIES = new ArrayList<>();
static {
// Always keep the name and symbol in first and second place respectively!
DEFAULT_PROPERTIES.add(QuotesProperty.Name);
DEFAULT_PROPERTIES.add(QuotesProperty.Symbol);
DEFAULT_PROPERTIES.add(QuotesProperty.Currency);
DEFAULT_PROPERTIES.add(QuotesProperty.StockExchange);
DEFAULT_PROPERTIES.add(QuotesProperty.Ask);
DEFAULT_PROPERTIES.add(QuotesProperty.AskRealtime);
DEFAULT_PROPERTIES.add(QuotesProperty.Symbol);
DEFAULT_PROPERTIES.add(QuotesProperty.AskSize);
DEFAULT_PROPERTIES.add(QuotesProperty.Symbol);
DEFAULT_PROPERTIES.add(QuotesProperty.Bid);
DEFAULT_PROPERTIES.add(QuotesProperty.BidRealtime);
DEFAULT_PROPERTIES.add(QuotesProperty.Symbol);
DEFAULT_PROPERTIES.add(QuotesProperty.BidSize);
DEFAULT_PROPERTIES.add(QuotesProperty.Symbol);
DEFAULT_PROPERTIES.add(QuotesProperty.LastTradePriceOnly);
DEFAULT_PROPERTIES.add(QuotesProperty.Symbol);
DEFAULT_PROPERTIES.add(QuotesProperty.LastTradeSize);
DEFAULT_PROPERTIES.add(QuotesProperty.Symbol);
DEFAULT_PROPERTIES.add(QuotesProperty.LastTradeDate);
DEFAULT_PROPERTIES.add(QuotesProperty.LastTradeTime);
DEFAULT_PROPERTIES.add(QuotesProperty.Open);
DEFAULT_PROPERTIES.add(QuotesProperty.PreviousClose);
DEFAULT_PROPERTIES.add(QuotesProperty.DaysLow);
DEFAULT_PROPERTIES.add(QuotesProperty.DaysHigh);
DEFAULT_PROPERTIES.add(QuotesProperty.Volume);
DEFAULT_PROPERTIES.add(QuotesProperty.AverageDailyVolume);
DEFAULT_PROPERTIES.add(QuotesProperty.YearHigh);
DEFAULT_PROPERTIES.add(QuotesProperty.YearLow);
DEFAULT_PROPERTIES.add(QuotesProperty.FiftydayMovingAverage);
DEFAULT_PROPERTIES.add(QuotesProperty.TwoHundreddayMovingAverage);
DEFAULT_PROPERTIES.add(QuotesProperty.Symbol);
DEFAULT_PROPERTIES.add(QuotesProperty.SharesOutstanding);
DEFAULT_PROPERTIES.add(QuotesProperty.Symbol);
DEFAULT_PROPERTIES.add(QuotesProperty.Symbol);
DEFAULT_PROPERTIES.add(QuotesProperty.SharesOwned);
DEFAULT_PROPERTIES.add(QuotesProperty.Symbol);
DEFAULT_PROPERTIES.add(QuotesProperty.MarketCapitalization);
DEFAULT_PROPERTIES.add(QuotesProperty.Symbol);
DEFAULT_PROPERTIES.add(QuotesProperty.SharesFloat);
DEFAULT_PROPERTIES.add(QuotesProperty.Symbol);
DEFAULT_PROPERTIES.add(QuotesProperty.DividendPayDate);
DEFAULT_PROPERTIES.add(QuotesProperty.ExDividendDate);
DEFAULT_PROPERTIES.add(QuotesProperty.TrailingAnnualDividendYield);
DEFAULT_PROPERTIES.add(QuotesProperty.TrailingAnnualDividendYieldInPercent);
DEFAULT_PROPERTIES.add(QuotesProperty.DilutedEPS);
DEFAULT_PROPERTIES.add(QuotesProperty.EPSEstimateCurrentYear);
DEFAULT_PROPERTIES.add(QuotesProperty.EPSEstimateNextQuarter);
DEFAULT_PROPERTIES.add(QuotesProperty.EPSEstimateNextYear);
DEFAULT_PROPERTIES.add(QuotesProperty.PERatio);
DEFAULT_PROPERTIES.add(QuotesProperty.PEGRatio);
DEFAULT_PROPERTIES.add(QuotesProperty.PriceBook);
DEFAULT_PROPERTIES.add(QuotesProperty.PriceSales);
DEFAULT_PROPERTIES.add(QuotesProperty.BookValuePerShare);
DEFAULT_PROPERTIES.add(QuotesProperty.Revenue);
DEFAULT_PROPERTIES.add(QuotesProperty.EBITDA);
DEFAULT_PROPERTIES.add(QuotesProperty.OneyrTargetPrice);
DEFAULT_PROPERTIES.add(QuotesProperty.ShortRatio);
}
public StockQuotesRequest(String query) {
super(query, StockQuotesRequest.DEFAULT_PROPERTIES);
}
@Override
protected StockQuotesData parseCSVLine(String line) {
List<String> parsedLine = new ArrayList<>();
// first get company name, symbol, currency and exchange
// because we need the symbol and currency or exchange might be the same as the symbol!
// pretty ugly code due to the bad format of the csv
int pos1 = 0;
int pos2;
int skip = 2;
if(line.startsWith("\"")) {
pos1 = 1; // skip first \"
pos2 = line.indexOf('\"', 1);
} else {
pos2 = line.indexOf(",\""); // last comma before the first symbol (hopefully)
skip = 1;
}
String name = line.substring(pos1, pos2);
pos1 = pos2 + skip; // skip \",
pos2 = line.indexOf('\"', pos1 + 1);
skip = 2;
String fullSymbol = line.substring(pos1, pos2 + 1);
String symbol = fullSymbol.substring(1, fullSymbol.length() - 1);
pos1 = pos2 + skip;
if (line.charAt(pos1) == '\"') {
pos1 += 1;
pos2 = line.indexOf('\"', pos1);
skip = 2;
} else {
pos2 = line.indexOf(',', pos1);
skip = 1;
}
String currency = line.substring(pos1, pos2);
pos1 = pos2 + skip;
if (line.charAt(pos1) == '\"') {
pos1 += 1;
pos2 = line.indexOf('\"', pos1);
skip = 2;
} else {
pos2 = line.indexOf(',', pos1);
skip = 1;
}
String exchange = line.substring(pos1, pos2);
parsedLine.add(name);
parsedLine.add(symbol);
parsedLine.add(currency);
parsedLine.add(exchange);
pos1 = pos2 + skip; // skip \",
for (; pos1 < line.length(); pos1++) {
if (line.startsWith(fullSymbol, pos1)) {
parsedLine.add(symbol);
pos1 = pos1 + fullSymbol.length() + 1; // immediately skip the , as well
pos2 = line.indexOf(fullSymbol, pos1) - 1; // don't include last ,
parsedLine.add(line.substring(pos1, pos2));
parsedLine.add(symbol);
pos1 = pos2 + fullSymbol.length() + 1;
} else if (line.charAt(pos1) == '\"') {
pos1 += 1;
pos2 = line.indexOf('\"', pos1);
parsedLine.add(line.substring(pos1, pos2));
pos1 = pos2 + 1;
} else if (line.charAt(pos1) != ',') {
pos2 = line.indexOf(',', pos1);
if (pos2 <= pos1) {
pos2 = line.length();
}
parsedLine.add(line.substring(pos1, pos2));
pos1 = pos2;
}
}
return new StockQuotesData(parsedLine.toArray(new String[this.properties.size()]));
}
}
| sfuhrm/yahoofinance-api |
<|start_filename|>examples/mqtt/client-b.js<|end_filename|>
const mqtt = require('mqtt');
const rawr = require('../../');
const connection = mqtt.connect('mqtt://localhost');
function getRandom() {
return Math.random();
}
// create the rawr peer
const rawrPeer = rawr({
transport: rawr.transports.mqtt({ connection, pubTopic: 'client-a', subTopic: 'client-b' }),
handlers: { getRandom },
timeout: 1000
});
// make RPC calls to the DOM
setInterval(async () => {
try {
const val = await rawrPeer.methods.add(1, 3);
console.log('result sent from client-a', val);
} catch (error) {
console.log('error calling client-a', error.message);
}
}, 1000);
<|start_filename|>examples/mqtt/client-a.js<|end_filename|>
const mqtt = require('mqtt');
const rawr = require('../../');
const connection = mqtt.connect('mqtt://localhost');
function add(x, y) {
return x + y;
}
// create the rawr peer
const rawrPeer = rawr({
transport: rawr.transports.mqtt({ connection, pubTopic: 'client-b', subTopic: 'client-a' }),
handlers: { add },
timeout: 1000
});
// make RPC calls to the DOM
setInterval(async () => {
try {
const val = await rawrPeer.methods.getRandom();
console.log('result sent from client-b', val);
} catch (error) {
console.log('error calling client-b', error.message);
}
}, 1000);
<|start_filename|>package.json<|end_filename|>
{
"name": "rawr",
"version": "0.15.0",
"description": "JSON-RPC over simple event emitters",
"dependencies": {},
"devDependencies": {
"b64id": "^1.0.0",
"browserify": "^16.2.3",
"chai": "^4.2.0",
"coveralls": "^3.0.3",
"eslint": "^5.15.3",
"eslint-config-airbnb": "^17.1.0",
"eslint-plugin-import": "^2.16.0",
"eslint-plugin-jsx-a11y": "^6.2.1",
"eslint-plugin-react": "^7.12.4",
"istanbul": "^0.4.5",
"mocha": "^6.0.2"
},
"main": "index.js",
"scripts": {
"lint": "eslint ./index.js --ext .js",
"test": "npm run lint && istanbul cover _mocha && npm run check-coverage",
"mocha": "_mocha",
"build": "browserify index.js -o dist/bundle.js",
"check-coverage": "istanbul check-coverage --statements 100 --branches 75 --lines 100 --functions 100",
"coveralls": "cat ./coverage/lcov.info | ./node_modules/.bin/coveralls"
},
"repository": {
"type": "git",
"url": "git://github.com/iceddev/rawr"
},
"keywords": [
"rpc",
"json-rpc",
"promises",
"websocket",
"ws",
"promise",
"mqtt"
],
"author": "<NAME> <<EMAIL>> (http://iceddev.com/)",
"license": "MIT",
"readmeFilename": "README.md",
"bugs": {
"url": "https://github.com/iceddev/rawr/issues"
}
}
<|start_filename|>index.js<|end_filename|>
const { EventEmitter } = require('events');
const transports = require('./transports');
function rawr({ transport, timeout = 0, handlers = {}, methods, idGenerator }) {
let callId = 0;
// eslint-disable-next-line no-param-reassign
methods = methods || handlers; // backwards compat
const pendingCalls = {};
const methodHandlers = {};
const notificationEvents = new EventEmitter();
notificationEvents.on = notificationEvents.on.bind(notificationEvents);
transport.on('rpc', (msg) => {
if (msg.id) {
// handle an RPC request
if (msg.params && methodHandlers[msg.method]) {
methodHandlers[msg.method](msg);
return;
}
// handle an RPC result
const promise = pendingCalls[msg.id];
if (promise) {
if (promise.timeoutId) {
clearTimeout(promise.timeoutId);
}
delete pendingCalls[msg.id];
if (msg.error) {
promise.reject(msg.error);
}
return promise.resolve(msg.result);
}
return;
}
// handle a notification
msg.params.unshift(msg.method);
notificationEvents.emit(...msg.params);
});
function addHandler(methodName, handler) {
methodHandlers[methodName] = (msg) => {
Promise.resolve()
.then(() => {
return handler.apply(this, msg.params);
})
.then((result) => {
transport.send({
id: msg.id,
result
});
})
.catch((error) => {
const serializedError = { message: error.message };
if (error.code) {
serializedError.code = error.code;
}
transport.send({
id: msg.id,
error: serializedError
});
});
};
}
Object.keys(methods).forEach((m) => {
addHandler(m, methods[m]);
});
const methodsProxy = new Proxy({}, {
get: (target, name) => {
return (...args) => {
const id = idGenerator ? idGenerator() : ++callId;
const msg = {
jsonrpc: '2.0',
method: name,
params: args,
id
};
let timeoutId;
if (timeout) {
timeoutId = setTimeout(() => {
if (pendingCalls[id]) {
const err = new Error('RPC timeout');
err.code = 504;
pendingCalls[id].reject(err);
delete pendingCalls[id];
}
}, timeout);
}
const response = new Promise((resolve, reject) => {
pendingCalls[id] = { resolve, reject, timeoutId };
});
transport.send(msg);
return response;
};
}
});
const notifiers = new Proxy({}, {
get: (target, name) => {
return (...args) => {
const msg = {
jsonrpc: '2.0',
method: name,
params: args
};
transport.send(msg);
};
}
});
const notifications = new Proxy({}, {
get: (target, name) => {
return (callback) => {
notificationEvents.on(name.substring(2), (...args) => {
return callback.apply(callback, args);
});
};
}
});
return {
methods: methodsProxy,
addHandler,
notifications,
notifiers,
transport,
};
}
rawr.transports = transports;
module.exports = rawr;
<|start_filename|>examples/webworker/package.json<|end_filename|>
{
"name": "rawr-webworker-example",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"build": "browserify main.js -o public/main-bundle.js && browserify worker.js -o public/worker-bundle.js",
"start": "npm run build && ecstatic ./public --port 8081"
},
"author": "",
"license": "ISC",
"dependencies": {
"browserify": "^14.4.0",
"ecstatic": "^3.3.1"
}
}
<|start_filename|>examples/webworker/worker.js<|end_filename|>
const rawr = require('../../');
function add(x, y) {
return x + y;
}
// create the rawr peer
const rawrPeer = rawr({ transport: rawr.transports.worker(), handlers: { add } });
// make RPC calls to the DOM
setInterval(async () => {
const val = await rawrPeer.methods.getRandom();
console.log('random from DOM', val);
}, 1000);
<|start_filename|>test/websocket_transport.js<|end_filename|>
const chai = require('chai');
const { EventEmitter } = require('events');
const rawr = require('../');
chai.should();
function mockTransports() {
const a = new EventEmitter();
const b = new EventEmitter();
a.send = (msg) => {
b.emit('message', {data: msg});
};
a.addEventListener = (topic, cb) => {
a.on(topic, cb);
}
b.send = (msg) => {
a.emit('message', {data: msg});
};
b.addEventListener = (topic, cb) => {
b.on(topic, cb);
}
const transportA = rawr.transports.websocket(a);
const transportB = rawr.transports.websocket(b);
return { transportA, transportB };
}
function helloTest(name) {
return new Promise((resolve, reject) => {
if (name === 'bad') {
const error = new Error('bad name !');
error.code = 9000;
return reject(error);
}
setTimeout(() => {
return resolve(`hello, ${name}`);
}, 100);
});
}
function add(a, b) {
return a + b;
}
function subtract(a, b) {
return a - b;
}
describe('websocket', () => {
it('should make a client', (done) => {
const { transportA } = mockTransports();
const client = rawr({ transport: transportA });
client.should.be.a('object');
client.addHandler.should.be.a('function');
done();
});
it('client should make a successful rpc call to another peer', async () => {
const { transportA, transportB } = mockTransports();
const clientA = rawr({ transport: transportA, handlers: { add } });
const clientB = rawr({ transport: transportB, handlers: { subtract } });
const resultA = await clientA.methods.subtract(7, 2);
const resultB = await clientB.methods.add(1, 2);
resultA.should.equal(5);
resultB.should.equal(3);
});
it('client should make an unsuccessful rpc call to a peer', async () => {
const { transportA, transportB } = mockTransports();
const clientA = rawr({ transport: transportA, handlers: { helloTest } });
const clientB = rawr({ transport: transportB });
clientA.should.be.an('object');
try {
await clientB.methods.helloTest('bad');
} catch (error) {
error.code.should.equal(9000);
}
});
it('client handle an rpc under a specified timeout', async () => {
const { transportA, transportB } = mockTransports();
const clientA = rawr({ transport: transportA, handlers: { helloTest } });
const clientB = rawr({ transport: transportB, timeout: 1000 });
clientA.should.be.an('object');
const result = await clientB.methods.helloTest('luis');
result.should.equal('hello, luis');
});
it('client handle an rpc timeout', async () => {
const { transportA, transportB } = mockTransports();
const clientA = rawr({ transport: transportA, handlers: { helloTest } });
const clientB = rawr({ transport: transportB, timeout: 10 });
clientA.should.be.an('object');
try {
await clientB.methods.helloTest('luis');
} catch (error) {
error.code.should.equal(504);
}
});
it('client should be able to send a notification to a server', (done) => {
const { transportA, transportB } = mockTransports();
const clientA = rawr({ transport: transportA });
const clientB = rawr({ transport: transportB });
clientA.notifications.ondoSomething((someData) => {
someData.should.equal('testing_notification');
done();
});
clientB.notifiers.doSomething('testing_notification');
});
});
<|start_filename|>transports/mqtt/index.js<|end_filename|>
const { EventEmitter } = require('events');
function transport({ connection, subTopic, pubTopic, subscribe = true }) {
const emitter = new EventEmitter();
if (subscribe) {
connection.subscribe(subTopic);
}
connection.on('message', (topic, message) => {
if (topic === subTopic) {
try {
const msg = JSON.parse(message.toString());
if (msg.method || (msg.id && ('result' in msg || 'error' in msg))) {
emitter.emit('rpc', msg);
}
} catch (err) {
console.error(err);
}
}
});
emitter.send = (msg) => {
connection.publish(pubTopic, JSON.stringify(msg));
};
return emitter;
}
module.exports = transport;
<|start_filename|>test/index.js<|end_filename|>
const chai = require('chai');
const { EventEmitter } = require('events');
const b64id = require('b64id');
const rawr = require('../');
chai.should();
function mockTransports() {
const a = new EventEmitter();
const b = new EventEmitter();
a.on('message', (msg) => {
a.emit('rpc', msg);
});
a.send = (msg) => {
b.emit('message', msg);
};
b.on('message', (msg) => {
b.emit('rpc', msg);
});
b.send = (msg) => {
a.emit('message', msg);
};
return { a, b };
}
function helloTest(name) {
return new Promise((resolve, reject) => {
if (name === 'bad') {
const error = new Error('bad name !');
error.code = 9000;
return reject(error);
}
setTimeout(() => {
return resolve(`hello, ${name}`);
}, 100);
});
}
function add(a, b) {
return a + b;
}
function subtract(a, b) {
return a - b;
}
describe('rawr', () => {
it('should make a client', (done) => {
const client = rawr({ transport: mockTransports().a });
client.should.be.a('object');
client.addHandler.should.be.a('function');
done();
});
it('client should make a successful rpc call to another peer', async () => {
const { a, b } = mockTransports();
const clientA = rawr({ transport: a, handlers: { add } });
const clientB = rawr({ transport: b, handlers: { subtract } });
const resultA = await clientA.methods.subtract(7, 2);
const resultB = await clientB.methods.add(1, 2);
resultA.should.equal(5);
resultB.should.equal(3);
});
it('client should make a successful rpc call to another peer with custom id generators', async () => {
const { a, b } = mockTransports();
const clientA = rawr({ transport: a, handlers: { add }, idGenerator: b64id.generateId });
const clientB = rawr({ transport: b, handlers: { subtract, idGenerator: b64id.generateId } });
const resultA = await clientA.methods.subtract(7, 2);
const resultB = await clientB.methods.add(1, 2);
resultA.should.equal(5);
resultB.should.equal(3);
});
it('client should make an unsuccessful rpc call to a peer', async () => {
const { a, b } = mockTransports();
const clientA = rawr({ transport: a, handlers: { helloTest } });
const clientB = rawr({ transport: b });
clientA.should.be.an('object');
try {
await clientB.methods.helloTest('bad');
} catch (error) {
error.code.should.equal(9000);
}
});
it('client handle an rpc under a specified timeout', async () => {
const { a, b } = mockTransports();
const clientA = rawr({ transport: a, handlers: { helloTest } });
const clientB = rawr({ transport: b, timeout: 1000 });
clientA.should.be.an('object');
const result = await clientB.methods.helloTest('luis');
result.should.equal('hello, luis');
});
it('client handle an rpc timeout', async () => {
const { a, b } = mockTransports();
const clientA = rawr({ transport: a, handlers: { helloTest } });
const clientB = rawr({ transport: b, timeout: 10 });
clientA.should.be.an('object');
try {
await clientB.methods.helloTest('luis');
} catch (error) {
error.code.should.equal(504);
}
});
it('client should be able to send a notification to a server', (done) => {
const { a, b } = mockTransports();
const clientA = rawr({ transport: a });
const clientB = rawr({ transport: b });
clientA.notifications.ondoSomething((someData) => {
someData.should.equal('testing_notification');
done();
});
clientB.notifiers.doSomething('testing_notification');
});
});
<|start_filename|>transports/worker/index.js<|end_filename|>
const { EventEmitter } = require('events');
function dom(webWorker) {
const emitter = new EventEmitter();
webWorker.addEventListener('message', (msg) => {
const { data } = msg;
if (data && (data.method || (data.id && ('result' in data || 'error' in data)))) {
emitter.emit('rpc', data);
}
});
emitter.send = (msg) => {
webWorker.postMessage(msg);
};
return emitter;
}
function worker() {
const emitter = new EventEmitter();
self.onmessage = (msg) => {
const { data } = msg;
if (data && (data.method || (data.id && ('result' in data || 'error' in data)))) {
emitter.emit('rpc', data);
}
};
emitter.send = (msg) => {
self.postMessage(msg);
};
return emitter;
}
function transport(webWorker) {
if (webWorker) {
return dom(webWorker);
}
return worker();
}
// backwards compat
transport.dom = dom;
transport.worker = worker;
module.exports = transport;
<|start_filename|>transports/index.js<|end_filename|>
const mqtt = require('./mqtt');
const socketio = require('./socketio');
const websocket = require('./websocket');
const worker = require('./worker');
module.exports = {
mqtt,
socketio,
websocket,
worker
};
<|start_filename|>transports/socketio/index.js<|end_filename|>
const { EventEmitter } = require('events');
function transport({ connection, subTopic, pubTopic }) {
const emitter = new EventEmitter();
connection.on(subTopic, (msg) => {
if (msg.method || (msg.id && ('result' in msg || 'error' in msg))) {
emitter.emit('rpc', msg);
}
});
emitter.send = (msg) => {
connection.emit(pubTopic, msg);
};
return emitter;
}
module.exports = transport;
<|start_filename|>examples/mqtt/server.js<|end_filename|>
const aedes = require('aedes')();
const server = require('net').createServer(aedes.handle);
const port = 1883;
server.listen(port, () => {
console.log('mqtt server listening on port', port);
});
<|start_filename|>examples/websocket/server.js<|end_filename|>
const express = require('express');
const http = require('http');
const WebSocket = require('ws');
const rawr = require('../../');
const app = express();
app.use(express.static('public'));
const server = http.createServer(app);
const wss = new WebSocket.Server({ server });
function add(x, y) {
return x + y;
}
wss.on('connection', (socket) => {
const rawrPeer = rawr({ transport: rawr.transports.websocket(socket) });
rawrPeer.addHandler('add', add);
// make RPC calls to the client
const intervalId = setInterval(async () => {
const val = await rawrPeer.methods.getRandom();
console.log('random from client', val);
}, 1000);
// cleanup
socket.on('close', () => {
console.log('disconnected');
clearInterval(intervalId);
});
});
server.listen(8080, () => {
console.log('Listening on %d', server.address().port);
});
<|start_filename|>global.js<|end_filename|>
const rawr = require('./');
global.rawr = rawr;
module.exports = rawr;
<|start_filename|>examples/webworker/main.js<|end_filename|>
const rawr = require('../../');
const myWorker = new Worker('/worker-bundle.js');
// create the rawr peer
const rawPeer = rawr({ transport: rawr.transports.worker(myWorker) });
// handle requests from the webworker
rawPeer.addHandler('getRandom', () => Math.random());
// make an RPC call to the webworker server on a button click
document.getElementById('addBtn').addEventListener('click', async () => {
const num1 = parseFloat(document.getElementById('number1').value);
const num2 = parseFloat(document.getElementById('number2').value);
const result = await rawPeer.methods.add(num1, num2);
document.getElementById('result').innerHTML = result;
}, false);
<|start_filename|>transports/websocket/index.js<|end_filename|>
const { EventEmitter } = require('events');
function transport(socket, allowBinary = false) {
const emitter = new EventEmitter();
socket.addEventListener('message', async (evt) => {
let { data } = evt;
if (allowBinary && data instanceof Blob) {
data = await (new Response(data)).text().catch(() => null);
}
if (typeof evt.data === 'string') {
try {
const msg = JSON.parse(evt.data);
if (msg.method || (msg.id && ('result' in msg || 'error' in msg))) {
emitter.emit('rpc', msg);
}
} catch (err) {
// wasn't a JSON message
}
}
});
emitter.send = (msg) => {
socket.send(JSON.stringify(msg));
};
return emitter;
}
module.exports = transport;
<|start_filename|>test/mqtt_transport.js<|end_filename|>
const chai = require('chai');
const { EventEmitter } = require('events');
const rawr = require('../');
chai.should();
function mockTransports() {
const a = new EventEmitter();
const b = new EventEmitter();
a.publish = (topic, msg) => {
b.emit('message', 'aPub', msg);
};
a.subscribe = () => {};
b.publish = (topic, msg) => {
a.emit('message', 'bPub', msg);
};
b.subscribe = () => {};
const transportA = rawr.transports.mqtt({
connection: a,
pubTopic: 'aPub',
subTopic: 'bPub'
});
transportA.a = a;
const transportB = rawr.transports.mqtt({
connection: b,
pubTopic: 'bPub',
subTopic: 'aPub'
});
transportB.b = b;
const transportDontSub = rawr.transports.mqtt({
connection: a,
pubTopic: 'aPub',
subTopic: 'bPub',
subscribe: false,
});
const transportBadTopic = rawr.transports.mqtt({
connection: a,
pubTopic: 'somethingElse',
subTopic: 'somethingElse',
});
return { transportA, transportB, transportDontSub, transportBadTopic };
}
function helloTest(name) {
return new Promise((resolve, reject) => {
if (name === 'bad') {
const error = new Error('bad name !');
error.code = 9000;
return reject(error);
}
setTimeout(() => {
return resolve(`hello, ${name}`);
}, 100);
});
}
function add(a, b) {
return a + b;
}
function subtract(a, b) {
return a - b;
}
describe('mqtt', () => {
it('should make a client', (done) => {
const { transportA, transportB } = mockTransports();
transportB.b.publish('bPub', 'check bad json');
const client = rawr({ transport: transportA });
client.should.be.a('object');
client.addHandler.should.be.a('function');
done();
});
it('should make a client with an already subscribed transport', (done) => {
const { transportDontSub, transportB } = mockTransports();
transportB.b.publish('bPub', 'check bad json');
const client = rawr({ transport: transportDontSub });
client.should.be.a('object');
client.addHandler.should.be.a('function');
done();
});
it('client should make a successful rpc call to another peer', async () => {
const { transportA, transportB } = mockTransports();
const clientA = rawr({ transport: transportA, handlers: { add } });
const clientB = rawr({ transport: transportB, handlers: { subtract } });
const resultA = await clientA.methods.subtract(7, 2);
const resultB = await clientB.methods.add(1, 2);
resultA.should.equal(5);
resultB.should.equal(3);
});
it('client should handle bad messages on topic', async () => {
const { transportA, transportB } = mockTransports();
const clientA = rawr({ transport: transportA });
const clientB = rawr({ transport: transportB, handlers: { subtract } });
transportA.a.publish('aPub', `{"something": "bad"}`);
const resultA = await clientA.methods.subtract(7, 2);
resultA.should.equal(5);
});
it('client should make an unsuccessful rpc call to a peer', async () => {
const { transportA, transportB } = mockTransports();
const clientA = rawr({ transport: transportA, handlers: { helloTest } });
const clientB = rawr({ transport: transportB });
clientA.should.be.an('object');
try {
await clientB.methods.helloTest('bad');
} catch (error) {
error.code.should.equal(9000);
}
});
it('client handle an rpc under a specified timeout', async () => {
const { transportA, transportB } = mockTransports();
const clientA = rawr({ transport: transportA, handlers: { helloTest } });
const clientB = rawr({ transport: transportB, timeout: 1000 });
clientA.should.be.an('object');
const result = await clientB.methods.helloTest('luis');
result.should.equal('hello, luis');
});
it('client handle an rpc timeout', async () => {
const { transportA, transportB } = mockTransports();
const clientA = rawr({ transport: transportA, handlers: { helloTest } });
const clientB = rawr({ transport: transportB, timeout: 10 });
clientA.should.be.an('object');
try {
await clientB.methods.helloTest('luis');
} catch (error) {
error.code.should.equal(504);
}
});
it('client handle an rpc timeout becuase topic didnt match', async () => {
const { transportA, transportBadTopic } = mockTransports();
const clientA = rawr({ transport: transportA, handlers: { helloTest } });
const clientB = rawr({ transport: transportBadTopic, timeout: 10 });
clientA.should.be.an('object');
try {
await clientB.methods.helloTest('luis');
} catch (error) {
error.code.should.equal(504);
}
});
it('client should be able to send a notification to a server', (done) => {
const { transportA, transportB } = mockTransports();
const clientA = rawr({ transport: transportA });
const clientB = rawr({ transport: transportB });
clientA.notifications.ondoSomething((someData) => {
someData.should.equal('testing_notification');
done();
});
clientB.notifiers.doSomething('testing_notification');
});
});
<|start_filename|>examples/websocket/client.js<|end_filename|>
const rawr = require('../../');
const ws = new WebSocket('ws://localhost:8080');
ws.onopen = () => {
// create the rawr peer
const rawPeer = rawr({ transport: rawr.transports.websocket(ws) });
// handle requests from the websocket server
rawPeer.addHandler('getRandom', () => Math.random());
// make an RPC call to the websocket server on a button click
document.getElementById('addBtn').addEventListener('click', async () => {
const num1 = parseFloat(document.getElementById('number1').value);
const num2 = parseFloat(document.getElementById('number2').value);
const result = await rawPeer.methods.add(num1, num2);
document.getElementById('result').innerHTML = result;
}, false);
};
| iceddev/rawr |
<|start_filename|>database/models/inventory.js<|end_filename|>
const { model, Schema } = require("mongoose")
module.exports = model('inventory', new Schema({
Guild: String,
User: String,
Inventory: Object,
})) | kevin-bot-glitch/Zxwar-beta |
<|start_filename|>src/main/java/com/qxw/web/WebController.java<|end_filename|>
package com.qxw.web;
import com.alibaba.fastjson.JSONObject;
import com.mongodb.BasicDBObject;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.MongoIterable;
import com.mongodb.client.model.Sorts;
import com.qxw.mongodb.MongoSdkBase;
import com.qxw.utils.ByteConvKbUtils;
import com.qxw.utils.JsonFormatTool;
import com.qxw.utils.Res;
import org.bson.Document;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Controller;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.OutputStream;
import java.util.*;
/**
* mongodb web
* @author qxw
* @data 2018年11月20日下午1:03:32
*/
@Controller
@RequestMapping("mongo")
public class WebController {
private Logger logger = LoggerFactory.getLogger(getClass());
@Value("${login.username}")
private String username;
@Value("${login.password}")
private String password;
@Autowired
private MongoSdkBase mongoSdkBase;
/**
* 需要过滤的表
*/
private final static String[] TAVLEARR = {"system.indexes"};
/**
* 模拟登录
* @param
* @return
*/
@ResponseBody
@RequestMapping("/login")
public Res login(String uname,String pwd,HttpServletRequest request) {
if(StringUtils.isEmpty(uname)||StringUtils.isEmpty(pwd)){
return Res.error("账号密码不能为空");
}
if(username.equals(uname)){
if(password.equals(pwd)){
request.getSession().setAttribute("username", username);
return Res.ok();
}else{
return Res.error("密码错误");
}
}else{
return Res.error("账号不存在");
}
}
/**
* 数据源列表
* @return
*/
@ResponseBody
@RequestMapping("/index")
public Res index() {
List<String> listNames =mongoSdkBase.getDbList();
//系统表过滤
listNames.remove("admin");
listNames.remove("local");
logger.info(listNames.toString());
return Res.ok().put("listNames", listNames);
}
/**
* 数据库对应的数据集合列表
* @param dbName
* @return
*/
@ResponseBody
@RequestMapping("/db")
public Res db(String dbName) {
if(StringUtils.isEmpty(dbName)){
return Res.error("dbName参数不能为空");
}
if("undefined".equals(dbName)){
return Res.error("请关闭所有的iframe后在执行F5");
}
MongoDatabase mogo = mongoSdkBase.getMongoDb(dbName);
//获取所有集合的名称
MongoIterable<String> collectionNames = mogo.listCollectionNames();
MongoCursor<String> i = collectionNames.iterator();
List<JSONObject> listNames = new ArrayList<JSONObject>();
while (i.hasNext()) {
String tableName = i.next();
if(!Arrays.asList(TAVLEARR).contains(tableName)) {
JSONObject t = new JSONObject();
t.put("tableName", tableName);
BasicDBObject obj = mongoSdkBase.getStats(dbName, tableName);
t.put("size", ByteConvKbUtils.getPrintSize(obj.getInt("size")));
listNames.add(t);
}
}
return Res.ok().put("listNames", listNames);
}
/***
* 集合下的数据列表
* @param pageNum
* @param pageSize
* @param parame 查询条件 json字符串
* @return
*/
@ResponseBody
@RequestMapping("/getCollection")
public Res getCollection(@RequestParam(value = "p", defaultValue = "1") int pageNum,@RequestParam(value = "s", defaultValue = "10") int pageSize,
String dbName,String tableName,String parame) {
if(StringUtils.isEmpty(dbName)||StringUtils.isEmpty(tableName)){
return Res.error("dbName,tableName参数不能为空");
}
BasicDBObject query = new BasicDBObject();
BasicDBObject group = new BasicDBObject();
if(!StringUtils.isEmpty(parame)){
JSONObject obj=JSONObject.parseObject(parame);
Set<String> kyes=obj.keySet();
kyes.forEach(key->{
if(key.equals("$group")){
group.put(key,obj.get(key));
}else {
query.put(key, obj.get(key));
}
});
}
MongoCollection<Document> table = mongoSdkBase.getColl(dbName,tableName);
JSONObject data=null;
if(group.size()==0){
data=mongoSdkBase.getPage(table, query, Sorts.descending("_id"), pageNum, pageSize);
}else{
data=mongoSdkBase.getGroupPage(table,query,group,Sorts.descending("_id"), pageNum, pageSize);
}
//获取集合的所有key
Document obj = mongoSdkBase.getColl(dbName,tableName).find().first();
Map<String, Object> m = new HashMap<String, Object>(16);
m.put("data", data);
if(obj!=null) {
m.put("keys", obj.keySet());
}
return Res.ok(m);
}
/**
* 删除集合
* @param dbName
* @param tableName
* @param id 主键
* @return
*/
@ResponseBody
@RequestMapping("/deleteCollection")
public Res deleteCollection(String dbName,String tableName, String id){
if(StringUtils.isEmpty(dbName)||StringUtils.isEmpty(tableName)||StringUtils.isEmpty(id)){
return Res.error("dbName,tableName,id,参数不能为空");
}
int count=mongoSdkBase.deleteOne(mongoSdkBase.getColl(dbName,tableName),id);
return count>0?Res.ok():Res.error("删除失败");
}
/**
* 更新集合
* @param tableName
* @param parame json字符串
* @return
*/
@ResponseBody
@RequestMapping("/updateCollection")
public Res updateCollection(String dbName,String tableName, String parame){
if(StringUtils.isEmpty(dbName)||StringUtils.isEmpty(tableName)||StringUtils.isEmpty(parame)){
return Res.error("dbName,tableName,parame,参数不能为空");
}
JSONObject info=JSONObject.parseObject(parame);
String id=info.getString("_id");
boolean falg=mongoSdkBase.updateOne(mongoSdkBase.getColl(dbName,tableName), id, info);
return falg==true?Res.ok():Res.error("更新失败");
}
/**
* 添加集合
* @param dbName
* @param tableName
* @param parame json字符串
* @return
*/
@ResponseBody
@RequestMapping("/saveCollection")
public Res saveCollection(String dbName,String tableName, String parame){
if(StringUtils.isEmpty(dbName)||StringUtils.isEmpty(tableName)||StringUtils.isEmpty(parame)){
return Res.error("dbName,tableName,parame,参数不能为空");
}
JSONObject info=JSONObject.parseObject(parame);
String id=mongoSdkBase.insertOne(mongoSdkBase.getColl(dbName,tableName), info);
return StringUtils.isEmpty(id)?Res.error("添加失败"):Res.ok();
}
/**
* 根据ID查询集合
* @param dbName
* @param tableName
* @param id
* @return
*/
@ResponseBody
@RequestMapping("/findOne")
public Res findOne(String dbName,String tableName, String id){
if(StringUtils.isEmpty(dbName)||StringUtils.isEmpty(tableName)||StringUtils.isEmpty(id)){
return Res.error("dbName,tableName,id,参数不能为空");
}
String result=mongoSdkBase.seleteOne(mongoSdkBase.getColl(dbName,tableName), id);
return Res.ok().put("data", JSONObject.parseObject(result));
}
/**
* 导出清单
* @param request
* @param response
*/
@RequestMapping("/exportList")
public void exportList(HttpServletRequest request,HttpServletResponse response,String dbName,String tableName,String parame) {
// 读取字符编码
String csvEncoding = "UTF-8";
// 设置响应
response.setCharacterEncoding(csvEncoding);
response.setContentType("application/json; charset=" + csvEncoding);
response.setHeader("Pragma", "public");
response.setHeader("Cache-Control", "max-age=30");
OutputStream os=null;
try {
BasicDBObject query = new BasicDBObject();
if(!StringUtils.isEmpty(parame)){
JSONObject obj=JSONObject.parseObject(parame);
Set<String> kyes=obj.keySet();
kyes.forEach(key->{
query.put(key,obj.get(key));
});
}
List<JSONObject> list=mongoSdkBase.getAll(mongoSdkBase.getColl(dbName, tableName), query, Sorts.descending("_id"));
response.setHeader("Content-Disposition", "attachment; filename=\"" + tableName+".json" + "\"");
// 写出响应
os=response.getOutputStream();
os.write(JsonFormatTool.formatJson(list.toString()).getBytes("GBK"));
os.flush();
} catch (Exception e) {
logger.error("导出异常:tableName:{},{}",tableName,e);
e.printStackTrace();
}finally{
if(os!=null){
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
<|start_filename|>src/main/java/com/qxw/utils/CsvUtils.java<|end_filename|>
package com.qxw.utils;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.net.URLEncoder;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
*
* @author Levan
* @time 2017年11月20日上午11:25:41
* @describe 导出csv并弹出下载框提示~
*/
public class CsvUtils {
/** CSV文件列分隔符 */
public static final String CSV_COLUMN_SEPARATOR = ",";
/** CSV文件列分隔符 */
public static final String CSV_RN = "\r\n";
public final static Logger logger = LoggerFactory.getLogger(CsvUtils.class);
public final static String MSIE="MSIE";
public final static String MOZILLA="Mozilla";
/**
* 数据初始化
* @param data 数据库查出来的数据
* @param displayColNames csv表头
* @param matchColNames data中的key ,可以说是数据库字段了,原本为”0001”类型的数据在excel中打开会被默认改变为”1”的数据。 解决方法 :key前加"'"用于特殊处理;
* @param 例如 输入列名为"num"数字为 001,则传入的key值为"-num",保证输出为字符串
* @return
*/
public static String formatCsvData(List<Map<String, Object>> data,String displayColNames, String matchColNames) {
StringBuffer buf = new StringBuffer();
String[] displayColNamesArr = null;
String[] matchColNamesMapArr = null;
displayColNamesArr = displayColNames.split(",");
matchColNamesMapArr = matchColNames.split(",");
// 输出列头
for (int i = 0; i < displayColNamesArr.length; i++) {
buf.append(displayColNamesArr[i]).append(CSV_COLUMN_SEPARATOR);
}
buf.append(CSV_RN);
if (null != data) {
// 输出数据
for (int i = 0; i < data.size(); i++) {
for (int j = 0; j < matchColNamesMapArr.length; j++) {
//处理list<Map>中 value=null的数据
Object object = data.get(i).get(matchColNamesMapArr[j]);
if(object==null){
object = data.get(i).get(matchColNamesMapArr[j].substring(1));
}
if(object==null){
buf.append(CSV_COLUMN_SEPARATOR);
}else{
if(matchColNamesMapArr[j].startsWith("-")){
buf.append("\t" +object.toString()).append(CSV_COLUMN_SEPARATOR);
}else{
buf.append(object).append(CSV_COLUMN_SEPARATOR);
}
}
}
buf.append(CSV_RN);
}
}
logger.info("csv file Initialize successfully");
return buf.toString();
}
/**
* 导出
* @param fileName 文件名
* @param content 内容
* @param request
* @param response
* @throws IOException
*/
public static void exportCsv(String fileName, String content,HttpServletRequest request, HttpServletResponse response){
// 读取字符编码
String csvEncoding = "UTF-8";
// 设置响应
response.setCharacterEncoding(csvEncoding);
response.setContentType("text/csv; charset=" + csvEncoding);
response.setHeader("Pragma", "public");
response.setHeader("Cache-Control", "max-age=30");
OutputStream os=null;
final String userAgent = request.getHeader("USER-AGENT");
try {
if(StringUtils.contains(userAgent,MSIE)){
//IE浏览器
fileName = URLEncoder.encode(fileName,"UTF8");
}else if(StringUtils.contains(userAgent,MOZILLA)){
//google,火狐浏览器
fileName = new String(fileName.getBytes(), "ISO8859-1");
}else{
//其他浏览器
fileName = URLEncoder.encode(fileName,"UTF8");
}
response.setHeader("Content-Disposition", "attachment; filename=\"" + fileName + "\"");
// 写出响应
os=response.getOutputStream();
os.write(content.getBytes("GBK"));
os.flush();
} catch (Exception e) {
e.printStackTrace();
}finally{
if(os!=null){
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
/**
* 穿件csv文件
* @param filePath 目录
* @param fileName 文件名
* @param colNames 表头
* @return
*/
public static File createFileAndColName(String filePath, String fileName, String[] colNames){
File csvFile = new File(filePath, fileName);
PrintWriter pw = null;
try {
pw = new PrintWriter(csvFile, "GBK");
StringBuffer sb = new StringBuffer();
for(int i=0; i<colNames.length; i++){
if( i<colNames.length-1 ) {
sb.append(colNames[i]+",");
} else{
sb.append(colNames[i]+"\r\n");
}
}
pw.print(sb.toString());
pw.flush();
pw.close();
return csvFile;
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
/**
* 向指定的csv文件追加数据
* @param csvFile
* @param sb
*/
public static void appendDate(File csvFile, StringBuffer sb){
try {
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(csvFile, true), "GBK"), 1024);
bw.write(sb.toString());
bw.flush();
bw.close();
} catch (Exception e) {
e.printStackTrace();
}
}
public static void main(String[] args) {
String[] colNames = {"第一列","第二列","第三列","第四列"};
File csvFile = createFileAndColName("E:\\eclipseWork\\.metadata\\.plugins\\org.eclipse.wst.server.core\\tmp1\\wtpwebapps\\shakeExternal\\impCsv", "test.csv", colNames);
StringBuffer sb=new StringBuffer();
sb.append("组织机构,手机号,姓名,工号");
sb.append("\r\n");
appendDate(csvFile, sb);
}
}
<|start_filename|>src/main/java/com/qxw/mongodb/MongoConfig.java<|end_filename|>
package com.qxw.mongodb;
import com.mongodb.MongoClient;
import com.mongodb.MongoClientOptions;
import com.mongodb.MongoCredential;
import com.mongodb.ServerAddress;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.util.StringUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* mongo配置
* @author qinxuewu
* @version 1.00
* @time 29/11/2018 下午 1:34
* @email <EMAIL>
*/
@Configuration
public class MongoConfig {
private final Logger logger = LoggerFactory.getLogger(getClass());
@Value("${mongo.host.port}")
private String HOST_PORT;
@Value("${mongo.user.pass.db}")
private String USER_PASS_DB;
@Bean
public MongoClient mongoClient(){
logger.debug("*********初始化mongodb*************************");
MongoClient mongoClient=null;
List<MongoCredential> mongoCredential=Collections.<MongoCredential>emptyList();
if(!StringUtils.isEmpty(USER_PASS_DB)){
//arr 0,1,2 用户名 密码 数据名
String[] arr = USER_PASS_DB.split(":");
MongoCredential credential = MongoCredential.createScramSha1Credential(arr[0],arr[2],arr[1].toCharArray());
mongoCredential=new ArrayList<MongoCredential>();
mongoCredential.add(credential);
}
MongoClientOptions option=new MongoClientOptions.Builder().threadsAllowedToBlockForConnectionMultiplier(10).build();
String[] hostps = HOST_PORT.split(";");
if (hostps.length == 1) {
//只有一个主 副本集
String[] h = hostps[0].split(":");
mongoClient = new MongoClient(new ServerAddress(h[0], Integer.parseInt(h[1])),mongoCredential,option);
} else {
List<ServerAddress> serverAddress = new ArrayList<ServerAddress>();
for (String hp : hostps) {
String[] h = hp.split(":");
serverAddress.add(new ServerAddress(h[0], Integer.parseInt(h[1])));
}
mongoClient = new MongoClient(serverAddress, mongoCredential,option);
}
logger.info("*********** mongoClient 偏好为=「」" + mongoClient.getReadPreference().toString());
return mongoClient;
}
}
<|start_filename|>src/main/java/com/qxw/mongodb/MongoSdkBase.java<|end_filename|>
package com.qxw.mongodb;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.serializer.SerializeFilter;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.alibaba.fastjson.serializer.ValueFilter;
import com.mongodb.BasicDBObject;
import com.mongodb.DB;
import com.mongodb.DBObject;
import com.mongodb.MongoClient;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.result.DeleteResult;
import org.bson.Document;
import org.bson.conversions.Bson;
import org.bson.types.ObjectId;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static com.mongodb.client.model.Aggregates.*;
import static com.mongodb.client.model.Filters.eq;
/**
* mongodb工具类
* @author qinxuewu
*
*/
@Component
public class MongoSdkBase {
@Autowired
private MongoClient mongoClient;
/**
* 表连接
*/
public MongoCollection<Document> getColl(String tableName) {
MongoDatabase db = mongoClient.getDatabase("actuator");
return db.getCollection(tableName);
}
public MongoCollection<Document> getColl(String dbName, String tableName) {
MongoDatabase db = mongoClient.getDatabase(dbName);
return db.getCollection(tableName);
}
/***
* 插入单条记录
* @param table 表连接
* @param doc Document
* @return
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public String insertOneDocument(MongoCollection table, Document doc) {
if (doc == null){ return null;}
doc.remove("_id");
doc.put("_id", new ObjectId().toString());
table.insertOne(doc);
return doc.get("_id").toString();
}
/***
* 插入单条记录
* @param table 表连接
* @param obj 单条数据
* obj double 处理不规范
* @return
*/
public String insertOne(MongoCollection table, Object obj) {
if (obj == null) {return null;}
Document docine =Document.parse(diyObjectIdToJson(obj));
docine.remove("_id");
docine.put("_id", new ObjectId().toString());
table.insertOne(docine);
return docine.get("_id").toString();
}
/**
* 删除找到的第一条记录
*
* @param table 表连接
* @return
*/
public int deleteOne(MongoCollection table, String id) {
Bson filter = eq("_id", id);
DeleteResult re = table.deleteOne(filter);
return (int) re.getDeletedCount();
}
/**
* 根据条件更新单条记录
*
* @param table 表连接
* @param obj
* @return
*/
@SuppressWarnings("rawtypes")
public boolean updateOne(MongoCollection table, String id, Object obj) {
Bson filter = eq("_id", id);
table.updateOne(filter, set(diyObjectIdToJson(obj)));
return true;
}
/**
* 查询 单条记录 返回json字符串
* @param table 表
* @return
*/
public String seleteOne(MongoCollection table, String id) {
Bson filter = eq("_id", id);
return diyObjectIdToJson(seleteOneDocument(table, filter));
}
/**
* 查询 单条记录 返回json字符串
*
* @param table 表连接
* @return
*/
public String seleteOne(MongoCollection table, Bson filter) {
return diyObjectIdToJson(seleteOneDocument(table, filter));
}
/**
* 查询 单条记录 返回 org.bson.Document 对象
*
* @param table 表连接
* @return
*/
public Document seleteOneDocument(MongoCollection table, Bson filter) {
FindIterable<Document> result = table.find(filter);
return result.first();
}
/**
* 查询所有记录 代码控制返回结果数
*
* @param table 表连接
* @param filter 条件 com.mongodb.client.model.Filter
* @param sort 排序 com.mongodb.client.model.Sorts 可空
* @return
*/
public List<JSONObject> getAll(MongoCollection table, Bson filter, Bson sort) {
List<JSONObject> list = new ArrayList<JSONObject>();
FindIterable<Document> result = null;
if (filter == null) {
result = table.find().sort(sort);
} else {
result = table.find(filter).sort(sort);
}
MongoCursor<Document> iterator = result.iterator();
while (iterator.hasNext()) {
Object ddd = iterator.next();
list.add(JSON.parseObject(diyObjectIdToJson(ddd)));
}
return list;
}
public List<JSONObject> getAllbyCid(MongoCollection table, int cid) {
return getAll(table, eq("cid", cid), null);
}
/**
* 分页查询
* @param table 表连接
* @param filter 条件 com.mongodb.client.model.Filter
* @param sort 排序 com.mongodb.client.model.Sorts
* @param pageNum
* @param pageSize
* @return
*/
public JSONObject getPage(MongoCollection table, Bson filter, Bson sort, int pageNum, int pageSize) {
int totalCount = (int) (filter == null ? table.count(): table.count(filter));
int totalPage = (int) (totalCount / pageSize + ((totalCount % pageSize == 0) ? 0 : 1));
if (pageNum > totalPage){ pageNum = totalPage;}
JSONObject msg = new JSONObject();
msg.put("pageNum", pageNum);
msg.put("pageSize", pageSize);
msg.put("totalCount", totalCount);
msg.put("totalPage", totalPage);
List<JSONObject> list = new ArrayList<JSONObject>();
if (totalCount > 0) {
int startRow = pageNum > 0 ? (pageNum - 1) * pageSize : 0;
FindIterable<Document> result = null;
if (filter == null) {
result = table.find().sort(sort).skip(startRow).limit(pageSize);
} else {
result = table.find(filter).sort(sort).skip(startRow).limit(pageSize);
}
MongoCursor<Document> iterator = result.iterator();
while (iterator.hasNext()) {
Document ddd = (Document) iterator.next();
list.add(JSON.parseObject(diyObjectIdToJson(ddd)));
}
}
msg.put("data", list);
return msg;
}
/**
* 分组分页查询
*
* @param table 表连接
* @param filter 条件 com.mongodb.client.model.Filter
* @param pageNum
* @param pageSize
* @return
*/
public JSONObject getGroupPage(MongoCollection table, Bson filter, Bson group, Bson sorts, int pageNum, int pageSize) {
List<JSONObject> list = new ArrayList<>();
int startRow = pageNum > 0 ? (pageNum - 1) * pageSize : 0;
MongoCursor<Document> iterator = table.aggregate(Arrays.asList(match(filter), group, sort(sorts), skip(startRow), limit(pageSize))).iterator();
while (iterator.hasNext()) {
Document doc = iterator.next();
list.add(JSON.parseObject(diyObjectIdToJson(doc)));
}
int totalCount = list != null && list.size() > 0 ? list.size() : 0;
int totalPage = (int) (totalCount / pageSize + ((totalCount % pageSize == 0) ? 0 : 1));
JSONObject msg = new JSONObject();
msg.put("pageNum", pageNum);
msg.put("pageSize", pageSize);
msg.put("totalCount", totalCount);
msg.put("totalPage", totalPage);
msg.put("data", list);
return msg;
}
/**
* 聚哈函数 统计 ,查询
* @param table
* @param filter
* @param group
* @param sort
* @return
*/
public List<JSONObject> getAggregateList(MongoCollection table, Bson filter, DBObject group, Bson sort){
List<JSONObject> list=new ArrayList<JSONObject>();
MongoCursor<Document> iterator=table.aggregate(Arrays.asList(match(filter),group,sort(sort))).iterator();
while (iterator.hasNext()) {
Document doc = iterator.next();
list.add(JSON.parseObject(diyObjectIdToJson(doc)));
}
return list;
}
/**
* 获取所有数据库名
* @return
*/
public List<String> getDbList() {
return mongoClient.getDatabaseNames();
}
/***
* 获取表想详细内存占用信息
* @return
*/
public BasicDBObject getStats(String name, String collection){
DB db=new DB(mongoClient,name);
return db.getCollection(collection).getStats();
}
public MongoDatabase getMongoDb(String databaseName) {
return mongoClient.getDatabase(databaseName);
}
/**
* 更新数据 注意 多余字段 会在库表记录追加
*
* @param json
* @return
*/
public static Document set(String json) {
Document b =Document.parse(json);
b.remove("_id");
return new Document("$set", b);
}
private static SerializeFilter objectIdSerializer = new ValueFilter() {
@Override
public Object process(Object object, String name, Object value) {
if ("_id".equals(name)) {
if (value instanceof ObjectId) {
return value.toString();
}
}
return value;
}
};
/**
* 出库后查询
* @param object
* @return
*/
public static final String diyObjectIdToJson(Object object) {
return JSON.toJSONString(object, objectIdSerializer,
SerializerFeature.WriteDateUseDateFormat,
SerializerFeature.WriteNullNumberAsZero,
SerializerFeature.WriteMapNullValue,
SerializerFeature.WriteNullStringAsEmpty);
}
/**
* 出库后查询 null跳过不转换为json
*
* @param object
* @return
*/
public static final String diyObjToJson(Object object) {
return JSON.toJSONString(object, objectIdSerializer,
SerializerFeature.WriteDateUseDateFormat);
}
}
<|start_filename|>src/main/java/com/qxw/handler/WebAppConfiguer.java<|end_filename|>
package com.qxw.handler;
import javax.annotation.Resource;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.Ordered;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.ViewControllerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
/**
* 拦截器注册
* @author qxw
* @data 2018年7月17日上午10:16:32
*/
@Configuration
public class WebAppConfiguer implements WebMvcConfigurer {
@Resource
private LoginInterceptor loginInterceptor;
@Override
public void addInterceptors(InterceptorRegistry registry) {
//需要拦截的方法 excludePathPatterns(不需要拦截的方法)
registry.addInterceptor(loginInterceptor).addPathPatterns("/mongo/**").excludePathPatterns("/mongo/login");
}
}
<|start_filename|>src/main/java/com/qxw/exception/MyExceptionHandler.java<|end_filename|>
package com.qxw.exception;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.RestControllerAdvice;
import com.qxw.utils.Res;
/**
* 通用异常处理器
* @author qxw
* @data 2018年11月20日上午11:22:52
*/
@RestControllerAdvice
public class MyExceptionHandler {
private Logger logger = LoggerFactory.getLogger(getClass());
/**
* 处理自定义异常
*/
@ExceptionHandler(MyException.class)
public Res handleRRException(MyException e) {
Res r = new Res();
r.put("code", e.getCode());
r.put("msg", e.getMessage());
logger.error("自定义RRException异常:{}", e);
return r;
}
@ExceptionHandler(Exception.class)
public Res handleException(Exception e) {
logger.error("全局Exception异常:{}", e);
return Res.error();
}
}
<|start_filename|>src/main/java/com/qxw/Application.java<|end_filename|>
package com.qxw;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* 启动类
* @author qxw
* @data 2018年11月21日上午11:15:02
*/
@SpringBootApplication
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}
| a870439570/Mongodb-WeAdmin |
<|start_filename|>packages/core/tsconfig.json<|end_filename|>
{
"compilerOptions": {
"composite": true,
"baseUrl": ".",
"declaration": true,
"lib": ["es2015", "dom"],
"esModuleInterop": true,
"sourceMap": true,
"mapRoot": "./",
"module": "esnext",
"moduleResolution": "node",
"resolveJsonModule": true,
"outDir": "./dist/build",
"rootDir": ".",
"target": "es5",
"inlineSources": true,
"types": ["node", "jasmine"],
"typeRoots": ["./node_modules/@types", "./custom.d.ts"]
},
"exclude": [
"./src/**/*.spec.ts",
"./src/tests",
"./node_modules",
"../../node_modules"
],
"references": [{ "path": "src" }, { "path": "demo" }],
"files": []
}
<|start_filename|>packages/core/src/styles/vendor/carbon-components/es/globals/js/misc/svg-toggle-class.js<|end_filename|>
function _defineProperty(obj, key, value) {
if (key in obj) {
Object.defineProperty(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
} else {
obj[key] = value;
}
return obj;
}
/**
* Copyright IBM Corp. 2016, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
function svgToggleClass(svg, name, forceAdd) {
var list = svg.getAttribute('class').trim().split(/\s+/);
var uniqueList = Object.keys(list.reduce(function (o, item) {
return Object.assign(o, _defineProperty({}, item, 1));
}, {}));
var index = uniqueList.indexOf(name);
var found = index >= 0;
var add = forceAdd === undefined ? !found : forceAdd;
if (found === !add) {
if (add) {
uniqueList.push(name);
} else {
uniqueList.splice(index, 1);
}
svg.setAttribute('class', uniqueList.join(' '));
}
}
export default svgToggleClass;
<|start_filename|>packages/core/src/styles/vendor/carbon-components/src/globals/js/misc/mixin.js<|end_filename|>
/**
* Copyright IBM Corp. 2016, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
/**
* @param {Array} a An array.
* @returns {Array} The flattened version of the given array.
*/
function flatten(a) {
return a.reduce((result, item) => {
if (Array.isArray(item)) {
result.push(...flatten(item));
} else {
result.push(item);
}
return result;
}, []);
}
/**
* An interface for defining mix-in classes. Used with {@link mixin}.
* @function mixinfn
* @param {Class} ToMix The class to mix.
* @returns {Class} The class mixed-in with the given ToMix class.
*/
/**
* @function mixin
* @param {...mixinfn} mixinfns The functions generating mix-ins.
* @returns {Class} The class generated with the given mix-ins.
*/
export default function mixin(...mixinfns) {
return flatten(mixinfns).reduce((Class, mixinfn) => mixinfn(Class), class {});
}
<|start_filename|>packages/core/src/styles/vendor/carbon-components/src/globals/js/misc/on.js<|end_filename|>
/**
* Copyright IBM Corp. 2016, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
export default function on(element, ...args) {
element.addEventListener(...args);
return {
release() {
element.removeEventListener(...args);
return null;
},
};
}
<|start_filename|>packages/core/src/styles/vendor/carbon-components/src/components/loading/loading.js<|end_filename|>
/**
* Copyright IBM Corp. 2016, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
import settings from '../../globals/js/settings';
import mixin from '../../globals/js/misc/mixin';
import createComponent from '../../globals/js/mixins/create-component';
import initComponentBySearch from '../../globals/js/mixins/init-component-by-search';
import handles from '../../globals/js/mixins/handles';
import on from '../../globals/js/misc/on';
class Loading extends mixin(createComponent, initComponentBySearch, handles) {
/**
* Spinner indicating loading state.
* @extends CreateComponent
* @extends InitComponentBySearch
* @extends Handles
* @param {HTMLElement} element The element working as a spinner.
* @param {object} [options] The component options.
* @param {boolean} [options.active] `true` if this spinner should roll.
*/
constructor(element, options) {
super(element, options);
this.active = this.options.active;
// Initialize spinner
this.set(this.active);
}
/**
* Sets active/inactive state.
* @param {boolean} active `true` if this spinner should roll.
*/
set(active) {
if (typeof active !== 'boolean') {
throw new TypeError('set expects a boolean.');
}
this.active = active;
this.element.classList.toggle(this.options.classLoadingStop, !this.active);
/**
* If overlay is the parentNode then toggle it too.
*/
const { parentNode } = this.element;
if (
parentNode &&
parentNode.classList.contains(this.options.classLoadingOverlay)
) {
parentNode.classList.toggle(
this.options.classLoadingOverlayStop,
!this.active
);
}
return this;
}
/**
* Toggles active/inactive state.
*/
toggle() {
return this.set(!this.active);
}
/**
* @returns {boolean} `true` if this spinner is rolling.
*/
isActive() {
return this.active;
}
/**
* Sets state to inactive and deletes the loading element.
*/
end() {
this.set(false);
let handleAnimationEnd = this.manage(
on(this.element, 'animationend', evt => {
if (handleAnimationEnd) {
handleAnimationEnd = this.unmanage(handleAnimationEnd).release();
}
if (evt.animationName === 'rotate-end-p2') {
this._deleteElement();
}
})
);
}
/**
* Delete component from the DOM.
*/
_deleteElement() {
const { parentNode } = this.element;
parentNode.removeChild(this.element);
if (parentNode.classList.contains(this.options.selectorLoadingOverlay)) {
parentNode.remove();
}
}
/**
* The map associating DOM element and spinner instance.
* @member Loading.components
* @type {WeakMap}
*/
static components /* #__PURE_CLASS_PROPERTY__ */ = new WeakMap();
/**
* The component options.
* If `options` is specified in the constructor, {@linkcode Loading.create .create()}, or {@linkcode Loading.init .init()},
* properties in this object are overriden for the instance being create and how {@linkcode Loading.init .init()} works.
* @member Loading.options
* @type {object}
* @property {string} selectorInit The CSS selector to find spinners.
*/
static get options() {
const { prefix } = settings;
return {
selectorInit: '[data-loading]',
selectorLoadingOverlay: `.${prefix}--loading-overlay`,
classLoadingOverlay: `${prefix}--loading-overlay`,
classLoadingStop: `${prefix}--loading--stop`,
classLoadingOverlayStop: `${prefix}--loading-overlay--stop`,
active: true,
};
}
}
export default Loading;
<|start_filename|>packages/core/src/styles/vendor/carbon-components/es/components/search/search.js<|end_filename|>
function _typeof(obj) {
if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") {
_typeof = function _typeof(obj) {
return typeof obj;
};
} else {
_typeof = function _typeof(obj) {
return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;
};
}
return _typeof(obj);
}
function _classCallCheck(instance, Constructor) {
if (!(instance instanceof Constructor)) {
throw new TypeError("Cannot call a class as a function");
}
}
function _defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
descriptor.enumerable = descriptor.enumerable || false;
descriptor.configurable = true;
if ("value" in descriptor) descriptor.writable = true;
Object.defineProperty(target, descriptor.key, descriptor);
}
}
function _createClass(Constructor, protoProps, staticProps) {
if (protoProps) _defineProperties(Constructor.prototype, protoProps);
if (staticProps) _defineProperties(Constructor, staticProps);
return Constructor;
}
function _possibleConstructorReturn(self, call) {
if (call && (_typeof(call) === "object" || typeof call === "function")) {
return call;
}
return _assertThisInitialized(self);
}
function _assertThisInitialized(self) {
if (self === void 0) {
throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
}
return self;
}
function _getPrototypeOf(o) {
_getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) {
return o.__proto__ || Object.getPrototypeOf(o);
};
return _getPrototypeOf(o);
}
function _inherits(subClass, superClass) {
if (typeof superClass !== "function" && superClass !== null) {
throw new TypeError("Super expression must either be null or a function");
}
subClass.prototype = Object.create(superClass && superClass.prototype, {
constructor: {
value: subClass,
writable: true,
configurable: true
}
});
if (superClass) _setPrototypeOf(subClass, superClass);
}
function _setPrototypeOf(o, p) {
_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
o.__proto__ = p;
return o;
};
return _setPrototypeOf(o, p);
}
/**
* Copyright IBM Corp. 2016, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
import settings from '../../globals/js/settings';
import mixin from '../../globals/js/misc/mixin';
import createComponent from '../../globals/js/mixins/create-component';
import initComponentBySearch from '../../globals/js/mixins/init-component-by-search';
import handles from '../../globals/js/mixins/handles';
import eventMatches from '../../globals/js/misc/event-matches';
import on from '../../globals/js/misc/on';
import svgToggleClass from '../../globals/js/misc/svg-toggle-class';
var toArray = function toArray(arrayLike) {
return Array.prototype.slice.call(arrayLike);
};
var Search =
/*#__PURE__*/
function (_mixin) {
_inherits(Search, _mixin);
/**
* Search with Options.
* @extends CreateComponent
* @extends InitComponentBySearch
* @extends Handles
* @param {HTMLElement} element The element working as the search component.
* @param {object} [options] The component options
* @property {string} [options.selectorInit]
* The selector to find search UIs with options.
* @property {string} [options.selectorSearchView]
* The selector to find the search view icon containers.
* @property {string} [options.selectorSearchInput]
* The selector to find the search input.
* @property {string} [options.selectorClearIcon]
* The selector for the clear icon that clears the search box.
* @property {string} [options.selectorIconContainer] The data attribute selector for the icon layout container.
* @property {string} [options.classClearHidden] The class used to hide the clear icon.
* @property {string} [options.classLayoutHidden] The class used to hide nonselected layout view.
*/
function Search(element, options) {
var _this;
_classCallCheck(this, Search);
_this = _possibleConstructorReturn(this, _getPrototypeOf(Search).call(this, element, options));
var closeIcon = _this.element.querySelector(_this.options.selectorClearIcon);
var input = _this.element.querySelector(_this.options.selectorSearchInput);
if (!input) {
throw new Error('Cannot find the search input.');
}
if (closeIcon) {
_this.manage(on(closeIcon, 'click', function () {
svgToggleClass(closeIcon, _this.options.classClearHidden, true);
input.value = '';
input.focus();
}));
}
_this.manage(on(_this.element, 'click', function (evt) {
var toggleItem = eventMatches(evt, _this.options.selectorIconContainer);
if (toggleItem) _this.toggleLayout(toggleItem);
}));
_this.manage(on(input, 'input', function (evt) {
if (closeIcon) _this.showClear(evt.target.value, closeIcon);
}));
return _this;
}
/**
* Toggles between the grid and list layout.
* @param {HTMLElement} element The element contining the layout toggle.
*/
_createClass(Search, [{
key: "toggleLayout",
value: function toggleLayout(element) {
var _this2 = this;
toArray(element.querySelectorAll(this.options.selectorSearchView)).forEach(function (item) {
item.classList.toggle(_this2.options.classLayoutHidden);
});
}
/**
* Toggles the clear icon visibility
* @param {HTMLElement} value The element serving as the search input.
* @param {HTMLElement} icon The element serving as close icon.
*/
}, {
key: "showClear",
value: function showClear(value, icon) {
svgToggleClass(icon, this.options.classClearHidden, value.length === 0);
}
/**
* The component options.
* If `options` is specified in the constructor,
* {@linkcode Search.create .create()}, or {@linkcode Search.init .init()},
* properties in this object are overriden for the instance being created
* and how {@linkcode Search.init .init()} works.
* @member Search.options
* @type {object}
* @property {string} [options.selectorInit]
* The selector to find search UIs with options.
* @property {string} [options.selectorSearchView]
* The selector to find the search view icon containers.
* @property {string} [options.selectorSearchInput]
* The selector to find the search input.
* @property {string} [options.selectorClearIcon]
* The selector for the clear icon that clears the search box.
* @property {string} [options.selectorIconContainer] The data attribute selector for the icon layout container.
* @property {string} [options.classClearHidden] The class used to hide the clear icon.
* @property {string} [options.classLayoutHidden] The class used to hide nonselected layout view.
*/
}], [{
key: "options",
get: function get() {
var prefix = settings.prefix;
return {
selectorInit: '[data-search]',
selectorSearchView: '[data-search-view]',
selectorSearchInput: ".".concat(prefix, "--search-input"),
selectorClearIcon: ".".concat(prefix, "--search-close"),
selectorIconContainer: ".".concat(prefix, "--search-button[data-search-toggle]"),
classClearHidden: "".concat(prefix, "--search-close--hidden"),
classLayoutHidden: "".concat(prefix, "--search-view--hidden")
};
}
/**
* The map associating DOM element and search instance.
* @member Search.components
* @type {WeakMap}
*/
}]);
Search.components = new WeakMap();
return Search;
}(mixin(createComponent, initComponentBySearch, handles));
export default Search;
<|start_filename|>packages/core/src/styles/vendor/@carbon/type/src/__tests__/scale-test.js<|end_filename|>
/**
* Copyright IBM Corp. 2018, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*
* @jest-environment node
*/
import { getTypeSize, scale } from '../scale';
describe('scale', () => {
it('should export the type scale', () => {
expect(scale).toMatchSnapshot();
});
describe('getTypeSize', () => {
it('should return the base font for steps <= 1', () => {
expect(getTypeSize(1)).toBe(12);
expect(getTypeSize(0)).toBe(12);
});
it('should support steps greater than 1', () => {
expect(() => {
getTypeSize(2);
getTypeSize(10);
getTypeSize(20);
}).not.toThrow();
});
});
});
<|start_filename|>packages/core/src/styles/vendor/@carbon/type/src/__tests__/fontWeight-test.js<|end_filename|>
/**
* Copyright IBM Corp. 2018, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*
* @jest-environment node
*/
import { fontWeights, fontWeight } from '../fontWeight';
import { print } from '../print';
describe('fontWeight', () => {
it('should export the supported font weights', () => {
expect(fontWeights).toMatchSnapshot();
});
it('should support getting the quoted string for a font weight', () => {
expect(fontWeight('light')).toEqual({
fontWeight: fontWeights.light,
});
});
it('should error out if trying to get a font weight that does not exist', () => {
expect(() => {
fontWeight('<unknown>');
}).toThrow();
});
it('should be printable', () => {
expect(print(fontWeight('regular'))).toMatchSnapshot();
});
});
<|start_filename|>packages/core/src/styles/vendor/@carbon/type/src/print.js<|end_filename|>
/**
* Copyright IBM Corp. 2018, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
export function print(block) {
return Object.keys(block).reduce((acc, key, index) => {
// Short-circuit on the foreign key 'breakpoints'. This is used in our
// tokens for fluid type and should not be printed. In the future, we should
// tie this to media query outputs.
if (key === 'breakpoints') {
return acc;
}
const property = `${paramCase(key)}: ${block[key]};`;
if (index === 0) {
return property;
}
return acc + '\n' + property;
}, '');
}
function paramCase(string) {
let result = '';
for (let i = 0; i < string.length; i++) {
const character = string[i];
if (character === character.toUpperCase()) {
result += '-' + character.toLowerCase();
continue;
}
result += character;
}
return result;
}
<|start_filename|>packages/core/src/styles/vendor/@carbon/umd/index.js<|end_filename|>
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(factory((global.CarbonLayout = {})));
}(this, (function (exports) { 'use strict';
/**
* Copyright IBM Corp. 2018, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
var unstable_tokens = [// Spacing
'spacing01', 'spacing02', 'spacing03', 'spacing04', 'spacing05', 'spacing06', 'spacing07', 'spacing08', 'spacing09', 'spacing10', 'spacing11', 'spacing12', // Fluid spacing
'fluidSpacing01', 'fluidSpacing02', 'fluidSpacing03', 'fluidSpacing04', // Layout
'layout01', 'layout02', 'layout03', 'layout04', 'layout05', 'layout06', 'layout07', // Containers
'container01', 'container02', 'container03', 'container04', 'container05', // Icon sizes
'iconSize01', 'iconSize02'];
/**
* Copyright IBM Corp. 2018, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
// Default, Use with em() and rem() functions
var baseFontSize = 16;
/**
* Convert a given px unit to a rem unit
* @param {number} px
* @returns {string}
*/
function rem(px) {
return "".concat(px / baseFontSize, "rem");
}
/**
* Convert a given px unit to a em unit
* @param {number} px
* @returns {string}
*/
function em(px) {
return "".concat(px / baseFontSize, "em");
}
/**
* Convert a given px unit to its string representation
* @param {number} value - number of pixels
* @returns {string}
*/
function px(value) {
return "".concat(value, "px");
} // Breakpoint
// Initial map of our breakpoints and their values
var breakpoints = {
sm: {
width: rem(320),
columns: 4,
margin: '0'
},
md: {
width: rem(672),
columns: 8,
margin: rem(16)
},
lg: {
width: rem(1056),
columns: 16,
margin: rem(16)
},
xlg: {
width: rem(1312),
columns: 16,
margin: rem(16)
},
max: {
width: rem(1584),
columns: 16,
margin: rem(24)
}
};
function breakpointUp(name) {
return "@media (min-width: ".concat(breakpoints[name].width, ")");
}
function breakpointDown(name) {
return "@media (max-width: ".concat(breakpoints[name].width, ")");
}
function breakpoint() {
return breakpointUp.apply(void 0, arguments);
} // Mini-unit
var miniUnit = 8;
function miniUnits(count) {
return rem(miniUnit * count);
} // Spacing
var spacing01 = miniUnits(0.25);
var spacing02 = miniUnits(0.5);
var spacing03 = miniUnits(1);
var spacing04 = miniUnits(1.5);
var spacing05 = miniUnits(2);
var spacing06 = miniUnits(3);
var spacing07 = miniUnits(4);
var spacing08 = miniUnits(5);
var spacing09 = miniUnits(6);
var spacing10 = miniUnits(8);
var spacing11 = miniUnits(10);
var spacing12 = miniUnits(12);
var spacing = [spacing01, spacing02, spacing03, spacing04, spacing05, spacing06, spacing07, spacing08, spacing09, spacing10, spacing11, spacing12]; // Fluid spacing
var fluidSpacing01 = 0;
var fluidSpacing02 = '2vw';
var fluidSpacing03 = '5vw';
var fluidSpacing04 = '10vw';
var fluidSpacing = [fluidSpacing01, fluidSpacing02, fluidSpacing03, fluidSpacing04]; // Layout
var layout01 = miniUnits(2);
var layout02 = miniUnits(3);
var layout03 = miniUnits(4);
var layout04 = miniUnits(6);
var layout05 = miniUnits(8);
var layout06 = miniUnits(12);
var layout07 = miniUnits(20);
var layout = [layout01, layout02, layout03, layout04, layout05, layout06, layout07]; // Container
var container01 = miniUnits(3);
var container02 = miniUnits(4);
var container03 = miniUnits(5);
var container04 = miniUnits(6);
var container05 = miniUnits(8);
var container = [container01, container02, container03, container04, container05]; // Icon
var iconSize01 = '1rem';
var iconSize02 = '1.25rem';
var iconSize = [iconSize01, iconSize02];
exports.unstable_tokens = unstable_tokens;
exports.baseFontSize = baseFontSize;
exports.rem = rem;
exports.em = em;
exports.px = px;
exports.breakpoints = breakpoints;
exports.breakpointUp = breakpointUp;
exports.breakpointDown = breakpointDown;
exports.breakpoint = breakpoint;
exports.miniUnit = miniUnit;
exports.miniUnits = miniUnits;
exports.spacing01 = spacing01;
exports.spacing02 = spacing02;
exports.spacing03 = spacing03;
exports.spacing04 = spacing04;
exports.spacing05 = spacing05;
exports.spacing06 = spacing06;
exports.spacing07 = spacing07;
exports.spacing08 = spacing08;
exports.spacing09 = spacing09;
exports.spacing10 = spacing10;
exports.spacing11 = spacing11;
exports.spacing12 = spacing12;
exports.spacing = spacing;
exports.fluidSpacing01 = fluidSpacing01;
exports.fluidSpacing02 = fluidSpacing02;
exports.fluidSpacing03 = fluidSpacing03;
exports.fluidSpacing04 = fluidSpacing04;
exports.fluidSpacing = fluidSpacing;
exports.layout01 = layout01;
exports.layout02 = layout02;
exports.layout03 = layout03;
exports.layout04 = layout04;
exports.layout05 = layout05;
exports.layout06 = layout06;
exports.layout07 = layout07;
exports.layout = layout;
exports.container01 = container01;
exports.container02 = container02;
exports.container03 = container03;
exports.container04 = container04;
exports.container05 = container05;
exports.container = container;
exports.iconSize01 = iconSize01;
exports.iconSize02 = iconSize02;
exports.iconSize = iconSize;
Object.defineProperty(exports, '__esModule', { value: true });
})));
<|start_filename|>packages/core/src/styles/vendor/carbon-components/es/components/toolbar/toolbar.js<|end_filename|>
function _typeof(obj) {
if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") {
_typeof = function _typeof(obj) {
return typeof obj;
};
} else {
_typeof = function _typeof(obj) {
return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;
};
}
return _typeof(obj);
}
function _classCallCheck(instance, Constructor) {
if (!(instance instanceof Constructor)) {
throw new TypeError("Cannot call a class as a function");
}
}
function _defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
descriptor.enumerable = descriptor.enumerable || false;
descriptor.configurable = true;
if ("value" in descriptor) descriptor.writable = true;
Object.defineProperty(target, descriptor.key, descriptor);
}
}
function _createClass(Constructor, protoProps, staticProps) {
if (protoProps) _defineProperties(Constructor.prototype, protoProps);
if (staticProps) _defineProperties(Constructor, staticProps);
return Constructor;
}
function _possibleConstructorReturn(self, call) {
if (call && (_typeof(call) === "object" || typeof call === "function")) {
return call;
}
return _assertThisInitialized(self);
}
function _assertThisInitialized(self) {
if (self === void 0) {
throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
}
return self;
}
function _getPrototypeOf(o) {
_getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) {
return o.__proto__ || Object.getPrototypeOf(o);
};
return _getPrototypeOf(o);
}
function _inherits(subClass, superClass) {
if (typeof superClass !== "function" && superClass !== null) {
throw new TypeError("Super expression must either be null or a function");
}
subClass.prototype = Object.create(superClass && superClass.prototype, {
constructor: {
value: subClass,
writable: true,
configurable: true
}
});
if (superClass) _setPrototypeOf(subClass, superClass);
}
function _setPrototypeOf(o, p) {
_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
o.__proto__ = p;
return o;
};
return _setPrototypeOf(o, p);
}
/**
* Copyright IBM Corp. 2016, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
import settings from '../../globals/js/settings';
import mixin from '../../globals/js/misc/mixin';
import createComponent from '../../globals/js/mixins/create-component';
import initComponentBySearch from '../../globals/js/mixins/init-component-by-search';
import handles from '../../globals/js/mixins/handles';
import eventMatches from '../../globals/js/misc/event-matches';
import on from '../../globals/js/misc/on';
var toArray = function toArray(arrayLike) {
return Array.prototype.slice.call(arrayLike);
};
var Toolbar =
/*#__PURE__*/
function (_mixin) {
_inherits(Toolbar, _mixin);
/**
* Toolbar.
* @extends CreateComponent
* @extends InitComponentBySearch
* @extends Handles
* @param {HTMLElement} element The element working as an toolbar.
*/
function Toolbar(element, options) {
var _this;
_classCallCheck(this, Toolbar);
_this = _possibleConstructorReturn(this, _getPrototypeOf(Toolbar).call(this, element, options));
if (!_this.element.dataset.tableTarget) {
console.warn('There is no table bound to this toolbar!'); // eslint-disable-line no-console
} else {
var boundTable = _this.element.ownerDocument.querySelector(_this.element.dataset.tableTarget);
var rowHeightBtns = _this.element.querySelector(_this.options.selectorRowHeight);
if (rowHeightBtns) {
_this.manage(on(rowHeightBtns, 'click', function (event) {
_this._handleRowHeightChange(event, boundTable);
})); // toArray(this.element.querySelectorAll(this.options.selectorRowHeight)).forEach((item) => {
// item.addEventListener('click', (event) => { this._handleRowHeightChange(event, boundTable); });
// });
}
}
_this.manage(on(_this.element.ownerDocument, 'keydown', function (evt) {
_this._handleKeyDown(evt);
}));
_this.manage(on(_this.element.ownerDocument, 'click', function (evt) {
_this._handleDocumentClick(evt);
}));
return _this;
}
/**
* Handles toggling of active state of the toolbar search input
* @param {Event} event The event triggering this method.
*/
_createClass(Toolbar, [{
key: "_handleDocumentClick",
value: function _handleDocumentClick(event) {
var _this2 = this;
var searchInput = eventMatches(event, this.options.selectorSearch);
var isOfSelfSearchInput = searchInput && this.element.contains(searchInput);
if (isOfSelfSearchInput) {
var shouldBeOpen = isOfSelfSearchInput && !this.element.classList.contains(this.options.classSearchActive);
searchInput.classList.toggle(this.options.classSearchActive, shouldBeOpen);
if (shouldBeOpen) {
searchInput.querySelector('input').focus();
}
}
var targetComponentElement = eventMatches(event, this.options.selectorInit);
toArray(this.element.ownerDocument.querySelectorAll(this.options.selectorSearch)).forEach(function (item) {
if (!targetComponentElement || !targetComponentElement.contains(item)) {
item.classList.remove(_this2.options.classSearchActive);
}
});
}
/**
* Handles toggling of active state of the toolbar search input via the keyboard
* @param {Event} event The event triggering this method.
*/
}, {
key: "_handleKeyDown",
value: function _handleKeyDown(event) {
var searchInput = eventMatches(event, this.options.selectorSearch);
if (searchInput && event.which === 27) {
searchInput.classList.remove(this.options.classSearchActive);
}
}
/**
* Handles toggling of the row height of the associated table
* @param {Event} event The event triggering this method.
* @param {HTMLElement} boundTable The table associated with the toolbar.
*/
}, {
key: "_handleRowHeightChange",
value: function _handleRowHeightChange(event, boundTable) {
var _event$currentTarget$ = event.currentTarget.querySelector('input:checked'),
value = _event$currentTarget$.value;
if (value === 'tall') {
boundTable.classList.add(this.options.classTallRows);
} else {
boundTable.classList.remove(this.options.classTallRows);
}
}
/**
* The map associating DOM element and Toolbar UI instance.
* @type {WeakMap}
*/
}], [{
key: "options",
/**
* The component options.
* If `options` is specified in the constructor,
* properties in this object are overriden for the instance being created.
* @property {string} selectorInit The CSS selector to find toolbar instances.
* @property {string} selectorSearch The CSS selector to find search inputs in a toolbar.
* @property {string} selectorRowHeight The CSS selector to find the row height inputs in a toolbar.
* @property {string} classTallRows The CSS class for making table rows into tall rows.
* @property {string} classSearchActive The CSS class the active state of the search input.
*/
get: function get() {
var prefix = settings.prefix;
return {
selectorInit: '[data-toolbar]',
selectorSearch: '[data-toolbar-search]',
selectorRowHeight: '[data-row-height]',
classTallRows: "".concat(prefix, "--responsive-table--tall"),
classSearchActive: "".concat(prefix, "--toolbar-search--active")
};
}
}]);
Toolbar.components = new WeakMap();
return Toolbar;
}(mixin(createComponent, initComponentBySearch, handles));
export default Toolbar;
<|start_filename|>packages/core/src/styles/vendor/carbon-components/src/components/checkbox/checkbox.js<|end_filename|>
/**
* Copyright IBM Corp. 2016, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
import settings from '../../globals/js/settings';
import mixin from '../../globals/js/misc/mixin';
import createComponent from '../../globals/js/mixins/create-component';
import initComponentBySearch from '../../globals/js/mixins/init-component-by-search';
import handles from '../../globals/js/mixins/handles';
import on from '../../globals/js/misc/on';
const stateChangeTypes = {
true: 'true',
false: 'false',
mixed: 'mixed',
};
class Checkbox extends mixin(createComponent, initComponentBySearch, handles) {
/**
* Checkbox UI.
* @extends CreateComponent
* @extends InitComponentBySearch
* @extends Handles
* @param {HTMLElement} element The element working as a checkbox UI.
*/
constructor(element, options) {
super(element, options);
this.manage(
on(this.element, 'click', event => {
this._handleClick(event);
})
);
this.manage(
on(this.element, 'focus', event => {
this._handleFocus(event);
})
);
this.manage(
on(this.element, 'blur', event => {
this._handleBlur(event);
})
);
this._indeterminateCheckbox();
this._initCheckbox();
}
_handleClick() {
if (this.element.checked === true) {
this.element.setAttribute('checked', '');
this.element.setAttribute('aria-checked', 'true');
this.element.checked = true;
// nested checkboxes inside labels
if (
this.element.parentElement.classList.contains(this.options.classLabel)
) {
this.element.parentElement.setAttribute(
this.options.attribContainedCheckboxState,
'true'
);
}
} else if (this.element.checked === false) {
this.element.removeAttribute('checked');
this.element.setAttribute('aria-checked', 'false');
this.element.checked = false;
// nested checkboxes inside labels
if (
this.element.parentElement.classList.contains(this.options.classLabel)
) {
this.element.parentElement.setAttribute(
this.options.attribContainedCheckboxState,
'false'
);
}
}
}
_handleFocus() {
if (
this.element.parentElement.classList.contains(this.options.classLabel)
) {
this.element.parentElement.classList.add(this.options.classLabelFocused);
}
}
_handleBlur() {
if (
this.element.parentElement.classList.contains(this.options.classLabel)
) {
this.element.parentElement.classList.remove(
this.options.classLabelFocused
);
}
}
/**
* Sets the new checkbox state.
* @param {boolean|string} [state]
* The new checkbox state to set. `mixed` to put checkbox in indeterminate state.
* If omitted, this method simply makes the style reflect `aria-checked` attribute.
*/
setState(state) {
if (state === undefined || stateChangeTypes[state] === undefined) {
throw new TypeError('setState expects a value of true, false or mixed.');
}
this.element.setAttribute('aria-checked', state);
this.element.indeterminate = state === stateChangeTypes.mixed;
this.element.checked = state === stateChangeTypes.true;
const container = this.element.closest(
this.options.selectorContainedCheckboxState
);
if (container) {
container.setAttribute(this.options.attribContainedCheckboxState, state);
}
}
setDisabled(value) {
if (value === undefined) {
throw new TypeError(
'setDisabled expects a boolean value of true or false'
);
}
if (value === true) {
this.element.setAttribute('disabled', true);
} else if (value === false) {
this.element.removeAttribute('disabled');
}
const container = this.element.closest(
this.options.selectorContainedCheckboxDisabled
);
if (container) {
container.setAttribute(
this.options.attribContainedCheckboxDisabled,
value
);
}
}
_indeterminateCheckbox() {
if (this.element.getAttribute('aria-checked') === 'mixed') {
this.element.indeterminate = true;
}
if (this.element.indeterminate === true) {
this.element.setAttribute('aria-checked', 'mixed');
}
if (
this.element.parentElement.classList.contains(this.options.classLabel) &&
this.element.indeterminate === true
) {
this.element.parentElement.setAttribute(
this.options.attribContainedCheckboxState,
'mixed'
);
}
}
_initCheckbox() {
if (this.element.checked === true) {
this.element.setAttribute('aria-checked', 'true');
}
if (
this.element.parentElement.classList.contains(this.options.classLabel) &&
this.element.checked
) {
this.element.parentElement.setAttribute(
this.options.attribContainedCheckboxState,
'true'
);
}
if (
this.element.parentElement.classList.contains(this.options.classLabel)
) {
this.element.parentElement.setAttribute(
this.options.attribContainedCheckboxDisabled,
'false'
);
}
if (
this.element.parentElement.classList.contains(this.options.classLabel) &&
this.element.disabled
) {
this.element.parentElement.setAttribute(
this.options.attribContainedCheckboxDisabled,
'true'
);
}
}
/**
* The map associating DOM element and copy button UI instance.
* @member Checkbox.components
* @type {WeakMap}
*/
static components /* #__PURE_CLASS_PROPERTY__ */ = new WeakMap();
/**
* The component options.
* If `options` is specified in the constructor, {@linkcode Checkbox.create .create()}, or {@linkcode Checkbox.init .init()},
* properties in this object are overriden for the instance being create and how {@linkcode Checkbox.init .init()} works.
* @member Checkbox.options
* @type {object}
* @property {string} selectorInit The data attribute to find copy button UIs.
* @property {string} selectorContainedCheckboxState The CSS selector to find a container of checkbox preserving checked state.
* @property {string} selectorContainedCheckboxDisabled
* The CSS selector to find a container of checkbox preserving disabled state.
* @property {string} classLabel The CSS class for the label.
* @property {string} classLabelFocused The CSS class for the focused label.
* @property {string} attribContainedCheckboxState The attribute name for the checked state of contained checkbox.
* @property {string} attribContainedCheckboxDisabled The attribute name for the disabled state of contained checkbox.
*/
static get options() {
const { prefix } = settings;
return {
selectorInit: `.${prefix}--checkbox`,
selectorContainedCheckboxState: '[data-contained-checkbox-state]',
selectorContainedCheckboxDisabled: '[data-contained-checkbox-disabled]',
classLabel: `${prefix}--checkbox-label`,
classLabelFocused: `${prefix}--checkbox-label__focus`,
attribContainedCheckboxState: 'data-contained-checkbox-state',
attribContainedCheckboxDisabled: 'data-contained-checkbox-disabled',
};
}
static stateChangeTypes /* #__PURE_CLASS_PROPERTY__ */ = stateChangeTypes;
}
export default Checkbox;
<|start_filename|>packages/core/src/styles/vendor/@carbon/src/tokens.js<|end_filename|>
/**
* Copyright IBM Corp. 2018, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
export const unstable_tokens = [
// Spacing
'spacing01',
'spacing02',
'spacing03',
'spacing04',
'spacing05',
'spacing06',
'spacing07',
'spacing08',
'spacing09',
'spacing10',
'spacing11',
'spacing12',
// Fluid spacing
'fluidSpacing01',
'fluidSpacing02',
'fluidSpacing03',
'fluidSpacing04',
// Layout
'layout01',
'layout02',
'layout03',
'layout04',
'layout05',
'layout06',
'layout07',
// Containers
'container01',
'container02',
'container03',
'container04',
'container05',
// Icon sizes
'iconSize01',
'iconSize02',
];
<|start_filename|>packages/core/src/styles/vendor/carbon-components/es/globals/js/misc/on.js<|end_filename|>
/**
* Copyright IBM Corp. 2016, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
export default function on(element) {
for (var _len = arguments.length, args = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
args[_key - 1] = arguments[_key];
}
element.addEventListener.apply(element, args);
return {
release: function release() {
element.removeEventListener.apply(element, args);
return null;
}
};
}
<|start_filename|>packages/vue/helpers/commons.js<|end_filename|>
// Function to be used to randomize a value
export const randomizeValue = currentVal => {
const firstTry = Math.max(
0.5 * currentVal,
currentVal * Math.random() * (Math.random() * 5)
);
const result =
currentVal > 0
? Math.min(2 * currentVal, firstTry)
: Math.max(2 * currentVal, firstTry);
return Math.floor(result);
};
export const colors = [
'#6e32c9',
'#1191e6',
'#006161',
'#a11950',
'#fb4b53',
'#570408',
'#198038',
'#003d73',
'#ee538b',
'#b28600',
'#009c98',
'#002b50',
'#8a3800',
'#a66efa',
];
<|start_filename|>packages/core/src/styles/vendor/@carbon/type/src/__tests__/reset-test.js<|end_filename|>
/**
* Copyright IBM Corp. 2018, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*
* @jest-environment node
*/
import { reset } from '../reset';
import { print } from '../print';
describe('reset', () => {
it('should set styles for `html` and `body`', () => {
expect(reset.html).toBeDefined();
expect(reset.body).toBeDefined();
});
it('should be printable', () => {
expect(print(reset.html)).toMatchSnapshot();
expect(print(reset.body)).toMatchSnapshot();
});
});
<|start_filename|>packages/core/src/styles/vendor/@carbon/type/src/__tests__/fontFamily-test.js<|end_filename|>
/**
* Copyright IBM Corp. 2018, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*
* @jest-environment node
*/
import { fontFamilies, fontFamily } from '../fontFamily';
import { print } from '../print';
describe('fontFamily', () => {
it('should export the supported font families', () => {
expect(fontFamilies).toMatchSnapshot();
});
it('should support getting the quoted string for a font family', () => {
expect(fontFamily('mono')).toEqual({
fontFamily: fontFamilies.mono,
});
});
it('should error out if trying to get a font that does not exist', () => {
expect(() => {
fontFamily('<unknown>');
}).toThrow();
});
it('should be printable', () => {
expect(print(fontFamily('mono'))).toMatchSnapshot();
});
});
<|start_filename|>packages/core/src/styles/vendor/@carbon/type/src/fontWeight.js<|end_filename|>
/**
* Copyright IBM Corp. 2018, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
export const fontWeights = {
light: 300,
regular: 400,
semibold: 600,
};
export function fontWeight(weight) {
if (!fontWeights[weight]) {
throw new Error(
`Unable to find font weight: \`${weight}\`. Expected one of: ` +
`[${Object.keys(fontWeights).join(', ')}]`
);
}
return {
fontWeight: fontWeights[weight],
};
}
<|start_filename|>packages/core/src/styles/vendor/carbon-components/es/bundle.js<|end_filename|>
/**
* Copyright IBM Corp. 2016, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
import './globals/js/boot';
export * from './index';
export { default as watch } from './globals/js/watch';
<|start_filename|>packages/core/src/styles/vendor/carbon-components/src/globals/js/mixins/evented-show-hide-state.js<|end_filename|>
/**
* Copyright IBM Corp. 2016, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
import eventedState from './evented-state';
import getLaunchingDetails from '../misc/get-launching-details';
function eventedShowHideState(ToMix) {
/**
* Mix-in class to launch a floating menu.
* @class EventedShowHideState
*/
class EventedShowHideState extends ToMix {
/**
*/
/**
* Switch to 'shown' state.
* @param [evtOrElem] The launching event or element.
* @param {EventedState~changeStateCallback} [callback] The callback.
*/
show(evtOrElem, callback) {
if (!evtOrElem || typeof evtOrElem === 'function') {
callback = evtOrElem; // eslint-disable-line no-param-reassign
}
this.changeState('shown', getLaunchingDetails(evtOrElem), callback);
}
/**
* Switch to 'hidden' state.
* @param [evtOrElem] The launching event or element.
* @param {EventedState~changeStateCallback} [callback] The callback.
*/
hide(evtOrElem, callback) {
if (!evtOrElem || typeof evtOrElem === 'function') {
callback = evtOrElem; // eslint-disable-line no-param-reassign
}
this.changeState('hidden', getLaunchingDetails(evtOrElem), callback);
}
}
return EventedShowHideState;
}
const exports = [eventedState, eventedShowHideState];
export default exports;
<|start_filename|>packages/core/src/styles/vendor/carbon-components/src/components/search/search.js<|end_filename|>
/**
* Copyright IBM Corp. 2016, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
import settings from '../../globals/js/settings';
import mixin from '../../globals/js/misc/mixin';
import createComponent from '../../globals/js/mixins/create-component';
import initComponentBySearch from '../../globals/js/mixins/init-component-by-search';
import handles from '../../globals/js/mixins/handles';
import eventMatches from '../../globals/js/misc/event-matches';
import on from '../../globals/js/misc/on';
import svgToggleClass from '../../globals/js/misc/svg-toggle-class';
const toArray = arrayLike => Array.prototype.slice.call(arrayLike);
class Search extends mixin(createComponent, initComponentBySearch, handles) {
/**
* Search with Options.
* @extends CreateComponent
* @extends InitComponentBySearch
* @extends Handles
* @param {HTMLElement} element The element working as the search component.
* @param {object} [options] The component options
* @property {string} [options.selectorInit]
* The selector to find search UIs with options.
* @property {string} [options.selectorSearchView]
* The selector to find the search view icon containers.
* @property {string} [options.selectorSearchInput]
* The selector to find the search input.
* @property {string} [options.selectorClearIcon]
* The selector for the clear icon that clears the search box.
* @property {string} [options.selectorIconContainer] The data attribute selector for the icon layout container.
* @property {string} [options.classClearHidden] The class used to hide the clear icon.
* @property {string} [options.classLayoutHidden] The class used to hide nonselected layout view.
*/
constructor(element, options) {
super(element, options);
const closeIcon = this.element.querySelector(
this.options.selectorClearIcon
);
const input = this.element.querySelector(this.options.selectorSearchInput);
if (!input) {
throw new Error('Cannot find the search input.');
}
if (closeIcon) {
this.manage(
on(closeIcon, 'click', () => {
svgToggleClass(closeIcon, this.options.classClearHidden, true);
input.value = '';
input.focus();
})
);
}
this.manage(
on(this.element, 'click', evt => {
const toggleItem = eventMatches(
evt,
this.options.selectorIconContainer
);
if (toggleItem) this.toggleLayout(toggleItem);
})
);
this.manage(
on(input, 'input', evt => {
if (closeIcon) this.showClear(evt.target.value, closeIcon);
})
);
}
/**
* Toggles between the grid and list layout.
* @param {HTMLElement} element The element contining the layout toggle.
*/
toggleLayout(element) {
toArray(element.querySelectorAll(this.options.selectorSearchView)).forEach(
item => {
item.classList.toggle(this.options.classLayoutHidden);
}
);
}
/**
* Toggles the clear icon visibility
* @param {HTMLElement} value The element serving as the search input.
* @param {HTMLElement} icon The element serving as close icon.
*/
showClear(value, icon) {
svgToggleClass(icon, this.options.classClearHidden, value.length === 0);
}
/**
* The component options.
* If `options` is specified in the constructor,
* {@linkcode Search.create .create()}, or {@linkcode Search.init .init()},
* properties in this object are overriden for the instance being created
* and how {@linkcode Search.init .init()} works.
* @member Search.options
* @type {object}
* @property {string} [options.selectorInit]
* The selector to find search UIs with options.
* @property {string} [options.selectorSearchView]
* The selector to find the search view icon containers.
* @property {string} [options.selectorSearchInput]
* The selector to find the search input.
* @property {string} [options.selectorClearIcon]
* The selector for the clear icon that clears the search box.
* @property {string} [options.selectorIconContainer] The data attribute selector for the icon layout container.
* @property {string} [options.classClearHidden] The class used to hide the clear icon.
* @property {string} [options.classLayoutHidden] The class used to hide nonselected layout view.
*/
static get options() {
const { prefix } = settings;
return {
selectorInit: '[data-search]',
selectorSearchView: '[data-search-view]',
selectorSearchInput: `.${prefix}--search-input`,
selectorClearIcon: `.${prefix}--search-close`,
selectorIconContainer: `.${prefix}--search-button[data-search-toggle]`,
classClearHidden: `${prefix}--search-close--hidden`,
classLayoutHidden: `${prefix}--search-view--hidden`,
};
}
/**
* The map associating DOM element and search instance.
* @member Search.components
* @type {WeakMap}
*/
static components /* #__PURE_CLASS_PROPERTY__ */ = new WeakMap();
}
export default Search;
<|start_filename|>packages/core/src/styles/vendor/@carbon/type/src/reset.js<|end_filename|>
/**
* Copyright IBM Corp. 2018, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
import { baseFontSize, px } from '@carbon/layout';
import { fontFamilies } from './fontFamily';
import { fontWeights } from './fontWeight';
export const reset = {
html: {
fontSize: px(baseFontSize),
},
body: {
fontFamily: fontFamilies.sans,
fontWeight: fontWeights.regular,
textRendering: 'optimizeLegibility',
'-webkit-font-smoothing': 'antialiased',
'-moz-osx-font-smoothing': 'grayscale',
},
strong: {
fontWeight: fontWeights.semibold,
},
code: {
fontFamily: fontFamilies.mono,
},
};
<|start_filename|>packages/core/src/styles/vendor/carbon-components/es/index.js<|end_filename|>
/**
* Copyright IBM Corp. 2016, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
// ====================//
// Imports and Exports //
// ====================//
// Base Elements & Components
// -------------
// - JavaScript classes for use with components and base-elements.
// - The following statements import classes from actual locations to
// be consumed from this file instead of their actual locations.
export * from './globals/js/components';
export { default as settings } from './globals/js/settings';
<|start_filename|>packages/vue/vue.config.js<|end_filename|>
module.exports = {
chainWebpack: config => config.resolve.symlinks(false),
};
<|start_filename|>packages/core/src/styles/vendor/@carbon/type/src/scale.js<|end_filename|>
/**
* Copyright IBM Corp. 2018, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
/**
* Get the type size for the given step
* @param {number} step
* @returns {number}
*/
export function getTypeSize(step) {
if (step <= 1) {
return 12;
}
// Yn = Yn-1 + {FLOOR[(n - 2) / 4] + 1} * 2
return getTypeSize(step - 1) + Math.floor((step - 2) / 4 + 1) * 2;
}
/**
* The default type scale for 23 steps. Inlined as an array here through running
* the follow step:
*
* > Array.from({ length: 23 }, (_, i) => getTypeSize(i + 1))
*/
export const scale = [
12,
14,
16,
18,
20,
24,
28,
32,
36,
42,
48,
54,
60,
68,
76,
84,
92,
102,
112,
122,
132,
144,
156,
];
<|start_filename|>packages/core/src/styles/vendor/carbon-components/src/globals/js/mixins/handles.js<|end_filename|>
/**
* Copyright IBM Corp. 2016, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
export default function(ToMix) {
/**
* Mix-in class to manage handles in component.
* Managed handles are automatically released when the component with this class mixed in is released.
* @class Handles
* @implements Handle
*/
class Handles extends ToMix {
/**
* The handled managed by this component.
* Releasing this component releases the handles.
* @type {Set<Handle>}
*/
handles = new Set();
/**
* Manages the given handle.
* @param {Handle} handle The handle to manage.
* @returns {Handle} The given handle.
*/
manage(handle) {
this.handles.add(handle);
return handle;
}
/**
* Stop managing the given handle.
* @param {Handle} handle The handle to stop managing.
* @returns {Handle} The given handle.
*/
unmanage(handle) {
this.handles.delete(handle);
return handle;
}
release() {
this.handles.forEach(handle => {
handle.release();
this.handles.delete(handle);
});
return super.release();
}
}
return Handles;
}
<|start_filename|>packages/core/src/styles/vendor/@carbon/type/src/fontFamily.js<|end_filename|>
/**
* Copyright IBM Corp. 2018, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
// Font family fallbacks for: IBM Plex Mono, IBM Plex Sans, IBM Plex Sans
// Condensed, IBM Plex Sans Hebrew, and IBM Plex Serif
export const fontFamilies = {
mono:
"'IBM Plex Mono', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', Courier, monospace",
sans: "'IBM Plex Sans', 'Helvetica Neue', Arial, sans-serif",
sansCondensed:
"'IBM Plex Sans Condensed', 'Helvetica Neue', Arial, sans-serif",
sansHebrew:
"'IBM Plex Sans Hebrew', 'Helvetica Hebrew', 'Arial Hebrew', sans-serif",
serif: "'IBM Plex Serif', 'Georgia', Times, serif",
};
export function fontFamily(name) {
if (!fontFamilies[name]) {
throw new Error(
`Unable to find font family: \`${name}\`. Expected one of: ` +
`[${Object.keys(fontFamilies).join(', ')}]`
);
}
return {
fontFamily: fontFamilies[name],
};
}
| obambrough/carbon-charts |
<|start_filename|>scripts/clear-examples.js<|end_filename|>
const fs = require("fs-extra");
const glob = require("glob");
const filePathsToDelete = glob.sync(
__dirname + "/../examples/**/!(ExampleAPI.jrgen.json)"
);
for (const filePath of filePathsToDelete) {
fs.removeSync(filePath);
}
<|start_filename|>src/jrgen.js<|end_filename|>
#!/usr/bin/env node
const path = require("path");
const program = require("commander");
const version = require("../package.json").version;
const utils = require(path.join(__dirname, "utils.js"));
program
.version(version)
.option(
"-o, --outdir <path>",
"Output directory. Defaults to current working directory."
)
.on("--help", () => {
console.log("");
console.log("");
console.log(" Examples:");
console.log("");
console.log(" Create html documentation from 'API.jrgen.json':");
console.log(" $ jrgen docs-html ~/API.jrgen.json");
console.log("");
console.log(" Create a postman specification from 'API.jrgen.json':");
console.log(" $ jrgen spec-postman ~/API.jrgen.json");
console.log("");
console.log(
" Create a ts web client from 'API.jrgen.json' and write all generated files into the ./client subdirectory:"
);
console.log(" $ jrgen client-web-ts -o ./client ~/API.jrgen.json");
console.log("");
console.log("");
});
const blueprints = utils.gatherBlueprints();
Object.keys(blueprints).forEach((key) => {
program.command(key + " <specFilePath>").action(async (schemaFile, cmd) => {
const schema = await utils.loadSchema(schemaFile);
if (!schema) {
console.error("No spec file provided.");
return;
}
const blueprint = await require(blueprints[key])(schema);
const artifacts = utils.buildArtifacts(blueprint);
utils.prettifyFileTree(artifacts);
utils.saveFileTreeTo(artifacts, program.outdir);
});
});
program.parse(process.argv);
<|start_filename|>src/blueprints/client/web/ts/client-web-ts.jrgen.blueprint.js<|end_filename|>
const json2ts = require("json-schema-to-typescript");
const path = require("path");
const utils = require(path.join(__dirname, "../../../../", "utils.js"));
buildTypes = async (schema) => {
const json2tsOptions = {
bannerComment: "",
style: {
singleQuote: true,
},
};
let types = "";
for (const key of Object.keys(schema.methods)) {
if (schema.methods[key].params) {
types +=
(await json2ts.compile(
schema.methods[key].params,
key.replace(/\./g, "") + "RpcParams.json",
json2tsOptions
)) + "\n\n";
}
if (schema.methods[key].result) {
types +=
(await json2ts.compile(
schema.methods[key].result,
key.replace(/\./g, "") + "RpcResult.json",
json2tsOptions
)) + "\n\n";
}
}
for (const key of Object.keys(schema.definitions)) {
types +=
(await json2ts.compile(
schema.definitions[key],
key.replace(/\./g, "") + ".json",
json2tsOptions
)) + "\n\n";
}
return types;
};
module.exports = async (schema) => {
return {
templates: Object.entries(
utils.loadFileTreeFrom(path.join(__dirname, "templates"))
).reduce((acc, [templatePath, templateValue]) => {
if (templatePath.endsWith("api-client.ts.mustache")) {
templatePath = `${schema.info.title
.toLowerCase()
.replace(/\s/g, "-")}-client.ts.mustache`;
}
acc[templatePath] = templateValue;
return acc;
}, {}),
model: {
title: schema.info.title.replace(/\s/g, ""),
methods: Object.keys(schema.methods).map((key) => {
const methodSchema = schema.methods[key];
return {
functionName: key.replace(/\./g, "_"),
rpcName: key,
paramsType: methodSchema.params
? ":" + key.replace(/\./g, "") + "RpcParams"
: "?:undefined",
resultType: methodSchema.result
? key.replace(/\./g, "") + "RpcResult"
: "undefined",
};
}),
types: await buildTypes(schema),
},
};
};
<|start_filename|>src/blueprints/client/web/js/client-web-js.jrgen.blueprint.js<|end_filename|>
const path = require("path");
const utils = require(path.join(__dirname, "../../../../", "utils.js"));
module.exports = (schema) => {
return {
templates: Object.entries(
utils.loadFileTreeFrom(path.join(__dirname, "templates"))
).reduce((acc, [templatePath, templateValue]) => {
if (templatePath.endsWith("api-client.js.mustache")) {
templatePath = `${schema.info.title
.toLowerCase()
.replace(/\s/g, "-")}-client.js.mustache`;
}
acc[templatePath] = templateValue;
return acc;
}, {}),
model: {
title: schema.info.title.replace(/\s/g, ""),
methods: Object.keys(schema.methods).map((key) => {
return {
functionName: key.replace(/\./g, "_"),
rpcName: key,
};
}),
},
};
};
<|start_filename|>src/blueprints/server/nodejs/js/server-nodejs-js.jrgen.blueprint.js<|end_filename|>
const path = require("path");
const utils = require(path.join(__dirname, "../../../../", "utils.js"));
module.exports = (schema) => {
return {
templates: Object.entries(
utils.loadFileTreeFrom(path.join(__dirname, "templates"))
).reduce((acc, [templatePath, templateValue]) => {
if (templatePath.endsWith("api-server.js.mustache")) {
templatePath = `${schema.info.title
.toLowerCase()
.replace(/\s/g, "-")}-server.js.mustache`;
}
acc[templatePath] = templateValue;
return acc;
}, {}),
model: {
methods: Object.keys(schema.methods).map((key) => {
const methodSchema = schema.methods[key];
return {
rpcName: key,
result: JSON.stringify(
utils.generateExample(methodSchema.result),
null,
2
),
};
}),
},
};
};
<|start_filename|>src/blueprints/server/nodejs/js/templates/rpc-server.js<|end_filename|>
const http = require("http");
module.exports = class RpcServer {
constructor() {
this.api = {};
this.httpServer = http.createServer((request, response) => {
this.process(request, response);
});
}
listen(port, ip, callback) {
this.httpServer.listen(port, ip, callback);
}
process(request, response) {
response.statusCode = 200;
response.setHeader("Content-Type", "application/json");
response.setHeader("Access-Control-Allow-Origin", "*");
if (request.method !== "POST") {
response.statusCode = 404;
response.end();
return;
}
this.loadBody(request)
.then((body) => {
this.parseRPCMessage(body)
.then((rpc) => {
new Promise((resolve, reject) => {
this.api[rpc.method](rpc.params, resolve, reject);
})
.then((result) => {
response.end(
JSON.stringify({
id: rpc.id,
jsonrpc: "2.0",
result: result,
})
);
})
.catch((error) => {
response.end(
JSON.stringify({
id: rpc.id,
jsonrpc: "2.0",
error: error,
})
);
});
})
.catch((error) => {
response.end(
JSON.stringify({
id: null,
jsonrpc: "2.0",
error: error,
})
);
});
})
.catch((error) => {
response.end(
JSON.stringify({
id: null,
jsonrpc: "2.0",
error: error,
})
);
});
}
loadBody(request) {
return new Promise((resolve, reject) => {
var requestBody = "";
request.on("data", (chunk) => {
requestBody += chunk;
});
request.on("end", () => {
resolve(requestBody);
});
});
}
parseRPCMessage(message) {
return new Promise((resolve, reject) => {
var rpc;
try {
rpc = JSON.parse(message);
} catch (e) {
return reject({
code: -32700,
message: "Parse error",
});
}
if (typeof rpc !== "object") {
return reject({
code: -32600,
message: "Invalid Request",
data: "rpc is not of type object.",
});
}
if (rpc.jsonrpc !== "2.0") {
return reject({
code: -32600,
message: "Invalid Request",
data: "jsonrpc has not been set or has an invalid value.",
});
}
if (
rpc.id &&
!(typeof rpc.id === "number" || typeof rpc.id === "string")
) {
return reject({
code: -32600,
message: "Invalid Request",
data: "id is not of type number or string.",
});
}
if (typeof rpc.method !== "string") {
return reject({
code: -32600,
message: "Invalid Request",
data: "method is not of type string.",
});
}
if (rpc.params && typeof rpc.params !== "object") {
return reject({
code: -32600,
message: "Invalid Request",
data: "params is not of type object.",
});
}
resolve(rpc);
});
}
expose(method, handler) {
this.api[method] = handler;
}
};
<|start_filename|>scripts/create-examples.js<|end_filename|>
const child_process = require("child_process");
const path = require("path");
const ApiSpecPath = path.join(
__dirname,
"..",
"examples",
"ExampleAPI.jrgen.json"
);
const jrgenPath = path.join(__dirname, "..", "src", "jrgen.js");
const examplesPath = path.join(__dirname, "..", "examples");
const blueprintIds = [
"docs-html",
"docs-md",
"client-web-js",
"client-web-ts",
"server-nodejs-js",
"spec-postman",
];
for (const generatorId of blueprintIds) {
const outputPath = path.join(examplesPath, generatorId.replace(/-/g, "/"));
child_process.execSync(
`node "${jrgenPath}" ${generatorId} -o ${outputPath} ${ApiSpecPath}`
);
}
| lbergesio/jrgen |
<|start_filename|>app/src/main/java/ren/yale/android/retrofitcachetest/LogTestUtil.java<|end_filename|>
package ren.yale.android.retrofitcachetest;
import android.util.Log;
/**
* Created by yale on 2017/11/29.
*/
public class LogTestUtil {
private static String TAG="retrofitcache";
public static void d(String text){
Log.d(TAG,text);
}
}
<|start_filename|>app/src/main/java/ren/yale/android/retrofitcachetest/rx2/OKHttpUtilsRx2.java<|end_filename|>
package ren.yale.android.retrofitcachetest.rx2;
import android.content.Context;
import java.io.File;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import io.reactivex.ObservableSource;
import io.reactivex.ObservableTransformer;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.schedulers.Schedulers;
import okhttp3.Cache;
import okhttp3.Interceptor;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
import ren.yale.android.retrofitcachelibrx2.CacheInterceptorListener;
import ren.yale.android.retrofitcachelibrx2.RetrofitCache;
import ren.yale.android.retrofitcachelibrx2.intercept.CacheForceInterceptorNoNet;
import ren.yale.android.retrofitcachelibrx2.intercept.CacheInterceptorOnNet;
import ren.yale.android.retrofitcachelibrx2.transformer.CacheTransformer;
import ren.yale.android.retrofitcachetest.LogTestUtil;
import retrofit2.Retrofit;
import retrofit2.adapter.rxjava2.RxJava2CallAdapterFactory;
import retrofit2.converter.gson.GsonConverterFactory;
/**
* Created by Yale on 2017/6/12.
*/
public enum OKHttpUtilsRx2 {
INSTANCE;
private Context mContext;
private static ApiRx2 apiRx2;
public void init(Context context){
mContext = context;
if (apiRx2 ==null){
apiRx2 = configRetrofit(ApiRx2.class,"http://gank.io/api/data/");
}
RetrofitCache.getInstance().init(context);
RetrofitCache.getInstance().setCacheInterceptorListener(
new CacheInterceptorListener() {
@Override
public boolean canCache(Request request,Response response) {
return true;
}
});
}
public OkHttpClient getOkHttpClient(){
OkHttpClient.Builder clientBuilder=new OkHttpClient.Builder();
clientBuilder.readTimeout(20, TimeUnit.SECONDS);
clientBuilder.connectTimeout(20, TimeUnit.SECONDS);
clientBuilder.writeTimeout(20, TimeUnit.SECONDS);
clientBuilder.addInterceptor(new LogInterceptor());
//clientBuilder.addInterceptor(new MockInterceptor());
clientBuilder.addInterceptor(new CacheForceInterceptorNoNet());
clientBuilder.addNetworkInterceptor(new CacheInterceptorOnNet());
int cacheSize = 200 * 1024 * 1024;
File cacheDirectory = new File(mContext.getCacheDir(), "httpcache");
Cache cache = new Cache(cacheDirectory, cacheSize);
return clientBuilder.cache(cache).build();
}
private static void showLog(String str) {
str = str.trim();
int index = 0;
int maxLength = 2000;
String finalString="";
while (index < str.length()) {
if (str.length() <= index + maxLength) {
finalString = str.substring(index);
} else {
finalString = str.substring(index, index+maxLength);
}
index += maxLength;
LogTestUtil.d( finalString.trim());
}
}
private class LogInterceptor implements Interceptor {
public Response intercept(Chain chain) throws IOException {
Request request = chain.request();
StringBuffer sb = new StringBuffer();
Response response = chain.proceed(chain.request());
okhttp3.MediaType mediaType = response.body().contentType();
String content = response.body().string();
sb.append("======== request: "+request.toString()+"\r\n ======== request headers: "+request.headers().toString()+"\r\n======= response header:"+response.headers().toString()+"\r\n---------- response body:\r\n");
LogTestUtil.d(sb.toString());
try {
showLog(content);
}catch (Exception e){
e.printStackTrace();
}
return response.newBuilder()
.body(okhttp3.ResponseBody.create(mediaType, content))
.build();
}
}
public <T> ObservableTransformer<T, T> IoMain() {
return new ObservableTransformer<T, T>() {
@Override
public ObservableSource<T> apply(io.reactivex.Observable<T> upstream) {
return upstream.compose(CacheTransformer.<T>emptyTransformer()).
subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread());
}
};
}
public ApiRx2 getApi(){
return apiRx2;
}
private <T> T configRetrofit(Class<T> service,String url ) {
Retrofit retrofit = new Retrofit.Builder()
.baseUrl(url)
.client(getOkHttpClient())
.addConverterFactory(GsonConverterFactory.create())
.addCallAdapterFactory(RxJava2CallAdapterFactory.create())
.build();
RetrofitCache.getInstance().addRetrofit(retrofit);
return retrofit.create(service);
}
}
<|start_filename|>retrofitcachelibrx2/src/main/java/ren/yale/android/retrofitcachelibrx2/transformer/CacheTransformer.java<|end_filename|>
package ren.yale.android.retrofitcachelibrx2.transformer;
import java.lang.reflect.Field;
import io.reactivex.ObservableSource;
import io.reactivex.ObservableTransformer;
import ren.yale.android.retrofitcachelibrx2.RetrofitCache;
/**
* Created by Yale on 2017/6/14.
*/
public class CacheTransformer {
private static final String CLASS_NAME1 ="retrofit2.adapter.rxjava2.BodyObservable";
private static final String CLASS_NAME2 ="retrofit2.adapter.rxjava2.ResultObservable";
private static final String CLASS_NAME3 ="retrofit2.adapter.rxjava2.CallEnqueueObservable";
private static final String CLASS_NAME4 ="retrofit2.adapter.rxjava2.CallExecuteObservable";
public static <T> ObservableTransformer<T, T> emptyTransformer(){
return new ObservableTransformer<T, T>() {
@Override
public ObservableSource<T> apply(io.reactivex.Observable<T> upstream) {
String name = upstream.getClass().getName();
if (name.equals(CLASS_NAME1)||name.equals(CLASS_NAME2)){
observable(upstream);
}
return upstream;
}
};
}
private static <T> void observable(io.reactivex.Observable<T> up){
try {
Object serviceMethodObj = null;
Object [] args;
Field upstream = up.getClass().getDeclaredField("upstream");
upstream.setAccessible(true);
Object ov = upstream.get(up);
Class cls = null;
if (ov.getClass().getName().equals(CLASS_NAME3)){
cls = Class.forName(CLASS_NAME3);
}else if (ov.getClass().getName().equals(CLASS_NAME4)){
cls = Class.forName(CLASS_NAME4);
}
if (cls == null){
return;
}
Field foriginalCall = cls.getDeclaredField("originalCall");
foriginalCall.setAccessible(true);
Object OkhttpCallObj = foriginalCall.get(ov);
Class clsOkhttpCall = Class.forName("retrofit2.OkHttpCall");
Field fdArgs = clsOkhttpCall.getDeclaredField("args");
fdArgs.setAccessible(true);
args = (Object[]) fdArgs.get(OkhttpCallObj);
Field fdserviceMethod = null;
try {
fdserviceMethod= clsOkhttpCall.getDeclaredField("serviceMethod");
}catch (Exception e){
}
if (fdserviceMethod == null){
Field filedRequestFactory= clsOkhttpCall.getDeclaredField("requestFactory");
filedRequestFactory.setAccessible(true);
serviceMethodObj = filedRequestFactory.get(OkhttpCallObj);
}else{
fdserviceMethod.setAccessible(true);
serviceMethodObj = fdserviceMethod.get(OkhttpCallObj);
}
if (serviceMethodObj!=null){
RetrofitCache.getInstance().addMethodInfo(serviceMethodObj,args);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
<|start_filename|>retrofitcachelibrx2/src/main/java/ren/yale/android/retrofitcachelibrx2/util/NetUtils.java<|end_filename|>
package ren.yale.android.retrofitcachelibrx2.util;
import android.content.Context;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
/**
* Created by Yale on 2017/6/13.
*/
public class NetUtils {
public static boolean isConnectNet(Context context){
if (context!=null){
ConnectivityManager conManager = (ConnectivityManager) context
.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo networkInfo = conManager.getActiveNetworkInfo();
return networkInfo == null ? false : networkInfo.isAvailable();
}
return true;
}
}
<|start_filename|>app/src/main/java/ren/yale/android/retrofitcachetest/rx1/ApiRx1.java<|end_filename|>
package ren.yale.android.retrofitcachetest.rx1;
import java.util.concurrent.TimeUnit;
import ren.yale.android.retrofitcachelib.anno.Cache;
import ren.yale.android.retrofitcachelib.anno.Mock;
import ren.yale.android.retrofitcachetest.bean.GankAndroid;
import retrofit2.http.GET;
import retrofit2.http.Query;
import rx.Observable;
/**
* Created by Yale on 2017/6/13.
*/
public interface ApiRx1 {
@Cache(time = 5,timeUnit = TimeUnit.SECONDS)
@GET("Android/9/1")
Observable<GankAndroid> getGankAndroid(@Query("aa") String aa);
@Mock(value = "{\"error\":false,\"results\":[{\"_id\":\"5941f5f3421aa92c7be61c16\",\"createdAt\":\"2017-06-15T10:50:27.317Z\",\"desc\":\"22222222\\\\u4effNice\\\\u9996\\\\u9875\\\\u56fe\\\\u7247\\\\u5217\\\\u88689\\\\u56fe\\\\u6837\\\\u5f0f\\\\uff0c\\\\u5e76\\\\u5b9e\\\\u73b0\\\\u62d6\\\\u62fd\\\\u6548\\\\u679c\",\"images\":[\"http://img.gank.io/4f54c011-e293-436a-ada1-dc03669ffb10\"],\"publishedAt\":\"2017-06-15T13:55:57.947Z\",\"source\":\"web\",\"type\":\"Android\",\"url\":\"http://www.jianshu.com/p/0ea96b952170\",\"used\":true,\"who\":\"www的事发生飞洒地方bbb\"}]}")
@GET("Android/10/4")
Observable<GankAndroid> getRamMockGankAndroid();
@Mock(url = "http://gank.io/api/data/Android/10/2")
@GET("Android/10/1")
Observable<GankAndroid> getUrlMockGankAndroid();
@Mock(assets = "mock/mock.json")
@GET("Android/10/5")
Observable<GankAndroid> getAssetsMockGankAndroid();
}
<|start_filename|>retrofitcachelib/src/main/java/ren/yale/android/retrofitcachelib/CacheInterceptorListener.java<|end_filename|>
package ren.yale.android.retrofitcachelib;
import okhttp3.Request;
import okhttp3.Response;
/**
* Created by yale on 2017/10/20.
*/
public interface CacheInterceptorListener {
boolean canCache(Request request,Response response);
}
<|start_filename|>retrofitcachelibrx2/src/test/java/com/daoxuehao/android/retrofitcachelibrx2/ExampleUnitTest.java<|end_filename|>
package com.daoxuehao.android.retrofitcachelibrx2;
import org.junit.Test;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import ren.yale.android.retrofitcachelibrx2.RetrofitCache;
import static org.junit.Assert.assertEquals;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
protected String getOriginUrl(String url){
Set<String> params = RetrofitCache.getInstance().getIgnoreParam();
if (params==null){
return url;
}
for (String p:params){
Pattern pattern = Pattern.compile(String.format("[\\?|&]%s=.*&|[\\?|&]%s=.*",p,p));
Matcher m = pattern.matcher(url);
while (m.find()){
String rep = "";
if (m.group().startsWith("?")){
rep="?";
}
url = m.replaceAll(rep);
}
}
if (url.endsWith("?")){
return url.substring(0,url.length()-1);
}
return url;
}
@Test
public void addition_isCorrect() throws Exception {
assertEquals(4, 2 + 2);
}
@Test
public void test_getOriginUrl()throws Exception{
RetrofitCache.getInstance().addIgnoreParam("test");
String u = getOriginUrl("http://www.baiduc.om");
System.out.println(u+"\r\n");
u = getOriginUrl("http://www.baiduc.om?a=b");
System.out.println(u+"\r\n");
u = getOriginUrl("http://www.baiduc.om?a=b&test=");
System.out.println(u+"\r\n");
u = getOriginUrl("http://www.baiduc.om?test=bb&c=d");
System.out.println(u+"\r\n");
u = getOriginUrl("http://www.baiduc.om?1test=bb&test=rer");
System.out.println(u+"\r\n");
u = getOriginUrl("http://www.baiduc.om?test=&11test=rer");
System.out.println(u+"\r\n");
u = getOriginUrl("http://www.baiduc.om?test=");
System.out.println(u+"\r\n");
}
}
<|start_filename|>retrofitcachelibrx2/src/main/java/ren/yale/android/retrofitcachelibrx2/anno/Cache.java<|end_filename|>
package ren.yale.android.retrofitcachelibrx2.anno;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.util.concurrent.TimeUnit;
/**
* Created by Yale on 2017/6/13.
*/
@Documented
@Target({ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
public @interface Cache {
TimeUnit timeUnit() default TimeUnit.NANOSECONDS;
long time() default -1;
boolean forceCacheNoNet() default true;
}
<|start_filename|>retrofitcachelib/src/main/java/ren/yale/android/retrofitcachelib/intercept/CacheInterceptorOnNet.java<|end_filename|>
package ren.yale.android.retrofitcachelib.intercept;
import android.text.TextUtils;
import java.io.IOException;
import okhttp3.Interceptor;
import okhttp3.Request;
import okhttp3.Response;
import ren.yale.android.retrofitcachelib.CacheInterceptorListener;
import ren.yale.android.retrofitcachelib.RetrofitCache;
import ren.yale.android.retrofitcachelib.bean.CacheConfig;
/**
* Created by Yale on 2017/6/13.
*/
public class CacheInterceptorOnNet extends BaseInterceptor implements Interceptor {
@Override
public Response intercept(Chain chain) throws IOException {
Request request = chain.request();
Response mockResponse = mockResponse(chain);
if (mockResponse!=null){
return mockResponse;
}
String url = getOriginUrl(request.url().url().toString());
String mockPreUrl = request.header(KEY_HEADER_PRE_URL);
if (!TextUtils.isEmpty(mockPreUrl)){
url = mockPreUrl;
}
CacheConfig cacheConfig = RetrofitCache.getInstance().getCacheTime(url);
Long maxAge = cacheConfig.getTime();
Response response = chain.proceed(request);
if (response.code()==301||response.code()==302){
String location = response.headers().get("Location");
RetrofitCache.getInstance().addUrlArgs(location,cacheConfig);
}
CacheInterceptorListener listener = RetrofitCache.getInstance().getCacheInterceptorListener();
if (listener!=null&&!listener.canCache(request,response)){
return response;
}
return response.newBuilder()
.removeHeader("Cache-Control")
.header("Cache-Control", "public,max-age="+maxAge)
.removeHeader("Pragma")
.build();
}
}
<|start_filename|>app/src/main/java/ren/yale/android/retrofitcachetest/rx1/OKHttpUtilsRx1.java<|end_filename|>
package ren.yale.android.retrofitcachetest.rx1;
import android.content.Context;
import java.io.File;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import okhttp3.Cache;
import okhttp3.Interceptor;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
import ren.yale.android.retrofitcachelib.CacheInterceptorListener;
import ren.yale.android.retrofitcachelib.RetrofitCache;
import ren.yale.android.retrofitcachelib.intercept.CacheForceInterceptorNoNet;
import ren.yale.android.retrofitcachelib.intercept.CacheInterceptorOnNet;
import ren.yale.android.retrofitcachelib.transformer.CacheTransformer;
import ren.yale.android.retrofitcachetest.LogTestUtil;
import retrofit2.Retrofit;
import retrofit2.adapter.rxjava.RxJavaCallAdapterFactory;
import retrofit2.converter.gson.GsonConverterFactory;
import rx.Observable;
import rx.android.schedulers.AndroidSchedulers;
import rx.functions.Func1;
import rx.schedulers.Schedulers;
/**
* Created by Yale on 2017/6/12.
*/
public enum OKHttpUtilsRx1 {
INSTANCE;
private Context mContext;
private static ApiRx1 apiRx1;
public void init(Context context){
mContext = context;
if (apiRx1 ==null){
apiRx1 = configRetrofit(ApiRx1.class,"http://gank.io/api/data/");
}
RetrofitCache.getInstance().init(context);
RetrofitCache.getInstance().setCacheInterceptorListener(
new CacheInterceptorListener() {
@Override
public boolean canCache(Request request,Response response) {
return true;
}
});
}
public OkHttpClient getOkHttpClient(){
okhttp3.OkHttpClient.Builder clientBuilder=new okhttp3.OkHttpClient.Builder();
clientBuilder.readTimeout(20, TimeUnit.SECONDS);
clientBuilder.connectTimeout(20, TimeUnit.SECONDS);
clientBuilder.writeTimeout(20, TimeUnit.SECONDS);
clientBuilder.addInterceptor(new LogInterceptor());
//clientBuilder.addInterceptor(new MockInterceptor());
clientBuilder.addInterceptor(new CacheForceInterceptorNoNet());
clientBuilder.addNetworkInterceptor(new CacheInterceptorOnNet());
clientBuilder.retryOnConnectionFailure(true);
int cacheSize = 200 * 1024 * 1024;
File cacheDirectory = new File(mContext.getCacheDir(), "httpcache");
Cache cache = new Cache(cacheDirectory, cacheSize);
return clientBuilder.cache(cache).build();
}
private static void showLog(String str) {
str = str.trim();
int index = 0;
int maxLength = 2000;
String finalString="";
while (index < str.length()) {
if (str.length() <= index + maxLength) {
finalString = str.substring(index);
} else {
finalString = str.substring(index, index+maxLength);
}
index += maxLength;
LogTestUtil.d( finalString.trim());
}
}
private class LogInterceptor implements Interceptor {
public okhttp3.Response intercept(Chain chain) throws IOException {
Request request = chain.request();
StringBuffer sb = new StringBuffer();
okhttp3.Response response = chain.proceed(chain.request());
okhttp3.MediaType mediaType = response.body().contentType();
String content = response.body().string();
LogTestUtil.d(response.headers().toString());
// sb.append("======== request: "+request.toString()+"\r\n ======== request headers: "+request.headers().toString()+"\r\n======= response header:"+response.headers().toString()+"\r\n---------- response body:\r\n");
LogTestUtil.d(sb.toString());
try {
// showLog(content);
}catch (Exception e){
e.printStackTrace();
}
return response.newBuilder()
.body(okhttp3.ResponseBody.create(mediaType, content))
.build();
}
}
public static <T> Observable.Transformer<T, T> IoMain() {
return new Observable.Transformer<T, T>() {
@Override
public Observable<T> call(Observable<T> tObservable) {
return tObservable.compose(CacheTransformer.<T>emptyTransformer())
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread()).map(new Func1<T, T>() {
@Override
public Object call(Object t) {
return t;
}
});
}
};
}
public ApiRx1 getApi(){
return apiRx1;
}
private <T> T configRetrofit(Class<T> service,String url ) {
Retrofit retrofit = new Retrofit.Builder()
.baseUrl(url)
.client(getOkHttpClient())
.addConverterFactory(GsonConverterFactory.create())
.addCallAdapterFactory(RxJavaCallAdapterFactory.create())
.build();
RetrofitCache.getInstance().addRetrofit(retrofit);
return retrofit.create(service);
}
}
<|start_filename|>retrofitcachelib/src/main/java/ren/yale/android/retrofitcachelib/transformer/CacheTransformer.java<|end_filename|>
package ren.yale.android.retrofitcachelib.transformer;
import java.lang.reflect.Field;
import ren.yale.android.retrofitcachelib.RetrofitCache;
import ren.yale.android.retrofitcachelib.util.LogUtil;
import rx.Observable;
import rx.internal.operators.OnSubscribeLift;
/**
* Created by Yale on 2017/6/14.
*/
public class CacheTransformer {
public static <T> Observable.Transformer<T, T> emptyTransformer() {
return new Observable.Transformer<T, T>() {
@Override
public Observable<T> call(Observable<T> tObservable) {
Field fdOnSubscribe = null;
Object serviceMethodObj = null;
Object [] args;
try {
fdOnSubscribe = tObservable.getClass().getDeclaredField("onSubscribe");
fdOnSubscribe.setAccessible(true);
Object object = fdOnSubscribe.get(tObservable);
if (object instanceof OnSubscribeLift){
OnSubscribeLift onSubscribe = (OnSubscribeLift) fdOnSubscribe.get(tObservable);
Field fdparent = onSubscribe.getClass().getDeclaredField("parent");
fdparent.setAccessible(true);
Object onSubscribeObj = fdparent.get(onSubscribe);
Class cls = Class.forName("retrofit2.adapter.rxjava.RxJavaCallAdapterFactory$CallOnSubscribe");
Field foriginalCall = cls.getDeclaredField("originalCall");
foriginalCall.setAccessible(true);
Object OkhttpCallObj = foriginalCall.get(onSubscribeObj);
Class clsOkhttpCall = Class.forName("retrofit2.OkHttpCall");
Field fdArgs = clsOkhttpCall.getDeclaredField("args");
fdArgs.setAccessible(true);
args = (Object[]) fdArgs.get(OkhttpCallObj);
Field fdserviceMethod = null;
try {
fdserviceMethod= clsOkhttpCall.getDeclaredField("serviceMethod");
}catch (Exception e){
}
if (fdserviceMethod == null){
Field filedRequestFactory= clsOkhttpCall.getDeclaredField("requestFactory");
filedRequestFactory.setAccessible(true);
serviceMethodObj = filedRequestFactory.get(OkhttpCallObj);
}else{
fdserviceMethod.setAccessible(true);
serviceMethodObj = fdserviceMethod.get(OkhttpCallObj);
}
if (serviceMethodObj!=null){
RetrofitCache.getInstance().addMethodInfo(serviceMethodObj,args);
}
}
} catch (Exception e) {
LogUtil.l(e);
}
return tObservable;
}
};
}
}
<|start_filename|>retrofitcachelib/src/main/java/ren/yale/android/retrofitcachelib/intercept/CacheForceInterceptorNoNet.java<|end_filename|>
package ren.yale.android.retrofitcachelib.intercept;
import java.io.IOException;
import okhttp3.CacheControl;
import okhttp3.Interceptor;
import okhttp3.Request;
import okhttp3.Response;
import ren.yale.android.retrofitcachelib.RetrofitCache;
import ren.yale.android.retrofitcachelib.util.LogUtil;
import ren.yale.android.retrofitcachelib.util.NetUtils;
/**
* Created by Yale on 2017/6/13.
*/
public class CacheForceInterceptorNoNet extends BaseInterceptor implements Interceptor {
@Override
public Response intercept(Chain chain) throws IOException {
Request request = chain.request();
Response mockResponse = mockResponse(chain);
if (mockResponse!=null){
return mockResponse;
}
String url = getOriginUrl(request.url().url().toString());
boolean forceCacheNoNet = RetrofitCache.getInstance().getCacheTime(url).isForceCacheNoNet();
if (forceCacheNoNet&&!NetUtils.isConnectNet(RetrofitCache.getInstance().getContext())){
request = request.newBuilder()
.cacheControl(CacheControl.FORCE_CACHE)
.build();
}
String mockUrl = mockUrl(chain);
if (mockUrl!=null){
LogUtil.d("get data from mock url: "+mockUrl);
request = request.newBuilder().url(mockUrl).header(KEY_HEADER_PRE_URL,request.url().toString())
.build();
}
Response response = chain.proceed(request);
int code = response.code();
if ( code == 504){
response = chain.proceed(chain.request());
}
if(response.networkResponse()!=null){
LogUtil.d("get data from net");
} else
if (response.cacheResponse()!=null){
LogUtil.d("get data from cache");
}
return response;
}
}
<|start_filename|>retrofitcachelibrx2/src/main/java/ren/yale/android/retrofitcachelibrx2/RetrofitCache.java<|end_filename|>
package ren.yale.android.retrofitcachelibrx2;
import android.content.Context;
import android.text.TextUtils;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import java.util.concurrent.TimeUnit;
import okhttp3.Request;
import ren.yale.android.retrofitcachelibrx2.anno.Cache;
import ren.yale.android.retrofitcachelibrx2.anno.Mock;
import ren.yale.android.retrofitcachelibrx2.bean.CacheConfig;
import ren.yale.android.retrofitcachelibrx2.util.LogUtil;
import retrofit2.Retrofit;
/**
* Created by Yale on 2017/6/13.
*/
public class RetrofitCache {
private static volatile RetrofitCache mRetrofit;
private Vector<Map> mVector;
private Map<String,CacheConfig> mUrlMap;
private Context mContext;
private Long mDefaultTime = 0L;
private TimeUnit mDefaultTimeUnit =TimeUnit.SECONDS;
private Map mUrlAragsMap =null;
private CacheInterceptorListener mCacheInterceptorListener;
private boolean mMock = true;
private Set<String> mIgnoreParam;
private RetrofitCache(){
clear();
mUrlAragsMap = new HashMap();
}
public Context getContext(){
return mContext;
}
public void setCacheInterceptorListener(CacheInterceptorListener listener){
mCacheInterceptorListener = listener;
}
public CacheInterceptorListener getCacheInterceptorListener(){
return mCacheInterceptorListener;
}
public static RetrofitCache getInstance(){
if (mRetrofit == null){
synchronized (RetrofitCache.class){
if (mRetrofit == null){
mRetrofit = new RetrofitCache();
}
}
}
return mRetrofit;
}
public RetrofitCache init(Context context){
mContext = context.getApplicationContext();
return this;
}
public RetrofitCache enableMock(boolean mock){
mMock = mock;
return this;
}
public boolean canMock(){
return mMock;
}
public RetrofitCache addIgnoreParam(String param){
if (mIgnoreParam==null){
mIgnoreParam = new HashSet<>();
}
mIgnoreParam.add(param);
return this;
}
public Set<String> getIgnoreParam(){
return mIgnoreParam;
}
public void addMethodInfo(Object serviceMethod,Object[] args){
String url = "";
try {
url = buildRequestUrl(serviceMethod,args);
} catch (Exception e) {
LogUtil.l(e);
}
if (!TextUtils.isEmpty(url)){
if (!mUrlAragsMap.containsKey(url)){
mUrlAragsMap.put(url,args);
}
}
}
private String buildRequestUrl(Object serviceMethod,Object[] args) throws Exception{
String objName = serviceMethod.getClass().getName();
Method toRequestMethod = null;
if (objName.equals("retrofit2.HttpServiceMethod")){
Class clsHttpServiceMethod = Class.forName("retrofit2.HttpServiceMethod");
Field fieldRequestFactory = clsHttpServiceMethod.getDeclaredField("requestFactory");
fieldRequestFactory.setAccessible(true);
serviceMethod = fieldRequestFactory.get(serviceMethod);
objName = serviceMethod.getClass().getName();
}
if (objName.equals("retrofit2.RequestFactory")){
Class clsServiceMethod = Class.forName("retrofit2.RequestFactory");
toRequestMethod = clsServiceMethod.getDeclaredMethod("create", Object[].class );
}else{
Class clsServiceMethod = Class.forName("retrofit2.ServiceMethod");
toRequestMethod = clsServiceMethod.getDeclaredMethod("toRequest", Object[].class );
}
toRequestMethod.setAccessible(true);
try {
Request request = (Request) toRequestMethod.invoke(serviceMethod,new Object[]{args});
return request.url().toString();
}catch (Exception e){
//LogUtil.l(e);
}
return "";
}
public RetrofitCache setDefaultTime(long time){
mDefaultTime = time;
return this;
}
public RetrofitCache setDefaultTimeUnit(TimeUnit timeUnit){
mDefaultTimeUnit = timeUnit;
return this;
}
public long getDaultTime(){
return mDefaultTime;
}
public TimeUnit getDefaultTimeUnit(){
return mDefaultTimeUnit;
}
public Mock getMockObject(String url){
for (Map serviceMethodCache:mVector) {
for (Object entry:serviceMethodCache.keySet()){
Object o = serviceMethodCache.get(entry);
try {
if (mUrlAragsMap.containsKey(url)){
Object[] args = (Object[]) mUrlAragsMap.get(url);
String reqUrl = buildRequestUrl(o,args);
if (reqUrl.equals(url)){
Method m = (Method) entry;
Mock mock = m.getAnnotation(Mock.class);
if (mock!=null){
return mock;
}
return null;
}
}
} catch (Exception e) {
LogUtil.l(e);
}
}
}
return null;
}
public String getMockUrl(Mock mock){
if (mock!=null){
if (!TextUtils.isEmpty(mock.url())){
return mock.url();
}
}
return null;
}
public String getMockData(Mock mock){
if (mock!=null){
if (!TextUtils.isEmpty(mock.value())){
return mock.value();
}
}
return null;
}
public String getMockAssetsValue(String assetsPath){
if(mContext==null){
return null;
}
try {
InputStream inputStream = mContext.getAssets().open(assetsPath);
byte[] buff = new byte[1024];
int len =0;
StringBuilder sb = new StringBuilder();
while ((len = inputStream.read(buff))>0){
sb.append(new String(buff,0,len, Charset.forName("utf-8")));
}
inputStream.close();
return sb.toString();
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
public String getMockAssets(Mock mock){
if (mock!=null){
if (!TextUtils.isEmpty(mock.assets())){
return mock.assets();
}
}
return null;
}
public String getMockData(String url){
Mock mock = getMockObject(url);
if (mock!=null){
if (!TextUtils.isEmpty(mock.value())){
return mock.value();
}
}
return null;
}
public void addUrlArgs(String url ,CacheConfig cacheConfig){
if (cacheConfig==null){
return;
}
if (TextUtils.isEmpty(url)){
return;
}
if (mUrlMap.containsKey(url)){
return;
}
mUrlMap.put(url,cacheConfig);
}
public CacheConfig getCacheTime(String url){
CacheConfig cacheConfig = new CacheConfig();
if (mUrlMap!=null){
CacheConfig config = mUrlMap.get(url);
if (config!=null){
return config;
}
}
for (Map serviceMethodCache:mVector) {
for (Object entry:serviceMethodCache.keySet()){
Object o = serviceMethodCache.get(entry);
try {
if (mUrlAragsMap.containsKey(url)){
Object[] args = (Object[]) mUrlAragsMap.get(url);
String reqUrl = buildRequestUrl(o,args);
if (reqUrl.equals(url)){
Method m = (Method) entry;
Cache cache = m.getAnnotation(Cache.class);
if (cache!=null){
TimeUnit timeUnit = mDefaultTimeUnit;
if (cache.timeUnit() != TimeUnit.NANOSECONDS){
timeUnit = cache.timeUnit();
}
long t = mDefaultTime;
if (cache.time() != -1){
t = cache.time();
}
long tm = timeUnit.toSeconds(t);
cacheConfig.setTime(tm);
cacheConfig.setForceCacheNoNet(cache.forceCacheNoNet());
getUrlMap().put(url, cacheConfig);
return cacheConfig;
}else{
getUrlMap().put(url, cacheConfig);
return cacheConfig;
}
}
}
} catch (Exception e) {
LogUtil.l(e);
}
}
}
getUrlMap().put(url, cacheConfig);
return cacheConfig;
}
private Map getUrlMap(){
if (mUrlMap==null){
mUrlMap = new HashMap<String, CacheConfig>();
}
return mUrlMap;
}
public RetrofitCache addRetrofit(Retrofit retrofit){
try {
Class cls = retrofit.getClass();
Field field = cls.getDeclaredField("serviceMethodCache");
field.setAccessible(true);
if (mVector == null){
mVector = new Vector<Map>();
}
Map m = (Map) field.get(retrofit);
mVector.add(m);
}
catch (Exception e) {
e.printStackTrace();
}
return this;
}
public void clear(){
if (mVector!=null){
mVector.clear();
mVector =null;
}
if (mUrlMap!=null){
mUrlMap.clear();
mUrlMap =null;
}
if (mUrlAragsMap!=null){
mUrlAragsMap.clear();
mUrlAragsMap =null;
}
mRetrofit = null;
}
}
<|start_filename|>app/src/main/java/ren/yale/android/retrofitcachetest/MainActivity.java<|end_filename|>
package ren.yale.android.retrofitcachetest;
import android.app.Activity;
import android.os.Bundle;
import android.view.View;
import android.widget.TextView;
import com.alibaba.fastjson.JSON;
import io.reactivex.Observer;
import io.reactivex.disposables.Disposable;
import io.reactivex.subjects.Subject;
import ren.yale.android.retrofitcachelib.RetrofitCache;
import ren.yale.android.retrofitcachetest.bean.GankAndroid;
import ren.yale.android.retrofitcachetest.rx1.OKHttpUtilsRx1;
import ren.yale.android.retrofitcachetest.rx2.OKHttpUtilsRx2;
import rx.Subscriber;
public class MainActivity extends Activity {
private TextView mTextView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
OKHttpUtilsRx1.INSTANCE.init(this.getApplication());
OKHttpUtilsRx2.INSTANCE.init(this.getApplication());
RetrofitCache.getInstance().init(this).enableMock(true);
RetrofitCache.getInstance().addIgnoreParam("access_token");
setContentView(R.layout.activity_main);
mTextView = (TextView) findViewById(R.id.tv_content);
}
private void testRx1(){
OKHttpUtilsRx1.INSTANCE.getApi().getGankAndroid("bb")
.compose(OKHttpUtilsRx1.<GankAndroid>IoMain())
.subscribe(new Subscriber<GankAndroid>() {
@Override
public void onStart() {
mTextView.setText("");
}
@Override
public void onCompleted() {
}
@Override
public void onError(Throwable e) {
e.printStackTrace();
}
@Override
public void onNext(GankAndroid gankAndroid) {
mTextView.setText(JSON.toJSONString(gankAndroid));
}
});
}
private void testRx1RamMock(){
OKHttpUtilsRx1.INSTANCE.getApi().getRamMockGankAndroid()
.compose(OKHttpUtilsRx1.<GankAndroid>IoMain())
.subscribe(new Subscriber<GankAndroid>() {
@Override
public void onStart() {
mTextView.setText("");
}
@Override
public void onCompleted() {
}
@Override
public void onError(Throwable e) {
e.printStackTrace();
}
@Override
public void onNext(GankAndroid gankAndroid) {
mTextView.setText(JSON.toJSONString(gankAndroid));
}
});
}
private void testRx1UrlMock(){
OKHttpUtilsRx1.INSTANCE.getApi().getUrlMockGankAndroid()
.compose(OKHttpUtilsRx1.<GankAndroid>IoMain())
.subscribe(new Subscriber<GankAndroid>() {
@Override
public void onStart() {
mTextView.setText("");
}
@Override
public void onCompleted() {
}
@Override
public void onError(Throwable e) {
e.printStackTrace();
}
@Override
public void onNext(GankAndroid gankAndroid) {
mTextView.setText(JSON.toJSONString(gankAndroid));
}
});
}
private void testRx2(){
OKHttpUtilsRx2.INSTANCE.getApi().getGankAndroid()
.compose(OKHttpUtilsRx2.INSTANCE.<GankAndroid>IoMain())
.subscribe(new Subject<GankAndroid>() {
@Override
public boolean hasObservers() {
return false;
}
@Override
public boolean hasThrowable() {
return false;
}
@Override
public boolean hasComplete() {
return false;
}
@Override
public Throwable getThrowable() {
return null;
}
@Override
protected void subscribeActual(Observer<? super GankAndroid> observer) {
}
@Override
public void onSubscribe(Disposable d) {
}
@Override
public void onNext(GankAndroid gankAndroid) {
mTextView.setText(JSON.toJSONString(gankAndroid));
}
@Override
public void onError(Throwable e) {
}
@Override
public void onComplete() {
}
});
}
private void testRx1AsssetslMock(){
OKHttpUtilsRx1.INSTANCE.getApi().getAssetsMockGankAndroid()
.compose(OKHttpUtilsRx1.<GankAndroid>IoMain())
.subscribe(new Subscriber<GankAndroid>() {
@Override
public void onStart() {
mTextView.setText("");
}
@Override
public void onCompleted() {
}
@Override
public void onError(Throwable e) {
e.printStackTrace();
}
@Override
public void onNext(GankAndroid gankAndroid) {
mTextView.setText(JSON.toJSONString(gankAndroid));
}
});
}
public void onClickRx1AssetsMock(View v){
testRx1AsssetslMock();
}
public void onClickRx1(View v){
testRx1();
}
public void onClickRx1RamMock(View v){
testRx1RamMock();
}
public void onClickRx1UrlMock(View v){
testRx1UrlMock();
}
public void onClickRx2(View v){
mTextView.setText("");
testRx2();
}
}
<|start_filename|>retrofitcachelib/src/main/java/ren/yale/android/retrofitcachelib/intercept/BaseInterceptor.java<|end_filename|>
package ren.yale.android.retrofitcachelib.intercept;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import okhttp3.Interceptor;
import okhttp3.Protocol;
import okhttp3.Request;
import okhttp3.Response;
import okhttp3.ResponseBody;
import ren.yale.android.retrofitcachelib.RetrofitCache;
import ren.yale.android.retrofitcachelib.anno.Mock;
import ren.yale.android.retrofitcachelib.util.LogUtil;
/**
* Created by Yale on 2017/7/5.
*/
public class BaseInterceptor {
protected static final String KEY_HEADER_PRE_URL = "retrofictcache_mock-pre-url";
protected String mockUrl(Interceptor.Chain chain){
if (!RetrofitCache.getInstance().canMock()){
return null;
}
Request request = chain.request();
String url = getOriginUrl(request.url().url().toString());
Mock mock = RetrofitCache.getInstance().getMockObject(url);
return RetrofitCache.getInstance().getMockUrl(mock);
}
protected String getOriginUrl(String url){
Set<String> params = RetrofitCache.getInstance().getIgnoreParam();
if (params==null){
return url;
}
for (String p:params){
Pattern pattern = Pattern.compile(String.format("[\\?|&]%s=.*&|[\\?|&]%s=.*",p,p));
Matcher m = pattern.matcher(url);
while (m.find()){
String rep = "";
if (m.group().startsWith("?")){
rep="?";
}
url = m.replaceAll(rep);
}
}
if (url.endsWith("?")){
return url.substring(0,url.length()-1);
}
return url;
}
protected Response mockResponse(Interceptor.Chain chain){
if (!RetrofitCache.getInstance().canMock()){
return null;
}
Request request = chain.request();
try{
String url = getOriginUrl(request.url().url().toString());
Mock mock = RetrofitCache.getInstance().getMockObject(url);
String mockData = RetrofitCache.getInstance().getMockData(mock);
if (mockData != null){
LogUtil.d("get data from mock");
Response response = new Response.Builder().protocol(Protocol.HTTP_1_0)
.code(200).request(request).message("ok").body(ResponseBody.create(null,mockData))
.build();
return response;
}
String mockAssets = RetrofitCache.getInstance().getMockAssets(mock);
if (mockAssets!=null){
String md = RetrofitCache.getInstance().getMockAssetsValue(mockAssets);
if (md!=null){
LogUtil.d("get data from asset: "+mockAssets);
Response response = new Response.Builder().protocol(Protocol.HTTP_1_0)
.code(200).request(request).message("ok").body(ResponseBody.create(null,md))
.build();
return response;
}
}
}catch (Exception e){
e.printStackTrace();
}
return null;
}
}
<|start_filename|>retrofitcachelib/src/main/java/ren/yale/android/retrofitcachelib/util/LogUtil.java<|end_filename|>
package ren.yale.android.retrofitcachelib.util;
/**
* Created by Yale on 2017/6/13.
*/
public class LogUtil {
private static String TAG="retrofitcache";
private static final boolean DEBUG = false;
public static void d(String text){
android.util.Log.d(TAG,text);
}
public static void w(String text){
android.util.Log.w(TAG,text);
}
public static void l(Exception e){
if (DEBUG){
e.printStackTrace();
}else{
android.util.Log.w(TAG,e.toString());
}
}
}
<|start_filename|>retrofitcachelibrx2/src/main/java/ren/yale/android/retrofitcachelibrx2/bean/CacheConfig.java<|end_filename|>
package ren.yale.android.retrofitcachelibrx2.bean;
import java.util.concurrent.TimeUnit;
/**
* Created by yale on 2018/1/23.
*/
public class CacheConfig {
private TimeUnit timeUnit = TimeUnit.NANOSECONDS;
private Long time = 0L;
private boolean forceCacheNoNet = true;
public TimeUnit getTimeUnit() {
return timeUnit;
}
public void setTimeUnit(TimeUnit timeUnit) {
this.timeUnit = timeUnit;
}
public Long getTime() {
return time;
}
public void setTime(Long time) {
this.time = time;
}
public boolean isForceCacheNoNet() {
return forceCacheNoNet;
}
public void setForceCacheNoNet(boolean forceCacheNoNet) {
this.forceCacheNoNet = forceCacheNoNet;
}
}
<|start_filename|>app/src/main/java/ren/yale/android/retrofitcachetest/rx2/ApiRx2.java<|end_filename|>
package ren.yale.android.retrofitcachetest.rx2;
import java.util.concurrent.TimeUnit;
import io.reactivex.Observable;
import ren.yale.android.retrofitcachelib.anno.Cache;
import ren.yale.android.retrofitcachelibrx2.anno.Mock;
import ren.yale.android.retrofitcachetest.bean.GankAndroid;
import retrofit2.http.GET;
/**
* Created by Yale on 2017/6/13.
*/
public interface ApiRx2 {
@Mock(assets = "mock/mock.json")
@Cache(time = 10,timeUnit = TimeUnit.SECONDS)
@GET("Android/9/1")
Observable<GankAndroid> getGankAndroid();
}
<|start_filename|>retrofitcachelib/src/main/java/ren/yale/android/retrofitcachelib/anno/Mock.java<|end_filename|>
package ren.yale.android.retrofitcachelib.anno;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Created by Yale on 2017/7/5.
*/
@Documented
@Target({ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
public @interface Mock {
String value() default "";
String url() default "";
String assets() default "";
}
<|start_filename|>retrofitcachelib/src/main/java/ren/yale/android/retrofitcachelib/MethodInfo.java<|end_filename|>
package ren.yale.android.retrofitcachelib;
import java.util.concurrent.TimeUnit;
/**
* Created by yale on 2017/11/2.
*/
public class MethodInfo {
private TimeUnit timeUnit;
private int time;
public TimeUnit getTimeUnit() {
return timeUnit;
}
public void setTimeUnit(TimeUnit timeUnit) {
this.timeUnit = timeUnit;
}
public int getTime() {
return time;
}
public void setTime(int time) {
this.time = time;
}
}
| yale8848/RetrofitCache |
<|start_filename|>ReadLine/Properties/AssemblyInfo.cs<|end_filename|>
[assembly:System.Runtime.CompilerServices.InternalsVisibleTo("ReadLine.Tests")] | filipw/csharp-repl-playground |
<|start_filename|>CreditCardUISample/Views/CreditCardPage.xaml.cs<|end_filename|>
using System.ComponentModel;
using CreditCardUISample.ViewModels;
using Xamarin.Forms;
namespace CreditCardUISample.Views
{
[DesignTimeVisible(false)]
public partial class CreditCardPage : ContentPage
{
public CreditCardPage()
{
InitializeComponent();
this.BindingContext = new CreditCardPageViewModel();
}
}
}
<|start_filename|>CreditCardUISample/ViewModels/CreditCardPageViewModel.cs<|end_filename|>
using System.ComponentModel;
namespace CreditCardUISample.ViewModels
{
public class CreditCardPageViewModel: INotifyPropertyChanged
{
public string CardNumber { get; set; }
public string CardCvv { get; set; }
public string CardExpirationDate { get; set; }
public event PropertyChangedEventHandler PropertyChanged;
}
}
| RafaelFernandez0512/PaymentUISample |
<|start_filename|>src/index.js<|end_filename|>
import React from 'react';
const useCustomElement = (props, customMapping = {}) => {
const ref = React.createRef();
React.useLayoutEffect(() => {
const { current } = ref;
let fns;
if (current) {
fns = Object.keys(props)
.filter(key => props[key] instanceof Function)
.map(key => ({
key: customMapping[key] || key,
fn: customEvent =>
props[key](customEvent.detail, customEvent),
}));
fns.forEach(({ key, fn }) => current.addEventListener(key, fn));
}
return () => {
if (current) {
fns.forEach(({ key, fn }) =>
current.removeEventListener(key, fn),
);
}
};
}, [customMapping, props, ref]);
const customElementProps = Object.keys(props)
.filter(key => !(props[key] instanceof Function))
.reduce((acc, key) => {
const prop = props[key];
const computedKey = customMapping[key] || key;
if (prop instanceof Object || prop instanceof Array) {
return { ...acc, [computedKey]: JSON.stringify(prop) };
}
return { ...acc, [computedKey]: prop };
}, {});
return [customElementProps, ref];
};
export default useCustomElement;
| ptaberg/use-custom-element |
<|start_filename|>src/parameterized.js<|end_filename|>
import Any from './types/any';
export default function parameterized(fn) {
function initialize(...args) {
let Type = fn(...args);
if (Type.initialize) {
Type.initialize();
}
return Type;
}
let defaultTypeParameters = new Array(fn.length);
defaultTypeParameters.fill(Any);
let DefaultType = initialize(...defaultTypeParameters);
DefaultType.of = (...args) => initialize(...args);
return DefaultType;
}
| rkeeler/microstates.js |
<|start_filename|>example/src/BlogPage.js<|end_filename|>
import HyvorTalk from 'hyvor-talk-react'
import React from 'react';
export default class BlogPage extends React.Component {
render() {
return (
<div className="blog-content">
<h1>The Ultimate Guide To POMODORO Technique</h1>
<content>The Pomodoro Technique is a time management method developed by <NAME> in the late 1980s. The technique uses a timer to break down work into intervals, traditionally 25 minutes in length, separated by short breaks.
Each interval is known as a pomodoro, from the Italian word for 'tomato', after the tomato-shaped kitchen timer that Cirillo used as a university student.</content>
<HyvorTalk.Embed
websiteId={14}
id="pomodoro"
/>
</div>
)
}
}
<|start_filename|>example/src/App.js<|end_filename|>
import React, { Component } from 'react'
import BlogPage from './BlogPage.js'
import Listing from './Listing.js'
export default class App extends Component {
state = {
section: 'listing'
}
activateBlog() {
this.setState({section: 'blog'})
}
activateListing() {
this.setState({section: 'listing'})
}
render () {
return (
<div>
<header>
<span onClick={() => this.activateListing()}>Listing</span>
<span onClick={() => this.activateBlog()}>Blog Page</span>
</header>
{
this.state.section === "blog" ? <BlogPage /> : <Listing />
}
</div>
)
}
}
<|start_filename|>example/src/Listing.js<|end_filename|>
import React from 'react';
import HyvorTalk from 'hyvor-talk-react'
export default class BlogPage extends React.Component {
render() {
return (
<div className="listing">
<ListItem
title="The Ultimate Guide To POMODORO Technique"
description="The Pomodoro Technique is a time management method developed by <NAME> in the late 1980s."
hyvorTalkId="pomodoro"
/>
<ListItem
title="Diffused Mode Vs Focused Mode Thinking"
description="Focus, as we traditionally define it, is doing one task or project, while minimizing time and attention on anything else."
hyvorTalkId="diffused"
/>
</div>
)
}
}
function ListItem(props) {
return (
<div className="list-item">
<h1>{props.title}</h1>
<div className="description">{props.description}</div>
<div> <HyvorTalk.CommentCount websiteId={14} id={props.hyvorTalkId} /> </div>
</div>
)
}
<|start_filename|>example/src/index.css<|end_filename|>
body {
margin: 0;
padding: 0;
font-family: Segoe UI, sans-serif;
}
.blog-content {
margin:auto;
max-width:700px;
}
header {
padding: 20px;
background: #000;
color:#fff;
}
header span {
margin-right: 20px;
cursor: pointer;
}
.listing {
margin: auto;
width: 600px;
}
.list-item {
padding: 30px;
background: #fafafa;
margin: 20px 0;
}
.list-item span {
display:block;
margin-top: 10px;
font-weight: 600;
}
<|start_filename|>src/lib/util.js<|end_filename|>
export function addScript(src, id) {
var script = document.createElement('script')
script.src = src
script.id = id
document.body.appendChild(script)
}
export function removeScript(id) {
var script = document.getElementById(id)
if (id) {
script.parentElement.removeChild(script)
}
}
<|start_filename|>src/index.js<|end_filename|>
import Embed from './lib/Embed.jsx'
import CommentCount from './lib/CommentCount.jsx'
const HyvorTalk = {
Embed,
CommentCount
}
export default HyvorTalk
| HyvorTalk/hyvor-talk-react |
<|start_filename|>src/search.html<|end_filename|>
---
title: Search dart.dev
title: 搜索 Flutter & Dart 官方文档资源
short-title: Search
short-title: 搜索
description: The search page for dart.dev.
description: 搜索 flutter.cn 和 dart.cn 的网页内容.
toc: false
---
Want results from additional Dart-related sites, like api.dart.dev and flutter.dev?
<a href="/search-all">Search more sites.</a>
本页面的搜索结果为 flutter.cn 和 dart.cn 以及官方相关的资源内容,
想要同时搜索社区的内容,<a href="/search-all">点击这里搜索更多内容</a>。
<div class="d-flex searchbar">
<script>
(function() {
var cx = '017471510655331970984:x0bd16320-u';
var gcse = document.createElement('script');
gcse.type = 'text/javascript';
gcse.async = true;
gcse.src = 'https://cse.flutter-io.cn/cse.js?cx=' + cx;
var s = document.getElementsByTagName('script')[0];
s.parentNode.insertBefore(gcse, s);
})();
</script>
<gcse:search linkTarget="_self"></gcse:search>
</div>
| venshine/dart.cn |
<|start_filename|>src/Hazelcast.Net/Models/MemberInfo.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Generic;
using System.Linq;
using Hazelcast.Core;
using Hazelcast.Exceptions;
using Hazelcast.Networking;
namespace Hazelcast.Models
{
/// <summary>
/// Represents a member of a cluster.
/// </summary>
/// <remarks>
/// <para>This class implements <see cref="IEquatable{MemberInfo}"/> and two instances are considered
/// equal if their <see cref="Id"/> are identical (the other fields are not considered for equality).</para>
/// </remarks>
public class MemberInfo : IEquatable<MemberInfo>
{
private static readonly Dictionary<EndpointQualifier, NetworkAddress> EmptyAddressMap = new Dictionary<EndpointQualifier, NetworkAddress>();
/// <summary>
/// Initializes a new instance of the <see cref="MemberInfo"/> class.
/// </summary>
/// <param name="id">The unique identifier of the member.</param>
/// <param name="address">The network address of the member.</param>
/// <param name="version">The version of the server running the member.</param>
/// <param name="isLiteMember">Whether the member is a "lite" member.</param>
/// <param name="attributes">Attributes of the member.</param>
public MemberInfo(Guid id, NetworkAddress address, MemberVersion version, bool isLiteMember, IReadOnlyDictionary<string, string> attributes)
: this(address, id, attributes, isLiteMember, version, false, null)
{ }
/// <summary>
/// Initializes a new instance of the <see cref="MemberInfo"/> class.
/// </summary>
/// <param name="id">The unique identifier of the member.</param>
/// <param name="address">The network address of the member.</param>
/// <param name="version">The version of the server running the member.</param>
/// <param name="isLiteMember">Whether the member is a "lite" member.</param>
/// <param name="attributes">Attributes of the member.</param>
/// <param name="addressMapExists">Whether the address map exists.</param>
/// <param name="addressMap">The address map.</param>
/// <remarks>
/// <para>That overload of the constructor is required by generated codecs.</para>
/// </remarks>
internal MemberInfo(NetworkAddress address, Guid id, IReadOnlyDictionary<string, string> attributes, bool isLiteMember, MemberVersion version, bool addressMapExists, IReadOnlyDictionary<EndpointQualifier, NetworkAddress> addressMap)
{
// yes, this constructor could be simplified, but it is used (exclusively) by the codec,
// and must respect what the codec expects, so don't simplify it!
Id = id;
Address = address;
Version = version;
IsLiteMember = isLiteMember;
Attributes = attributes;
if (addressMapExists)
{
AddressMap = addressMap;
PublicAddress = addressMap.WherePair((qualifier, _) => qualifier.Type == ProtocolType.Client && qualifier.Identifier == "public")
.SelectPair((_, addr) => addr)
.FirstOrDefault();
}
else
{
AddressMap = EmptyAddressMap; // will never get modified = safe
PublicAddress = null;
}
}
/// <summary>
/// Whether to use the public address or the internal address to connect to the member.
/// </summary>
/// <remarks>Determines the value of <see cref="ConnectAddress"/>.</remarks>
internal bool UsePublicAddress { get; set; }
/// <summary>
/// Gets the unique identifier of the member.
/// </summary>
public Guid Id { get; }
/// <summary>
/// (for internal use only) Gets the unique identifier of the member.
/// </summary>
/// <remarks>
/// <para>Generated codecs expect this naming of the property. The public version
/// of this is <see cref="Id"/>.</para>
/// </remarks>
internal Guid Uuid => Id;
/// <summary>
/// Gets the network address of the member.
/// </summary>
public NetworkAddress Address { get; }
/// <summary>
/// Gets the public network address of the member.
/// </summary>
public NetworkAddress PublicAddress { get; }
/// <summary>
/// Gets the address to connect to.
/// </summary>
/// <remarks>The address to connect to is either the <see cref="PublicAddress"/> or the <see cref="Address"/>,
/// depending on the network structure and how members can be reached by the client.</remarks>
internal NetworkAddress ConnectAddress => UsePublicAddress ? PublicAddress : Address;
/// <summary>
/// Gets the version of the server running the member.
/// </summary>
public MemberVersion Version { get; }
/// <summary>
/// Determines whether the member is a "lite" member.
/// </summary>
/// <remarks>
/// <para>Lite members do not own partitions.</para>
/// </remarks>
public bool IsLiteMember {get; }
/// <summary>
/// Gets the attributes of the member.
/// </summary>
public IReadOnlyDictionary<string, string> Attributes { get; }
/// <summary>
/// Gets the address map.
/// </summary>
internal IReadOnlyDictionary<EndpointQualifier, NetworkAddress> AddressMap { get; }
/// <inheritdoc />
public override bool Equals(object obj)
=> Equals(obj as MemberInfo);
/// <summary>
/// Determines whether this <see cref="MemberInfo"/> instance is equal to another <see cref="MemberInfo"/> instance.
/// </summary>
/// <param name="other">The other <see cref="MemberInfo"/> instance.</param>
/// <returns><c>true</c> if this <see cref="MemberInfo"/> instance and the other <see cref="MemberInfo"/> instance
/// are considered being equal; otherwise <c>false</c>.</returns>
public bool Equals(MemberInfo other)
{
if (other is null) return false;
if (ReferenceEquals(this, other)) return true;
// compare members on what matters: the id and the connect address
return
Id == other.Id &&
ConnectAddress == other.ConnectAddress;
}
/// <summary>
/// Determines whether two <see cref="MemberInfo"/> instances are equal.
/// </summary>
/// <param name="left">The first <see cref="MemberInfo"/> instance.</param>
/// <param name="right">The second <see cref="MemberInfo"/> instance.</param>
/// <returns><c>true</c> if the two <see cref="MemberInfo"/> instances are considered being equal;
/// otherwise <c>false</c>.</returns>
public static bool operator ==(MemberInfo left, MemberInfo right)
=> left is null ? right is null : left.Equals(right);
/// <summary>
/// Determines whether two <see cref="MemberInfo"/> instances are not equal.
/// </summary>
/// <param name="left">The first <see cref="MemberInfo"/> instance.</param>
/// <param name="right">The second <see cref="MemberInfo"/> instance.</param>
/// <returns><c>true</c> if the two <see cref="MemberInfo"/> instances are considered being not equal;
/// otherwise <c>false</c>.</returns>
public static bool operator !=(MemberInfo left, MemberInfo right)
=> !(left == right);
/// <inheritdoc />
public override int GetHashCode() => HashCode.Combine(Id, ConnectAddress);
/// <inheritdoc />
public override string ToString()
{
return $"(Member Address = {Address}, PublicAddress = {PublicAddress}, ConnectAddress = {ConnectAddress}, Id = {Id}, IsLite = {IsLiteMember})";
}
public string ToShortString(bool flagConnectAddress)
=> $"{Id.ToShortString()} - {Address}{(!flagConnectAddress || UsePublicAddress ? "" : "*")} / {(PublicAddress == null ? "null" : PublicAddress.ToString())}{(flagConnectAddress && UsePublicAddress ? "*" : "")}";
}
}
<|start_filename|>src/Hazelcast.Net/DistributedObjects/ServiceNames.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
namespace Hazelcast.DistributedObjects
{
/// <summary>
/// Define the service names.
/// </summary>
internal static class ServiceNames
{
/// <summary>
/// The name of the map service.
/// </summary>
public const string Map = "hz:impl:mapService";
/// <summary>
/// The name of the topic service.
/// </summary>
public const string Topic = "hz:impl:topicService";
/// <summary>
/// The name of the list service.
/// </summary>
public const string List = "hz:impl:listService";
/// <summary>
/// The name of the multi map service.
/// </summary>
public const string MultiMap = "hz:impl:multiMapService";
/// <summary>
/// The name of the queue service.
/// </summary>
public const string Queue = "hz:impl:queueService";
/// <summary>
/// The name of the replicated map service.
/// </summary>
public const string ReplicatedMap = "hz:impl:replicatedMapService";
/// <summary>
/// The name off the ring buffer service.
/// </summary>
public const string RingBuffer = "hz:impl:ringbufferService";
/// <summary>
/// The name of the set service.
/// </summary>
public const string Set = "hz:impl:setService";
/// <summary>
/// The name of the raft atomic long service.
/// </summary>
public const string AtomicLong = "hz:raft:atomicLongService";
/// <summary>
/// The name of the raft atomic ref service.
/// </summary>
public const string AtomicRef = "hz:raft:atomicRefService";
/// <summary>
/// The name of the Flake ID Generator service.
/// </summary>
public const string FlakeIdGenerator = "hz:impl:flakeIdGeneratorService";
}
}
<|start_filename|>src/Hazelcast.Net/Protocol/BuiltInCodecs/CustomTypeFactory.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Generic;
using Hazelcast.Exceptions;
using Hazelcast.Models;
using Hazelcast.Networking;
using Hazelcast.Serialization;
using Hazelcast.Sql;
namespace Hazelcast.Protocol.BuiltInCodecs
{
internal static class CustomTypeFactory
{
public static NetworkAddress CreateAddress(string host, int port)
{
try
{
// The creation of the address uses https://docs.microsoft.com/en-us/dotnet/api/system.net.dns.gethostaddresses
// This method may throw ArgumentException, SocketException, ArgumentOutOfRangeException, ArgumentNullException
// Java implementation may throw https://docs.oracle.com/javase/7/docs/api/java/net/UnknownHostException.html
return new NetworkAddress(host, port);
}
catch (Exception e)
{
throw new HazelcastException(e);
}
}
public static MapEntryStats<IData, IData> CreateSimpleEntryView(IData key, IData value, long cost, long creationTime,
long expirationTime, long hits, long lastAccessTime, long lastStoredTime, long lastUpdateTime, long version, long ttl,
long maxIdle)
{
return new MapEntryStats<IData, IData>
{
Key = key,
Value = value,
Cost = cost,
CreationTime = creationTime,
ExpirationTime = expirationTime,
Hits = hits,
LastAccessTime = lastAccessTime,
LastStoredTime = lastStoredTime,
LastUpdateTime = lastUpdateTime,
Version = version,
Ttl = ttl,
MaxIdle = maxIdle
};
}
public static IndexOptions CreateIndexConfig(string name, int indexType, List<string> attributes, BitmapIndexOptions bitmapIndexOptions)
{
return new IndexOptions(attributes) { Name = name, Type = (IndexType) indexType, BitmapIndexOptions = bitmapIndexOptions };
}
public static BitmapIndexOptions CreateBitmapIndexOptions(string uniqueKey, int uniqueKeyTransformation)
{
return new BitmapIndexOptions { UniqueKey = uniqueKey, UniqueKeyTransformation = (UniqueKeyTransformation) uniqueKeyTransformation };
}
public static EndpointQualifier CreateEndpointQualifier(int type, string identifier)
{
return new EndpointQualifier((ProtocolType) type, identifier);
}
public static SqlColumnMetadata CreateSqlColumnMetadata(string name, int type, bool isNullableExists, bool nullable)
{
if (!Enum.IsDefined(typeof(SqlColumnType), type))
throw new NotSupportedException($"Column type #{type} is not supported.");
var sqlColumnType = (SqlColumnType)type;
return new SqlColumnMetadata(name, sqlColumnType,
// By default, columns are nullable
// The column becomes non-nullable only if NOT NULL modifier applied during table creation or if an expression is selected
nullable || !isNullableExists
);
}
}
}
<|start_filename|>src/Hazelcast.Net.Tests/Configuration/HazelcastOptionsTests.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.IO;
using System.Linq;
using System.Security.Authentication;
using System.Text;
using System.Threading.Tasks;
using ExpectedObjects;
using Hazelcast.Clustering;
using Hazelcast.Clustering.LoadBalancing;
using Hazelcast.Configuration;
using Hazelcast.Configuration.Binding;
using Hazelcast.Core;
using Hazelcast.NearCaching;
using Hazelcast.Networking;
using Hazelcast.Security;
using Hazelcast.Serialization;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using NUnit.Framework;
namespace Hazelcast.Tests.Configuration
{
[TestFixture]
public class HazelcastOptionsTests
{
[Test]
public void BuildExceptions()
{
Assert.Throws<ArgumentNullException>(() => HazelcastOptions.Build((Action<IConfigurationBuilder>) null));
Assert.Throws<ArgumentNullException>(() => HazelcastOptions.Build(null, null, "key"));
}
[Test]
public void ServiceProvider()
{
var services = new ServiceCollection();
var serviceProvider = services.BuildServiceProvider();
var options = new HazelcastOptions { ServiceProvider = serviceProvider };
Assert.That(options.ServiceProvider, Is.SameAs(serviceProvider));
}
[Test]
public void EmptyOptionsFile()
{
var json = Resources.Empty;
var stream = new MemoryStream(Encoding.UTF8.GetBytes(json));
var builder = new ConfigurationBuilder();
builder.AddJsonStream(stream);
var configuration = builder.Build();
var options = new HazelcastOptions();
configuration.HzBind(HazelcastOptions.Hazelcast, options);
Assert.AreEqual("dev", options.ClusterName);
}
[Test]
public void EmptyOptionsFileWithComments()
{
var json = Resources.EmptyWithComments;
var stream = new MemoryStream(Encoding.UTF8.GetBytes(json));
var builder = new ConfigurationBuilder();
builder.AddJsonStream(stream);
var configuration = builder.Build();
var options = new HazelcastOptions();
configuration.HzBind(HazelcastOptions.Hazelcast, options);
Assert.AreEqual("dev", options.ClusterName);
}
private static HazelcastOptions ReadResource(string json)
{
var stream = new MemoryStream(Encoding.UTF8.GetBytes(json));
var builder = new ConfigurationBuilder();
builder.AddJsonStream(stream);
var configuration = builder.Build();
var options = new HazelcastOptions();
configuration.HzBind(HazelcastOptions.Hazelcast, options);
return options;
}
[Test]
public void HazelcastOptionsRoot()
{
var options = ReadResource(Resources.HazelcastOptions);
Assert.AreEqual("cluster", options.ClusterName);
Assert.AreEqual("client", options.ClientName);
Assert.AreEqual(2, options.Labels.Count);
Assert.IsTrue(options.Labels.Contains("label_1"));
Assert.IsTrue(options.Labels.Contains("label_2"));
Assert.AreEqual(1, options.Subscribers.Count);
var subscriber = options.Subscribers[0];
Assert.IsInstanceOf<HazelcastClientEventSubscriber>(subscriber);
TestSubscriber.Ctored = false;
subscriber.Build(null);
Assert.IsTrue(TestSubscriber.Ctored);
var loadBalancer = options.LoadBalancer.Service;
Assert.IsInstanceOf<RandomLoadBalancer>(loadBalancer);
var clusterOptions = (IClusterOptions) options;
Assert.AreEqual(1000, clusterOptions.WaitForConnectionMilliseconds);
}
[Test]
public void CoreOptionsSection()
{
var options = ReadResource(Resources.HazelcastOptions).Core;
Assert.AreEqual(1000, options.Clock.OffsetMilliseconds);
}
[Test]
public void MessagingOptionsSection()
{
var options = ReadResource(Resources.HazelcastOptions).Messaging;
// internal, cannot change
Assert.AreEqual(5, options.MaxFastInvocationCount);
Assert.AreEqual(1001, options.MinRetryDelayMilliseconds);
}
[Test]
public void HeartbeatOptionsSection()
{
var options = ReadResource(Resources.HazelcastOptions).Heartbeat;
Assert.AreEqual(1000, options.PeriodMilliseconds);
Assert.AreEqual(1001, options.TimeoutMilliseconds);
}
[Test]
public void PreviewOptionsSection()
{
var options = ReadResource(Resources.HazelcastOptions).Preview;
Assert.That(options.EnableNewReconnectOptions, Is.False);
Assert.That(options.EnableNewRetryOptions, Is.False);
}
[Test]
public void NetworkingOptionsSection()
{
var options = ReadResource(Resources.HazelcastOptions).Networking;
Assert.AreEqual(2, options.Addresses.Count);
Assert.IsTrue(options.Addresses.Contains("localhost"));
Assert.IsTrue(options.Addresses.Contains("otherhost"));
Assert.IsFalse(options.ShuffleAddresses);
Assert.IsFalse(options.SmartRouting);
Assert.IsFalse(options.RedoOperations);
Assert.AreEqual(ReconnectMode.DoNotReconnect, options.ReconnectMode);
Assert.IsTrue(options.Reconnect);
Assert.IsFalse(options.ShuffleAddresses);
var sslOptions = options.Ssl;
Assert.IsTrue(sslOptions.Enabled);
Assert.IsFalse(sslOptions.ValidateCertificateChain);
Assert.IsTrue(sslOptions.ValidateCertificateName);
Assert.IsTrue(sslOptions.CheckCertificateRevocation);
Assert.AreEqual("cert", sslOptions.CertificateName);
Assert.AreEqual("path", sslOptions.CertificatePath);
Assert.AreEqual("password", sslOptions.CertificatePassword);
Assert.AreEqual(SslProtocols.Tls11, sslOptions.Protocol);
Console.WriteLine(sslOptions.ToString());
#if NETCOREAPP
#pragma warning disable CS0618 // Type or member is obsolete
#endif
// testing obsolete Ssl2, Default protocols
Assert.Throws<ConfigurationException>(() => sslOptions.Protocol = SslProtocols.Ssl2);
Assert.Throws<ConfigurationException>(() => sslOptions.Protocol = SslProtocols.Default);
#if NETCOREAPP
#pragma warning restore CS0618
#endif
var cloudOptions = options.Cloud;
Assert.IsTrue(cloudOptions.Enabled);
Assert.AreEqual("token", cloudOptions.DiscoveryToken);
// constant
Assert.AreEqual(new Uri("https://coordinator.hazelcast.cloud/"), cloudOptions.Url);
var socketOptions = options.Socket;
Assert.AreEqual(1000, socketOptions.BufferSizeKiB);
Assert.IsFalse(socketOptions.KeepAlive);
Assert.AreEqual(1001, socketOptions.LingerSeconds);
Assert.IsTrue(socketOptions.TcpNoDelay);
var retryOptions = options.ConnectionRetry;
Assert.AreEqual(1000, retryOptions.InitialBackoffMilliseconds);
Assert.AreEqual(1001, retryOptions.MaxBackoffMilliseconds);
Assert.AreEqual(1002, retryOptions.Multiplier);
Assert.AreEqual(1003, retryOptions.ClusterConnectionTimeoutMilliseconds);
Assert.AreEqual(1004, retryOptions.Jitter);
}
[Test]
public void AuthenticationOptionsFile()
{
var options = ReadResource(Resources.HazelcastOptions).Authentication;
var credentialsFactory = options.CredentialsFactory.Service;
Assert.IsInstanceOf<TestCredentialsFactory>(credentialsFactory);
var testCredentialsFactory = (TestCredentialsFactory) credentialsFactory;
Assert.AreEqual("arg", testCredentialsFactory.Arg1);
Assert.AreEqual(1000, testCredentialsFactory.Arg2);
}
[Test]
public void LoadBalancingOptions1()
{
const string json = @"{ ""hazelcast"": {
""loadBalancer"" : {
""typeName"": ""random""
}
}}";
var stream = new MemoryStream(Encoding.UTF8.GetBytes(json));
var builder = new ConfigurationBuilder();
builder.AddJsonStream(stream);
var configuration = builder.Build();
var options = new HazelcastOptions();
configuration.HzBind(HazelcastOptions.Hazelcast, options);
Assert.IsInstanceOf<RandomLoadBalancer>(options.LoadBalancer.Service);
}
[Test]
public void LoadBalancingOptions2()
{
const string json = @"{ ""hazelcast"": {
""loadBalancer"" : {
""typeName"": ""ROUNDROBIN""
}
}}";
var stream = new MemoryStream(Encoding.UTF8.GetBytes(json));
var builder = new ConfigurationBuilder();
builder.AddJsonStream(stream);
var configuration = builder.Build();
var options = new HazelcastOptions();
configuration.HzBind(HazelcastOptions.Hazelcast, options);
Assert.IsInstanceOf<RoundRobinLoadBalancer>(options.LoadBalancer.Service);
}
[Test]
public void LoadBalancingOptions3()
{
const string json = @"{ ""hazelcast"": {
""loadBalancer"" : {
""typeName"": ""Hazelcast.Clustering.LoadBalancing.RandomLoadBalancer, Hazelcast.Net""
}
}}";
var stream = new MemoryStream(Encoding.UTF8.GetBytes(json));
var builder = new ConfigurationBuilder();
builder.AddJsonStream(stream);
var configuration = builder.Build();
var options = new HazelcastOptions();
configuration.HzBind(HazelcastOptions.Hazelcast, options);
Assert.IsInstanceOf<RandomLoadBalancer>(options.LoadBalancer.Service);
}
[Test]
public void Clone()
{
var options = ReadResource(Resources.HazelcastOptions);
// TODO: find a way to ensure that *everything* is non-default
options.Networking.Addresses.Add("127.0.0.1:11001");
options.Networking.Addresses.Add("127.0.0.1:11002");
options.Events.SubscriptionCollectDelay = TimeSpan.FromSeconds(4);
options.Events.SubscriptionCollectPeriod = TimeSpan.FromSeconds(5);
options.Events.SubscriptionCollectTimeout = TimeSpan.FromSeconds(6);
// clone
var clone = options.Clone();
// use the ExpectedObject to perform a complete comparison of the clone
options.ToExpectedObject(config => config
// factories. OwnsService is not cloned
.IgnoreRelativePath("OwnsService"))
.ShouldEqual(clone);
}
[Test]
public void AddSubscriber()
{
var options = new HazelcastOptions();
options.AddSubscriber(new TestSubscriber());
options.AddSubscriber("TestSubscriber");
options.AddSubscriber(typeof (TestSubscriber));
options.AddSubscriber<TestSubscriber>();
options.AddSubscriber(x => x.StateChanged((sender, args) => { }));
Assert.That(options.Subscribers.Count, Is.EqualTo(5));
Assert.Throws<ArgumentNullException>(() => options.AddSubscriber((Type) null));
Assert.Throws<ArgumentException>(() => options.AddSubscriber((string) null));
Assert.Throws<ArgumentNullException>(() => options.AddSubscriber((IHazelcastClientEventSubscriber) null));
}
public class TestSubscriber : IHazelcastClientEventSubscriber
{
public static bool Ctored { get; set; }
public void Build(HazelcastClientEventHandlers events)
{
Ctored = true;
}
}
public class TestCredentialsFactory : ICredentialsFactory
{
public TestCredentialsFactory(string arg1, int arg2)
{
Arg1 = arg1;
Arg2 = arg2;
}
public string Arg1 { get; }
public int Arg2 { get; }
public ICredentials NewCredentials()
{
throw new NotSupportedException();
}
public void Dispose()
{ }
}
[Test]
public void SerializationOptionsFile()
{
var options = ReadResource(Resources.HazelcastOptions).Serialization;
Assert.AreEqual(Endianness.LittleEndian, options.Endianness);
Assert.AreEqual(1000, options.PortableVersion);
Assert.IsFalse(options.ValidateClassDefinitions);
Assert.AreEqual(1, options.PortableFactories.Count);
var portableFactoryOptions = options.PortableFactories.First();
Assert.AreEqual(1001, portableFactoryOptions.Id);
Assert.IsInstanceOf<TestPortableFactory>(portableFactoryOptions.Service);
Assert.AreEqual(1, options.DataSerializableFactories.Count);
var dataSerializableFactoryOptions = options.DataSerializableFactories.First();
Assert.AreEqual(1002, dataSerializableFactoryOptions.Id);
Assert.IsInstanceOf<TestDataSerializableFactory>(dataSerializableFactoryOptions.Service);
Assert.IsNotNull(options.GlobalSerializer);
Assert.IsTrue(options.GlobalSerializer.OverrideClrSerialization);
Assert.IsInstanceOf<TestDefaultSerializer>(options.GlobalSerializer.Service);
Assert.AreEqual(1, options.Serializers.Count);
var serializerOptions = options.Serializers.First();
Assert.AreEqual(typeof(HazelcastClient), serializerOptions.SerializedType);
Assert.IsInstanceOf<TestSerializer>(serializerOptions.Service);
}
[Test]
public void NearCacheOptionsFile()
{
var options = ReadResource(Resources.HazelcastOptions);
Assert.AreEqual(2, options.NearCaches.Count);
Assert.IsTrue(options.NearCaches.TryGetValue("default", out var defaultNearCache));
Assert.AreEqual(EvictionPolicy.Lru, defaultNearCache.EvictionPolicy);
Assert.AreEqual(InMemoryFormat.Binary, defaultNearCache.InMemoryFormat);
Assert.AreEqual(1000, defaultNearCache.MaxIdleSeconds);
Assert.AreEqual(1001, defaultNearCache.MaxSize);
Assert.AreEqual(1002, defaultNearCache.TimeToLiveSeconds);
Assert.IsTrue(defaultNearCache.InvalidateOnChange);
Assert.IsTrue(options.NearCaches.TryGetValue("other", out var otherNearCache));
Assert.AreEqual(EvictionPolicy.Lfu, otherNearCache.EvictionPolicy);
Assert.AreEqual(InMemoryFormat.Object, otherNearCache.InMemoryFormat);
Assert.AreEqual(2000, otherNearCache.MaxIdleSeconds);
Assert.AreEqual(2001, otherNearCache.MaxSize);
Assert.AreEqual(2002, otherNearCache.TimeToLiveSeconds);
Assert.IsFalse(otherNearCache.InvalidateOnChange);
// TODO: whatever keys?
}
public class TestPortableFactory : IPortableFactory
{
public IPortable Create(int classId)
{
throw new NotSupportedException();
}
}
public class TestDataSerializableFactory : IDataSerializableFactory
{
public IIdentifiedDataSerializable Create(int typeId)
{
throw new NotSupportedException();
}
}
public class TestDefaultSerializer : ISerializer
{
public void Dispose()
{ }
public int TypeId => throw new NotSupportedException();
}
public class TestSerializer : ISerializer
{
public void Dispose()
{ }
public int TypeId => throw new NotSupportedException();
}
[Test]
public void AltKey()
{
const string json1 = @"{
""hazelcast"": {
""clientName"": ""client"",
""clusterName"": ""cluster"",
""networking"": {
""addresses"": [
""127.0.0.1""
]
}
}
}";
const string json2 = @"{
""alt"": {
""clientName"": ""altClient"",
""networking"": {
""addresses"": [
""127.0.0.2""
]
}
}
}";
var stream1 = new MemoryStream(Encoding.UTF8.GetBytes(json1));
var stream2 = new MemoryStream(Encoding.UTF8.GetBytes(json2));
var builder = new ConfigurationBuilder();
builder.AddJsonStream(stream1);
builder.AddJsonStream(stream2);
var configuration = builder.Build();
var options = new HazelcastOptions();
configuration.HzBind(HazelcastOptions.Hazelcast, options);
configuration.HzBind("alt", options);
Assert.AreEqual("altClient", options.ClientName);
Assert.AreEqual("cluster", options.ClusterName);
Assert.That(options.Networking.Addresses.Count, Is.EqualTo(2));
Assert.That(options.Networking.Addresses, Does.Contain("127.0.0.1"));
Assert.That(options.Networking.Addresses, Does.Contain("127.0.0.2"));
// or, more simply (only in tests):
stream1 = new MemoryStream(Encoding.UTF8.GetBytes(json1));
stream2 = new MemoryStream(Encoding.UTF8.GetBytes(json2));
options = HazelcastOptions.Build(x => x.AddJsonStream(stream1).AddJsonStream(stream2),
null, "alt");
Assert.AreEqual("altClient", options.ClientName);
Assert.AreEqual("cluster", options.ClusterName);
Assert.That(options.Networking.Addresses.Count, Is.EqualTo(2));
Assert.That(options.Networking.Addresses, Does.Contain("127.0.0.1"));
Assert.That(options.Networking.Addresses, Does.Contain("127.0.0.2"));
}
}
}
<|start_filename|>src/Hazelcast.Net/Clustering/ClusterState.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Hazelcast.Core;
using Hazelcast.Exceptions;
using Hazelcast.Partitioning;
using Microsoft.Extensions.Logging;
namespace Hazelcast.Clustering
{
/// <summary>
/// Represents the state of the cluster.
/// </summary>
internal class ClusterState : IAsyncDisposable
{
private readonly CancellationTokenSource _clusterCancellation = new CancellationTokenSource(); // general kill switch
private readonly object _mutex = new object();
private readonly StateChangeQueue _stateChangeQueue;
private Action _shutdownRequested;
private volatile bool _readonlyProperties;
/// <summary>
/// Initializes a new instance of the <see cref="ClusterState"/> class.
/// </summary>
public ClusterState(IClusterOptions options, string clusterName, string clientName, Partitioner partitioner, ILoggerFactory loggerFactory)
{
Options = options;
ClusterName = clusterName;
ClientName = clientName;
Partitioner = partitioner;
LoggerFactory = loggerFactory;
_stateChangeQueue = new StateChangeQueue(loggerFactory);
HConsole.Configure(x=> x.Configure<ClusterState>().SetPrefix("CLUST.STATE"));
}
#region Events
/// <summary>
/// Triggers when the state changes.
/// </summary>
public Func<ClientState, ValueTask> StateChanged
{
get => _stateChangeQueue.StateChanged;
set
{
ThrowIfPropertiesAreReadOnly();
_stateChangeQueue.StateChanged = value ?? throw new ArgumentNullException(nameof(value));
}
}
/// <summary>
/// Triggers when shutdown is requested.
/// </summary>
public Action ShutdownRequested
{
get => _shutdownRequested;
set
{
ThrowIfPropertiesAreReadOnly();
_shutdownRequested = value;
}
}
#endregion
#region Readonly Properties
/// <summary>
/// Throws an <see cref="InvalidOperationException"/> if properties (On...) are read-only.
/// </summary>
public void ThrowIfPropertiesAreReadOnly()
{
if (_readonlyProperties) throw new InvalidOperationException(ExceptionMessages.PropertyIsNowReadOnly);
}
/// <summary>
/// Sets properties (On...) as read-only.
/// </summary>
public void SetPropertiesReadOnly()
{
_readonlyProperties = true;
}
#endregion
#region Infos
/// <summary>
/// Gets the unique identifier of the cluster, as assigned by the client.
/// </summary>
public Guid ClientId { get; } = Guid.NewGuid();
/// <summary>
/// Gets the name of the cluster client, as assigned by the client.
/// </summary>
public string ClientName { get; }
/// <summary>
/// Gets the name of the cluster server.
/// </summary>
public string ClusterName { get; }
#endregion
#region ClientState
// NOTE: the initial ClientState is the default value, i.e. zero
// we don't make it ClientState.Unknown because we don't want it
// to be publicly visible, as this is a purely internal state
/// <summary>
/// Gets the client state.
/// </summary>
public ClientState ClientState { get; private set; }
/// <summary>
/// Changes the state, and pushes the change to the events queue.
/// </summary>
/// <param name="newState">The new state.</param>
public void ChangeState(ClientState newState)
{
lock (_mutex)
{
if (ClientState == newState)
return;
ClientState = newState;
HConsole.WriteLine(this, $"{ClientName} state -> {ClientState}");
_stateChangeQueue.Add(newState);
}
}
/// <summary>
/// Changes the state if it is as expected, and pushes the change to the events queue.
/// </summary>
/// <param name="newState">The new state.</param>
/// <param name="expectedState">The expected state.</param>
/// <returns><c>true</c> if the state was as expected, and thus changed; otherwise <c>false</c>.</returns>
public bool ChangeState(ClientState newState, ClientState expectedState)
{
lock (_mutex)
{
if (ClientState != expectedState)
return false;
ClientState = newState;
HConsole.WriteLine(this, $"{ClientName} state -> {ClientState}");
_stateChangeQueue.Add(newState);
return true;
}
}
/// <summary>
/// Changes the state if it is as expected, and pushes the change to the events queue.
/// </summary>
/// <param name="newState">The new state.</param>
/// <param name="expectedStates">The expected states.</param>
/// <returns><c>true</c> if the state was as expected, and thus changed; otherwise <c>false</c>.</returns>
public bool ChangeState(ClientState newState, params ClientState[] expectedStates)
{
lock (_mutex)
{
if (!expectedStates.Contains(ClientState))
return false;
ClientState = newState;
HConsole.WriteLine(this, $"{ClientName} state -> {ClientState}");
_stateChangeQueue.Add(newState);
return true;
}
}
/// <summary>
/// Changes the state if it is as expected, and pushes the change to the events queue,
/// then waits for the event to be handled.
/// </summary>
/// <param name="newState">The new state.</param>
/// <returns>A task that will complete when the state change event has been handled.</returns>
public async Task ChangeStateAndWait(ClientState newState)
{
Task wait;
lock (_mutex)
{
if (ClientState == newState)
return;
ClientState = newState;
HConsole.WriteLine(this, $"{ClientName} state -> {ClientState}");
wait = _stateChangeQueue.AddAndWait(newState);
}
await wait.CfAwait();
}
/// <summary>
/// Changes the state if it is as expected, and pushes the change to the events queue,
/// then waits for the event to be handled.
/// </summary>
/// <param name="newState">The new state.</param>
/// <param name="expectedState">The expected state.</param>
/// <returns><c>true</c> if the state was as expected, and thus changed, and the corresponding
/// event has been handled; otherwise (not changed) <c>false</c>.</returns>
public async Task<bool> ChangeStateAndWait(ClientState newState, ClientState expectedState)
{
Task wait;
lock (_mutex)
{
if (ClientState != expectedState)
return false;
ClientState = newState;
HConsole.WriteLine(this, $"{ClientName} state -> {ClientState}");
wait = _stateChangeQueue.AddAndWait(newState);
}
await wait.CfAwait();
return true;
}
/// <summary>
/// Changes the state if it is as expected, and pushes the change to the events queue,
/// then waits for the event to be handled.
/// </summary>
/// <param name="newState">The new state.</param>
/// <param name="expectedStates">The expected states.</param>
/// <returns><c>true</c> if the state was as expected, and thus changed, and the corresponding
/// event has been handled; otherwise (not changed) <c>false</c>.</returns>
public async Task<bool> ChangeStateAndWait(ClientState newState, params ClientState[] expectedStates)
{
Task wait;
lock (_mutex)
{
if (!expectedStates.Contains(ClientState))
return false;
ClientState = newState;
wait = _stateChangeQueue.AddAndWait(newState);
}
await wait.CfAwait();
return true;
}
/// <summary>
/// Waits until connected, or it becomes impossible to connect.
/// </summary>
/// <param name="cancellationToken">A cancellation token.</param>
/// <returns><c>true</c> if connected; otherwise <c>false</c> meaning it has become impossible to connect.</returns>
public ValueTask<bool> WaitForConnectedAsync(CancellationToken cancellationToken)
{
lock (_mutex)
{
// already connected
if (ClientState == ClientState.Connected) return new ValueTask<bool>(true);
// never going to be connected
if (ClientState != ClientState.Started && ClientState != ClientState.Disconnected) return new ValueTask<bool>(false);
}
return WaitForConnectedAsync2(cancellationToken);
}
private async ValueTask<bool> WaitForConnectedAsync2(CancellationToken cancellationToken)
{
TaskCompletionSource<ClientState> wait;
CancellationTokenRegistration reg;
lock (_mutex)
{
// already connected
if (ClientState == ClientState.Connected) return true;
// never going to be connected
if (ClientState != ClientState.Started && ClientState != ClientState.Disconnected) return false;
// must wait
wait = new TaskCompletionSource<ClientState>();
reg = cancellationToken.Register(() => wait.TrySetCanceled());
_stateChangeQueue.StateChanged += x =>
{
// either connected, or never going to be connected
if (x != ClientState.Started && x != ClientState.Disconnected)
wait.TrySetResult(x);
// keep waiting
return default;
};
}
ClientState state;
try { state = await wait.Task.CfAwait(); } catch { state = 0; }
reg.Dispose();
return state == ClientState.Connected;
}
/// <summary>
/// Whether the cluster is connected.
/// </summary>
public bool IsConnected => ClientState == ClientState.Connected;
/// <summary>
/// Whether the cluster is active i.e. connected or connecting.
/// </summary>
/// <remarks>
/// <para>When the cluster is active it is either connected, or trying to get
/// connected. It may make sense to retry operations that fail, because they
/// should succeed when the cluster is eventually connected.</para>
/// </remarks>
public bool IsActive => ClientState.IsActiveState();
/// <summary>
/// Throws a <see cref="ClientOfflineException"/> if the cluster is not active.
/// </summary>
/// <param name="innerException">An optional inner exception.</param>
public void ThrowIfNotActive(Exception innerException = null)
{
if (!IsActive) throw new ClientOfflineException(innerException, ClientState);
}
#endregion
public Exception ThrowClientOfflineException()
{
// due to a race condition between ClusterMembers potentially removing all its connections,
// and ClusterConnections figuring we are now disconnected and changing the state, the state
// here could still be ClientState.Connected - fix it.
var clientState = ClientState;
if (clientState == ClientState.Connected) clientState = ClientState.Disconnected;
return new ClientOfflineException(clientState);
}
/// <summary>
/// Requests that the client shuts down.
/// </summary>
public void RequestShutdown()
{
_shutdownRequested?.Invoke();
}
/// <summary>
/// Gets the options.
/// </summary>
public IClusterOptions Options { get; }
/// <summary>
/// Whether smart routing is enabled.
/// </summary>
public bool IsSmartRouting => Options.Networking.SmartRouting;
/// <summary>
/// Gets the partitioner.
/// </summary>
public Partitioner Partitioner { get; }
/// <summary>
/// Gets the logger factory.
/// </summary>
public ILoggerFactory LoggerFactory { get; }
/// <summary>
/// Gets the cluster instrumentation.
/// </summary>
public ClusterInstrumentation Instrumentation { get; } = new ClusterInstrumentation();
/// <summary>
/// Gets the correlation identifier sequence.
/// </summary>
public ISequence<long> CorrelationIdSequence { get; } = new Int64Sequence();
/// <summary>
/// Gets the next correlation identifier.
/// </summary>
/// <returns>The next correlation identifier.</returns>
public long GetNextCorrelationId() => CorrelationIdSequence.GetNext();
/// <summary>
/// <inheritdoc />
public async ValueTask DisposeAsync()
{
await _stateChangeQueue.DisposeAsync().CfAwait();
_clusterCancellation.Dispose();
}
}
}
<|start_filename|>src/Hazelcast.Net/Protocol/CustomCodecs/SqlQueryIdCodec.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// <auto-generated>
// This code was generated by a tool.
// Hazelcast Client Protocol Code Generator
// https://github.com/hazelcast/hazelcast-client-protocol
// Change to this file will be lost if the code is regenerated.
// </auto-generated>
#pragma warning disable IDE0051 // Remove unused private members
// ReSharper disable UnusedMember.Local
// ReSharper disable RedundantUsingDirective
// ReSharper disable CheckNamespace
using System;
using System.Collections.Generic;
using Hazelcast.Protocol.BuiltInCodecs;
using Hazelcast.Protocol.CustomCodecs;
using Hazelcast.Core;
using Hazelcast.Messaging;
using Hazelcast.Clustering;
using Hazelcast.Serialization;
using Microsoft.Extensions.Logging;
namespace Hazelcast.Protocol.CustomCodecs
{
internal static class SqlQueryIdCodec
{
private const int MemberIdHighFieldOffset = 0;
private const int MemberIdLowFieldOffset = MemberIdHighFieldOffset + BytesExtensions.SizeOfLong;
private const int LocalIdHighFieldOffset = MemberIdLowFieldOffset + BytesExtensions.SizeOfLong;
private const int LocalIdLowFieldOffset = LocalIdHighFieldOffset + BytesExtensions.SizeOfLong;
private const int InitialFrameSize = LocalIdLowFieldOffset + BytesExtensions.SizeOfLong;
public static void Encode(ClientMessage clientMessage, Hazelcast.Sql.SqlQueryId sqlQueryId)
{
clientMessage.Append(Frame.CreateBeginStruct());
var initialFrame = new Frame(new byte[InitialFrameSize]);
initialFrame.Bytes.WriteLongL(MemberIdHighFieldOffset, sqlQueryId.MemberIdHigh);
initialFrame.Bytes.WriteLongL(MemberIdLowFieldOffset, sqlQueryId.MemberIdLow);
initialFrame.Bytes.WriteLongL(LocalIdHighFieldOffset, sqlQueryId.LocalIdHigh);
initialFrame.Bytes.WriteLongL(LocalIdLowFieldOffset, sqlQueryId.LocalIdLow);
clientMessage.Append(initialFrame);
clientMessage.Append(Frame.CreateEndStruct());
}
public static Hazelcast.Sql.SqlQueryId Decode(IEnumerator<Frame> iterator)
{
// begin frame
iterator.Take();
var initialFrame = iterator.Take();
var memberIdHigh = initialFrame.Bytes.ReadLongL(MemberIdHighFieldOffset);
var memberIdLow = initialFrame.Bytes.ReadLongL(MemberIdLowFieldOffset);
var localIdHigh = initialFrame.Bytes.ReadLongL(LocalIdHighFieldOffset);
var localIdLow = initialFrame.Bytes.ReadLongL(LocalIdLowFieldOffset);
iterator.SkipToStructEnd();
return new Hazelcast.Sql.SqlQueryId(memberIdHigh, memberIdLow, localIdHigh, localIdLow);
}
}
}
#pragma warning restore IDE0051 // Remove unused private members
<|start_filename|>src/Hazelcast.Net/HazelcastClientFactory.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Threading;
using System.Threading.Tasks;
using Hazelcast.Aggregation;
using Hazelcast.Clustering;
using Hazelcast.Core;
using Hazelcast.Partitioning.Strategies;
using Hazelcast.Projection;
using Hazelcast.Query;
using Hazelcast.Serialization;
using Hazelcast.Serialization.ConstantSerializers;
using Hazelcast.Serialization.DefaultSerializers;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
namespace Hazelcast
{
/// <summary>
/// Creates <see cref="IHazelcastClient"/> instances.
/// </summary>
public static class HazelcastClientFactory
{
/// <summary>
/// Starts a new <see cref="IHazelcastClient"/> instance with automatic options.
/// </summary>
/// <param name="cancellationToken">An optional cancellation token.</param>
/// <returns>A new <see cref="IHazelcastClient"/> instance.</returns>
/// <remarks>
/// <para>Options are built via the <see cref="HazelcastOptionsBuilder.Build()"/> method.</para>
/// <para>By default, the client connection timeout is infinite. If this method cannot establish
/// a connection to a cluster at the configured addresses, it may appear to hang as it retries
/// forever. You may want to configure a timeout via the options.Networking.ConnectionRetry.ClusterConnectionTimeoutMilliseconds
/// configuration option.</para>
/// </remarks>
public static ValueTask<IHazelcastClient> StartNewClientAsync(CancellationToken cancellationToken = default)
=> StartNewClientAsync(new HazelcastOptionsBuilder().Build(), cancellationToken);
/// <summary>
/// Starts a new <see cref="IHazelcastClient"/> instance with configured options.
/// </summary>
/// <param name="configure">A <see cref="HazelcastOptions"/> configuration delegate.</param>
/// <param name="cancellationToken">A optional cancellation token.</param>
/// <returns>A new <see cref="IHazelcastClient"/> instance.</returns>
/// <remarks>
/// <para>Options are built via the <see cref="HazelcastOptionsBuilder.Build()"/> method and passed to the <paramref name="configure"/> method,
/// where they can be refined and adjusted, before being used to create the client.</para>
/// <para>By default, the client connection timeout is infinite. If this method cannot establish
/// a connection to a cluster at the configured addresses, it may appear to hang as it retries
/// forever. You may want to configure a timeout via the options.Networking.ConnectionRetry.ClusterConnectionTimeoutMilliseconds
/// configuration option.</para>
/// </remarks>
public static ValueTask<IHazelcastClient> StartNewClientAsync(Action<HazelcastOptions> configure, CancellationToken cancellationToken = default)
=> StartNewClientAsync(GetOptions(configure ?? throw new ArgumentNullException(nameof(configure))), cancellationToken);
/// <summary>
/// Starts a new <see cref="IHazelcastClient"/> instance with options.
/// </summary>
/// <param name="options">Options.</param>
/// <param name="cancellationToken">A optional cancellation token.</param>
/// <returns>A new <see cref="IHazelcastClient"/> instance.</returns>
/// <remarks>
/// <para>By default, the client connection timeout is infinite. If this method cannot establish
/// a connection to a cluster at the configured addresses, it may appear to hang as it retries
/// forever. You may want to configure a timeout via the options.Networking.ConnectionRetry.ClusterConnectionTimeoutMilliseconds
/// configuration option.</para>
/// </remarks>
public static ValueTask<IHazelcastClient> StartNewClientAsync(HazelcastOptions options, CancellationToken cancellationToken = default)
{
if (options == null) throw new ArgumentNullException(nameof(options));
// every async operations using this client will need a proper async context
// and, we *must* do this in a non-async method for the change to bubble up!
AsyncContext.Ensure();
return StartNewClientAsyncInternal(options, cancellationToken);
}
// implements the async part of StartNewClientAsync w/ cancellation
private static async ValueTask<IHazelcastClient> StartNewClientAsyncInternal(HazelcastOptions options, CancellationToken cancellationToken)
{
if (options == null) throw new ArgumentNullException(nameof(options));
var client = CreateClient(options);
await client.StartAsync(cancellationToken).CfAwait();
return client;
}
/// <summary>
/// Gets a new starting <see cref="IHazelcastClient"/> instance with automatic options.
/// </summary>
/// <param name="cancellationToken">A optional cancellation token.</param>
/// <returns>A <see cref="HazelcastClientStart"/> instance which exposes the <see cref="IHazelcastClient"/> itself,
/// along with a <see cref="Task"/> representing the start operation.</returns>
/// <remarks>
/// <para>The <see cref="IHazelcastClient"/> instance is starting, but not started yet. Its start operation is represented by the returned
/// <see cref="Task"/>, which will complete when the client has started, or when starting has failed. Trying to use the client before the
/// start <see cref="Task"/> has completed can have unspecified results, including throwing exceptions. Make sure that the start
/// <see cref="Task"/> has actually completed before using the client.</para>
/// <para>In any case, the start <see cref="Task"/> must be awaited, as it may fail with an exception that must be observed.</para>
/// <para>Options are built via the <see cref="HazelcastOptionsBuilder.Build()"/> method.</para>
/// <para>By default, the client connection timeout is infinite. If this method cannot establish
/// a connection to a cluster at the configured addresses, it may appear to hang as it retries
/// forever. You may want to configure a timeout via the options.Networking.ConnectionRetry.ClusterConnectionTimeoutMilliseconds
/// configuration option.</para>
/// </remarks>
public static HazelcastClientStart GetNewStartingClient(CancellationToken cancellationToken = default)
=> GetNewStartingClient(new HazelcastOptionsBuilder().Build(), cancellationToken);
/// <summary>
/// Gets a new starting <see cref="IHazelcastClient"/> instance with configured options.
/// </summary>
/// <param name="configure">A <see cref="HazelcastOptions"/> configuration delegate.</param>
/// <param name="cancellationToken">A optional cancellation token.</param>
/// <returns>A <see cref="HazelcastClientStart"/> instance which exposes the <see cref="IHazelcastClient"/> itself,
/// along with a <see cref="Task"/> representing the start operation.</returns>
/// <remarks>
/// <para>The <see cref="IHazelcastClient"/> instance is starting, but not started yet. Its start operation is represented by the returned
/// <see cref="Task"/>, which will complete when the client has started, or when starting has failed. Trying to use the client before the
/// start <see cref="Task"/> has completed can have unspecified results, including throwing exceptions. Make sure that the start
/// <see cref="Task"/> has actually completed before using the client.</para>
/// <para>In any case, the start <see cref="Task"/> must be awaited, as it may fail with an exception that must be observed.</para>
/// <para>Options are built via the <see cref="HazelcastOptionsBuilder.Build()"/> method and passed to the <paramref name="configure"/> method,
/// where they can be refined and adjusted, before being used to create the client.</para>
/// <para>By default, the client connection timeout is infinite. If this method cannot establish
/// a connection to a cluster at the configured addresses, it may appear to hang as it retries
/// forever. You may want to configure a timeout via the options.Networking.ConnectionRetry.ClusterConnectionTimeoutMilliseconds
/// configuration option.</para>
/// </remarks>
public static HazelcastClientStart GetNewStartingClient(Action<HazelcastOptions> configure, CancellationToken cancellationToken = default)
=> GetNewStartingClient(GetOptions(configure ?? throw new ArgumentNullException(nameof(configure))), cancellationToken);
/// <summary>
/// Gets a new starting <see cref="IHazelcastClient"/> instance with options.
/// </summary>
/// <param name="options">Options.</param>
/// <param name="cancellationToken">A optional cancellation token.</param>
/// <returns>A <see cref="HazelcastClientStart"/> instance which exposes the <see cref="IHazelcastClient"/> itself,
/// along with a <see cref="Task"/> representing the start operation.</returns>
/// <remarks>
/// <para>The <see cref="IHazelcastClient"/> instance is starting, but not started yet. Its start operation is represented by the returned
/// <see cref="Task"/>, which will complete when the client has started, or when starting has failed. Trying to use the client before the
/// start <see cref="Task"/> has completed can have unspecified results, including throwing exceptions. Make sure that the start
/// <see cref="Task"/> has actually completed before using the client.</para>
/// <para>In any case, the start <see cref="Task"/> must be awaited, as it may fail with an exception that must be observed.</para>
/// <para>By default, the client connection timeout is infinite. If this method cannot establish
/// a connection to a cluster at the configured addresses, it may appear to hang as it retries
/// forever. You may want to configure a timeout via the options.Networking.ConnectionRetry.ClusterConnectionTimeoutMilliseconds
/// configuration option.</para>
/// </remarks>
public static HazelcastClientStart GetNewStartingClient(HazelcastOptions options, CancellationToken cancellationToken = default)
{
if (options == null) throw new ArgumentNullException(nameof(options));
// every async operations using this client will need a proper async context
// and, we *must* do this in a non-async method for the change to bubble up!
AsyncContext.Ensure();
var client = CreateClient(options);
return new HazelcastClientStart(client, client.StartAsync(cancellationToken));
}
private static HazelcastOptions GetOptions(Action<HazelcastOptions> configure)
{
return new HazelcastOptionsBuilder().With(configure).Build();
}
// (internal for tests only) creates the serialization service
internal static SerializationService CreateSerializationService(SerializationOptions options, ILoggerFactory loggerFactory)
{
// TODO: refactor serialization service entirely
// there should not be a 'builder'
// it's all configuration or service
var serializationServiceBuilder = new SerializationServiceBuilder(loggerFactory);
serializationServiceBuilder
.SetConfig(options)
.SetPartitioningStrategy(new PartitionAwarePartitioningStragegy()) // TODO: should be configure-able
.SetVersion(SerializationService.SerializerVersion) // uh? else default is wrong?
.AddHook<PredicateDataSerializerHook>() // shouldn't they be configurable?
.AddHook<AggregatorDataSerializerHook>()
.AddHook<ProjectionDataSerializerHook>()
.AddDefinitions(new ConstantSerializerDefinitions())
.AddDefinitions(new DefaultSerializerDefinitions())
;
return serializationServiceBuilder.Build();
}
// creates the client
private static HazelcastClient CreateClient(HazelcastOptions options)
{
if (options == null) throw new ArgumentNullException(nameof(options));
// clone the options - we don't want any change to the original options to impact this client
options = options.Clone();
// this ensures that the clock is correctly configured before anything else
// happens - remember the clock is static - so we are doing it here - and
// the clock will actually initialize once
// TODO: make the clock non-static and pass it to the client + where it's needed
Clock.Initialize(options.Core.Clock);
var loggerFactory = options.LoggerFactory.Service ?? new NullLoggerFactory();
var serializationService = CreateSerializationService(options.Serialization, loggerFactory);
var cluster = new Cluster(options, serializationService, loggerFactory);
var client = new HazelcastClient(options, cluster, serializationService, loggerFactory);
return client;
}
}
}
<|start_filename|>src/Hazelcast.Net.DependencyInjection/ServiceCollectionExtensions.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
namespace Hazelcast.DependencyInjection
{
/// <summary>
/// Provides extension methods to the <see cref="IServiceCollection"/> interface.
/// </summary>
public static class ServiceCollectionExtensions
{
/// <summary>
/// Adds Hazelcast services.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="configuration">The configuration.</param>
/// <returns>The service collection.</returns>
public static IServiceCollection AddHazelcast(this IServiceCollection services, IConfiguration configuration)
{
configuration = configuration.GetSection("hazelcast");
// wire the Hazelcast-specific configuration
services.AddOptions();
services.AddSingleton<IOptionsChangeTokenSource<HazelcastOptions>>(new ConfigurationChangeTokenSource<HazelcastOptions>(string.Empty, configuration));
// register the HazelcastOptions, making sure that (1) HzBind is used to bind them, and (2) the
// service provider is assigned so that service factories that require it (see logging below) can
// use it
services.AddSingleton<IConfigureOptions<HazelcastOptions>>(provider =>
new HazelcastNamedConfigureFromConfigurationOptions(string.Empty, configuration, provider));
// wire creators
services.Configure<HazelcastOptions>(options =>
{
// assumes that the ILoggerFactory has been registered in the container
options.LoggerFactory.ServiceProvider = options.ServiceProvider;
// we could do it for others but we cannot assume that users want all other services
// wired through dependency injection - so... this is just an example of how we would
// do it for the authenticator - but we are not going to do any here
//options.Authentication.Authenticator.Creator = () => options.ServiceProvider.GetRequiredService<IAuthenticator>();
});
return services;
}
}
}
<|start_filename|>src/Hazelcast.Net/Clustering/MemberConnectionQueue.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Hazelcast.Core;
using Hazelcast.Models;
using Microsoft.Extensions.Logging;
namespace Hazelcast.Clustering
{
/// <summary>
/// Represents the queue of members that need to be connected.
/// </summary>
internal class MemberConnectionQueue : IAsyncEnumerable<MemberConnectionRequest>, IAsyncDisposable
{
private readonly AsyncQueue<MemberConnectionRequest> _requests = new AsyncQueue<MemberConnectionRequest>();
private readonly CancellationTokenSource _cancel = new CancellationTokenSource();
private readonly SemaphoreSlim _resume = new SemaphoreSlim(0); // blocks the queue when it is suspended
private readonly SemaphoreSlim _enumerate = new SemaphoreSlim(1); // ensures there can be only 1 concurrent enumerator
private readonly object _mutex = new object();
private readonly ILogger _logger;
private volatile bool _disposed;
private MemberConnectionRequest _request;
private bool _suspended;
/// <summary>
/// Initializes a new instance of the <see cref="MemberConnectionQueue"/> class.
/// </summary>
/// <param name="loggerFactory">A logger factory.</param>
public MemberConnectionQueue(ILoggerFactory loggerFactory)
{
if (loggerFactory == null) throw new ArgumentNullException(nameof(loggerFactory));
_logger = loggerFactory.CreateLogger<MemberConnectionQueue>();
HConsole.Configure(x => x.Configure<MemberConnectionQueue>().SetPrefix("MBRQ"));
}
public event EventHandler<MemberInfo> ConnectionFailed;
/// <summary>
/// Suspends the queue.
/// </summary>
/// <returns>A <see cref="ValueTask"/> that will be completed when the queue is suspended.</returns>
/// <remarks>
/// <para>If an item is being processed, this waits for the processing to complete.</para>
/// <para>When the queue is suspended, calls to the enumerator's MoveNextAsync() method blocks.</para>
/// </remarks>
public ValueTask SuspendAsync()
{
lock (_mutex)
{
if (_disposed) return default; // nothing to suspend - but no need to throw about it
_logger.IfDebug()?.LogDebug("Suspend the members connection queue.");
_suspended = true;
// _request is a struct and cannot be null
// the default MemberConnectionRequest's Completion is the default ValueTask
// otherwise, is used to ensure that the request is completed before actually being suspended
return _request.Completion;
}
}
/// <summary>
/// Resumes the queue.
/// </summary>
/// <remarks>
/// <para>If <paramref name="drain"/> is <c>true</c>, de-queues and ignores all queued items.</para>
/// <para>Unblocks calls to the enumerator MoveNextAsync() method.</para>
/// </remarks>
public void Resume(bool drain = false)
{
lock (_mutex)
{
if (_disposed) return; // nothing to resume - but no need to throw about it
if (!_suspended) throw new InvalidOperationException("Not suspended.");
_logger.IfDebug()?.LogDebug($"{(drain?"Drain and resume":"Resume")} the members connection queue.");
if (drain) _requests.ForEach(request => request.Cancel());
_suspended = false;
_resume.Release();
}
}
/// <summary>
/// Adds a member to connect.
/// </summary>
/// <param name="member">The member to connect.</param>
public void Add(MemberInfo member)
{
if (_disposed) return; // no need to add - no need to throw about it
lock (_mutex)
{
if (!_requests.TryWrite(new MemberConnectionRequest(member)))
{
// that should not happen, but log to be sure
_logger.LogWarning($"Failed to add member ({member.Id.ToShortString()}).");
}
else
{
_logger.LogDebug($"Added member {member.Id.ToShortString()}");
}
}
}
// when receiving members from the cluster... if a member is gone,
// we need to remove it from the queue, no need to ever try to connect
// to it again - so it remains in the _members async queue, but we
// flag it so that when we dequeue it, we can ignore it
/// <summary>
/// Removes a member.
/// </summary>
/// <param name="memberId">The identifier of the member.</param>
public void Remove(Guid memberId)
{
// cancel all corresponding requests - see notes in AsyncQueue, this is best-effort,
// a member that we want to remove *may* end up being enumerated, and we're going to
// to to connect to it, and either fail, or drop the connection - accepted tradeoff
lock (_mutex) _requests.ForEach(m =>
{
if (m.Member.Id == memberId) m.Cancel();
});
}
/// <inheritdoc />
public IAsyncEnumerator<MemberConnectionRequest> GetAsyncEnumerator(CancellationToken cancellationToken = default)
{
if (_disposed) throw new ObjectDisposedException(nameof(MemberConnectionQueue));
// ensure that disposing this class cancels the enumeration
return new AsyncEnumerator(this, _cancel.Token, cancellationToken);
}
private class AsyncEnumerator : IAsyncEnumerator<MemberConnectionRequest>
{
private readonly MemberConnectionQueue _queue;
private readonly CancellationTokenSource _cancellation;
private IAsyncEnumerator<MemberConnectionRequest> _queueRequestsEnumerator;
public AsyncEnumerator(MemberConnectionQueue queue, CancellationToken cancellationToken1, CancellationToken cancellationToken2)
{
_queue = queue;
_cancellation = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken1, cancellationToken2);
}
public async ValueTask<bool> MoveNextAsync()
{
if (_cancellation.IsCancellationRequested) return false;
// if this is the first call, validate that we are only enumerating once at a time & create the enumerator
if (_queueRequestsEnumerator == null)
{
var acquired = await _queue._enumerate.WaitAsync(TimeSpan.Zero, default).CfAwait();
if (!acquired) throw new InvalidOperationException("Can only enumerate once at a time.");
_queueRequestsEnumerator = _queue._requests.GetAsyncEnumerator(_cancellation.Token);
}
// there is only one consumer, and the consumer *must* complete a request before picking a new one
if (_queue._request != null && !_queue._request.Completed)
{
throw new InvalidOperationException("Cannot move to next request if previous request has not completed.");
}
// loop until we have a valid request to return, because we may dequeue nulls or cancelled members
while (!_cancellation.IsCancellationRequested)
{
// dequeue a request
if (!await _queueRequestsEnumerator.MoveNextAsync().CfAwait())
return false;
while (!_cancellation.IsCancellationRequested)
{
// if not suspended, make that request the current one and return - this request is not in the queue
// anymore, it's going to be processed no matter what even if the queue is drained or the member is
// removed, and then the established connection (if any) will be dropped
lock (_queue._mutex)
{
if (!_queue._suspended)
{
var request = _queueRequestsEnumerator.Current;
if (request.Member == null || request.Cancelled) break; // that request is to be skipped
request.Failed += (r, _) => _queue.ConnectionFailed?.Invoke(_queue, ((MemberConnectionRequest)r).Member);
_queue._request = request;
return true;
}
}
// if we reach this point, we did not return nor break = the queue was suspended, wait until it is released
// and then loop within the nested while => the dequeued request will be considered again
await _queue._resume.WaitAsync(_cancellation.Token).CfAwaitCanceled();
}
}
return false;
}
/// <inheritdoc />
public MemberConnectionRequest Current => _queue._request;
public async ValueTask DisposeAsync()
{
if (_queueRequestsEnumerator != null)
{
await _queueRequestsEnumerator.DisposeAsync().CfAwait();
_queue._enumerate.Release();
}
_cancellation.Dispose();
}
}
/// <inheritdoc />
public ValueTask DisposeAsync()
{
// note: DisposeAsync should not throw (CA1065)
lock (_mutex)
{
if (_disposed) return default;
_disposed = true;
}
_requests.Complete();
_cancel.Cancel();
_cancel.Dispose();
// cannot wait until enumeration (if any) is complete,
// because that depends on the caller calling MoveNext,
// instead, we return false if the caller calls MoveNext,
// and the caller should dispose the enumerator
_resume.Dispose();
_enumerate.Dispose();
return default;
}
}
}
<|start_filename|>src/Hazelcast.Net/Clustering/ClusterEvents.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Hazelcast.Core;
using Hazelcast.Events;
using Hazelcast.Exceptions;
using Hazelcast.Messaging;
using Hazelcast.Models;
using Hazelcast.Protocol.Codecs;
using Microsoft.Extensions.Logging;
namespace Hazelcast.Clustering
{
/// <summary>
/// Provides the cluster events service for a cluster.
/// </summary>
internal partial class ClusterEvents : IAsyncDisposable
{
private readonly TerminateConnections _terminateConnections;
private readonly ClusterState _clusterState;
private readonly ClusterMessaging _clusterMessaging;
private readonly ClusterMembers _clusterMembers;
private readonly DistributedEventScheduler _scheduler;
private readonly ILogger _logger;
private readonly CancellationTokenSource _cancel = new CancellationTokenSource();
private readonly object _mutex = new object(); // subscriptions and connections
private Func<ValueTask> _partitionsUpdated;
private Func<MembersUpdatedEventArgs, ValueTask> _membersUpdated;
private readonly object _clusterViewsMutex = new object();
private MemberConnection _clusterViewsConnection; // the connection which supports the view event
private long _clusterViewsCorrelationId; // the correlation id of the view event subscription
private Task _clusterViewsTask; // the task that assigns a connection to support the view event
private volatile int _disposed;
// connections
private readonly HashSet<MemberConnection> _connections = new HashSet<MemberConnection>();
private TaskCompletionSource<object> _connectionOpened;
// subscription id -> subscription
// the master subscriptions list
private readonly ConcurrentDictionary<Guid, ClusterSubscription> _subscriptions = new ConcurrentDictionary<Guid, ClusterSubscription>();
// subscribe tasks
private readonly object _subscribeTasksMutex = new object();
private Dictionary<MemberConnection, Task> _subscribeTasks = new Dictionary<MemberConnection, Task>(); // the tasks that subscribe new connections
// correlation id -> subscription
// used to match a subscription to an incoming event message
// each connection has its own correlation id, so there can be many entries per cluster subscription
private readonly ConcurrentDictionary<long, ClusterSubscription> _correlatedSubscriptions = new ConcurrentDictionary<long, ClusterSubscription>();
// ghost subscriptions, to be collected
// subscriptions that have failed to properly unsubscribe and now we need to take care of them
private readonly HashSet<MemberSubscription> _collectSubscriptions = new HashSet<MemberSubscription>();
private readonly object _collectMutex = new object();
private Task _collectTask; // the task that collects ghost subscriptions
static ClusterEvents()
{
HConsole.Configure(x => x.Configure<ClusterEvents>().SetPrefix("CLUST.EVTS"));
}
public ClusterEvents(ClusterState clusterState, ClusterMessaging clusterMessaging, TerminateConnections terminateConnections, ClusterMembers clusterMembers)
{
_clusterState = clusterState;
_clusterMessaging = clusterMessaging;
_clusterMembers = clusterMembers;
_logger = _clusterState.LoggerFactory.CreateLogger<ClusterEvents>();
_scheduler = new DistributedEventScheduler(_clusterState.LoggerFactory);
_terminateConnections = terminateConnections;
_objectLifecycleEventSubscription = new ObjectLifecycleEventSubscription(_clusterState, this)
{
ObjectCreated = args => _objectCreated.AwaitEach(args),
ObjectDestroyed = args => _objectDestroyed.AwaitEach(args)
};
_partitionLostEventSubscription = new PartitionLostEventSubscription(_clusterState, this, clusterMembers)
{
PartitionLost = args => _partitionLost.AwaitEach(args)
};
}
/// <summary>
/// (internal for tests only) Gets the subscriptions.
/// </summary>
internal ConcurrentDictionary<Guid, ClusterSubscription> Subscriptions => _subscriptions;
/// <summary>
/// (internal for tests only) Gets the correlated subscriptions.
/// </summary>
internal ConcurrentDictionary<long, ClusterSubscription> CorrelatedSubscriptions => _correlatedSubscriptions;
/// <summary>
/// (internal for tests only) Gets the ghost subscriptions that need to be collected.
/// </summary>
internal HashSet<MemberSubscription> CollectSubscriptions => _collectSubscriptions;
#region Add/Remove Subscriptions
// _connections is the list of known member connections
// connections are added & removed by handling the ConnectionOpened and ConnectionClosed events
// note: a connection may be opened yet not correspond to any member
//
// _subscriptions is the list of known cluster subscriptions
// subscriptions are added & removed by invoking Add/RemoveSubscriptionAsync
// each subscription in _subscriptions must be added to each connection in _connections
//
// when a subscription is added,
// - (mutex): capture _connections connections, add the subscription to _subscriptions
// - for each connection
// - add a correlated subscription (before adding on server!)
// - add the subscription to the connection on server
// - fails
// - remove the correlated subscription
// - because
// - the connection is not active anymore = skip & continue with other connections
// - any other reason = queue all member connections for collection
// - fail
// - try-add a member connection to subscription
// - fails (because the subscription is not active anymore)
// - remove the correlated subscription
// - nothing else to do: the subscription has been de-activated = clean
// - fail
//
// when a connection is added
// - (mutex): capture _subscriptions subscriptions, add the connection to _connections
// - for each subscription
// - add a correlated subscription (before adding on server!)
// - add the subscription to the connection on server
// - fails
// - remove the correlated subscription
// - because
// - the connection is not active anymore = queue all created member subscriptions for collection
// - for any other reason = terminate the connection
// - exit
// - try-add the corresponding member connection to the subscription
// - fails (because the subscription is not active anymore)
// - remove the correlated subscription
// - queue the member connection for collection
// - skip & continue with other subscriptions
//
//
// when a subscription is removed
// - (mutex): remove the subscription from _subscriptions
// - de-activate the subscription (cannot add member subscriptions anymore)
// - for each member connection in the subscription,
// - clear the correlated subscription
// - remove from server
// - fails because the connection is not active anymore = consider it a success
// - fails for any other reason = queue the member subscription for collection
//
// note: meanwhile, if a connection is
// - added: it will not see the subscription, or see it de-activated
// - removed: removing from server will be considered a success
//
//
// when a connection is removed
// - (mutex): capture _subscriptions subscriptions, remove the connection from _connections
// - for each subscription
// - remove the member subscription for the removed connection (cannot remove from server, connection is down)
// - remove the corresponding correlated subscription
// - if it is the cluster views connection
// - clear
// - remove the corresponding correlated subscription
// - start assigning another connection
//
// note: meanwhile, if a subscription is
// - added: it will not see the connection
// - removed: never mind, we just have nothing to remove
/// <summary>
/// Adds a subscription.
/// </summary>
/// <param name="subscription">The subscription.</param>
/// <param name="cancellationToken">A cancellation token.</param>
/// <returns>A task that will complete when the subscription has been added.</returns>
public async Task AddSubscriptionAsync(ClusterSubscription subscription, CancellationToken cancellationToken = default)
{
if (subscription == null) throw new ArgumentNullException(nameof(subscription));
// atomically get connections and add the subscription
List<MemberConnection> connections;
lock (_mutex)
{
// capture connections
connections = _connections.ToList();
// failing would be a nasty internal error but better report it
if (!_subscriptions.TryAdd(subscription.Id, subscription))
throw new InvalidOperationException("A subscription with the same identifier already exists.");
}
// add the subscription to each captured connection
// TODO: consider adding in parallel
foreach (var connection in connections)
{
if (cancellationToken.IsCancellationRequested)
{
CollectSubscription(subscription); // undo what has been done already
cancellationToken.ThrowIfCancellationRequested(); // and throw
}
// this never throws
var attempt = await AddSubscriptionAsync(subscription, connection, cancellationToken).CfAwait();
switch (attempt.Value)
{
case InstallResult.Success: // good
case InstallResult.ConnectionNotActive: // ignore it
continue;
case InstallResult.SubscriptionNotActive:
// not active = has been de-activated = what has been done already has been undone
throw new HazelcastException("Failed to add the subscription because it was removed.");
case InstallResult.Failed: // also if canceled
CollectSubscription(subscription); // undo what has been done already
throw new HazelcastException("Failed to add subscription (see inner exception).", attempt.Exception);
default:
throw new NotSupportedException();
}
}
}
// adds a subscription on one member
private async ValueTask<Attempt<InstallResult>> AddSubscriptionAsync(ClusterSubscription subscription, MemberConnection connection, CancellationToken cancellationToken)
{
// if we already know the connection is not active anymore, ignore it
// otherwise, install on this member - may throw if the connection goes away in the meantime
if (!connection.Active) return Attempt.Fail(InstallResult.ConnectionNotActive);
// add correlated subscription now so it is ready when the first events come
var correlationId = _clusterState.GetNextCorrelationId();
_correlatedSubscriptions[correlationId] = subscription;
// the original subscription.SubscribeRequest message may be used concurrently,
// we need a safe clone so we can use our own correlation id in a safe way.
var subscribeRequest = subscription.SubscribeRequest.CloneWithNewCorrelationId(correlationId);
// talk to the server
ClientMessage response;
try
{
response = await _clusterMessaging.SendToMemberAsync(subscribeRequest, connection, correlationId, cancellationToken).CfAwait();
}
catch (Exception e)
{
_correlatedSubscriptions.TryRemove(correlationId, out _);
return connection.Active
? Attempt.Fail(InstallResult.Failed, e) // also if canceled
: Attempt.Fail(InstallResult.ConnectionNotActive);
}
// try to add the member subscription to the cluster subscription
// fails if the cluster subscription is not active anymore
var memberSubscription = subscription.ReadSubscriptionResponse(response, connection);
var added = subscription.TryAddMemberSubscription(memberSubscription);
if (added) return InstallResult.Success;
// the subscription is not active anymore
_correlatedSubscriptions.TryRemove(correlationId, out _);
CollectSubscription(memberSubscription);
return Attempt.Fail(InstallResult.SubscriptionNotActive);
}
// (background) adds subscriptions on one member - when a connection is added
private async Task AddSubscriptionsAsync(MemberConnection connection, IReadOnlyCollection<ClusterSubscription> subscriptions, CancellationToken cancellationToken)
{
// this is a background task and therefore should never throw!
foreach (var subscription in subscriptions)
{
if (cancellationToken.IsCancellationRequested) return;
// this never throws
var attempt = await AddSubscriptionAsync(subscription, connection, cancellationToken).CfAwait();
switch (attempt.Value)
{
case InstallResult.Success: // ok
case InstallResult.SubscriptionNotActive: // ignore it
continue;
case InstallResult.ConnectionNotActive:
// not active = has been removed = what has been done already has been undone
break; // simply exit
case InstallResult.Failed:
// failed to talk to the server - this connection is not working
_terminateConnections.Add(connection);
break; // exit
default:
continue;
}
}
// we are done now
lock (_subscribeTasksMutex) _subscribeTasks.Remove(connection);
}
/// <summary>
/// Removes a subscription.
/// </summary>
/// <param name="subscriptionId">The unique identifier of the subscription.</param>
/// <param name="cancellationToken">A cancellation token.</param>
/// <returns>Whether the subscription was removed.</returns>
/// <remarks>
/// <para>This may throw in something goes wrong. In this case, the subscription
/// is de-activated but remains in the lists, so that it is possible to try again.</para>
/// </remarks>
public async ValueTask<bool> RemoveSubscriptionAsync(Guid subscriptionId, CancellationToken cancellationToken = default)
{
// get and remove the subscription
ClusterSubscription subscription;
lock (_mutex)
{
if (!_subscriptions.TryRemove(subscriptionId, out subscription))
return false; // unknown subscription
}
await RemoveSubscriptionAsync(subscription, cancellationToken).CfAwait();
return true;
}
// removes a subscription
private async ValueTask RemoveSubscriptionAsync(ClusterSubscription subscription, CancellationToken cancellationToken)
{
// de-activate the subscription: all further events will be ignored
subscription.Deactivate();
// for each member subscription
foreach (var memberSubscription in subscription)
{
// runs them all regardless of cancellation
// remove the correlated subscription
_correlatedSubscriptions.TryRemove(memberSubscription.CorrelationId, out _);
// remove from the server
// and, if it fails, enqueue for collection
if (await RemoveSubscriptionAsync(memberSubscription, cancellationToken).CfAwait())
subscription.Remove(memberSubscription);
else
CollectSubscription(memberSubscription);
}
}
// remove a subscription from one member
private async ValueTask<bool> RemoveSubscriptionAsync(MemberSubscription subscription, CancellationToken cancellationToken)
{
// fast: if the connection is down, consider the subscription removed
if (!subscription.Connection.Active) return true;
try
{
// remove the member subscription = trigger the server-side un-subscribe
// this *may* throw if we fail to talk to the member
// this *may* return false for some reason
var unsubscribeRequest = subscription.ClusterSubscription.CreateUnsubscribeRequest(subscription.ServerSubscriptionId);
var responseMessage = await _clusterMessaging.SendToMemberAsync(unsubscribeRequest, subscription.Connection, cancellationToken).CfAwait();
var removed = subscription.ClusterSubscription.ReadUnsubscribeResponse(responseMessage);
return removed;
}
catch (Exception e)
{
// if the connection is down, consider the subscription removed
if (!subscription.Connection.Active) return true;
// otherwise something went wrong and maybe we want to try again
_logger.LogError(e, "Caught an exception while unsubscribing to events.");
return false;
}
}
// clears the subscriptions of a member that is gone fishing
// the connection is down, no way to unsubscribe, just clear the data structures
private void ClearMemberSubscriptions(IEnumerable<ClusterSubscription> subscriptions, MemberConnection connection)
{
foreach (var subscription in subscriptions)
{
// remove the correlated subscription
// remove the client subscription
if (subscription.TryRemove(connection, out var memberSubscription))
_correlatedSubscriptions.TryRemove(memberSubscription.CorrelationId, out _);
}
}
#endregion
#region Cluster Members/Partitions Views
/// <summary>
/// Clears the connection currently supporting the cluster view event, if it matches the specified <paramref name="connection"/>.
/// </summary>
/// <param name="connection">A connection.</param>
/// <remarks>
/// <para>If <paramref name="connection"/> was supporting the cluster view event, and was not the last connection,
/// this starts a background task to assign another connection to support the cluster view event.</para>
/// </remarks>
private void ClearClusterViewsConnection(MemberConnection connection)
{
// note: we do not "unsubscribe" - if we come here, the connection is gone
lock (_clusterViewsMutex)
{
// if the specified client is *not* the cluster events client, ignore
if (_clusterViewsConnection != connection)
return;
// otherwise, clear the connection
_clusterViewsConnection = null;
_correlatedSubscriptions.TryRemove(_clusterViewsCorrelationId, out _);
_clusterViewsCorrelationId = 0;
_logger.IfDebug()?.LogDebug($"Cleared cluster views connection (was {connection.Id.ToShortString()}).");
// assign another connection (async)
_clusterViewsTask ??= AssignClusterViewsConnectionAsync(null, _cancel.Token);
}
}
/// <summary>
/// Proposes a connection to support the cluster view event.
/// </summary>
/// <param name="connection">A connection.</param>
/// <remarks>
/// <para>if there is no connection currently supporting the cluster view event, then this starts a background
/// task to assign a connection to support the event, trying the supplied <paramref name="connection"/> first.</para>
/// </remarks>
private void ProposeClusterViewsConnection(MemberConnection connection)
{
lock (_clusterViewsMutex)
{
if (_clusterViewsConnection == null)
_clusterViewsTask ??= AssignClusterViewsConnectionAsync(connection, _cancel.Token);
}
}
/// <summary>
/// Assigns a connection to support the cluster view event.
/// </summary>
/// <param name="connection">An optional candidate connection.</param>
/// <param name="cancellationToken">A cancellation token.</param>
/// <returns>A task that will complete when a connection has been assigned to handle the cluster views event.</returns>
private async Task AssignClusterViewsConnectionAsync(MemberConnection connection, CancellationToken cancellationToken)
{
// TODO: consider throttling
ValueTask<MemberConnection> WaitRandomConnection(CancellationToken token)
{
var c = _clusterMembers.GetRandomConnection();
return c == null
? WaitRandomConnection2(token)
: new ValueTask<MemberConnection>(c);
}
async ValueTask<MemberConnection> WaitRandomConnection2(CancellationToken token)
{
MemberConnection c = null;
while (!token.IsCancellationRequested &&
((c = _clusterMembers.GetRandomConnection()) == null || !c.Active))
{
lock (_mutex) _connectionOpened = new TaskCompletionSource<object>();
using var reg = token.Register(() => _connectionOpened.TrySetCanceled());
await _connectionOpened.Task.CfAwait();
lock (_mutex) _connectionOpened = null;
}
return c;
}
// this will only exit once a connection is assigned, or the task is
// cancelled, when the cluster goes down (and never up again)
while (!cancellationToken.IsCancellationRequested)
{
connection ??= await WaitRandomConnection(cancellationToken).CfAwait();
// try to subscribe, relying on the default invocation timeout,
// so this is not going to last forever - we know it will end
var correlationId = _clusterState.GetNextCorrelationId();
if (!await SubscribeToClusterViewsAsync(connection, correlationId, cancellationToken).CfAwait()) // does not throw
{
// failed => try another connection
connection = null;
continue;
}
// success!
lock (_clusterViewsMutex)
{
if (connection.Active)
{
_clusterViewsConnection = connection;
_clusterViewsCorrelationId = correlationId;
_clusterViewsTask = null;
HConsole.WriteLine(this, $"ClusterViews: connection {connection.Id.ToShortString()} [{correlationId}]");
break;
}
}
// if the connection was not active anymore, we have rejected it
// if the connection was active, and we have accepted it, and it de-activates,
// then ClearClusterViewsConnection will deal with it
}
}
/// <summary>
/// Subscribes a connection to the cluster view event.
/// </summary>
/// <param name="connection">The connection.</param>
/// <param name="correlationId">The correlation identifier.</param>
/// <param name="cancellationToken">A cancellation token.</param>
/// <returns>A task that will complete when the subscription has been processed, and represent whether it was successful.</returns>
private async Task<bool> SubscribeToClusterViewsAsync(MemberConnection connection, long correlationId, CancellationToken cancellationToken)
{
// aka subscribe to member/partition view events
_logger.IfDebug()?.LogDebug($"Subscribe to cluster views on connection {connection.Id.ToShortString()}.");
// handles the event
ValueTask HandleEventAsync(ClientMessage message, object _)
=> ClientAddClusterViewListenerCodec.HandleEventAsync(message,
HandleCodecMemberViewEvent,
HandleCodecPartitionViewEvent,
connection.Id,
_clusterState.LoggerFactory);
try
{
var subscribeRequest = ClientAddClusterViewListenerCodec.EncodeRequest();
_correlatedSubscriptions[correlationId] = new ClusterSubscription(HandleEventAsync);
_ = await _clusterMessaging.SendToMemberAsync(subscribeRequest, connection, correlationId, cancellationToken).CfAwait();
_logger.IfDebug()?.LogDebug($"Subscribed to cluster views on connection {connection.Id.ToShortString()}.");
return true;
}
catch (TargetDisconnectedException)
{
_correlatedSubscriptions.TryRemove(correlationId, out _);
// if the connection has died... and that can happen when switching members... no need to worry the
// user with a warning, a debug message should be enough
_logger.IfDebug()?.LogDebug($"Failed to subscribe to cluster views on connection {connection.Id.ToShortString()} (disconnected), may retry.");
return false;
}
catch (Exception e)
{
_correlatedSubscriptions.TryRemove(correlationId, out _);
_logger.IfWarning()?.LogWarning(e, $"Failed to subscribe to cluster views on connection {connection.Id.ToShortString()}, may retry.");
return false;
}
}
/// <summary>
/// Handles the 'members view' event.
/// </summary>
/// <param name="version">The version.</param>
/// <param name="members">The members.</param>
/// <param name="state">A state object.</param>
private async ValueTask HandleCodecMemberViewEvent(int version, ICollection<MemberInfo> members, object state)
{
var eventArgs = await _clusterMembers.SetMembersAsync(version, members).CfAwait();
// nothing to do if members have been skipped (due to version)
if (eventArgs == null) return;
// raise events (On... does not throw)
await _membersUpdated.AwaitEach(eventArgs).CfAwait();
}
/// <summary>
/// Handles the 'partitions view' event.
/// </summary>
/// <param name="version">The version.</param>
/// <param name="partitions">The partitions.</param>
/// <param name="state">A state object.</param>
private async ValueTask HandleCodecPartitionViewEvent(int version, IList<KeyValuePair<Guid, IList<int>>> partitions, object state)
{
var clientId = (Guid) state;
var updated = _clusterState.Partitioner.NotifyPartitionView(clientId, version, MapPartitions(partitions));
if (!updated) return;
// signal once
//if (Interlocked.CompareExchange(ref _firstPartitionsViewed, 1, 0) == 0)
// _firstPartitionsView.Release();
// raise event
// On... does not throw
await _partitionsUpdated.AwaitEach().CfAwait();
}
/// <summary>
/// Maps partitions from the event representation to our internal representation.
/// </summary>
/// <param name="partitions">The event representation of partitions.</param>
/// <returns>Our internal representation of partitions.</returns>
private static Dictionary<int, Guid> MapPartitions(IEnumerable<KeyValuePair<Guid, IList<int>>> partitions)
{
var map = new Dictionary<int, Guid>();
foreach (var (memberId, partitionIds) in partitions)
foreach (var partitionId in partitionIds)
map[partitionId] = memberId;
return map;
}
#endregion
#region Collect Ghosts
// add all member subscriptions of a cluster subscription to be collected, start the collect task if needed
private void CollectSubscription(ClusterSubscription subscription)
{
lock (_collectMutex)
{
foreach (var memberSubscription in subscription)
_collectSubscriptions.Add(memberSubscription);
_collectTask ??= CollectSubscriptionsAsync(_cancel.Token);
}
}
// add a member subscription to be collected, start the collect task if needed
private void CollectSubscription(MemberSubscription subscription)
{
lock (_collectMutex)
{
_collectSubscriptions.Add(subscription);
_collectTask ??= CollectSubscriptionsAsync(_cancel.Token);
}
}
// body of the subscription collection task
private async Task CollectSubscriptionsAsync(CancellationToken cancellationToken)
{
List<MemberSubscription> removedSubscriptions = null;
HConsole.WriteLine(this, "CollectSubscription starting");
// if canceled, will be awaited properly
await Task.Delay(_clusterState.Options.Events.SubscriptionCollectDelay, cancellationToken).CfAwait();
while (!cancellationToken.IsCancellationRequested)
{
// capture subscriptions to collect
List<MemberSubscription> subscriptions;
lock (_collectMutex)
{
subscriptions = _collectSubscriptions.ToList();
}
HConsole.WriteLine(this, $"CollectSubscription loop for {subscriptions.Count} member subscriptions");
// try to remove captured subscriptions
// if canceled, will be awaited properly
removedSubscriptions?.Clear();
var timeLimit = DateTime.Now - _clusterState.Options.Events.SubscriptionCollectTimeout;
foreach (var subscription in subscriptions)
{
HConsole.WriteLine(this, "CollectSubscription collects");
try
{
var removed = await RemoveSubscriptionAsync(subscription, cancellationToken).CfAwait();
if (removed || subscription.ClusterSubscription.DeactivateTime < timeLimit)
{
subscription.ClusterSubscription.Remove(subscription);
(removedSubscriptions ??= new List<MemberSubscription>()).Add(subscription);
}
}
catch (OperationCanceledException)
{
return; // cancelled - stop everything
}
catch (Exception e)
{
_logger.LogError(e, "An error occurred while collecting subscriptions.");
}
}
HConsole.WriteLine(this, $"CollectSubscription collected {removedSubscriptions?.Count ?? 0} subscriptions");
// update subscriptions to collect
// none remaining = exit the task
lock (_collectMutex)
{
if (removedSubscriptions != null)
{
foreach (var subscription in removedSubscriptions)
_collectSubscriptions.Remove(subscription);
}
if (_collectSubscriptions.Count == 0)
{
HConsole.WriteLine(this, "CollectSubscription exits");
_collectTask = null;
return;
}
}
HConsole.WriteLine(this, "CollectSubscription waits");
// else, wait + loop / try again
// if canceled, will be awaited properly
await Task.Delay(_clusterState.Options.Events.SubscriptionCollectPeriod, cancellationToken).CfAwait();
}
}
#endregion
#region Events
/// <summary>
/// Gets or sets an action that will be executed when members have been updated.
/// </summary>
public Func<MembersUpdatedEventArgs, ValueTask> MembersUpdated
{
get => _membersUpdated;
set
{
_clusterState.ThrowIfPropertiesAreReadOnly();
_membersUpdated = value;
}
}
/// <summary>
/// Gets or sets an action that will be executed when partitions have been updated.
/// </summary>
public Func<ValueTask> PartitionsUpdated
{
get => _partitionsUpdated;
set
{
_clusterState.ThrowIfPropertiesAreReadOnly();
_partitionsUpdated = value;
}
}
#endregion
#region Event Handlers
/// <summary>
/// Handles a connection being created.
/// </summary>
/// <param name="connection"></param>
public void OnConnectionCreated(MemberConnection connection)
{
// wires reception of event messages
connection.ReceivedEvent += OnReceivedEvent;
}
/// <summary>
/// Handles a connection being opened.
/// </summary>
#pragma warning disable IDE0060 // Remove unused parameters
#pragma warning disable CA1801 // Review unused parameters
// unused parameters are required, this is an event handler
public ValueTask OnConnectionOpened(MemberConnection connection, bool isFirstEver, bool isFirst, bool isNewCluster)
#pragma warning restore CA1801
#pragma warning restore IDE0060
{
// atomically add the connection and capture known subscriptions
List<ClusterSubscription> subscriptions;
lock (_mutex)
{
_connections.Add(connection);
subscriptions = _subscriptions.Values.ToList();
_connectionOpened?.TrySetResult(null);
}
// in case we don't have one already...
ProposeClusterViewsConnection(connection);
// for this new connection... we need to add all known subscriptions to it, and this is
// going to happen in the background - yes, it means that the connection could be used
// even before all subscriptions have been added and thus some events may fail to trigger,
// we don't offer any strict guarantee on events anyways
lock (_subscribeTasksMutex)
{
if (_subscribeTasks != null)
_subscribeTasks[connection] = AddSubscriptionsAsync(connection, subscriptions, _cancel.Token);
}
return default;
}
/// <summary>
/// Handles a connection being closed.
/// </summary>
public ValueTask OnConnectionClosed(MemberConnection connection)
{
// atomically remove the connection and capture known subscriptions
List<ClusterSubscription> subscriptions;
lock (_mutex)
{
_connections.Remove(connection);
subscriptions = _subscriptions.Values.ToList();
}
// just clear subscriptions,
// cannot unsubscribes from the server since the client is not connected anymore
ClearMemberSubscriptions(subscriptions, connection);
// clear, if that was the cluster views connection,
// and then start the task to assign another one)
ClearClusterViewsConnection(connection);
return default;
}
/// <summary>
/// Handles an event message.
/// </summary>
/// <param name="message">The event message.</param>
public void OnReceivedEvent(ClientMessage message)
{
HConsole.WriteLine(this, "Handle event message");
// get the matching subscription
if (!_correlatedSubscriptions.TryGetValue(message.CorrelationId, out var subscription))
{
_clusterState.Instrumentation.CountMissedEvent(message);
_logger.LogWarning($"No event handler for [{message.CorrelationId}]");
HConsole.WriteLine(this, $"No event handler for [{message.CorrelationId}]");
return;
}
// schedule the event - will run async, but sequentially per-partition
// (queues the event, returns immediately, does not await on handlers)
_scheduler.Add(subscription, message);
}
#endregion
/// <inheritdoc />
public async ValueTask DisposeAsync()
{
if (Interlocked.CompareExchange(ref _disposed, 1, 0) == 1)
return;
HConsole.WriteLine(this, "Dispose scheduler.");
await _scheduler.DisposeAsync().CfAwait();
HConsole.WriteLine(this, "Dispose subscriptions.");
await _objectLifecycleEventSubscription.DisposeAsync().CfAwait();
await _partitionLostEventSubscription.DisposeAsync().CfAwait();
_cancel.Cancel();
HConsole.WriteLine(this, "Await cluster views task.");
await _clusterViewsTask.MaybeNull().CfAwaitCanceled();
HConsole.WriteLine(this, "Dispose collect task.");
await _collectTask.MaybeNull().CfAwaitCanceled();
HConsole.WriteLine(this, "Await subscribe tasks.");
Task[] tasks;
lock (_subscribeTasksMutex)
{
tasks = _subscribeTasks.Values.ToArray();
_subscribeTasks = null;
}
await Task.WhenAll(tasks).CfAwait();
_cancel.Dispose();
// connection is going down
// it will be disposed as well as all other connections
// and subscriptions will terminate
_clusterViewsConnection = null;
HConsole.WriteLine(this, "Down.");
}
}
}
<|start_filename|>src/Hazelcast.Net.Testing/RemoteTestBase.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Threading.Tasks;
using Hazelcast.Configuration;
using Hazelcast.Core;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
namespace Hazelcast.Testing
{
/// <summary>
/// Provides a base class for Hazelcast tests that require a remote environment.
/// </summary>
public abstract partial class RemoteTestBase : HazelcastTestBase
{
/// <summary>
/// Creates the Hazelcast options.
/// </summary>
protected virtual HazelcastOptions CreateHazelcastOptions()
{
var options = HazelcastOptions.Build(builder =>
{
builder.AddHazelcastAndDefaults(null);
builder.AddUserSecrets(GetType().Assembly, true);
}, null, ConfigurationSecretsKey);
options.Networking.Addresses.Clear();
options.Networking.Addresses.Add("127.0.0.1:5701");
options.LoggerFactory.Creator = () => LoggerFactory;
return options;
}
/// <summary>
/// Gets the configuration secrets key.
/// </summary>
/// <remarks>
/// <para>By default this is "hazelcast", which means that any secrets configuration
/// options named "hazelcast:something" will be merged into configuration, but a
/// different value e.g. "hazelcast-tests-something" can be specified to select
/// different groups of secrets for different tests.</para>
/// </remarks>
protected virtual string ConfigurationSecretsKey { get; } = HazelcastOptions.Hazelcast;
/// <summary>
/// Creates a client.
/// </summary>
/// <returns>A client.</returns>
protected virtual async ValueTask<IHazelcastClient> CreateAndStartClientAsync()
{
Logger.LogInformation("Create new client");
var client = await HazelcastClientFactory.StartNewClientAsync(CreateHazelcastOptions()).CfAwait();
return client;
}
/// <summary>
/// Creates a client.
/// </summary>
/// <returns>A client.</returns>
protected virtual async ValueTask<IHazelcastClient> CreateAndStartClientAsync(Action<HazelcastOptions> configure)
{
Logger.LogInformation("Create new client");
var options = CreateHazelcastOptions();
configure(options);
var client = await HazelcastClientFactory.StartNewClientAsync(options).CfAwait();
return client;
}
/// <summary>
/// Generates a random key that maps to a partition.
/// </summary>
/// <param name="client">The client.</param>
/// <param name="partitionId">The identifier of the partition.</param>
/// <returns>A random key that maps to the specified partition.</returns>
protected object GenerateKeyForPartition(IHazelcastClient client, int partitionId)
{
var clientInternal = (HazelcastClient) client;
while (true)
{
var randomKey = TestUtils.RandomString();
var randomKeyData = clientInternal.SerializationService.ToData(randomKey);
if (clientInternal.Cluster.Partitioner.GetPartitionId(randomKeyData.PartitionHash) == partitionId)
return randomKey;
}
}
}
}
<|start_filename|>src/Hazelcast.Net/Sql/SqlService.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Hazelcast.Clustering;
using Hazelcast.Core;
using Hazelcast.Protocol.Codecs;
using Hazelcast.Serialization;
namespace Hazelcast.Sql
{
internal class SqlService : ISqlService
{
private readonly Cluster _cluster;
private readonly SerializationService _serializationService;
internal SqlService(Cluster cluster, SerializationService serializationService)
{
_cluster = cluster;
_serializationService = serializationService;
}
/// <inheritdoc/>
public async Task<ISqlQueryResult> ExecuteQueryAsync(string sql, object[] parameters = null, SqlStatementOptions options = null, CancellationToken cancellationToken = default)
{
parameters ??= Array.Empty<object>();
options ??= SqlStatementOptions.Default;
var queryId = SqlQueryId.FromMemberId(_cluster.ClientId);
SqlRowMetadata metadata;
SqlPage firstPage;
try
{
(metadata, firstPage) = await FetchFirstPageAsync(queryId, sql, parameters, options, cancellationToken).CfAwait();
}
catch (TaskCanceledException)
{
// maybe, the server is running the query, so better notify it
// for any other exception: assume that the query did not start
await CloseAsync(queryId).CfAwaitNoThrow(); // swallow the exception, nothing we can do really
throw;
}
return new SqlQueryResult(_serializationService, metadata, firstPage, options.CursorBufferSize, FetchNextPageAsync, queryId, CloseAsync, cancellationToken);
}
/// <inheritdoc/>
public async Task<long> ExecuteCommandAsync(string sql, object[] parameters = null, SqlStatementOptions options = null, CancellationToken cancellationToken = default)
{
parameters ??= Array.Empty<object>();
options ??= SqlStatementOptions.Default;
var queryId = SqlQueryId.FromMemberId(_cluster.ClientId);
// commands self-close when returning = no need to close anything
// and... in case token is cancelled, it's pretty much the same
return await FetchUpdateCountAsync(queryId, sql, parameters, options, cancellationToken).CfAwait();
}
private async Task<SqlExecuteCodec.ResponseParameters> FetchAndValidateResponseAsync(SqlQueryId queryId,
string sql, object[] parameters, SqlStatementOptions options, SqlResultType resultType,
CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
var connection = _cluster.Members.GetConnectionForSql();
if (connection == null)
{
throw new HazelcastSqlException(_cluster.ClientId, SqlErrorCode.ConnectionProblem,
"Client is not currently connected to the cluster."
);
}
var serializedParameters = parameters
.Select(p => _serializationService.ToData(p))
.ToList(parameters.Length);
var requestMessage = SqlExecuteCodec.EncodeRequest(
sql,
serializedParameters,
(long)options.Timeout.TotalMilliseconds,
options.CursorBufferSize,
options.Schema,
(byte)resultType,
queryId
);
var responseMessage = await _cluster.Messaging.SendAsync(requestMessage, cancellationToken).CfAwait();
var response = SqlExecuteCodec.DecodeResponse(responseMessage);
if (response.Error != null) throw new HazelcastSqlException(_cluster.ClientId, response.Error);
return response;
}
private async Task<(SqlRowMetadata rowMetadata, SqlPage page)> FetchFirstPageAsync(SqlQueryId queryId, string sql, object[] parameters, SqlStatementOptions options, CancellationToken cancellationToken)
{
var result = await FetchAndValidateResponseAsync(queryId, sql, parameters, options, SqlResultType.Rows, cancellationToken).CfAwait();
if (result.RowMetadata == null)
throw new HazelcastSqlException(_cluster.ClientId, SqlErrorCode.Generic, "Expected row set in the response but got update count.");
return (new SqlRowMetadata(result.RowMetadata), result.RowPage);
}
private async Task<SqlPage> FetchNextPageAsync(SqlQueryId queryId, int cursorBufferSize, CancellationToken cancellationToken)
{
var requestMessage = SqlFetchCodec.EncodeRequest(queryId, cursorBufferSize);
var responseMessage = await _cluster.Messaging.SendAsync(requestMessage, cancellationToken).CfAwait();
var response = SqlFetchCodec.DecodeResponse(responseMessage);
if (response.Error != null) throw new HazelcastSqlException(_cluster.ClientId, response.Error);
return response.RowPage;
}
private async Task<long> FetchUpdateCountAsync(SqlQueryId queryId, string sql, object[] parameters, SqlStatementOptions options, CancellationToken cancellationToken = default)
{
var result = await FetchAndValidateResponseAsync(queryId, sql, parameters, options, SqlResultType.UpdateCount, cancellationToken).CfAwait();
if (result.RowMetadata != null)
throw new HazelcastSqlException(_cluster.ClientId, SqlErrorCode.Generic, "Expected update count in the response but got row set.");
return result.UpdateCount;
}
private async Task CloseAsync(SqlQueryId queryId)
{
var requestMessage = SqlCloseCodec.EncodeRequest(queryId);
var responseMessage = await _cluster.Messaging.SendAsync(requestMessage).CfAwait();
_ = SqlCloseCodec.DecodeResponse(responseMessage);
}
}
}
<|start_filename|>src/Hazelcast.Net/Clustering/ClusterMembers.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Hazelcast.Clustering.LoadBalancing;
using Hazelcast.Core;
using Hazelcast.Events;
using Hazelcast.Exceptions;
using Hazelcast.Models;
using Hazelcast.Networking;
using Microsoft.Extensions.Logging;
namespace Hazelcast.Clustering
{
/// <summary>
/// Provides the members management services of a cluster.
/// </summary>
internal class ClusterMembers : IAsyncDisposable
{
private const int SqlConnectionRandomAttempts = 10;
private readonly object _mutex = new object();
private readonly ClusterState _clusterState;
private readonly ILogger _logger;
private readonly ILoadBalancer _loadBalancer;
private readonly TerminateConnections _terminateConnections;
private readonly MemberConnectionQueue _memberConnectionQueue;
private MemberTable _members;
private bool _connected;
private bool _usePublicAddresses;
// flag + semaphore to wait for the first "partitions view" event
//private volatile int _firstPartitionsViewed;
//private SemaphoreSlim _firstPartitionsView = new SemaphoreSlim(0, 1);
// member id -> connection
// not concurrent, always managed through the mutex
private readonly Dictionary<Guid, MemberConnection> _connections = new Dictionary<Guid, MemberConnection>();
/// <summary>
/// Initializes a new instance of the <see cref="ClusterMembers"/> class.
/// </summary>
/// <param name="clusterState">The cluster state.</param>
/// <param name="terminateConnections">The terminate connections task.</param>
public ClusterMembers(ClusterState clusterState, TerminateConnections terminateConnections)
{
HConsole.Configure(x => x.Configure<ClusterMembers>().SetPrefix("CLUST.MBRS"));
_clusterState = clusterState;
_terminateConnections = terminateConnections;
_loadBalancer = clusterState.Options.LoadBalancer.Service ?? new RandomLoadBalancer();
_logger = _clusterState.LoggerFactory.CreateLogger<ClusterMembers>();
_members = new MemberTable();
// members to connect
if (clusterState.IsSmartRouting)
{
// initialize the queue of members to connect
// and the handler to re-queue members that have failed, *if* they are still members
_memberConnectionQueue = new MemberConnectionQueue(clusterState.LoggerFactory);
_memberConnectionQueue.ConnectionFailed += (_, member) =>
{
lock (_mutex)
{
if (_members.ContainsMember(member.Id))
_memberConnectionQueue.Add(member);
}
};
}
}
// NOTES
// we cannot have two connections to the same member ID at the same time, but a member IP may change,
// so having a connection to a member ID does not mean that the member is connected, and we may have
// to switch a member's connection over to a new IP
// determines whether a member is connected.
private bool IsMemberConnected(MemberInfo member)
{
// a member is connected when it is registered, and has a connection
return _members.ContainsMember(member.Id) &&
HasConnectionForMember(member);
}
// determines whether at least one member is connected.
private bool IsAnyMemberConnected()
{
lock (_mutex) return _members.Members.Any(HasConnectionForMemberLocked);
}
private bool HasConnectionForMemberLocked(MemberInfo member)
=> _connections.TryGetValue(member.Id, out var connection) &&
connection.Address == member.ConnectAddress;
// determines whether we have a connection for a member
private bool HasConnectionForMember(MemberInfo member)
{
lock (_mutex) return HasConnectionForMemberLocked(member);
}
// registers a connection for termination
public void TerminateConnection(MemberConnection connection)
=> _terminateConnections.Add(connection);
#region Event Handlers
/// <summary>
/// Adds a connection.
/// </summary>
/// <param name="connection">The connection.</param>
/// <param name="isNewCluster">Whether the connection is the first connection to a new cluster.</param>
public void AddConnection(MemberConnection connection, bool isNewCluster)
{
// accept every connection, regardless of whether there is a known corresponding member,
// since the first connection is going to come before we get the first members view.
lock (_mutex)
{
// don't add the connection if it is not active - if it *is* active, it still
// could turn not-active anytime, but thanks to _mutex that will only happen
// after the connection has been added
if (!connection.Active) return;
var contains = _connections.TryGetValue(connection.MemberId, out var existingConnection);
if (contains)
{
if (existingConnection.Address != connection.Address)
{
_terminateConnections.Add(existingConnection);
}
else
{
// we cannot accept this connection, it's a duplicate (internal error?)
_logger.LogWarning($"Cannot add connection {connection.Id.ToShortString()} to member {connection.MemberId.ToShortString()} at {connection.Address}, a connection to that member at that address already exists.");
_terminateConnections.Add(connection); // kill.kill.kill
return;
}
}
// add the connection
_connections[connection.MemberId] = connection;
if (isNewCluster)
{
// reset members
// this is safe because... isNewCluster means that this is the very first connection and there are
// no other connections yet and therefore we should not receive events and therefore no one
// should invoke SetMembers.
_members = new MemberTable();
}
// if this is a true member connection
if (_members.TryGetMember(connection.MemberId, out var member) && member.ConnectAddress == connection.Address)
{
// if this is the first connection to an actual member, change state & trigger event
if (!_connected)
{
// change Started | Disconnected -> Connected, ignore otherwise, it could be ShuttingDown or Shutdown
_logger.LogDebug($"Added connection {connection.Id.ToShortString()} to member {connection.MemberId.ToShortString()} at {connection.Address}, now connected.");
_clusterState.ChangeState(ClientState.Connected, ClientState.Started, ClientState.Disconnected);
_connected = true;
}
else if (_logger.IsEnabled(LogLevel.Debug))
{
var msg = $"Added connection {connection.Id.ToShortString()} to member {connection.MemberId.ToShortString()} at {connection.Address}";
msg += existingConnection == null
? "."
: $", replacing connection {existingConnection.Id.ToShortString()} at {existingConnection.Address}.";
_logger.LogDebug(msg);
}
}
else
{
_logger.IfDebug()?.LogDebug($"Added orphan connection {connection.Id.ToShortString()} at {connection.Address} (member {connection.MemberId.ToShortString()}).");
}
}
}
/// <summary>
/// Removes a connection.
/// </summary>
/// <param name="connection">The connection.</param>
public async Task RemoveConnectionAsync(MemberConnection connection)
{
lock (_mutex)
{
// ignore unknown connections that were not added in the first place,
// or that have been replaced with another connection to the same member
if (!_connections.TryGetValue(connection.MemberId, out var c) || connection.Id != c.Id)
{
_logger.IfDebug()?.LogDebug($"Removed orphan connection {connection.Id.ToShortString()}.");
return;
}
// remove the connection and check whether we are potentially disconnecting
// ie whether we were connected, and either we don't have connections any more, or no member
// is connected (has a matching connection)
_connections.Remove(connection.MemberId);
var disconnecting = _connected && (_connections.Count == 0 || !IsAnyMemberConnected());
// if we are not disconnecting, we can return - we are done
if (!disconnecting)
{
_logger.LogDebug($"Removed connection {connection.Id.ToShortString()} to member {connection.MemberId.ToShortString()}, remain connected.");
// if we are connected,
// and the disconnected member is still a member, queue it for reconnection
if (_connected && _members.TryGetMember(connection.MemberId, out var member))
_memberConnectionQueue?.Add(member);
return;
}
}
// otherwise, we might be disconnecting
// but, the connection queue was running and might have added a new connection
// we *need* a stable state in order to figure out whether we are disconnecting or not,
// and if we are, we *need* to drain the queue (stop connecting more members) - and
// the only way to achieve this is to suspend the queue
if (_memberConnectionQueue != null) await _memberConnectionQueue.SuspendAsync().CfAwait();
// note: multiple connections can close an once = multiple calls can reach this point
var drain = false;
try
{
lock (_mutex) // but we deal with calls one by one
{
if (_connected) // and only disconnect once
{
// if we have connections, and at least one member is connected (has a matching connection),
// then the queue has added a new connection indeed and we are finally not disconnecting - we
// can return - we are done
if (_connections.Count > 0 && _members.Members.Any(x => _connections.ContainsKey(x.Id)))
{
// if the disconnected member is still a member, queue it for reconnection
if (_members.TryGetMember(connection.MemberId, out var member))
_memberConnectionQueue?.Add(member);
_logger.LogDebug($"Removed connection {connection.Id.ToShortString()} to member {connection.MemberId.ToShortString()}, remain connected.");
return;
}
// otherwise, we're really disconnecting: flip _connected, and change the state
_connected = false;
_logger.LogDebug($"Removed connection {connection.Id.ToShortString()} to member {connection.MemberId.ToShortString()}, disconnecting.");
_clusterState.ChangeState(ClientState.Disconnected, ClientState.Connected);
// and drain the queue: stop connecting members, we need to fully reconnect
drain = true;
}
else
{
_logger.LogDebug($"Removed connection {connection.Id.ToShortString()} to member {connection.MemberId.ToShortString()}, already disconnected (?).");
}
}
}
finally
{
// don't forget to resume the queue
_memberConnectionQueue?.Resume(drain);
}
}
private void LogDiffs(MemberTable table, Dictionary<MemberInfo, int> diff)
{
var msg = new StringBuilder();
msg.Append("Members [");
msg.Append(table.Count);
msg.AppendLine("] {");
foreach (var member in table.Members)
{
msg.Append(" ");
msg.Append(member.ToShortString(true));
if (diff.TryGetValue(member, out var d) && d == 2)
msg.Append(" - new");
msg.AppendLine();
}
msg.Append('}');
_logger.LogInformation(msg.ToString());
}
/// <summary>
/// Set the members.
/// </summary>
/// <param name="version">The version.</param>
/// <param name="members">The members.</param>
/// <returns>The corresponding event arguments, if members were updated; otherwise <c>null</c>.</returns>
public async Task<MembersUpdatedEventArgs> SetMembersAsync(int version, ICollection<MemberInfo> members)
{
// skip old sets
if (version < _members.Version)
return null;
// note: members are compared by member.Id and member.ConnectAddress
// as that is what makes a difference, really - the actual Address and
// PublicAddress don't matter much for what we do
// replace the table
var previous = _members;
var table = new MemberTable(version, members);
lock (_mutex) _members = table;
// notify the load balancer of the new list of members
// (the load balancer can always return a member that is not a member
// anymore, see note in GetMember)
_loadBalancer.SetMembers(members.Select(x => x.Id));
// if this is the initial list of members, determine how to connect to members
if (previous.Count == 0)
{
var resolver = new ConnectAddressResolver(_clusterState.Options.Networking, _clusterState.LoggerFactory);
if (!(members is IReadOnlyCollection<MemberInfo> mro)) throw new HazelcastException("panic"); // TODO: not exactly pretty
_usePublicAddresses = await resolver.DetermineUsePublicAddresses(mro).CfAwaitNoThrow(false);
}
// update members
foreach (var member in members) member.UsePublicAddress = _usePublicAddresses;
// compute changes
// count 1 for old members, 2 for new members, and then the result is
// 1=removed, 2=added, 3=unchanged
// MemberInfo overrides GetHashCode and can be used as a key here
var diff = new Dictionary<MemberInfo, int>();
if (previous == null)
{
foreach (var m in members)
diff[m] = 2;
}
else
{
foreach (var m in previous.Members)
diff[m] = 1;
foreach (var m in members)
if (diff.ContainsKey(m)) diff[m] += 2;
else diff[m] = 2;
}
// log, if the members have changed (one of them at least is not 3=unchanged)
if (_logger.IsEnabled(LogLevel.Information) && diff.Any(d => d.Value != 3))
LogDiffs(table, diff);
// process changes, gather events
var added = new List<MemberInfo>();
var removed = new List<MemberInfo>();
foreach (var (member, status) in diff) // all members, old and new
{
switch (status)
{
case 1: // old but not new = removed
HConsole.WriteLine(this, $"Removed {member}");
removed.Add(member);
// dequeue the member
_memberConnectionQueue?.Remove(member.Id);
break;
case 2: // new but not old = added
HConsole.WriteLine(this, $"Added {member}");
added.Add(member);
// queue the member for connection
_memberConnectionQueue?.Add(member);
break;
case 3: // old and new = no change
break;
default:
throw new NotSupportedException();
}
}
var maybeDisconnected = false;
lock (_mutex)
{
// removed members need to have their connection removed and terminated
foreach (var member in removed)
{
if (_connections.TryGetValue(member.Id, out var c))
{
_logger.LogDebug($"Set members: remove obsolete connection {c.Id.ToShortString()} to {c.MemberId.ToShortString()} at {c.Address}.");
_connections.Remove(member.Id);
_terminateConnections.Add(c);
}
}
// remove connections that don't match a member
var d = members.ToDictionary(x => x.Id, x => x);
List<MemberConnection> toRemove = null;
foreach (var c in _connections.Values)
{
if (!d.TryGetValue(c.MemberId, out var m) || m.ConnectAddress != c.Address)
(toRemove ??= new List<MemberConnection>()).Add(c);
}
if (toRemove != null)
{
foreach (var c in toRemove)
{
_connections.Remove(c.Id);
_logger.LogDebug($"Set members: remove orphaned connection {c.Id.ToShortString()} to {c.MemberId.ToShortString()} at {c.Address}.");
_terminateConnections.Add(c);
}
}
var isAnyMemberConnected = IsAnyMemberConnected();
if (!_connected)
{
if (isAnyMemberConnected)
{
// if we were not connected and now one member happens to be connected then we are now connected
// we hold the mutex so nothing bad can happen
_logger.LogDebug($"Set members: {removed.Count} removed, {added.Count} added, {members.Count} total and at least one is connected, now connected.");
_clusterState.ChangeState(ClientState.Connected, ClientState.Started, ClientState.Disconnected);
_connected = true;
}
else
{
// remain disconnected
_logger.LogDebug($"Set members: {removed.Count} removed, {added.Count} added, {members.Count} total and none is connected, remain disconnected.");
}
}
else
{
if (isAnyMemberConnected)
{
// remain connected
_logger.LogDebug($"Set members: {removed.Count} removed, {added.Count} added, {members.Count} total and at least one is connected, remain connected.");
}
else
{
// we probably are disconnected now
// but the connection queue is running and might have re-added a member
maybeDisconnected = true;
}
}
}
// if we cannot be disconnected, we can return immediately
if (!maybeDisconnected) return new MembersUpdatedEventArgs(added, removed, members.ToList());
// else, suspend the queue - we need stable connections before we can make a decision
if (_memberConnectionQueue != null) await _memberConnectionQueue.SuspendAsync().CfAwait();
var disconnected = false;
try
{
lock (_mutex)
{
var isAnyMemberConnected = IsAnyMemberConnected();
if (!isAnyMemberConnected)
{
// no more connected member, we are now disconnected
_logger.LogDebug($"Set members: {removed.Count} removed, {added.Count} added, {members.Count} total and none connected, disconnecting.");
_clusterState.ChangeState(ClientState.Disconnected, ClientState.Connected);
_connected = false;
disconnected = true;
}
else
{
_logger.LogDebug($"Set members: {removed.Count} removed, {added.Count} added, {members.Count} total and at least one is connected, remain connected.");
}
}
}
finally
{
// if we are now disconnected, make sure to drain the queue
_memberConnectionQueue?.Resume(drain: disconnected);
}
return new MembersUpdatedEventArgs(added, removed, members.ToList());
}
#endregion
/// <summary>
/// Enumerates the members to connect.
/// </summary>
public IAsyncEnumerable<MemberConnectionRequest> MemberConnectionRequests
=> _memberConnectionQueue;
/// <summary>
/// Gets a connection to a random member.
/// </summary>
/// <returns>A random client connection if available; otherwise <c>null</c>.</returns>
/// <para>The connection should be active, but there is no guarantee it will not become immediately inactive.</para>
public MemberConnection GetRandomConnection()
{
MemberConnection connection;
// In "smart routing" mode the clients connect to each member of the cluster. Since each
// data partition uses the well known and consistent hashing algorithm, each client
// can send an operation to the relevant cluster member, which increases the
// overall throughput and efficiency. Smart mode is the default mode.
//
// In "uni-socket" mode the clients is required to connect to a single member, which
// then behaves as a gateway for the other members. Firewalls, security, or some
// custom networking issues can be the reason for these cases.
if (_clusterState.IsSmartRouting)
{
// "smart" mode
// limit the number of tries to the amount of known members, but
// it is ok to try more than once, order to return a connection
// that has a reasonable chance of being usable
var count = _loadBalancer.Count;
for (var i = 0; i < count; i++)
{
var memberId = _loadBalancer.GetMember();
// if the load balancer does not have members, break
if (memberId == Guid.Empty)
break;
// we cannot guarantee that the connection we'll return will not correspond to
// a member... that is not a member by the time it is used... but at least we
// can make sure it *still* is a member now
if (!_members.ContainsMember(memberId))
continue;
lock (_mutex)
{
if (_connections.TryGetValue(memberId, out connection))
return connection;
}
}
}
// either "smart" mode but the load balancer did not return a member,
// or "uni-socket" mode where there should only be once connection
lock (_mutex) connection = _connections.Values.FirstOrDefault();
// may be null
return connection;
}
/// <summary>
/// Gets connection to execute SQL queries/statements.
/// </summary>
public MemberConnection GetConnectionForSql()
{
if (_clusterState.IsSmartRouting)
{
// There might be a race - the chosen member might be just connected or disconnected - try a
// couple of times, the memberOfLargerSameVersionGroup returns a random connection,
// we might be lucky...
for (var i = 0; i < SqlConnectionRandomAttempts; i++)
{
var member = GetMemberForSql();
if (member == null) break;
if (TryGetConnection(member.Id, out var memberConnection))
return memberConnection;
}
}
// Otherwise iterate over connections and return the first one that's not to a lite member
MemberConnection firstConnection = null;
lock (_mutex)
{
foreach (var (memberId, connection) in _connections)
{
firstConnection ??= connection;
if (_members.TryGetMember(memberId, out var member) && !member.IsLiteMember)
return connection;
}
}
// Failed to get a connection to a data member, return first lite member instead
// Lite members support DDL but note DML statements
// https://docs.hazelcast.com/hazelcast/5.0-SNAPSHOT/sql/sql-statements.html
return firstConnection;
}
/// <summary>
/// Finds a larger same-version group of data members from a collection of members.
/// Otherwise returns a random member from the group. If the same-version
/// groups have the same size, returns a member from the newer group.
/// </summary>
/// <returns><see cref="MemberInfo"/> if one is found or <c>null</c> otherwise.</returns>
/// <exception cref="InvalidOperationException">If there are more than 2 distinct member versions found.</exception>
public MemberInfo GetMemberForSql()
{
(MemberVersion version0, MemberVersion version1) = (null, null);
var (count0, count1) = (0, 0);
foreach (var member in _members.Members)
{
if (member.IsLiteMember)
continue;
var memberVersion = member.Version;
if (version0 == null || version0.Equals(memberVersion, ignorePatchVersion: true))
{
version0 = memberVersion;
count0++;
}
else if (version1 == null || version1.Equals(memberVersion, ignorePatchVersion: true))
{
version1 = memberVersion;
count1++;
}
else
{
var strVersion0 = version0.ToString(ignorePatchVersion: true);
var strVersion1 = version1.ToString(ignorePatchVersion: true);
var strVersion = memberVersion.ToString(ignorePatchVersion: true);
throw new InvalidOperationException(
$"More than 2 distinct member versions found: {strVersion0}, {strVersion1}, {strVersion}"
);
}
}
// no data members
if (count0 == 0)
return null;
int count;
MemberVersion version;
if (count0 > count1 || (count0 == count1 && version0 > version1))
(count, version) = (count0, version0);
else
(count, version) = (count1, version1);
// otherwise return a random member from the larger group
var randomIndex = RandomProvider.Next(count);
foreach (var member in _members.Members)
{
if (!member.IsLiteMember && member.Version.Equals(version, ignorePatchVersion: true))
{
randomIndex--;
if (randomIndex < 0)
return member;
}
}
// should never get here
throw new HazelcastException($"Reached unexpected state in {nameof(GetMemberForSql)}.");
}
/// <summary>
/// Gets the oldest active connection.
/// </summary>
/// <returns>The oldest active connection, or <c>null</c> if no connection is active.</returns>
public MemberConnection GetOldestConnection()
{
lock (_mutex) return _connections.Values
.Where(x => x.Active)
.OrderBy(x => x.ConnectTime)
.FirstOrDefault();
}
/// <summary>
/// Tries to get a connection for a member.
/// </summary>
/// <param name="memberId">The identifier of the member.</param>
/// <param name="connection">The connection.</param>
/// <returns><c>true</c> if a connection to the specified member was found; otherwise <c>false</c>.</returns>
/// <para>The connection should be active, but there is no guarantee it will not become immediately inactive.</para>
public bool TryGetConnection(Guid memberId, out MemberConnection connection)
{
lock (_mutex) return _connections.TryGetValue(memberId, out connection);
}
/// <summary>
/// Gets information about each member.
/// </summary>
/// <param name="liteOnly">Whether to only return lite members.</param>
/// <returns>The current members.</returns>
public IEnumerable<MemberInfoState> GetMembersAndState(bool liteOnly = false)
{
IEnumerable<MemberInfo> members = _members.Members;
if (liteOnly) members = members.Where(x => x.IsLiteMember);
lock (_mutex) return members.Select(x => new MemberInfoState(x, HasConnectionForMemberLocked(x))).ToList();
}
/// <summary>
/// Gets information about each member.
/// </summary>
/// <param name="liteOnly">Whether to only return lite members.</param>
/// <returns>The current members.</returns>
public IEnumerable<MemberInfo> GetMembers(bool liteOnly = false)
{
IEnumerable<MemberInfo> members = _members.Members;
return liteOnly ? members.Where(x => x.IsLiteMember).ToList() : members;
}
/// <summary>
/// Gets information about a member.
/// </summary>
/// <param name="memberId">The identifier of the member.</param>
/// <returns>Information about the specified member, or <c>null</c> if no member with the specified identifier was found.</returns>
public MemberInfo GetMember(Guid memberId)
{
return _members.TryGetMember(memberId, out var memberInfo)
? memberInfo
: null;
}
/// <inheritdoc />
public async ValueTask DisposeAsync()
{
await _memberConnectionQueue.DisposeAsync().CfAwait();
}
}
}
<|start_filename|>src/Hazelcast.Net.Testing/Remote/RemoteControllerClient.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Threading;
using System.Threading.Tasks;
using Hazelcast.Core;
using Thrift.Protocol;
namespace Hazelcast.Testing.Remote
{
/// <summary>
/// Represents a remote controller client.
/// </summary>
public class RemoteControllerClient : RemoteController.Client, IRemoteControllerClient
{
private readonly SemaphoreSlim _lock = new SemaphoreSlim(1);
/// <summary>
/// Initializes a new instance of the <see cref="RemoteControllerClient"/> class.
/// </summary>
/// <param name="protocol">The protocol.</param>
private RemoteControllerClient(TProtocol protocol)
: base(protocol)
{ }
/// <summary>
/// Initializes a new instance of the <see cref="RemoteControllerClient"/> class.
/// </summary>
/// <param name="inputProtocol">The input protocol.</param>
/// <param name="outputProtocol">The output protocol.</param>
private RemoteControllerClient(TProtocol inputProtocol, TProtocol outputProtocol)
: base(inputProtocol, outputProtocol)
{ }
/// <summary>
/// Creates a new remote controller client.
/// </summary>
/// <param name="protocol">The protocol.</param>
/// <returns>A new remote controller client.</returns>
public static IRemoteControllerClient Create(TProtocol protocol)
=> new RemoteControllerClient(protocol);
/// <summary>
/// Creates a new remote controller client.
/// </summary>
/// <param name="inputProtocol">The input protocol.</param>
/// <param name="outputProtocol">The output protocol.</param>
/// <returns>A new remote controller client.</returns>
public static IRemoteControllerClient Create(TProtocol inputProtocol, TProtocol outputProtocol)
=> new RemoteControllerClient(inputProtocol, outputProtocol);
private async Task<T> WithLock<T>(Func<CancellationToken, Task<T>> action, CancellationToken cancellationToken)
{
await _lock.WaitAsync(cancellationToken).CfAwait();
try
{
if (!cancellationToken.IsCancellationRequested)
return await action(cancellationToken).CfAwait();
else
return default;
}
finally
{
_lock.Release();
}
}
/// <inheritdoc />
public Task<bool> PingAsync(CancellationToken cancellationToken = default)
=> WithLock(ping, cancellationToken);
/// <inheritdoc />
public Task<bool> CleanAsync(CancellationToken cancellationToken = default)
=> WithLock(clean, cancellationToken);
/// <inheritdoc />
public async Task<bool> ExitAsync(CancellationToken cancellationToken = default)
{
var result = await WithLock(exit, cancellationToken).CfAwait();
InputProtocol?.Transport?.Close();
return result;
}
/// <inheritdoc />
public Task<Cluster> CreateClusterAsync(string hzVersion, string xmlconfig, CancellationToken cancellationToken = default)
=> WithLock(token => createCluster(hzVersion, xmlconfig, token), cancellationToken);
/// <inheritdoc />
public Task<Member> StartMemberAsync(string clusterId, CancellationToken cancellationToken = default)
=> WithLock(token => startMember(clusterId, token), cancellationToken);
/// <inheritdoc />
public Task<bool> ShutdownMemberAsync(string clusterId, string memberId, CancellationToken cancellationToken = default)
=> WithLock(token => shutdownMember(clusterId, memberId, token), cancellationToken);
/// <inheritdoc />
public Task<bool> TerminateMemberAsync(string clusterId, string memberId, CancellationToken cancellationToken = default)
=> WithLock(token => terminateMember(clusterId, memberId, token), cancellationToken);
/// <inheritdoc />
public Task<bool> SuspendMemberAsync(string clusterId, string memberId, CancellationToken cancellationToken = default)
=> WithLock(token => suspendMember(clusterId, memberId, token), cancellationToken);
/// <inheritdoc />
public Task<bool> ResumeMemberAsync(string clusterId, string memberId, CancellationToken cancellationToken = default)
=> WithLock(token => resumeMember(clusterId, memberId, token), cancellationToken);
/// <inheritdoc />
public Task<bool> ShutdownClusterAsync(string clusterId, CancellationToken cancellationToken = default)
=> WithLock(token => shutdownCluster(clusterId, token), cancellationToken);
/// <inheritdoc />
public Task<bool> TerminateClusterAsync(string clusterId, CancellationToken cancellationToken = default)
=> WithLock(token => terminateCluster(clusterId, token), cancellationToken);
/// <inheritdoc />
public Task<Cluster> SplitMemberFromClusterAsync(string memberId, CancellationToken cancellationToken = default)
=> WithLock(token => splitMemberFromCluster(memberId, token), cancellationToken);
/// <inheritdoc />
public Task<Cluster> MergeMemberToClusterAsync(string clusterId, string memberId, CancellationToken cancellationToken = default)
=> WithLock(token => mergeMemberToCluster(clusterId, memberId, token), cancellationToken);
/// <inheritdoc />
public Task<Response> ExecuteOnControllerAsync(string clusterId, string script, Lang lang, CancellationToken cancellationToken = default)
=> WithLock(token => executeOnController(clusterId, script, lang, token), cancellationToken);
}
}
<|start_filename|>src/Hazelcast.Net.Tests/Networking/NetworkAddressTests.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Sockets;
using Hazelcast.Networking;
using Hazelcast.Testing;
using Microsoft.Extensions.Logging.Abstractions;
using NUnit.Framework;
namespace Hazelcast.Tests.Networking
{
[TestFixture]
public class NetworkAddressTests
{
[TestCase("127.0.0.1", true, "127.0.0.1:0")]
[TestCase("127.0.0.1:81", true, "127.0.0.1:81")]
[TestCase("1", true, "0.0.0.1:0")]
[TestCase(":82", false, "")]
[TestCase("666", true, "0.0.2.154:0")]
[TestCase("::1", true, "[::1]:0")]
[TestCase("::1%33", true, "[::1%33]:0")]
[TestCase("[::1]:81", true, "[::1]:81")]
[TestCase("[::1%33]:81", true, "[::1%33]:81")]
[TestCase("[65535]", false, "")]
[TestCase("www.hazelcast.com", true, null)] // cannot depend on actual resolution
[TestCase("www.hazelcast.com:81", true, null)] // cannot depend on actual resolution
[TestCase("www.hazelcast", false, "")]
[TestCase("x[::1]:81", false, "")]
[TestCase("[::81", false, "")]
[TestCase("[::1]x:81", false, "")]
[TestCase("[::1]:uh", false, "")]
[TestCase("[]", false, "")]
[TestCase("[##::'']:81", false, "")]
public void CanTryParse(string s, bool succeeds, string toString)
{
var result = NetworkAddress.TryParse(s, out NetworkAddress networkAddress);
if (succeeds) Assert.IsTrue(result); else Assert.IsFalse(result);
if (succeeds && toString != null) Assert.AreEqual(toString, networkAddress.ToString());
}
[Test]
public void NetworkAddressEqualAndHash()
{
Assert.AreEqual(new NetworkAddress("127.0.0.1"), new NetworkAddress("127.0.0.1"));
Assert.AreEqual(new NetworkAddress("127.0.0.1").GetHashCode(), new NetworkAddress("127.0.0.1").GetHashCode());
// ReSharper disable once EqualExpressionComparison
Assert.IsTrue(new NetworkAddress("127.0.0.1") == new NetworkAddress("127.0.0.1"));
}
[Test]
// ReSharper disable once InconsistentNaming
public void IPEndPointEqualAndHash()
{
var d = new ConcurrentDictionary<IPEndPoint, string>();
d[new IPEndPoint(IPAddress.Parse("127.0.0.1"), 666)] = "a";
d[new IPEndPoint(IPAddress.Parse("127.0.0.1"), 666)] = "b";
Assert.AreEqual(1, d.Count);
Assert.AreEqual("b", d[new IPEndPoint(IPAddress.Parse("127.0.0.1"), 666)]);
}
private static void AssertAddresses(IEnumerable<NetworkAddress> xx, string n, bool v6)
{
var xa = xx.ToArray();
foreach (var x in xa)
Console.WriteLine(" " + x);
Assert.That(xa.Length, Is.GreaterThanOrEqualTo(3));
for (var i = 0; i < 3; i++)
{
if (n == "*")
Assert.That(xa[i].ToString().EndsWith(":570" + (i + 1)));
else
Assert.That(xa[i].ToString(), Is.EqualTo(n + ":570" + (i + 1)));
Assert.That(xa[i].IsIpV6, Is.EqualTo(v6));
}
}
private static ICollection<NetworkAddress> GetAddresses(string address)
{
var options = new NetworkingOptions();
options.Addresses.Clear();
options.Addresses.Add(address);
var a = new AddressProvider(options, new NullLoggerFactory());
return a.CreateMapFromConfiguration().Values;
}
[Test]
public void Parse()
{
Assert.Throws<FormatException>(() => _ = NetworkAddress.Parse("[::1]:uh"));
var address = NetworkAddress.Parse("127.0.0.1:5701");
Assert.That(address.HostName, Is.EqualTo("127.0.0.1"));
Assert.That(address.Port, Is.EqualTo(5701));
Assert.That(NetworkAddress.TryParse("712.548", out var _), Is.False);
var addresses = GetAddresses("127.0.0.1");
Console.WriteLine("127.0.0.1");
AssertAddresses(addresses, "127.0.0.1", false);
addresses = GetAddresses("localhost");
Console.WriteLine("localhost");
AssertAddresses(addresses, "127.0.0.1", false);
// on Windows, this gets 127.0.0.1 but on Linux it gets what the host name
// maps to in /etc/hosts and by default on some systems (eg Debian) it can
// be 127.0.1.1 instead of 127.0.0.1
//
addresses = GetAddresses(Dns.GetHostName());
Console.Write(Dns.GetHostName());
var n = Dns.GetHostAddresses(Dns.GetHostName()).First(x => x.AddressFamily == AddressFamily.InterNetwork).ToString();
Console.WriteLine(" -> " + n);
AssertAddresses(addresses, n, false);
addresses = GetAddresses("::1");
Console.WriteLine("::1");
AssertAddresses(addresses, "[::1]", true);
// on Windows, this gets the various fe80 local addresses (but not the random one
// that we specified) - on Linux this gets nothing and it may eventually be an issue?
// there are various issues corresponding to this situation,
// see https://github.com/dotnet/runtime/issues/27534
// and fixes seem to be in the 5.0 milestone = n/a yet.
addresses = GetAddresses("fe80::bd0f:a8bc:6480:238b");
Console.WriteLine("fe80::bd0f:a8bc:6480:238b");
if (OS.IsWindows)
{
// test the first 3, we might get more depending on NICs
AssertAddresses(addresses, "*", true);
}
else
{
foreach (var a in addresses)
Console.WriteLine(" " + a);
}
}
[Test]
public void Equality()
{
var address1 = NetworkAddress.Parse("127.0.0.1:5701");
var address2 = NetworkAddress.Parse("127.0.0.1:5702");
var address3 = NetworkAddress.Parse("127.0.0.1:5701");
Assert.That(address1 == address2, Is.False);
Assert.That(address1 != address2, Is.True);
Assert.That(address1 == address3, Is.True);
Assert.That(address1 != address3, Is.False);
}
[Test]
public void Constructors()
{
var address = new NetworkAddress(IPAddress.Parse("127.0.0.1"));
Assert.That(address.HostName, Is.EqualTo("127.0.0.1"));
Assert.That(address.Port, Is.EqualTo(0));
address = new NetworkAddress(IPAddress.Parse("127.0.0.1"), 5702);
Assert.That(address.HostName, Is.EqualTo("127.0.0.1"));
Assert.That(address.Port, Is.EqualTo(5702));
var ipAddress = IPAddress.Parse("127.0.0.1");
var ipEndpoint = new IPEndPoint(ipAddress, 0);
address = new NetworkAddress(ipEndpoint);
Assert.That(address.HostName, Is.EqualTo("127.0.0.1"));
Assert.That(address.Port, Is.EqualTo(0));
Assert.Throws<ArgumentNullException>(() => _ = new NetworkAddress((IPAddress) null));
Assert.Throws<ArgumentOutOfRangeException>(() => _ = new NetworkAddress(ipAddress, -1));
#if !NET5_0_OR_GREATER
ipEndpoint.Address = null; // this is not even legal in NET 5+
Assert.Throws<ArgumentException>(() => _ = new NetworkAddress(ipEndpoint));
#endif
Assert.Throws<ArgumentNullException>(() => _ = new NetworkAddress((NetworkAddress)null, 5701));
Assert.Throws<ArgumentOutOfRangeException>(() => _ = new NetworkAddress(address, -1));
}
[Test]
public void Misc()
{
Assert.That(NetworkAddress.GetIPAddressByName("0.0.0.0"), Is.EqualTo(IPAddress.Any));
}
}
}
<|start_filename|>src/Hazelcast.Net.Tests/Networking/ClientSslTestBase.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.IO;
using System.Threading.Tasks;
using Hazelcast.Clustering;
using Hazelcast.Core;
using Hazelcast.Testing;
using Hazelcast.Testing.Remote;
using NUnit.Framework;
using Cluster = Hazelcast.Testing.Remote.Cluster;
namespace Hazelcast.Tests.Networking
{
public abstract class ClientSslTestBase : RemoteTestBase
{
protected const string ValidCertNameSigned = "foobar.hazelcast.com";
protected const string Password = "password";
protected IRemoteControllerClient RcClient { get; set; }
protected Cluster RcCluster { get; set; }
protected Member RcMember { get; set; }
[OneTimeSetUp]
public async Task OneTimeSetUp()
{
RcClient = await ConnectToRemoteControllerAsync();
}
[OneTimeTearDown]
public async Task OneTimeTearDown()
{
// remove temp files
foreach (var fileInfo in new DirectoryInfo(Path.GetTempPath()).GetFiles())
{
try { fileInfo.Delete(); }
catch (Exception) { /* ignore */ }
}
// terminate & remove member (just in case)
if (RcMember != null)
{
await RcClient.StopMemberAsync(RcCluster, RcMember);
RcMember = null;
}
// terminate & remove client (needed) and cluster (just in case)
if (RcClient != null)
{
if (RcCluster != null)
{
await RcClient.ShutdownClusterAsync(RcCluster).CfAwait();
RcCluster = null;
}
await RcClient.ExitAsync().CfAwait();
RcClient = null;
}
}
[TearDown]
public async Task TearDown()
{
// terminate & remove member
if (RcMember != null)
{
await RcClient.StopMemberAsync(RcCluster, RcMember);
RcMember = null;
}
// terminate & remove cluster
if (RcCluster != null)
{
await RcClient.ShutdownClusterAsync(RcCluster).CfAwait();
RcCluster = null;
}
}
protected async ValueTask<IHazelcastClient> StartClientAsync(string serverXml, bool enableSsl, bool? validateCertificateChain,
bool? validateCertificateName, bool? checkCertificateRevocation, string certSubjectName, byte[] clientCertificate,
string certPassword, bool failFast = false)
{
RcCluster = await RcClient.CreateClusterAsync(serverXml);
RcMember = await RcClient.StartMemberAsync(RcCluster);
var options = new HazelcastOptionsBuilder().Build();
options.Networking.Addresses.Clear();
//options.Networking.Addresses.Add("localhost:5701");
options.Networking.Addresses.Add("127.0.0.1:5701");
((IClusterOptions) options).ClusterName = RcCluster.Id;
options.LoggerFactory.Creator = () => LoggerFactory;
var sslOptions = options.Networking.Ssl;
sslOptions.Enabled = enableSsl;
sslOptions.ValidateCertificateChain = validateCertificateChain ?? sslOptions.ValidateCertificateChain;
sslOptions.ValidateCertificateName = validateCertificateName ?? sslOptions.ValidateCertificateName;
sslOptions.CertificateName = certSubjectName ?? sslOptions.CertificateName;
sslOptions.CheckCertificateRevocation = checkCertificateRevocation ?? sslOptions.CheckCertificateRevocation;
if (failFast)
{
// default value is 20s but if we know we are going to fail, no point trying again and again
options.Networking.ConnectionRetry.ClusterConnectionTimeoutMilliseconds = 2_000;
}
if (enableSsl && clientCertificate != null)
{
var certFilePath = CreateTmpFile(clientCertificate);
sslOptions.CertificatePath = certFilePath;
if (certPassword != null) sslOptions.CertificatePassword = <PASSWORD>;
}
return await HazelcastClientFactory.StartNewClientAsync(options);
}
private static string CreateTmpFile(byte[] cert)
{
var tmpFileName = Path.GetTempFileName();
var fs = File.Open(tmpFileName, FileMode.Append);
var bw = new BinaryWriter(fs);
bw.Write(cert);
bw.Close();
fs.Close();
return tmpFileName;
}
}
}
<|start_filename|>src/Hazelcast.Net/Clustering/ClusterConnections.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Hazelcast.Core;
using Hazelcast.Exceptions;
using Hazelcast.Networking;
using Hazelcast.Protocol;
using Hazelcast.Protocol.Models;
using Hazelcast.Serialization;
using Microsoft.Extensions.Logging;
using MemberInfo = Hazelcast.Models.MemberInfo;
namespace Hazelcast.Clustering
{
internal class ClusterConnections : IAsyncDisposable
{
private readonly CancellationTokenSource _cancel = new CancellationTokenSource();
private readonly object _mutex = new object();
private readonly ClusterState _clusterState;
private readonly ClusterMembers _clusterMembers;
private readonly Authenticator _authenticator;
private readonly AddressProvider _addressProvider;
private readonly IRetryStrategy _connectRetryStrategy;
private readonly ILogger _logger;
// member id -> connection
// TODO: consider we are duplicating this with members?
private readonly ConcurrentDictionary<Guid, MemberConnection> _connections = new ConcurrentDictionary<Guid, MemberConnection>();
// connection -> completion
private readonly ConcurrentDictionary<MemberConnection, TaskCompletionSource<object>> _completions = new ConcurrentDictionary<MemberConnection, TaskCompletionSource<object>>();
private Action<MemberConnection> _connectionCreated;
private Func<MemberConnection, bool, bool, bool, ValueTask> _connectionOpened;
private Func<MemberConnection, ValueTask> _connectionClosed;
private BackgroundTask _reconnect;
private Guid _clusterId;
private readonly Task _connectMembers;
private volatile int _disposed; // disposed flag
/// <summary>
/// Initializes a new instance of the <see cref="ClusterConnections"/> class.
/// </summary>
public ClusterConnections(ClusterState clusterState, ClusterMembers clusterMembers, SerializationService serializationService)
{
_clusterState = clusterState;
_clusterMembers = clusterMembers;
_logger = _clusterState.LoggerFactory.CreateLogger<ClusterConnections>();
_authenticator = new Authenticator(_clusterState.Options.Authentication, serializationService, _clusterState.LoggerFactory);
_addressProvider = new AddressProvider(_clusterState.Options.Networking, _clusterState.LoggerFactory);
_connectRetryStrategy = new RetryStrategy("connect to cluster", _clusterState.Options.Networking.ConnectionRetry, _clusterState.LoggerFactory);
if (_clusterState.IsSmartRouting)
_connectMembers = ConnectMembers(_cancel.Token);
_clusterState.StateChanged += OnStateChanged;
HConsole.Configure(x => x.Configure<ClusterConnections>().SetPrefix("CCNX"));
}
#region Connect Members
private async Task<(bool, bool, Exception)> EnsureConnectionInternalAsync(MemberInfo member, CancellationToken cancellationToken)
{
Exception exception = null;
var wasCanceled = false;
try
{
var attempt = await EnsureConnectionAsync(member, cancellationToken).CfAwait();
if (attempt) return (true, false, null);
exception = attempt.Exception;
}
catch (OperationCanceledException)
{
wasCanceled = true;
}
catch (Exception e)
{
exception = e;
}
return (false, wasCanceled, exception);
}
// background task that connect members
private async Task ConnectMembers(CancellationToken cancellationToken)
{
await foreach(var connectionRequest in _clusterMembers.MemberConnectionRequests.WithCancellation(cancellationToken))
{
var member = connectionRequest.Member;
_logger.LogDebug($"Ensure client {_clusterState.ClientName} is connected to member {member.Id.ToShortString()} at {member.ConnectAddress}.");
var (success, wasCanceled, exception) = await EnsureConnectionInternalAsync(member, cancellationToken).CfAwait();
if (success)
{
connectionRequest.Complete(success: true);
continue;
}
if (_disposed > 0)
{
_logger.LogWarning($"Could not connect to member {member.Id.ToShortString()} at {member.ConnectAddress}: shutting down.");
}
else
{
var details = wasCanceled ? "canceled" : "failed";
if (exception is RemoteException { Error : RemoteError.HazelcastInstanceNotActive })
{
exception = null;
details = "failed (member is not active)";
}
else if (exception is TimeoutException)
{
exception = null;
details = "failed (socket timeout)";
}
else if (exception != null)
details = $"failed ({exception.GetType()}: {exception.Message})";
_logger.LogWarning(exception, $"Could not connect to member {member.Id.ToShortString()} at {member.ConnectAddress}: {details}.");
}
connectionRequest.Complete(success: false);
}
}
#endregion
#region Events
/// <summary>
/// Gets or sets an action that will be executed when a connection is created.
/// </summary>
public Action<MemberConnection> ConnectionCreated
{
get => _connectionCreated;
set
{
_clusterState.ThrowIfPropertiesAreReadOnly();
_connectionCreated = value;
}
}
private void RaiseConnectionCreated(MemberConnection connection)
{
_connectionCreated?.Invoke(connection);
}
/// <summary>
/// Gets or sets an action that will be executed when a connection is opened.
/// </summary>
public Func<MemberConnection, bool, bool, bool, ValueTask> ConnectionOpened
{
get => _connectionOpened;
set
{
_clusterState.ThrowIfPropertiesAreReadOnly();
_connectionOpened = value;
}
}
private async ValueTask RaiseConnectionOpened(MemberConnection connection, bool isFirstEver, bool isFirst, bool isNewCluster)
{
if (_connectionOpened == null) return;
try
{
await _connectionOpened.AwaitEach(connection, isFirstEver, isFirst, isNewCluster).CfAwait();
}
catch (Exception e)
{
_logger.LogError(e, "Caught exception while raising ConnectionOpened.");
}
}
/// <summary>
/// Gets or sets an action that will be executed when a connection is closed.
/// </summary>
public Func<MemberConnection, ValueTask> ConnectionClosed
{
get => _connectionClosed;
set
{
_clusterState.ThrowIfPropertiesAreReadOnly();
_connectionClosed = value;
}
}
private async ValueTask RaiseConnectionClosed(MemberConnection connection)
{
if (_connectionClosed == null) return;
try
{
await _connectionClosed.AwaitEach(connection).CfAwait();
}
catch (Exception e)
{
_logger.LogError(e, "Caught exception while raising ConnectionClosed.");
}
}
#endregion
#region Event Handlers
private ValueTask OnStateChanged(ClientState state)
{
_logger.LogDebug($"State changed: {state}");
// only if disconnected
if (state != ClientState.Disconnected) return default;
// and still disconnected - if the cluster is down or shutting down, give up
if (_clusterState.ClientState != ClientState.Disconnected)
{
_logger.LogInformation("Disconnected (shutting down)");
return default;
}
// the cluster is disconnected, but not down
bool reconnect;
if (_clusterState.Options.Networking.Preview.EnableNewReconnectOptions)
{
reconnect = _clusterState.Options.Networking.Reconnect;
if (_logger.IsEnabled(LogLevel.Information))
{
#pragma warning disable CA1308 // Normalize strings to uppercase - we are not normalizing here
var option = _clusterState.Options.Networking.Reconnect.ToString().ToLowerInvariant();
#pragma warning restore CA1308
var action = reconnect ? "reconnect" : "shut down";
_logger.LogInformation($"Disconnected (reconnect == {option} => {action})");
}
}
else
{
reconnect = _clusterState.Options.Networking.ReconnectMode == ReconnectMode.ReconnectAsync ||
_clusterState.Options.Networking.ReconnectMode == ReconnectMode.ReconnectSync;
_logger.LogInformation("Disconnected (reconnect mode == {ReconnectMode} => {ReconnectAction})",
_clusterState.Options.Networking.ReconnectMode,
_clusterState.Options.Networking.ReconnectMode switch
{
ReconnectMode.DoNotReconnect => "shut down",
ReconnectMode.ReconnectSync => "reconnect (synchronously)",
ReconnectMode.ReconnectAsync => "reconnect (asynchronously)",
_ => "meh?"
});
}
if (reconnect)
{
// reconnect via a background task
// operations will either retry until timeout, or fail
_reconnect = BackgroundTask.Run(ReconnectAsync);
}
else
{
_clusterState.RequestShutdown();
}
return default;
}
/// <summary>
/// Handles a <see cref="MemberConnection"/> going down.
/// </summary>
/// <param name="connection">The connection.</param>
private async ValueTask OnConnectionClosed(MemberConnection connection)
{
_logger.IfDebug()?.LogDebug($"Connection {connection.Id.ToShortString()} to member {connection.MemberId.ToShortString()} at {connection.Address} closed.");
TaskCompletionSource<object> connectCompletion;
lock (_mutex)
{
// if the connection was not added yet, ignore
if (!_connections.TryGetValue(connection.MemberId, out var existing))
{
_logger.IfDebug()?.LogDebug($"Found no connection to member {connection.MemberId.ToShortString()}, ignore.");
return;
}
// must be matching, might have been replaced
if (existing.Id == connection.Id)
{
// else remove (safe, mutex)
_connections.TryRemove(connection.MemberId, out _);
_logger.IfDebug()?.LogDebug($"Removed connection {connection.Id.ToShortString()} to member {connection.MemberId.ToShortString()} at {connection.Address}.");
}
else
{
_logger.IfDebug()?.LogDebug($"Connection {connection.Id.ToShortString()} to member {connection.MemberId.ToShortString()} already replaced by {existing.Id.ToShortString()}.");
}
// and get its 'connect' completion source
_completions.TryGetValue(connection, out connectCompletion);
}
// if still connecting... wait until done, because we cannot
// eg trigger the 'closed' event before or while the 'opened'
// triggers
if (connectCompletion != null)
{
await connectCompletion.Task.CfAwait();
_completions.TryRemove(connection, out _);
}
// proceed: raise 'closed'
await RaiseConnectionClosed(connection).CfAwait(); // does not throw
}
#endregion
/// <summary>
/// Connects to the cluster.
/// </summary>
/// <param name="cancellationToken">A cancellation token.</param>
/// <returns>A task that will complete when connected.</returns>
public async Task ConnectAsync(CancellationToken cancellationToken)
{
using var cancellation = _cancel.LinkedWith(cancellationToken);
cancellationToken = cancellation.Token;
// properties cannot be changed once connected
_clusterState.SetPropertiesReadOnly();
// we have started, and are now trying to connect
if (!await _clusterState.ChangeStateAndWait(ClientState.Started, ClientState.Starting).CfAwait())
throw new ConnectionException("Failed to connect (aborted).");
try
{
cancellationToken.ThrowIfCancellationRequested();
HConsole.WriteLine(this, $"{_clusterState.ClientName} connecting");
// establishes the first connection, throws if it fails
await ConnectFirstAsync(cancellationToken).CfAwait();
// once the first connection is established, we should use it to subscribe
// to the cluster views event, and then we should receive a members view,
// which in turn should change the state to Connected - unless something
// goes wrong
// TODO: consider *not* waiting for this and running directly on the member we're connected to?
var connected = await _clusterState.WaitForConnectedAsync(cancellationToken).CfAwait();
HConsole.WriteLine(this, $"{_clusterState.ClientName} connected");
if (!connected)
throw new ConnectionException("Failed to connect.");
// we have been connected (rejoice) - of course, nothing guarantees that it
// will last, but then OnConnectionClosed will deal with it
}
catch
{
// we *have* retried and failed, shutdown & throw
_clusterState.RequestShutdown();
throw;
}
}
/// <summary>
/// Reconnects to the cluster.
/// </summary>
/// <param name="cancellationToken">A cancellation token.</param>
/// <returns>A task that will complete when reconnected.</returns>
private async Task ReconnectAsync(CancellationToken cancellationToken)
{
try
{
// establishes the first connection, throws if it fails
await ConnectFirstAsync(cancellationToken).CfAwait();
// once the first connection is established, we should use it to subscribe
// to the cluster views event, and then we should receive a members view,
// which in turn should change the state to Connected - unless something
// goes wrong
var connected = await _clusterState.WaitForConnectedAsync(cancellationToken).CfAwait();
if (!connected)
{
// we are a background task and cannot throw!
_logger.LogError("Failed to reconnect.");
}
else
{
_logger.LogDebug("Reconnected");
}
// we have been reconnected (rejoice) - of course, nothing guarantees that it
// will last, but then OnConnectionClosed will deal with it
}
catch (Exception e)
{
// we *have* retried and failed, shutdown, and log (we are a background task!)
_clusterState.RequestShutdown();
_logger.LogError(e, "Failed to reconnect.");
}
// in any case, remove ourselves
_reconnect = null;
}
/// <summary>
/// Gets the cluster addresses.
/// </summary>
/// <returns>All cluster addresses.</returns>
/// <remarks>
/// <para>This methods first list the known members' addresses, and then the
/// configured addresses. Each group can be shuffled, depending on options.
/// The returned addresses are distinct across both groups, i.e. each address
/// is returned only once.</para>
/// </remarks>
private IEnumerable<NetworkAddress> GetClusterAddresses()
{
var shuffle = _clusterState.Options.Networking.ShuffleAddresses;
var distinct = new HashSet<NetworkAddress>();
static IEnumerable<NetworkAddress> Distinct(IEnumerable<NetworkAddress> aa, ISet<NetworkAddress> d, bool s)
{
if (s) aa = aa.Shuffle();
foreach (var a in aa)
{
if (d.Add(a)) yield return a;
}
}
// get known members' addresses
var addresses = _clusterMembers.GetMembers().Select(x => x.ConnectAddress);
foreach (var address in Distinct(addresses, distinct, shuffle))
yield return address;
// get configured addresses that haven't been tried already
addresses = _addressProvider.GetAddresses();
foreach (var address in Distinct(addresses, distinct, shuffle))
yield return address;
}
/// <summary>
/// Opens a first connection to the cluster (no connection yet).
/// </summary>
/// <param name="cancellationToken">A cancellation token.</param>
/// <returns>A task that will complete when connected.</returns>
/// <remarks>
/// <para>Tries all the candidate addresses until one works; tries again
/// according to the configured retry strategy, and if nothing works,
/// end up throwing an exception.</para>
/// </remarks>
private async Task ConnectFirstAsync(CancellationToken cancellationToken)
{
var tried = new HashSet<NetworkAddress>();
List<Exception> exceptions = null;
bool canRetry;
_connectRetryStrategy.Restart();
do
{
try
{
// try each address (unique by the IPEndPoint)
foreach (var address in GetClusterAddresses())
{
if (cancellationToken.IsCancellationRequested)
break;
tried.Add(address);
HConsole.WriteLine(this, $"Try to connect {_clusterState.ClientName} to server at {address}");
_logger.LogDebug("Try to connect {ClientName} to cluster {ClusterName} server at {MemberAddress}", _clusterState.ClientName, _clusterState.ClusterName, address);
var attempt = await ConnectFirstAsync(address, cancellationToken).CfAwait(); // does not throw
if (attempt)
{
var connection = attempt.Value;
HConsole.WriteLine(this, $"Connected {_clusterState.ClientName} via {connection.Id.ToShortString()} to {connection.MemberId.ToShortString()} at {address}");
return; // successful exit, a first connection has been opened
}
HConsole.WriteLine(this, $"Failed to connect to address {address}");
if (attempt.HasException) // else gather exceptions
{
if (attempt.Exception is RemoteException { Error: RemoteError.HazelcastInstanceNotActive })
{
_logger.LogDebug($"Failed to connect to address {address} (member is not active).");
}
else if (attempt.Exception is TimeoutException)
{
_logger.LogDebug($"Failed to connect to address {address} (socket timeout).");
}
else
{
exceptions ??= new List<Exception>();
exceptions.Add(attempt.Exception);
_logger.LogDebug(attempt.Exception, $"Failed to connect to address {address}.");
}
}
else
{
_logger.LogDebug($"Failed to connect to address {address}.");
}
}
}
catch (Exception e)
{
// the GetClusterAddresses() enumerator itself can throw, if a configured
// address is invalid or cannot be resolved via DNS... a DNS problem may
// be transient: better retry
exceptions ??= new List<Exception>();
exceptions.Add(e);
_logger.LogDebug(e, "Connection attempt has thrown.");
// TODO: it's the actual DNS that should retry!
}
try
{
// try to retry, maybe with a delay - handles cancellation
canRetry = await _connectRetryStrategy.WaitAsync(cancellationToken).CfAwait();
}
catch (OperationCanceledException) // don't gather the cancel exception
{
canRetry = false; // retry strategy was canceled
}
catch (Exception e) // gather exceptions
{
exceptions ??= new List<Exception>();
exceptions.Add(e);
canRetry = false; // retry strategy threw
}
} while (canRetry);
var aggregate = exceptions == null ? null : new AggregateException(exceptions);
// canceled exception?
if (cancellationToken.IsCancellationRequested)
throw new OperationCanceledException($"The cluster connection operation to \"{_clusterState.ClusterName}\" has been canceled. " +
$"The following addresses where tried: {string.Join(", ", tried)}.", aggregate);
// other exception
throw new ConnectionException($"Unable to connect to the cluster \"{_clusterState.ClusterName}\". " +
$"The following addresses where tried: {string.Join(", ", tried)}.", aggregate);
}
/// <summary>
/// Opens a first connection to an address (no other connections).
/// </summary>
/// <param name="address">The address.</param>
/// <param name="cancellationToken">A cancellation token.</param>
/// <returns>The opened connection, if successful.</returns>
/// <remarks>
/// <para>This method does not throw.</para>
/// </remarks>
private async Task<Attempt<MemberConnection>> ConnectFirstAsync(NetworkAddress address, CancellationToken cancellationToken)
{
// lock the address - can only connect once at a time per address
// but! this is the first connection so nothing else can connect
//using var locked = _addressLocker.LockAsync(address);
try
{
// this may throw
#pragma warning disable CA2000 // Dispose objects before losing scope
// "The allocating method does not have dispose ownership; that is, the responsibility
// to dispose the object is transferred to another object or wrapper that's created
// in the method and returned to the caller." - here: the Attempt<>.
return await ConnectAsync(address, cancellationToken).CfAwait();
#pragma warning restore CA2000
}
catch (Exception e)
{
// don't throw, just fail
HConsole.WriteLine(this, "Exceptions while connecting " + e);
return Attempt.Fail<MemberConnection>(e);
}
}
/// <summary>
/// Ensures that a connection exists to a member.
/// </summary>
/// <param name="member">The member.</param>
/// <param name="cancellationToken">A cancellation token.</param>
/// <remarks>
/// <para>This method does not throw.</para>
/// </remarks>
private async Task<Attempt<MemberConnection>> EnsureConnectionAsync(MemberInfo member, CancellationToken cancellationToken)
{
// if we already have a connection for that member, to the right address, return the connection if
// it is active, or fail it if is not: cannot open yet another connection to that same address, we'll
// have to wait for the inactive connection to be removed. OTOH if we have a connection for that
// member to a wrong address, keep proceeding and try to open a connection to the right address.
if (_connections.TryGetValue(member.Id, out var connection))
{
var active = connection.Active;
if (connection.Address == member.ConnectAddress)
{
_logger.IfDebug()?.LogDebug($"Found {(active ? "" : "non-")}active connection {connection.Id.ToShortString()} from client {_clusterState.ClientName} to member {member.Id.ToShortString()} at {connection.Address}.");
return Attempt.If(active, connection);
}
_logger.IfDebug()?.LogDebug($"Found {(active ? "" : "non-")}active connection {connection.Id.ToShortString()} from client {_clusterState.ClientName} to member {member.Id.ToShortString()} at {connection.Address}, but member address is {member.ConnectAddress}.");
}
// ConnectMembers invokes EnsureConnectionAsync sequentially, and is suspended
// whenever we need to connect the very first address, therefore each address
// can only be connected once at a time = no need for locks here
// exit now if canceled
if (cancellationToken.IsCancellationRequested)
return Attempt.Fail<MemberConnection>();
try
{
// else actually connect
// this may throw
_logger.IfDebug()?.LogDebug($"Client {_clusterState.ClientName} is not connected to member {member.Id.ToShortString()} at {member.ConnectAddress}, connecting.");
#pragma warning disable CA2000 // Dispose objects before losing scope - CA2000 does not understand CfAwait :(
var memberConnection = await ConnectAsync(member.ConnectAddress, cancellationToken).CfAwait();
#pragma warning restore CA2000
if (memberConnection.MemberId != member.Id)
{
_logger.LogWarning($"Client {_clusterState.ClientName} connected address {member.ConnectAddress} expecting member {member.Id.ToShortString()} but found member {memberConnection.MemberId}, dropping the connection.");
_clusterMembers.TerminateConnection(memberConnection);
return Attempt.Fail<MemberConnection>();
}
return memberConnection;
}
catch (Exception e)
{
// don't throw, just fail
return Attempt.Fail<MemberConnection>(e);
}
}
private static async ValueTask ThrowDisconnected(MemberConnection connection)
{
// disposing the connection *will* run OnConnectionClosed which will
// remove the connection from all the places it needs to be removed from
await connection.DisposeAsync().CfAwait();
throw new TargetDisconnectedException();
}
private static async ValueTask ThrowRejected(MemberConnection connection)
{
// disposing the connection *will* run OnConnectionClosed which will
// remove the connection from all the places it needs to be removed from
await connection.DisposeAsync().CfAwait();
throw new ConnectionException("Connection was not accepted.");
}
private static async ValueTask ThrowCanceled(MemberConnection connection)
{
// disposing the connection *will* run OnConnectionClosed which will
// remove the connection from all the places it needs to be removed from
await connection.DisposeAsync().CfAwait();
throw new OperationCanceledException();
}
/// <summary>
/// Opens a connection to an address.
/// </summary>
/// <param name="address">The address.</param>
/// <param name="cancellationToken">A cancellation token.</param>
/// <returns>A task that will complete when the connection has been established, and represents the associated client.</returns>
private async Task<MemberConnection> ConnectAsync(NetworkAddress address, CancellationToken cancellationToken)
{
// map private address to public address
address = _addressProvider.Map(address);
// create the connection to the member
var connection = new MemberConnection(address, _authenticator, _clusterState.Options.Messaging, _clusterState.Options.Networking, _clusterState.Options.Networking.Ssl, _clusterState.CorrelationIdSequence, _clusterState.LoggerFactory)
{
Closed = OnConnectionClosed
};
RaiseConnectionCreated(connection);
if (cancellationToken.IsCancellationRequested) await ThrowCanceled(connection).CfAwait();
// note: soon as ConnectAsync returns, the connection can close anytime - this is handled by
// adding the connection to _connections within _connectionsMutex + managing a connection
// completions that ensures that either neither Opened nor Closed trigger, or both trigger
// and in the right order, Closed after Opened has completed
// connect to the server (may throw and that is ok here)
var result = await connection.ConnectAsync(_clusterState, cancellationToken).CfAwait();
// report
_logger.LogInformation("Authenticated client '{ClientName}' ({ClientId}) running version {ClientVersion}"+
" on connection {ConnectionId} from {LocalAddress}" +
" to member {MemberId} at {Address}" +
" of cluster '{ClusterName}' ({ClusterId}) running version {HazelcastServerVersion}.",
_clusterState.ClientName, _clusterState.ClientId.ToShortString(), ClientVersion.Version,
connection.Id.ToShortString(), connection.LocalEndPoint,
result.MemberId.ToShortString(), address,
_clusterState.ClusterName, result.ClusterId.ToShortString(), result.ServerVersion);
// notify partitioner
try
{
_clusterState.Partitioner.SetOrVerifyPartitionCount(result.PartitionCount);
}
catch (Exception e)
{
await connection.DisposeAsync().CfAwait(); // does not throw
throw new ConnectionException("Failed to open a connection because " +
"the partitions count announced by the member is invalid.", e);
}
if (cancellationToken.IsCancellationRequested) await ThrowCanceled(connection).CfAwait();
if (!connection.Active) await ThrowDisconnected(connection).CfAwait();
// isFirst: this is the first connection (but maybe after we've been disconnected)
// isFirstEver: this is the first connection, ever
// isNewCluster: when isFirst, this is also a new cluster (either because isFirstEver, or because of a cluster id change)
var isFirst = false;
var isFirstEver = false;
var isNewCluster = false;
var accepted = false;
// register the connection
lock (_mutex)
{
if (_disposed == 0)
{
isFirst = _connections.IsEmpty;
isFirstEver = isFirst && _clusterId == default;
accepted = true;
// ok to connect to a different cluster only if this is the very first connection
isNewCluster = _clusterId != connection.ClusterId;
if (isNewCluster)
{
if (!_connections.IsEmpty)
{
_logger.LogWarning($"Cannot accept a connection to cluster {connection.ClusterId} which is not the current cluster ({_clusterId}).");
accepted = false;
}
else
{
_clusterId = connection.ClusterId;
}
}
}
// finally, add the connection
if (accepted)
{
_logger.IfDebug()?.LogDebug($"Added connection {connection.Id.ToShortString()} to member {connection.MemberId.ToShortString()} at {connection.Address}.");
_connections[connection.MemberId] = connection;
_completions[connection] = new TaskCompletionSource<object>(TaskCreationOptions.RunContinuationsAsynchronously);
}
}
// these Throw methods dispose the connection, which will then be removed from _connections
// we are safe
if (cancellationToken.IsCancellationRequested) await ThrowCanceled(connection).CfAwait();
if (!connection.Active) await ThrowDisconnected(connection).CfAwait();
if (!accepted) await ThrowRejected(connection).CfAwait();
// NOTE: connections are opened either by 'connect first' or by 'connect members' and
// both ensure that one connection is opened after another - not concurrently - thus
// making sure that there is no race condition here and the ConnectionOpened for the
// isFirst connection will indeed trigger before any other connection is created - think
// about it if adding support for parallel connections!
// connection is opened
await RaiseConnectionOpened(connection, isFirstEver, isFirst, isNewCluster).CfAwait();
lock (_mutex)
{
// there is always a completion, but we have to TryRemove from concurrent dictionaries
if (_completions.TryRemove(connection, out var completion)) completion.SetResult(null);
}
return connection;
}
/// <inheritdoc />
public async ValueTask DisposeAsync()
{
if (Interlocked.CompareExchange(ref _disposed, 1, 0) == 1)
return;
HConsole.WriteLine(this, "Terminate ConnectMembers");
// be sure to properly terminate _connectMembers, even though, because the
// MemberConnectionQueue has been disposed already, the task should have
// ended by now
_cancel.Cancel();
if (_connectMembers != null)
await _connectMembers.CfAwaitCanceled();
_cancel.Dispose();
// stop and dispose the reconnect task if it's running
HConsole.WriteLine(this, "Terminate Reconnect");
var reconnect = _reconnect;
if (reconnect != null)
await reconnect.CompletedOrCancelAsync(true).CfAwait();
// trash all remaining connections
HConsole.WriteLine(this, "Tear down Connections");
ICollection<MemberConnection> connections;
lock (_mutex) connections = _connections.Values;
foreach (var connection in connections)
await connection.DisposeAsync().CfAwait();
}
}
}
<|start_filename|>src/Hazelcast.Net.Tests/Networking/NetworkingTests.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Buffers;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Sockets;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Hazelcast.Clustering;
using Hazelcast.Core;
using Hazelcast.Exceptions;
using Hazelcast.Messaging;
using Hazelcast.Models;
using Hazelcast.Networking;
using Hazelcast.Protocol.Codecs;
using Hazelcast.Protocol.Models;
using Hazelcast.Serialization;
using Hazelcast.Testing;
using Hazelcast.Testing.Logging;
using Hazelcast.Testing.Protocol;
using Hazelcast.Testing.TestServer;
using Microsoft.Extensions.Logging.Abstractions;
using NUnit.Framework;
namespace Hazelcast.Tests.Networking
{
using NetworkingTests_;
namespace NetworkingTests_
{
internal static class Extensions
{
public static ValueTask<bool> SendResponseAsync(this ClientMessageConnection connection, ClientMessage requestMessage, ClientMessage responseMessage)
{
responseMessage.CorrelationId = requestMessage.CorrelationId;
responseMessage.Flags |= ClientMessageFlags.BeginFragment | ClientMessageFlags.EndFragment;
return connection.SendAsync(responseMessage);
}
public static ValueTask<bool> SendEventAsync(this ClientMessageConnection connection, ClientMessage requestMessage, ClientMessage eventMessage)
{
eventMessage.CorrelationId = requestMessage.CorrelationId;
eventMessage.Flags |= ClientMessageFlags.BeginFragment | ClientMessageFlags.EndFragment;
return connection.SendAsync(eventMessage);
}
}
}
[TestFixture]
public class NetworkingTests : HazelcastTestBase
{
private ClientMessage CreateMessage(string text)
{
var message = new ClientMessage()
.Append(new Frame(new byte[64])) // header stuff
.Append(new Frame(Encoding.UTF8.GetBytes(text)));
return message;
}
private string GetText(ClientMessage message)
=> Encoding.UTF8.GetString(message.FirstFrame.Next.Bytes);
// basic handler that handles authentication and member views
private async Task HandleAsync(Server server, ClientMessageConnection connection, ClientMessage requestMessage,
Func<Server, ClientMessageConnection, ClientMessage, ValueTask> handler)
{
switch (requestMessage.MessageType)
{
// handle authentication
case ClientAuthenticationServerCodec.RequestMessageType:
{
var request = ClientAuthenticationServerCodec.DecodeRequest(requestMessage);
var responseMessage = ClientAuthenticationServerCodec.EncodeResponse(
0, server.Address, server.MemberId, SerializationService.SerializerVersion,
"4.0", 1, server.ClusterId, false);
await connection.SendResponseAsync(requestMessage, responseMessage).CfAwait();
break;
}
// handle events
case ClientAddClusterViewListenerServerCodec.RequestMessageType:
{
var request = ClientAddClusterViewListenerServerCodec.DecodeRequest(requestMessage);
var responseMessage = ClientAddClusterViewListenerServerCodec.EncodeResponse();
await connection.SendResponseAsync(requestMessage, responseMessage).CfAwait();
_ = Task.Run(async () =>
{
await Task.Delay(500).CfAwait();
var memberVersion = new MemberVersion(4, 0, 0);
var memberInfo = new MemberInfo(server.MemberId, server.Address, memberVersion, false, new Dictionary<string, string>());
var eventMessage = ClientAddClusterViewListenerServerCodec.EncodeMembersViewEvent(1, new[] { memberInfo });
await connection.SendEventAsync(requestMessage, eventMessage).CfAwait();
});
break;
}
// handle others
default:
await handler(server, connection, requestMessage).CfAwait();
break;
}
}
private async ValueTask ReceiveMessage(Server server, ClientMessageConnection connection, ClientMessage message)
{
HConsole.WriteLine(this, "Respond");
var text = Encoding.UTF8.GetString(message.FirstFrame.Bytes);
#if NETSTANDARD2_1
var responseText = text switch
{
"a" => "alpha",
"b" => "bravo",
_ => "??"
};
#else
var responseText =
text == "a" ? "alpha" :
text == "b" ? "bravo" :
"??";
#endif
// this is very basic stuff and does not respect HZ protocol
// the 64-bytes header is nonsense etc
var response = new ClientMessage()
.Append(new Frame(new byte[64])) // header stuff
.Append(new Frame(Encoding.UTF8.GetBytes(responseText)));
response.CorrelationId = message.CorrelationId;
response.MessageType = 0x1; // 0x00 means exception
// send in one fragment, set flags
response.Flags |= ClientMessageFlags.BeginFragment | ClientMessageFlags.EndFragment;
await connection.SendAsync(response).CfAwait();
HConsole.WriteLine(this, "Responded");
}
private ClientMessage CreateErrorMessage(RemoteError error)
{
// can we prepare server messages?
var errorHolders = new List<ErrorHolder>
{
new ErrorHolder(error, "className", "message", Enumerable.Empty<StackTraceElement>())
};
return ErrorsServerCodec.EncodeResponse(errorHolders);
}
private IDisposable HConsoleForTest(Action<HConsoleOptions> configure = null)
{
void Configure(HConsoleOptions options)
{
options
.ClearAll()
.Configure().SetMinLevel()
.Configure<HConsoleLoggerProvider>().SetMaxLevel();
configure?.Invoke(options);
}
return HConsole.Capture(Configure);
}
[Test]
[Timeout(30_000)]
public async Task CanCancel()
{
var address = NetworkAddress.Parse("127.0.0.1:11001");
using var console = HConsoleForTest(x => x.Configure(this).SetIndent(0).SetMaxLevel().SetPrefix("TEST"));
HConsole.WriteLine(this, "Begin");
// gate the ping response
var gate = new SemaphoreSlim(0);
// configure server
await using var server = new Server(address, async (xSvr, xConnection, xRequestMessage)
=> await HandleAsync(xSvr, xConnection, xRequestMessage, async (svr, connection, requestMessage) =>
{
switch (requestMessage.MessageType)
{
// handle ping (gated)
case ClientPingServerCodec.RequestMessageType:
var pingRequest = ClientPingServerCodec.DecodeRequest(requestMessage);
var pingResponseMessage = ClientPingServerCodec.EncodeResponse();
_ = Task.Run(async () =>
{
await gate.WaitAsync();
await connection.SendResponseAsync(requestMessage, pingResponseMessage);
});
break;
// err everything else
default:
HConsole.WriteLine(svr, "Respond with error.");
var errorResponseMessage = CreateErrorMessage(RemoteError.Undefined);
await connection.SendResponseAsync(requestMessage, errorResponseMessage).CfAwait();
break;
}
}), LoggerFactory);
// start server
await server.StartAsync().CfAwait();
// start client
HConsole.WriteLine(this, "Start client");
var options = new HazelcastOptionsBuilder()
.With(options =>
{
options.Networking.Addresses.Add("127.0.0.1:11001");
options.Heartbeat.PeriodMilliseconds = -1; // infinite: we don't want heartbeat pings interfering with the test
})
.WithHConsoleLogger()
.Build();
var client = (HazelcastClient)await HazelcastClientFactory.StartNewClientAsync(options);
// send ping request - which should be canceled before completing
HConsole.WriteLine(this, "Send ping request");
var message = ClientPingServerCodec.EncodeRequest();
using var cancel = new CancellationTokenSource(1000);
HConsole.WriteLine(this, "Wait for cancellation");
await AssertEx.ThrowsAsync<OperationCanceledException>(async ()
=> await client.Cluster.Messaging.SendAsync(message, cancel.Token).CfAwait());
// release the gate
HConsole.WriteLine(this, "Release the gate");
gate.Release();
// the server is going to respond, and a warning will be logged
// "Received message for unknown invocation ...:..."
// which is a good thing - yet we don't have instrumentation in our code to wait on that
// warning... so we cannot *assert* that we get it... so we just wait a bit to see the
// warning in the log...
await Task.Delay(1000);
// tear down client and server
HConsole.WriteLine(this, "Teardown");
await client.DisposeAsync().CfAwait();
await server.DisposeAsync().CfAwait();
}
[Test]
[Timeout(10_000)]
[KnownIssue(0, "Breaks on GitHub Actions")] // TODO we should deal with this
public async Task CanRetryAndTimeout()
{
var address = NetworkAddress.Parse("127.0.0.1:11001");
HConsole.Configure(x => x.Configure(this).SetIndent(0).SetPrefix("TEST"));
HConsole.WriteLine(this, "Begin");
HConsole.WriteLine(this, "Start server");
await using var server = new Server(address, async (xsvr, xconn, xmsg)
=> await HandleAsync(xsvr, xconn, xmsg, async (svr, conn, msg) =>
{
async Task ResponseAsync(ClientMessage response)
{
response.CorrelationId = msg.CorrelationId;
response.Flags |= ClientMessageFlags.BeginFragment | ClientMessageFlags.EndFragment;
await conn.SendAsync(response).CfAwait();
}
async Task EventAsync(ClientMessage eventMessage)
{
eventMessage.CorrelationId = msg.CorrelationId;
eventMessage.Flags |= ClientMessageFlags.BeginFragment | ClientMessageFlags.EndFragment;
await conn.SendAsync(eventMessage).CfAwait();
}
switch (msg.MessageType)
{
// must handle auth
case ClientAuthenticationServerCodec.RequestMessageType:
var authRequest = ClientAuthenticationServerCodec.DecodeRequest(msg);
var authResponse = ClientAuthenticationServerCodec.EncodeResponse(
0, address, Guid.NewGuid(), SerializationService.SerializerVersion,
"4.0", 1, Guid.NewGuid(), false);
await ResponseAsync(authResponse).CfAwait();
break;
// must handle events
case ClientAddClusterViewListenerServerCodec.RequestMessageType:
var addRequest = ClientAddClusterViewListenerServerCodec.DecodeRequest(msg);
var addResponse = ClientAddClusterViewListenerServerCodec.EncodeResponse();
await ResponseAsync(addResponse).CfAwait();
_ = Task.Run(async () =>
{
await Task.Delay(500).CfAwait();
var eventMessage = ClientAddClusterViewListenerServerCodec.EncodeMembersViewEvent(1, new[]
{
new MemberInfo(Guid.NewGuid(), address, new MemberVersion(4, 0, 0), false, new Dictionary<string, string>()),
});
await EventAsync(eventMessage).CfAwait();
});
break;
default:
HConsole.WriteLine(svr, "Respond with error.");
var response = CreateErrorMessage(RemoteError.RetryableHazelcast);
await ResponseAsync(response).CfAwait();
break;
}
}), LoggerFactory);
await server.StartAsync().CfAwait();
HConsole.WriteLine(this, "Start client");
var options = new HazelcastOptionsBuilder().With(options =>
{
options.Networking.Addresses.Add("127.0.0.1:11001");
}).Build();
await using var client = (HazelcastClient) await HazelcastClientFactory.StartNewClientAsync(options);
HConsole.WriteLine(this, "Send message");
var message = ClientPingServerCodec.EncodeRequest();
var token = new CancellationTokenSource(3_000).Token;
await AssertEx.ThrowsAsync<TaskCanceledException>(async () => await client.Cluster.Messaging.SendAsync(message, token).CfAwait());
// TODO dispose the client, the server
await server.StopAsync().CfAwait();
}
[Test]
[Timeout(10_000)]
public async Task CanRetryAndSucceed()
{
var address = NetworkAddress.Parse("127.0.0.1:11001");
HConsole.Configure(x => x.Configure(this).SetIndent(0).SetPrefix("TEST"));
HConsole.WriteLine(this, "Begin");
HConsole.WriteLine(this, "Start server");
var count = 0;
await using var server = new Server(address, async (xsvr, xconn, xmsg)
=> await HandleAsync(xsvr, xconn, xmsg, async (svr, conn, msg) =>
{
HConsole.WriteLine(svr, "Handle request.");
ClientMessage response;
if (++count > 3)
{
HConsole.WriteLine(svr, "Respond with success.");
response = ClientPingServerCodec.EncodeResponse();
}
else
{
HConsole.WriteLine(svr, "Respond with error.");
response = CreateErrorMessage(RemoteError.RetryableHazelcast);
response.Flags |= ClientMessageFlags.BeginFragment | ClientMessageFlags.EndFragment;
}
response.CorrelationId = msg.CorrelationId;
await conn.SendAsync(response).CfAwait();
}), LoggerFactory);
await server.StartAsync().CfAwait();
HConsole.WriteLine(this, "Start client");
var options = new HazelcastOptionsBuilder().With(options =>
{
options.Networking.Addresses.Add("127.0.0.1:11001");
}).Build();
await using var client = (HazelcastClient) await HazelcastClientFactory.StartNewClientAsync(options);
HConsole.WriteLine(this, "Send message");
var message = ClientPingServerCodec.EncodeRequest();
var token = new CancellationTokenSource(3_000).Token;
await client.Cluster.Messaging.SendAsync(message, token); // default is 120s
Assert.AreEqual(4, count);
await server.StopAsync().CfAwait();
}
[Test]
[Timeout(20_000)]
public async Task TimeoutsAfterMultipleRetries()
{
var address = NetworkAddress.Parse("127.0.0.1:11001");
using var _ = HConsole.Capture(consoleOptions => consoleOptions
.ClearAll()
.Configure().SetMaxLevel()
.Configure(this).SetPrefix("TEST")
.Configure<AsyncContext>().SetMinLevel()
.Configure<SocketConnectionBase>().SetIndent(1).SetLevel(0).SetPrefix("SOCKET"));
HConsole.WriteLine(this, "Begin");
HConsole.WriteLine(this, "Start server");
await using var server = new Server(address, async (xsvr, xconn, xmsg)
=> await HandleAsync(xsvr, xconn, xmsg, async (svr, conn, msg) =>
{
HConsole.WriteLine(svr, "Handle request (wait...)");
await Task.Delay(500).CfAwait();
HConsole.WriteLine(svr, "Respond with error.");
var response = ErrorsServerCodec.EncodeResponse(new[]
{
// make sure the error is retryable
new ErrorHolder(RemoteError.RetryableHazelcast, "classname", "message", Enumerable.Empty<StackTraceElement>())
});
//HConsole.WriteLine(svr, "Respond with success.");
//var response = ClientPingServerCodec.EncodeResponse();
response.CorrelationId = msg.CorrelationId;
await conn.SendAsync(response).CfAwait();
}), LoggerFactory);
await server.StartAsync().CfAwait();
HConsole.WriteLine(this, "Start client");
var options = new HazelcastOptionsBuilder().With(options =>
{
options.Networking.Addresses.Add("127.0.0.1:11001");
options.Messaging.RetryTimeoutSeconds = 3; // default value is 120s
}).Build();
await using var client = (HazelcastClient) await HazelcastClientFactory.StartNewClientAsync(options);
HConsole.WriteLine(this, "Send message");
var message = ClientPingServerCodec.EncodeRequest();
// note: the error only happens *after* the server has responded
// we could wait for the response for ever
await AssertEx.ThrowsAsync<TaskTimeoutException>(async () =>
{
// server will respond w/ error every 500ms and client will retry
// until the 3s retry timeout (options above) is reached
await client.Cluster.Messaging.SendAsync(message).CfAwait();
});
await server.StopAsync().CfAwait();
}
[Test]
[Timeout(10_000)]
public async Task Test()
{
//var host = Dns.GetHostEntry(_hostname);
//var ipAddress = host.AddressList[0];
//var endpoint = new IPEndPoint(ipAddress, _port);
var address = NetworkAddress.Parse("127.0.0.1:11001");
HConsole.Configure(x => x.Configure(this).SetIndent(0).SetPrefix("TEST"));
HConsole.WriteLine(this, "Begin");
HConsole.WriteLine(this, "Start server");
var server = new Server(address, async (xsvr, xconn, xmsg)
=> await HandleAsync(xsvr, xconn, xmsg, ReceiveMessage), LoggerFactory);
await server.StartAsync().CfAwait();
var options = new HazelcastOptionsBuilder().With(options =>
{
options.Networking.Addresses.Add("127.0.0.1:11001");
}).Build();
HConsole.WriteLine(this, "Start client 1");
await using var client1 = (HazelcastClient) await HazelcastClientFactory.StartNewClientAsync(options);
HConsole.WriteLine(this, "Send message 1 to client 1");
var message = CreateMessage("ping");
var response = await client1.Cluster.Messaging.SendAsync(message, CancellationToken.None).CfAwait();
HConsole.WriteLine(this, "Got response: " + GetText(response));
HConsole.WriteLine(this, "Start client 2");
await using var client2 = (HazelcastClient) await HazelcastClientFactory.StartNewClientAsync(options);
HConsole.WriteLine(this, "Send message 1 to client 2");
message = CreateMessage("a");
response = await client2.Cluster.Messaging.SendAsync(message, CancellationToken.None).CfAwait();
HConsole.WriteLine(this, "Got response: " + GetText(response));
HConsole.WriteLine(this, "Send message 2 to client 1");
message = CreateMessage("foo");
response = await client1.Cluster.Messaging.SendAsync(message, CancellationToken.None).CfAwait();
HConsole.WriteLine(this, "Got response: " + GetText(response));
//XConsole.WriteLine(this, "Stop client");
//await client1.CloseAsync().CAF();
HConsole.WriteLine(this, "Stop server");
await server.StopAsync().CfAwait();
await Task.Delay(1000).CfAwait();
HConsole.WriteLine(this, "End");
await Task.Delay(100).CfAwait();
}
[Test]
[Timeout(10_000)]
public async Task ServerShutdown([Values] bool reconnect, [Values] bool previewOptions)
{
var address = NetworkAddress.Parse("127.0.0.1:11000");
HConsole.Configure(x => x.Configure(this).SetIndent(0).SetPrefix("TEST"));
HConsole.WriteLine(this, "Begin");
HConsole.WriteLine(this, "Start server");
await using var server = new Server(address, async (xsvr, xconn, xmsg)
=> await HandleAsync(xsvr, xconn, xmsg, ReceiveMessage), LoggerFactory);
await server.StartAsync().CfAwait();
var options = new HazelcastOptionsBuilder().With(options =>
{
if (previewOptions)
{
options.Preview.EnableNewReconnectOptions = true;
options.Preview.EnableNewRetryOptions = true;
options.Networking.Reconnect = reconnect;
}
else
{
if (reconnect) options.Networking.ReconnectMode = ReconnectMode.ReconnectAsync;
}
options.Networking.Addresses.Add("127.0.0.1:11000");
options.Messaging.RetryTimeoutSeconds = 1; // fail fast!
}).Build();
HConsole.WriteLine(this, "Start client");
await using var client1 = (HazelcastClient) await HazelcastClientFactory.StartNewClientAsync(options);
HConsole.WriteLine(this, "Send message 1 from client");
var message = CreateMessage("ping");
var response = await client1.Cluster.Messaging.SendAsync(message, CancellationToken.None).CfAwait();
HConsole.WriteLine(this, "Got response: " + GetText(response));
HConsole.WriteLine(this, "Stop server");
await server.StopAsync().CfAwait();
await Task.Delay(1000).CfAwait();
HConsole.WriteLine(this, "Send message 2 from client");
message = CreateMessage("ping");
if (reconnect)
{
// client is going to try to reconnect and the invocation times out
Assert.ThrowsAsync<TaskTimeoutException>(async () => await client1.Cluster.Messaging.SendAsync(message, CancellationToken.None).CfAwait());
}
else
{
// client goes offline and everything ends
Assert.ThrowsAsync<ClientOfflineException>(async () => await client1.Cluster.Messaging.SendAsync(message, CancellationToken.None).CfAwait());
}
HConsole.WriteLine(this, "End");
await Task.Delay(100).CfAwait();
}
[Test]
[Timeout(10_000)]
[Ignore("Requires a real server, obsolete")]
public async Task Cluster()
{
// this test expects a server
HConsole.Configure(x => x.Configure(this).SetIndent(0).SetPrefix("TEST"));
HConsole.WriteLine(this, "Begin");
HConsole.WriteLine(this, "Cluster?");
var serializationService = new SerializationServiceBuilder(new NullLoggerFactory())
.SetVersion(1)
.Build();
var options = new HazelcastOptions();
//options.Networking.Addresses.Add("sgay-l4");
options.Networking.Addresses.Add("localhost");
var cluster = new Cluster(options, serializationService, new NullLoggerFactory());
await cluster.Connections.ConnectAsync(CancellationToken.None).CfAwait();
// now we can send messages...
//await cluster.SendAsync(new ClientMessage()).CAF();
// events?
await Task.Delay(4000).CfAwait();
HConsole.WriteLine(this, "End");
await Task.Delay(100).CfAwait();
}
[Test]
public void Sequences1()
{
const int origin = 1234;
var bytes = new byte[4];
bytes.WriteInt(0, origin, Endianness.BigEndian);
var buffer = new ReadOnlySequence<byte>(bytes);
var value = BytesExtensions.ReadInt(ref buffer, Endianness.BigEndian);
NUnit.Framework.Assert.AreEqual(origin, value);
}
[Test]
[Timeout(20_000)]
public async Task SocketTimeout1()
{
await using var server = new Server(NetworkAddress.Parse("127.0.0.1:11000"), (svr, connection, message) => new ValueTask(), LoggerFactory);
await server.StartAsync().CfAwait();
var socket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, System.Net.Sockets.ProtocolType.Tcp);
// server is listening, can connect within 1s timeout
await socket.ConnectAsync(new IPEndPoint(IPAddress.Parse("127.0.0.1"), 11000), 1_000).CfAwait();
socket.Close();
await server.StopAsync().CfAwait();
}
[Test]
[Timeout(20_000)]
public void SocketTimeout2()
{
var socket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, System.Net.Sockets.ProtocolType.Tcp);
// server is not listening, connecting results in timeout after 1s
Assert.ThrowsAsync<TimeoutException>(async () =>
{
await socket.ConnectAsync(NetworkAddress.Parse("www.hazelcast.com:5701").IPEndPoint, 500).CfAwait();
});
// socket has been properly closed and disposed
Assert.Throws<ObjectDisposedException>(() =>
{
socket.Send(Array.Empty<byte>());
});
// can dispose multiple times
socket.Close();
socket.Dispose();
}
[Test]
[Timeout(60_000)]
public async Task SocketTimeout3()
{
var socket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, System.Net.Sockets.ProtocolType.Tcp);
try
{
var endpoint = NetworkAddress.Parse("127.0.0.1:11000").IPEndPoint;
await socket.ConnectAsync(endpoint, 60_000).CfAwait();
Assert.Fail("Expected an exception.");
}
catch (TimeoutException)
{
Assert.Fail("Did not expect TimeoutException.");
}
catch (Exception)
{
// ok
}
// socket is not ready (but not disposed)
Assert.Throws<SocketException>(() =>
{
socket.Send(Array.Empty<byte>());
});
}
}
}
<|start_filename|>src/Hazelcast.Net/Configuration/HazelcastCommandLineConfigurationProvider.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Generic;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Configuration.CommandLine;
namespace Hazelcast.Configuration
{
/// <summary>
/// A command line based Hazelcast <see cref="ConfigurationProvider"/>.
/// </summary>
/// <remarks>
/// <para>Adds support for hazelcast.x.y arguments that do not respect the standard hazelcast:x:y pattern.</para>
/// </remarks>
internal class HazelcastCommandLineConfigurationProvider : CommandLineConfigurationProvider
{
/// <summary>
/// Initializes a new instance of the <see cref="HazelcastCommandLineConfigurationProvider"/> class.
/// </summary>
/// <param name="args">The command line args.</param>
/// <param name="switchMappings">The switch mappings.</param>
public HazelcastCommandLineConfigurationProvider(IEnumerable<string> args, IDictionary<string, string> switchMappings = null)
: base(FilterArgs(args, switchMappings), switchMappings)
{ }
/// <summary>
/// (internal for tests only)
/// Filters arguments.
/// </summary>
internal static IEnumerable<string> FilterArgs(IEnumerable<string> args, IDictionary<string, string> switchMappings)
{
var hazelcastAndKeyDelimiter = "hazelcast" + ConfigurationPath.KeyDelimiter;
using var enumerator = args.GetEnumerator();
while (enumerator.MoveNext())
{
var arg = enumerator.Current;
if (string.IsNullOrWhiteSpace(arg)) continue;
int pos;
if (switchMappings != null && arg.StartsWith("-", StringComparison.Ordinal))
{
string argk, argv;
if ((pos = arg.IndexOf('=', StringComparison.Ordinal)) > 0)
{
argk = arg[..pos];
argv = arg[(pos + 1)..];
}
else
{
argk = arg;
argv = null;
}
argk = argk.Replace(".", ConfigurationPath.KeyDelimiter, StringComparison.Ordinal);
if (switchMappings.TryGetValue(argk, out var argm) &&
argm.StartsWith(hazelcastAndKeyDelimiter, StringComparison.Ordinal))
{
// yield the key
yield return "--" + argm;
// yield the value
#pragma warning disable CA1508 // Avoid dead conditional code - false positive due to range operator?!
// ReSharper disable once ConditionIsAlwaysTrueOrFalse
if (argv != null) yield return argv;
#pragma warning restore CA1508
else if (enumerator.MoveNext()) yield return enumerator.Current;
continue; // next!
}
}
if (arg.StartsWith("/hazelcast.", StringComparison.Ordinal) ||
arg.StartsWith("--hazelcast.", StringComparison.Ordinal))
{
if ((pos = arg.IndexOf('=', StringComparison.Ordinal)) > 0)
{
// yield the key
yield return arg[..pos].Replace(".", ConfigurationPath.KeyDelimiter, StringComparison.Ordinal);
// yield the value
yield return arg[(pos + 1)..];
}
else
{
// yield the key
yield return arg.Replace(".", ConfigurationPath.KeyDelimiter, StringComparison.Ordinal);
// yield the value
if (enumerator.MoveNext()) yield return enumerator.Current;
}
}
else if (arg.StartsWith("hazelcast.", StringComparison.Ordinal) &&
(pos = arg.IndexOf('=', StringComparison.Ordinal)) > 0)
{
// yield the key
yield return "--" + arg[..pos].Replace(".", ConfigurationPath.KeyDelimiter, StringComparison.Ordinal);
// yield the value
yield return arg[(pos + 1)..];
}
// else ignore that arg (handled by the default command line provider)
}
}
}
}
<|start_filename|>src/Hazelcast.Net.Tests/Remote/HeartbeatTests.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Hazelcast.Configuration;
using Hazelcast.Core;
using Hazelcast.Networking;
using Hazelcast.Testing;
using Hazelcast.Testing.Logging;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using NUnit.Framework;
namespace Hazelcast.Tests.Remote
{
[TestFixture]
[Explicit("Takes time")]
public class HeartbeatTests : SingleMemberRemoteTestBase
{
private IDisposable HConsoleForTest()
=> HConsole.Capture(options => options
.ClearAll()
.Configure().SetMaxLevel()
.Configure(this).SetPrefix("TEST")
.Configure<AsyncContext>().SetMinLevel()
.Configure<SocketConnectionBase>().SetIndent(1).SetLevel(0).SetPrefix("SOCKET"));
protected override HazelcastOptions CreateHazelcastOptions()
{
var keyValues = new Dictionary<string, string>();
static void AddIfMissing(IDictionary<string, string> d, string k, string v)
{
if (!d.ContainsKey(k)) d.Add(k, v);
}
// add Microsoft logging configuration
AddIfMissing(keyValues, "Logging:LogLevel:Default", "Debug");
AddIfMissing(keyValues, "Logging:LogLevel:System", "Information");
AddIfMissing(keyValues, "Logging:LogLevel:Microsoft", "Information");
return HazelcastOptions.Build(
builder =>
{
builder.AddHazelcastAndDefaults(null);
builder.AddInMemoryCollection(keyValues);
builder.AddUserSecrets(GetType().Assembly, true);
},
(configuration, options) =>
{
options.Networking.Addresses.Clear();
options.Networking.Addresses.Add("127.0.0.1:5701");
options.ClusterName = RcCluster?.Id ?? options.ClusterName;
// configure logging factory and add the console provider
options.LoggerFactory.Creator = () =>
Microsoft.Extensions.Logging.LoggerFactory.Create(builder =>
builder
.AddConfiguration(configuration.GetSection("logging"))
.AddHConsole());
}, ConfigurationSecretsKey);
}
[Test]
public async Task Heartbeat()
{
using var _ = HConsoleForTest();
var options = CreateHazelcastOptions();
options.Heartbeat.TimeoutMilliseconds = 4_000; // cannot be < period!
options.Heartbeat.PeriodMilliseconds = 3_000;
await using var client = await HazelcastClientFactory.StartNewClientAsync(options);
await Task.Delay(3_000);
Assert.That(client.IsActive);
await Task.Delay(3_000);
Assert.That(client.IsActive);
await Task.Delay(3_000);
Assert.That(client.IsActive);
}
[Test]
public async Task DemoTest()
{
using var _ = HConsoleForTest();
await new MapSimpleExample().Run(CreateHazelcastOptions(), 1000);
}
public class MapSimpleExample
{
public const string CacheName = "simple-example";
public async Task Run(HazelcastOptions options, int count)
{
// create an Hazelcast client and connect to a server running on localhost
var client = await HazelcastClientFactory.StartNewClientAsync(options);
// get the distributed map from the cluster
var map = await client.GetMapAsync<string, string>(CacheName);
// get the logger
var logger = options.LoggerFactory.Service.CreateLogger("Demo");
// loop
try
{
// add values
for (var i = 0; i < 1000; i++)
{
await map.SetAsync("key-" + i, "value-" + i);
}
// NOTE
// if processing a message that is too big, takes too long, then a heartbeat 'ping'
// response may be waiting in some queue and not be processed = timeout! what would
// be the correct way to handle this? have a parallel, priority queue of some sort
// for these messages, or simply increase timeout?
// get values, count, etc...
logger.LogDebug("Key: " + await map.GetAsync("key"));
logger.LogDebug("Values: " + string.Join(", ", await map.GetValuesAsync()));
logger.LogDebug("Keys: " + string.Join(", ", await map.GetKeysAsync()));
logger.LogDebug("Count: " + await map.GetSizeAsync());
logger.LogDebug("Entries: " + string.Join(", ", await map.GetEntriesAsync()));
logger.LogDebug("ContainsKey: " + await map.ContainsKeyAsync("key"));
logger.LogDebug("ContainsValue: " + await map.ContainsValueAsync("value"));
logger.LogDebug("Press ESC to stop");
var x = 0;
//while (!Console.KeyAvailable || Console.ReadKey(true).Key != ConsoleKey.Escape)
while (x < count)
{
logger.LogDebug($"{x++}: Client Connected:" + client.IsConnected);
await map.SetAsync("key1", "Hello, world.");
}
logger.LogDebug("Exit loop.");
}
catch (Exception e)
{
logger.LogError($"Ooops!!!!: '{e}'");
}
// destroy the map
await client.DestroyAsync(map);
// dispose & close the client
await client.DisposeAsync();
logger.LogDebug("Test Completed!");
// dispose the logger factory = flush
options.LoggerFactory.Service.Dispose();
}
}
}
}
<|start_filename|>src/Hazelcast.Net/Sql/SqlColumnType.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using Hazelcast.Models;
#pragma warning disable CA1720 // Identifier contains type name - on purpose
namespace Hazelcast.Sql
{
/// <summary>
/// SQL column type.
/// </summary>
public enum SqlColumnType
{
/// <summary>
/// <c>VARCHAR</c> type, represented by <c>java.lang.String</c> in Java and by <see cref="string"/> in .NET.
/// </summary>
Varchar = 0,
/// <summary>
/// <c>BOOLEAN</c> type, represented by <c>java.lang.Boolean</c> in Java and by <see cref="bool"/> in .NET.
/// </summary>
Boolean = 1,
// FIXME [Oleksii] discuss sign preservation
// TINYINT is signed whereas byte is not = ?!
/// <summary>
/// <c>TINYINT</c> type, represented by <c>java.lang.Byte</c> in Java and by <see cref="byte"/> in .NET.
/// </summary>
TinyInt = 2,
/// <summary>
/// <c>SMALLINT</c> type, represented by <c>java.lang.Short</c> in Java and by <see cref="short"/> in .NET.
/// </summary>
SmallInt = 3,
/// <summary>
/// <c>INTEGER</c> type, represented by <c>java.lang.Integer</c> in Java and by <see cref="int"/> in .NET.
/// </summary>
Integer = 4,
/// <summary>
/// <c>BIGINT</c> type, represented by <c>java.lang.Long</c> in Java and by <see cref="long"/> in .NET.
/// </summary>
BigInt = 5,
/// <summary>
/// <c>DECIMAL</c> type, represented by <c>java.lang.BigDecimal</c> in Java and by <see cref="HBigDecimal"/> in .NET.
/// </summary>
Decimal = 6,
/// <summary>
/// <c>REAL</c> type, represented by <c>java.lang.Float</c> in Java and by <see cref="float"/> in .NET.
/// </summary>
Real = 7,
/// <summary>
/// <c>DOUBLE</c> type, represented by <c>java.lang.Double</c> in Java and by <see cref="double"/> in .NET.
/// </summary>
Double = 8,
/// <summary>
/// <c>DATE</c> type, represented by <c>java.lang.LocalDate</c> in Java and by <see cref="HLocalDate"/> in .NET.
/// </summary>
Date = 9,
/// <summary>
/// <c>TIME</c> type, represented by <c>java.lang.LocalTime</c> in Java and by <see cref="HLocalTime"/> in .NET.
/// </summary>
Time = 10,
/// <summary>
/// <c>TIMESTAMP</c> type, represented by <c>java.lang.LocalDateTime</c> in Java and by <see cref="HLocalDateTime"/> in .NET.
/// </summary>
Timestamp = 11,
/// <summary>
/// <c>TIMESTAMP_WITH_TIME_ZONE</c> type, represented by <c>java.lang.OffsetDateTime</c> in Java and by <see cref="HOffsetDateTime"/> in .NET.
/// </summary>
TimestampWithTimeZone = 12,
/// <summary>
/// <c>OBJECT</c> type, could be represented by any Java and .NET class.
/// </summary>
Object = 13,
/// <summary>
/// The type of the generic SQL <c>NULL</c> literal. <para/>
/// The only valid value of <c>NULL</c> type is <c>null</c>.
/// </summary>
Null = 14
}
}
<|start_filename|>src/Hazelcast.Net.Tests/Cloud/CloudTests.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using Hazelcast.Core;
using Hazelcast.Metrics;
using Hazelcast.Networking;
using Hazelcast.Testing;
using Hazelcast.Testing.Conditions;
using Hazelcast.Testing.Configuration;
using Hazelcast.Testing.Logging;
using Microsoft.Extensions.Logging;
using NuGet.Versioning;
using NUnit.Framework;
namespace Hazelcast.Tests.Cloud
{
[TestFixture]
[Explicit("Has special requirements, see comments in code.")]
public class CloudTests
{
// REQUIREMENTS
//
// 1. a working Cloud environment
// browse to https://cloud.hazelcast.com/ to create an environment
// (or to one of the internal Hazelcast test clouds)
//
// 2. parameters for this environment, configured as Visual Studio secrets,
// with a specific key indicated by the following constant. The secrets
// file would then need to contain a section looking like:
// {
// "cloud-test": {
// "clusterName": "<cluster-name>",
// "networking": {
// "cloud": {
// "discoveryToken": "<token>",
// "url": "<cloud-url>"
// }
// }
// },
// }
//
private const string SecretsKey = "cloud-test";
//
// 3. a valid path to a Java JDK, indicated by the following constant
private const string JdkPath = @"C:\Program Files\Java\jdk1.8.0_241";
// 4. the number of put/get iterations + how long to wait between each iteration
private const int IterationCount = 60;
private const int IterationPauseMilliseconds = 100;
[TestCase(true)]
[TestCase(false)]
public async Task SampleClient(bool previewOptions)
{
using var _ = HConsoleForTest();
HConsole.WriteLine(this, "Hazelcast Cloud Client");
var stopwatch = Stopwatch.StartNew();
HConsole.WriteLine(this, "Build options...");
var options = new HazelcastOptionsBuilder()
.WithHConsoleLogger()
.With("Logging:LogLevel:Hazelcast", "Debug")
.WithUserSecrets(GetType().Assembly, SecretsKey)
.Build();
// log level must be a valid Microsoft.Extensions.Logging.LogLevel value
// Trace | Debug | Information | Warning | Error | Critical | None
// enable metrics
options.Metrics.Enabled = true;
// enable reconnection
if (previewOptions)
{
options.Preview.EnableNewReconnectOptions = true;
options.Preview.EnableNewRetryOptions = true;
}
else
{
options.Networking.ReconnectMode = ReconnectMode.ReconnectAsync;
}
// instead of using Visual Studio secrets, configuration via code is
// possible, by uncommenting some of the blocks below - however, this
// is not recommended as it increases the risk of leaking private
// infos in a Git repository.
// uncomment to run on localhost
/*
options.Networking.Addresses.Clear();
options.Networking.Addresses.Add("localhost:5701");
options.ClusterName = "dev";
*/
// uncomment to run on cloud
/*
options.ClusterName = "...";
options.Networking.Cloud.DiscoveryToken = "...";
options.Networking.Cloud.Url = new Uri("https://...");
*/
HConsole.WriteLine(this, "Get and connect client...");
HConsole.WriteLine(this, $"Connect to cluster \"{options.ClusterName}\"{(options.Networking.Cloud.Enabled ? " (cloud)" : "")}");
if (options.Networking.Cloud.Enabled) HConsole.WriteLine(this, $"Cloud Discovery Url: {options.Networking.Cloud.Url}");
var client = await HazelcastClientFactory.StartNewClientAsync(options).ConfigureAwait(false);
HConsole.WriteLine(this, "Get map...");
var map = await client.GetMapAsync<string, string>("map").ConfigureAwait(false);
HConsole.WriteLine(this, "Put value into map...");
await map.PutAsync("key", "value").ConfigureAwait(false);
HConsole.WriteLine(this, "Get value from map...");
var value = await map.GetAsync("key").ConfigureAwait(false);
HConsole.WriteLine(this, "Validate value...");
if (!value.Equals("value"))
{
HConsole.WriteLine(this, "Error: check your configuration.");
return;
}
HConsole.WriteLine(this, "Put/Get values in/from map with random values...");
var random = new Random();
var step = IterationCount / 10;
for (var i = 0; i < IterationCount; i++)
{
var randomValue = random.Next(100_000);
await map.PutAsync("key_" + randomValue, "value_" + randomValue).ConfigureAwait(false);
randomValue = random.Next(100_000);
await map.GetAsync("key" + randomValue).ConfigureAwait(false);
if (i % step == 0)
{
HConsole.WriteLine(this, $"[{i:D3}] map size: {await map.GetSizeAsync().ConfigureAwait(false)}");
}
if (IterationPauseMilliseconds > 0)
await Task.Delay(IterationPauseMilliseconds).ConfigureAwait(false);
}
HConsole.WriteLine(this, "Destroy the map...");
await map.DestroyAsync().ConfigureAwait(false);
HConsole.WriteLine(this, "Dispose map...");
await map.DisposeAsync().ConfigureAwait(false);
HConsole.WriteLine(this, "Dispose client...");
await client.DisposeAsync().ConfigureAwait(false);
HConsole.WriteLine(this, $"Done (elapsed: {stopwatch.Elapsed.ToString("hhmmss\\.fff\\ ", CultureInfo.InvariantCulture)}).");
}
[Test]
public void MetricsCompressorTests()
{
// compress bytes
byte[] bytes;
using (var compressor = new MetricsCompressor())
{
compressor.Append(new Metric<long>
{
Descriptor = new MetricDescriptor<long>("name"),
Value = 42
});
bytes = compressor.GetBytesAndReset();
}
// determine solution path
var assemblyLocation = GetType().Assembly.Location;
var solutionPath = Path.GetFullPath(Path.Combine(Path.GetDirectoryName(assemblyLocation), "../../../../.."));
// name a temp directory
var tempPath = Path.Combine(Path.GetTempPath(), $"hz-tests-{Guid.NewGuid():N}");
try
{
// create the temp directory and copy the source files
Directory.CreateDirectory(tempPath);
//File.WriteAllText(Path.Combine(tempPath, "Program.java"), Resources.Java_CloudTests_Program);
//File.WriteAllText(Path.Combine(tempPath, "TestConsumer.java"), Resources.Java_Cloudtests_TestConsumer);
File.WriteAllText(Path.Combine(tempPath, "Program.java"), TestFiles.ReadAllText(this, "Java/CloudTests/Program.java"));
File.WriteAllText(Path.Combine(tempPath, "TestConsumer.java"), TestFiles.ReadAllText(this, "Java/Cloudtests/TestConsumer.java"));
// validate that we have the server JAR
var serverJarPath = Path.Combine(solutionPath, $"temp/lib/hazelcast-{ServerVersion.GetVersion(NuGetVersion.Parse("4.0"))}.jar");
Assert.That(File.Exists(serverJarPath), Is.True, $"Could not find JAR file {serverJarPath}");
// compile
Console.WriteLine("Compile...");
Assert.That(Directory.GetFiles(tempPath, "*.java").Any(), "Could not find source files.");
var p = Process.Start(new ProcessStartInfo(Path.Combine(JdkPath, "bin/javac.exe"), $"-cp {serverJarPath} {Path.Combine(tempPath, "*.java")}")
.WithRedirects(true, true, false));
Assert.That(p, Is.Not.Null);
p.WaitForExit();
Console.WriteLine($"Compilation exit code: {p.ExitCode}");
Console.WriteLine("Compilation stderr:");
Console.WriteLine(p.StandardError.ReadToEnd());
Console.WriteLine("Compilation stdout:");
Console.WriteLine(p.StandardOutput.ReadToEnd());
Assert.That(p.ExitCode, Is.Zero, "Compilation failed.");
// execute
Console.WriteLine("Execute...");
Console.WriteLine($"Writing {bytes.Length} bytes to java");
p = Process.Start(new ProcessStartInfo(Path.Combine(JdkPath, "bin/java.exe"), $"-cp {serverJarPath};{tempPath} Program")
.WithRedirects(true, true, true));
Assert.That(p, Is.Not.Null);
p.StandardInput.BaseStream.Write(bytes, 0, bytes.Length);
p.StandardInput.Close();
p.WaitForExit();
Console.WriteLine($"Execution exit code: {p.ExitCode}");
Console.WriteLine("Execution stderr:");
Console.WriteLine(p.StandardError.ReadToEnd());
Console.WriteLine("Execution stdout:");
var output = p.StandardOutput.ReadToEnd();
Console.WriteLine(output);
Assert.That(p.ExitCode, Is.Zero, "Execution failed.");
// assert that things were properly decompressed
Assert.That(output.Contains("name = 42"));
}
finally
{
// get rid of the temp directory
Directory.Delete(tempPath, true);
}
}
private IDisposable HConsoleForTest()
=> HConsole.Capture(options => options
.ClearAll()
.Configure<HConsoleLoggerProvider>().SetPrefix("LOG").SetMaxLevel()
.Configure().SetMinLevel().EnableTimeStamp(origin: DateTime.Now)
.Configure(this).SetMaxLevel().SetPrefix("TEST")
);
}
public static class CloudTestsExtensions
{
public static ProcessStartInfo WithRedirects(this ProcessStartInfo info, bool redirectOutput, bool redirectError, bool redirectInput)
{
info.CreateNoWindow = true;
info.RedirectStandardOutput = redirectOutput;
info.RedirectStandardError = redirectError;
info.RedirectStandardInput = redirectInput;
#if !NETCOREAPP
info.UseShellExecute = false;
#endif
return info;
}
public static HazelcastOptionsBuilder WithHConsoleLogger(this HazelcastOptionsBuilder builder)
{
return builder
.With("Logging:LogLevel:Default", "None")
.With("Logging:LogLevel:System", "None")
.With("Logging:LogLevel:Microsoft", "None")
.With((configuration, options) =>
{
// configure logging factory and add the console provider
options.LoggerFactory.Creator = () => LoggerFactory.Create(loggingBuilder =>
loggingBuilder
.AddConfiguration(configuration.GetSection("logging"))
.AddHConsole());
});
}
}
}
<|start_filename|>src/Hazelcast.Net/Networking/ConnectAddressResolver.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Hazelcast.Core;
using Hazelcast.Models;
using Microsoft.Extensions.Logging;
namespace Hazelcast.Networking
{
/// <summary>
/// Resolves connect addresses for members by determining whether to use internal or public addresses.
/// </summary>
internal class ConnectAddressResolver
{
// TODO: consider making these options
private const int NumberOfMembersToCheck = 3;
private static readonly TimeSpan InternalAddressTimeout = TimeSpan.FromSeconds(1);
private static readonly TimeSpan PublicAddressTimeout = TimeSpan.FromSeconds(3);
private readonly NetworkingOptions _options;
private readonly ILogger _logger;
public ConnectAddressResolver(NetworkingOptions options, ILoggerFactory loggerFactory)
{
_options = options;
_logger = loggerFactory.CreateLogger<ConnectAddressResolver>();
}
/// <summary>
/// Determines whether to use public addresses.
/// </summary>
/// <param name="members">A collection of members.</param>
/// <returns><c>true</c> if public addresses must be used; otherwise <c>false</c>, indicating that internal addresses can be used.</returns>
public async Task<bool> DetermineUsePublicAddresses(IReadOnlyCollection<MemberInfo> members)
{
// if the user has specified its intention, respect it, otherwise try to decide
// automatically whether to use private or public addresses by trying to reach
// a few members
if (_options.UsePublicAddresses is {} usePublicAddresses)
{
_logger.LogDebug(usePublicAddresses
? "NetworkingOptions.UsePublicAddresses is true, the client will use public addresses."
: "NetworkingOptions.UsePublicAddresses is false, the client will use internal addresses.");
return usePublicAddresses;
}
_logger.LogDebug("NetworkingOptions.UsePublicAddresses is not set, decide by ourselves.");
// if ssl is enabled, the the client uses internal addresses
if (_options.Ssl.Enabled)
{
_logger.LogDebug("Ssl is enabled, the client will use internal addresses.");
return false;
}
if (_logger.IsEnabled(LogLevel.Debug))
{
var text = new StringBuilder();
text.Append("Members [");
text.Append(members.Count);
text.Append("] {");
text.AppendLine();
foreach (var member in members)
{
text.Append(" ");
text.Append(member.ToShortString(false));
text.AppendLine();
foreach (var entry in member.AddressMap)
{
text.Append(" ");
text.Append(entry.Key);
text.Append(": ");
text.Append(entry.Value);
text.AppendLine();
}
}
text.Append('}');
_logger.LogDebug(text.ToString());
}
// if at least one member has its internal address that matches options, assume we can use internal addresses
if (DetermineAnyMemberInternalAddressMatchesOptions(members))
{
_logger.LogDebug("At least one member's internal address matches options, assume that the client can use internal addresses.");
return false;
}
// if one member does not have a public address, then the client has to use internal addresses
if (members.Any(x => x.PublicAddress is null))
{
_logger.LogDebug("At least one member does not have a public address, the client has to use internal addresses.");
return false;
}
// else try to reach addresses to figure out which ones to use
return await DeterminePublicAddressesAreRequired(members).CfAwait();
}
// determines whether at least one member has its internal address specified in options,
// which would mean that the client can reach the configured addresses and we can use
// internal addresses
private bool DetermineAnyMemberInternalAddressMatchesOptions(IReadOnlyCollection<MemberInfo> members)
{
// both NodeJS and Java code plainly ignore ports and only focus on the host name
var optionHosts = _options.Addresses
.Select(x => NetworkAddress.TryParse(x, out var a) ? a : null)
.Where(x => x != null)
.Select(x => x.HostName);
var memberHosts = members.Select(x => x.Address.HostName);
return memberHosts.Intersect(optionHosts).Any();
}
// determines whether using public addresses is required
// by testing a subset of all members
private Task<bool> DeterminePublicAddressesAreRequired(IReadOnlyCollection<MemberInfo> members)
=> DeterminePublicAddressesAreRequired(members.Shuffle(), NumberOfMembersToCheck);
// determines whether using public addresses is required
private async Task<bool> DeterminePublicAddressesAreRequired(IReadOnlyCollection<MemberInfo> members, int sampleCount)
{
var count = 0;
var requirePublic = false;
foreach (var member in members)
{
// we failed to find a member that can be reached at its internal address, but enough members can
// be reached at their public addresses, so assume public addresses are required for all
if (count++ == sampleCount && requirePublic)
{
_logger.LogDebug("At least {Count} members can only be reached at their public address, the client has to use public addresses.", sampleCount);
return true;
}
// TODO: we could try both in parallel and would it be a good idea?
//var (canReachInternal, canReachPublic) = await Task.WhenAll(
// member.Address.TryReachAsync(_internalAddressTimeout),
// member.PublicAddress.TryReachAsync(_publicAddressTimeout)
// ).CfAwait();
var canReachInternal = await member.Address.TryReachAsync(InternalAddressTimeout).CfAwait();
// if one member can be reached at its internal address then assume internal addresses are ok for all
if (canReachInternal)
{
_logger.LogDebug("Member at {Address} can be reached at this internal address, assume that the client can use internal addresses.", member.Address);
return false;
}
var canReachPublic = await member.PublicAddress.TryReachAsync(PublicAddressTimeout).CfAwait();
// if the member cannot be reached at its internal address but can be reached at its public address,
// this would indicate that the client has to use public addresses, but we are going to try a few
// more members just to be sure - maybe the failure to reach the internal address was a glitch and
// another member will make it
if (canReachPublic)
{
_logger.LogDebug("Member at {Address} cannot be reached at this internal address, but can be reached at its {PublicAddress} public address.", member.Address, member.PublicAddress);
requirePublic = true;
}
// otherwise, the client cannot be reached at all - both NodeJS and Java immediately return false,
// but really - this could very well be a glitch and we should probably try a few more members
}
// we failed to find a member that can be reached at its internal address, but members can be reached at
// their public addresses, so assume public addresses are required for all
if (requirePublic)
{
_logger.LogDebug("Members can only be reached at their public address, the client has to use public addresses.");
return true;
}
// otherwise, we tested all members and could not reach any or them, neither on internal nor on public address,
// and this is a sad situation indeed - we're going to go with internal addresses but... something is wrong
_logger.LogDebug("Could not connect to any member. Assume the client can use internal addresses.");
return false;
}
}
}
<|start_filename|>src/Hazelcast.Net/HazelcastOptions.Build.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using Hazelcast.Configuration.Binding;
using Microsoft.Extensions.Configuration;
namespace Hazelcast
{
public partial class HazelcastOptions // Build
{
/// <summary>
/// Gets the Hazelcast configuration section name, which is <c>"hazelcast"</c>.
/// </summary>
/// <returns>The Hazelcast configuration section name, which is <c>"hazelcast"</c>.</returns>
internal const string Hazelcast = "hazelcast";
/// <summary>
/// (internal for tests only) Builds Hazelcast options.
/// </summary>
/// <param name="setup">An <see cref="IConfigurationBuilder"/> setup delegate.</param>
/// <param name="configure">Optional <see cref="HazelcastOptions"/> configuration delegate.</param>
/// <returns>Hazelcast options.</returns>
internal static HazelcastOptions Build(Action<IConfigurationBuilder> setup, Action<IConfiguration, HazelcastOptions> configure = null)
{
if (setup == null) throw new ArgumentNullException(nameof(setup));
var builder = new ConfigurationBuilder();
setup(builder);
var configuration = builder.Build();
return Build(configuration, configure);
}
/// <summary>
/// (internal for tests only) Builds Hazelcast options, using an alternate key.
/// </summary>
/// <param name="setup">An <see cref="IConfigurationBuilder"/> setup delegate.</param>
/// <param name="configure">An <see cref="HazelcastOptions"/> configuration delegate.</param>
/// <param name="altKey">An alternate key.</param>
/// <returns>Hazelcast options.</returns>
/// <remarks>
/// <para>This is used in tests only and not meant to be public. If <paramref name="altKey"/> is not
/// <c>null</c>, options starting with that key will bind after those starting with "hazelcast" and
/// override them. This allows one json file to contain several configuration sets, which is
/// convenient for instance when using the "user secrets" during tests.</para>
/// </remarks>
internal static HazelcastOptions Build(Action<IConfigurationBuilder> setup, Action<IConfiguration, HazelcastOptions> configure, string altKey)
{
if (setup == null) throw new ArgumentNullException(nameof(setup));
var builder = new ConfigurationBuilder();
setup(builder);
var configuration = builder.Build();
return Build(configuration, configure, altKey);
}
// builds options, no alternate keys
private static HazelcastOptions Build(IConfiguration configuration, Action<IConfiguration, HazelcastOptions> configure = null)
=> Build(configuration, configure, null);
// builds options, optionally binding alternate keys
private static HazelcastOptions Build(IConfiguration configuration, Action<IConfiguration, HazelcastOptions> configure, string altKey)
{
// must HzBind here and not simply Bind because we use our custom
// binder which handles more situations such as ignoring and/or
// renaming properties
var options = new HazelcastOptions();
configuration.HzBind(Hazelcast, options);
if (altKey != null && altKey != Hazelcast)
configuration.HzBind(altKey, options);
configure?.Invoke(configuration, options);
return options;
}
}
}
<|start_filename|>src/Hazelcast.Net/Networking/NetworkAddress.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Linq;
using System.Net;
using System.Net.Sockets;
using System.Threading;
using System.Threading.Tasks;
using Hazelcast.Core;
namespace Hazelcast.Networking
{
/// <summary>
/// Represents a network address.
/// </summary>
public class NetworkAddress : IEquatable<NetworkAddress>
{
// NOTES
//
// an IP v4 address is 'x.x.x.x' where each octet 'x' is a byte (8 bits unsigned)
//
// an IP v4 endpoint is '<address>:p' where 'p' is the port number
//
// an IP v6 address can be normal (pure v6) or dual (v6 + v4)
// normal is 'y:y:y:y:y:y:y:y' where each segment 'y' is a word (16 bits unsigned)
// dual is 'y:y:y:y:y:y:x.x.x.x'
// missing segments are assumed to be zeros
//
// it can also be 'y:y:y:y:y:y:y:y%i' where 'i' is the scope id (a number, 'eth0'..)
//
// an IP v6 endpoint is '[<address>]:p' where 'p' is the port number
//
// read
// https://superuser.com/questions/99746/why-is-there-a-percent-sign-in-the-ipv6-address
// https://docs.microsoft.com/en-us/previous-versions/aa917150(v=msdn.10)
// which explain the 'node-local' vs 'link-local' vs 'global' scopes
/// <summary>
/// Initializes a new instance of the <see cref="NetworkAddress"/> class with a hostname and a port.
/// </summary>
/// <param name="hostName">The hostname.</param>
/// <param name="port">The port.</param>
public NetworkAddress(string hostName, int port = 0)
: this(hostName, GetIPAddressByName(hostName), port)
{ }
/// <summary>
/// Initializes a new instance of the <see cref="NetworkAddress"/> class with an IP address and a port.
/// </summary>
/// <param name="ipAddress">The IP address.</param>
/// <param name="port">The port.</param>
public NetworkAddress(IPAddress ipAddress, int port = 0)
: this(null, ipAddress, port)
{ }
/// <summary>
/// Initializes a new instance of the <see cref="NetworkAddress"/> with an IP endpoint.
/// </summary>
/// <param name="endpoint">The IP endpoint.</param>
internal NetworkAddress(IPEndPoint endpoint)
{
IPEndPoint = endpoint ?? throw new ArgumentNullException(nameof(endpoint));
var ipAddress = IPEndPoint.Address;
if (ipAddress == null) throw new ArgumentException("Address cannot be null.", nameof(endpoint));
HostName = ipAddress.ToString();
}
/// <summary>
/// Initializes a new instance of the <see cref="NetworkAddress"/>.
/// </summary>
/// <param name="hostName">The hostname.</param>
/// <param name="ipAddress">The IP address.</param>
/// <param name="port">The port.</param>
/// <remarks>
/// <para>The <paramref name="hostName"/> and <paramref name="ipAddress"/> are assumed to be consistent.</para>
/// </remarks>
internal NetworkAddress(string hostName, IPAddress ipAddress, int port)
{
if (ipAddress == null) throw new ArgumentNullException(nameof(ipAddress));
if (port < 0) throw new ArgumentOutOfRangeException(nameof(port));
IPEndPoint = new IPEndPoint(ipAddress, port);
HostName = string.IsNullOrWhiteSpace(hostName) ? ipAddress.ToString() : hostName;
}
/// <summary>
/// (internal for tests only)
/// Initializes a new instance of the <see cref="NetworkAddress"/>.
/// </summary>
/// <param name="source">The origin address.</param>
/// <param name="port">The port.</param>
internal NetworkAddress(NetworkAddress source, int port)
{
if (source == null) throw new ArgumentNullException(nameof(source));
if (port < 0) throw new ArgumentOutOfRangeException(nameof(port));
IPEndPoint = new IPEndPoint(source.IPAddress, port);
HostName = source.HostName;
}
/// <summary>
/// Gets the host name.
/// </summary>
public string HostName { get; }
/// <summary>
/// Gets the host name (prefer <see cref="HostName"/>).
/// </summary>
/// <returns>
/// <para>This property returns <see cref="HostName"/> and is required by codecs
/// that use names derived from the protocol definitions, because the protocol
/// codecs generator does not know yet how to map / rename properties.</para>
/// </returns>
internal string Host => HostName;
/// <summary>
/// Gets the port.
/// </summary>
public int Port => IPEndPoint.Port;
/// <summary>
/// Gets the IP address corresponding to this address.
/// </summary>
// ReSharper disable once InconsistentNaming
public IPAddress IPAddress => IPEndPoint.Address;
/// <summary>
/// Gets the IP endpoint corresponding to this address.
/// </summary>
// ReSharper disable once InconsistentNaming
public IPEndPoint IPEndPoint { get; }
/// <summary>
/// Whether the address is an IP v4 address.
/// </summary>
public bool IsIpV4 => IPAddress.AddressFamily == AddressFamily.InterNetwork;
/// <summary>
/// Whether the address is an IP v6 address.
/// </summary>
public bool IsIpV6 => IPAddress.AddressFamily == AddressFamily.InterNetworkV6;
/// <summary>
/// Whether the address is an IP v6 address which is global (non-local), or scoped.
/// </summary>
public bool IsIpV6GlobalOrScoped => (!IPAddress.IsIPv6SiteLocal && !IPAddress.IsIPv6LinkLocal) || IPAddress.ScopeId > 0;
/// <summary>
/// Gets an IP address by name, via DNS.
/// </summary>
/// <param name="hostname">The hostname.</param>
/// <returns>The corresponding IP address.</returns>
/// <remarks>
/// <para>Returns the first IP v4 address available, if any,
/// else the first IP v6 address available. Throws if it cannot
/// get an IP for the hostname via DNS.</para>
/// </remarks>
// ReSharper disable once InconsistentNaming
internal static IPAddress GetIPAddressByName(string hostname)
{
if (hostname == "0.0.0.0") return IPAddress.Any;
var addresses = HDns.GetHostAddresses(hostname);
// prefer an IP v4, if possible
return addresses.FirstOrDefault(x => x.AddressFamily == AddressFamily.InterNetwork) ??
addresses.FirstOrDefault();
}
/// <summary>
/// Parses a string into a <see cref="NetworkAddress"/>.
/// </summary>
/// <param name="s">The string.</param>
/// <param name="defaultPort">The default port to use if none is specified.</param>
/// <returns>The network address.</returns>
internal static NetworkAddress Parse(string s, int defaultPort = 0)
{
if (TryParse(s, out NetworkAddress address, defaultPort)) return address;
throw new FormatException($"The string \"{s}\" does not represent a valid network address.");
}
/// <summary>
/// Tries to parse a string into a <see cref="NetworkAddress"/> instance.
/// </summary>
/// <param name="s">The string.</param>
/// <param name="address">The network address.</param>
/// <param name="defaultPort">The default port to use if none is specified.</param>
/// <returns>Whether the string could be parsed into an address.</returns>
internal static bool TryParse(string s, out NetworkAddress address, int defaultPort = 0)
{
address = null;
var span = s.AsSpan();
var colon1 = span.IndexOf(':');
var colon2 = span.LastIndexOf(':');
var brket1 = span.IndexOf('[');
var brket2 = span.IndexOf(']');
var port = defaultPort;
// opening bracket must be first
if (brket1 > 0) return false;
// must have both brackets, or none
if (brket1 == 0 != brket2 >= 0) return false;
// brackets => colon if any *must* be right after closing bracket
if (brket1 == 0 && colon2 > brket2 + 1) return false;
// no bracket and single colon, or one colon after brackets
// => parse port
if ((brket2 < 0 && colon2 > 0 && colon1 == colon2) ||
(brket2 > 0 && colon2 > brket2))
{
#if NETSTANDARD2_0
if (!int.TryParse(span[(colon2 + 1)..].ToString(), out port))
return false;
#else
if (!int.TryParse(span.Slice(colon2 + 1), out port))
return false;
#endif
}
ReadOnlySpan<char> hostName;
#pragma warning disable IDE0057 // Slice can be simplified
if (brket1 == 0)
{
// if we have brackets, they must contain colons
if (colon2 < 0 || colon1 > brket2) return false;
// hostname is in-between brackets
// (and has to be parseable as an ip address)
hostName = span.Slice(1, brket2 - 1);
}
else
{
// one single colon = hostname is whatever is before
// otherwise, hostname is the whole string
hostName = (colon2 > 0 && colon1 == colon2) ? span[..colon2] : span;
}
#pragma warning restore IDE0057 // Slice can be simplified
// must have a hostname - look at the code above, hostname cannot be empty here
//if (hostName.Length == 0)
// return false;
// note that in IPv6 case, hostname can contain a % followed by a scope id
// which is fine, IPAddress.TryParse handles it
string hostNameString;
#if NETSTANDARD2_0
if (IPAddress.TryParse(hostName.ToString(), out var ipAddress))
#else
if (IPAddress.TryParse(hostName, out var ipAddress))
#endif
{
// if the hostname parses to an ip address, fine
hostNameString = ipAddress.ToString();
}
else
{
// if we have brackets, hostname must be parseable
if (brket1 == 0) return false;
hostNameString = hostName.ToString();
// else, try to get the ip via DNS
try
{
ipAddress = GetIPAddressByName(hostNameString);
}
catch
{
return false;
}
}
address = new NetworkAddress(hostNameString, ipAddress, port);
return true;
}
/// <summary>
/// Creates a new instance of the network address with the same host but a different port.
/// </summary>
/// <param name="port">The port.</param>
/// <returns>A new instance of the network address with the same host but a different port.</returns>
internal NetworkAddress WithPort(int port) => new NetworkAddress(Host, port);
/// <summary>
/// Determines whether this address is reachable, by trying to connect to it.
/// </summary>
/// <param name="timeout">A timeout.</param>
/// <returns><c>true</c> if the connection was successful; otherwise false.</returns>
/// <remarks>Use a timeout value of -1ms for infinite.</remarks>
internal async Task<bool> TryReachAsync(TimeSpan timeout)
{
var socket = new Socket(IPAddress.AddressFamily, SocketType.Stream, ProtocolType.Tcp);
try
{
await socket.ConnectAsync(IPEndPoint, (int) timeout.TotalMilliseconds, CancellationToken.None).CfAwait();
return true;
}
catch
{
return false;
}
finally
{
socket.Close();
socket.Dispose();
}
}
/// <inheritdoc />
public override string ToString()
{
var name = IPAddress.ToString(); // or HostName?
return IPAddress.AddressFamily == AddressFamily.InterNetworkV6
? $"[{name}]:{Port}"
: $"{name}:{Port}";
}
/// <inheritdoc />
public override int GetHashCode() => IPEndPoint.GetHashCode();
/// <inheritdoc />
public override bool Equals(object obj)
{
return obj is NetworkAddress other && Equals(this, other);
}
/// <inheritdoc />
public bool Equals(NetworkAddress other)
{
return Equals(this, other);
}
/// <summary>
/// Compares two instances for equality.
/// </summary>
private static bool Equals(NetworkAddress a1, NetworkAddress a2)
{
// return true if both are null or both are the same instance
if (ReferenceEquals(a1, a2)) return true;
// return false if either is null since the other cannot be null
if (a1 is null || a2 is null) return false;
// actual comparison
return a1.IPEndPoint.Equals(a2.IPEndPoint);
}
/// <summary>
/// Overrides the == operator.
/// </summary>
public static bool operator ==(NetworkAddress a1, NetworkAddress a2)
=> Equals(a1, a2);
/// <summary>
/// Overrides the != operator.
/// </summary>
public static bool operator !=(NetworkAddress a1, NetworkAddress a2)
=> !(a1 == a2);
}
}
<|start_filename|>src/Hazelcast.Net.Testing/RemoteTestBase.Remoting.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Threading;
using System.Threading.Tasks;
using Hazelcast.Core;
using Hazelcast.Networking;
using Hazelcast.Testing.Remote;
using Microsoft.Extensions.Logging;
using NUnit.Framework;
using Thrift;
namespace Hazelcast.Testing
{
public abstract partial class RemoteTestBase // Remoting
{
private static bool _canConnectToRemoveController = true;
/// <summary>
/// Connects to the remote controller.
/// </summary>
/// <returns>A new remote controller client.</returns>
protected async Task<IRemoteControllerClient> ConnectToRemoteControllerAsync()
{
// assume we can start the RC, else mark the test as inconclusive without even trying
// so... if starting the RC fails once, we probably have a problem (is it even running?)
// and there is no point trying again and again - faster to stop here
Assume.That(_canConnectToRemoveController, Is.True, () => "Cannot connect to the Remote Controller (is it running?).");
try
{
var rcHostAddress = NetworkAddress.GetIPAddressByName("localhost");
var configuration = new TConfiguration();
var tSocketTransport = new Thrift.Transport.Client.TSocketTransport(rcHostAddress, 9701, configuration);
var transport = new Thrift.Transport.TFramedTransport(tSocketTransport);
if (!transport.IsOpen)
{
await transport.OpenAsync(CancellationToken.None).CfAwait();
}
var protocol = new Thrift.Protocol.TBinaryProtocol(transport);
return RemoteControllerClient.Create(protocol);
}
catch (Exception e)
{
_canConnectToRemoveController = false; // fail fast other tests
Logger?.LogDebug(e, "Cannot connect to the Remote Controller (is it running?)");
throw new AssertionException("Cannot connect to the Remote Controller (is it running?)", e);
}
}
}
}
<|start_filename|>src/Hazelcast.Net/Core/BytesExtensions.ReadFromByteArray.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Diagnostics;
using Hazelcast.Models;
namespace Hazelcast.Core
{
internal static partial class BytesExtensions // Read from byte[]
{
/// <summary>
/// Reads a <see cref="byte"/> value from an array of bytes.
/// </summary>
/// <param name="bytes">The array of bytes to read from.</param>
/// <param name="position">The position in the array where the value should be read.</param>
/// <returns>The value.</returns>
public static byte ReadByte(this byte[] bytes, int position)
{
Debug.Assert(bytes != null && position >= 0 && bytes.Length >= position + SizeOfByte);
return bytes[position];
}
public static sbyte ReadSByte(this byte[] bytes, int position)
{
Debug.Assert(bytes != null && position >= 0 && bytes.Length >= position + SizeOfUnsignedByte);
return (sbyte) bytes[position];
}
/// <summary>
/// Reads a <see cref="short"/> value from an array of bytes.
/// </summary>
/// <param name="bytes">The array of bytes to read from.</param>
/// <param name="position">The position in the array where the value should be read.</param>
/// <param name="endianness">The endianness.</param>
/// <returns>The value.</returns>
public static short ReadShort(this byte[] bytes, int position, Endianness endianness)
{
Debug.Assert(bytes != null && position >= 0 && bytes.Length >= position + SizeOfShort);
unchecked
{
return endianness.IsBigEndian()
? (short) (bytes[position + 0] << 8 | bytes[position + 1])
: (short) (bytes[position] | bytes[position + 1] << 8);
}
}
/// <summary>
/// Reads an <see cref="ushort"/> value from an array of bytes.
/// </summary>
/// <param name="bytes">The array of bytes to read from.</param>
/// <param name="position">The position in the array where the value should be read.</param>
/// <param name="endianness">The endianness.</param>
/// <returns>The value.</returns>
public static ushort ReadUShort(this byte[] bytes, int position, Endianness endianness)
{
Debug.Assert(bytes != null && position >= 0 && bytes.Length >= position + SizeOfUnsignedShort);
unchecked
{
return endianness.IsBigEndian()
? (ushort) (bytes[position + 0] << 8 | bytes[position + 1])
: (ushort) (bytes[position] | bytes[position + 1] << 8);
}
}
/// <summary>
/// Reads an <see cref="int"/> value from an array of bytes.
/// </summary>
/// <param name="bytes">The array of bytes to read from.</param>
/// <param name="position">The position in the array where the value should be read.</param>
/// <param name="endianness">The endianness.</param>
/// <returns>The value.</returns>
public static int ReadInt(this byte[] bytes, int position, Endianness endianness)
{
Debug.Assert(bytes != null && position >= 0 && bytes.Length >= position + SizeOfInt);
unchecked
{
return endianness.IsBigEndian()
? bytes[position] << 24 | bytes[position + 1] << 16 |
bytes[position + 2] << 8 | bytes[position + 3]
: bytes[position] | bytes[position + 1] << 8 |
bytes[position + 2] << 16 | bytes[position + 3] << 24;
}
}
/// <summary>
/// Reads a <see cref="long"/> value from an array of bytes.
/// </summary>
/// <param name="bytes">The array of bytes to read from.</param>
/// <param name="position">The position in the array where the value should be read.</param>
/// <param name="endianness">The endianness.</param>
/// <returns>The value.</returns>
public static long ReadLong(this byte[] bytes, int position, Endianness endianness)
{
Debug.Assert(bytes != null && position >= 0 && bytes.Length >= position + SizeOfLong);
unchecked
{
return endianness.IsBigEndian()
? (long) bytes[position] << 56 | (long) bytes[position + 1] << 48 |
(long) bytes[position + 2] << 40 | (long) bytes[position + 3] << 32 |
(long) bytes[position + 4] << 24 | (long) bytes[position + 5] << 16 |
(long) bytes[position + 6] << 8 | bytes[position + 7]
: bytes[position] | (long) bytes[position + 1] << 8 |
(long) bytes[position + 2] << 16 | (long) bytes[position + 3] << 24 |
(long) bytes[position + 4] << 32 | (long) bytes[position + 5] << 40 |
(long) bytes[position + 6] << 48 | (long) bytes[position + 7] << 56;
}
}
public static HLocalDate ReadLocalDate(this byte[] bytes, int position)
{
var year = bytes.ReadIntL(position);
var month = bytes.ReadByte(position + SizeOfInt);
var date = bytes.ReadByte(position + SizeOfInt + SizeOfByte);
return new HLocalDate(year, month, date);
}
public static HLocalTime ReadLocalTime(this byte[] bytes, int position)
{
var hour = bytes.ReadByte(position);
var minute = bytes.ReadByte(position + SizeOfByte);
var second = bytes.ReadByte(position + SizeOfByte * 2);
var nano = bytes.ReadIntL(position + SizeOfByte * 3);
return new HLocalTime(hour, minute, second, nano);
}
public static HLocalDateTime ReadLocalDateTime(this byte[] bytes, int position)
{
var date = ReadLocalDate(bytes, position);
var time = ReadLocalTime(bytes, position + SizeOfLocalDate);
return new HLocalDateTime(date, time);
}
public static HOffsetDateTime ReadOffsetDateTime(this byte[] bytes, int position)
{
var localDateTime = ReadLocalDateTime(bytes, position);
var offsetSeconds = ReadIntL(bytes, position + SizeOfLocalDateTime);
return new HOffsetDateTime(localDateTime, TimeSpan.FromSeconds(offsetSeconds));
}
/// <summary>
/// Reads a <see cref="float"/> value from an array of bytes.
/// </summary>
/// <param name="bytes">The array of bytes to read from.</param>
/// <param name="position">The position in the array where the value should be read.</param>
/// <param name="endianness">The endianness.</param>
/// <returns>The value.</returns>
public static float ReadFloat(this byte[] bytes, int position, Endianness endianness)
{
Debug.Assert(bytes != null && position >= 0 && bytes.Length >= position + SizeOfFloat);
int value;
unchecked
{
value = endianness.IsBigEndian()
? bytes[position] << 24 | bytes[position + 1] << 16 |
bytes[position + 2] << 8 | bytes[position + 3]
: bytes[position] | bytes[position + 1] << 8 |
bytes[position + 2] << 16 | bytes[position + 3] << 24;
}
#if NETSTANDARD2_0
return BitConverter.ToSingle(BitConverter.GetBytes(value), 0);
#else
// this is essentially an unsafe *((float*)&value)
return BitConverter.Int32BitsToSingle(value);
#endif
}
/// <summary>
/// Reads a <see cref="double"/> value from an array of bytes.
/// </summary>
/// <param name="bytes">The array of bytes to read from.</param>
/// <param name="position">The position in the array where the value should be read.</param>
/// <param name="endianness">The endianness.</param>
/// <returns>The value.</returns>
public static double ReadDouble(this byte[] bytes, int position, Endianness endianness)
{
Debug.Assert(bytes != null && position >= 0 && bytes.Length >= position + SizeOfDouble);
long value;
unchecked
{
value = endianness.IsBigEndian()
? (long) bytes[position] << 56 | (long) bytes[position + 1] << 48 |
(long) bytes[position + 2] << 40 | (long) bytes[position + 3] << 32 |
(long) bytes[position + 4] << 24 | (long) bytes[position + 5] << 16 |
(long) bytes[position + 6] << 8 | bytes[position + 7]
: bytes[position] | (long) bytes[position + 1] << 8 |
(long) bytes[position + 2] << 16 | (long) bytes[position + 3] << 24 |
(long) bytes[position + 4] << 32 | (long) bytes[position + 5] << 40 |
(long) bytes[position + 6] << 48 | (long) bytes[position + 7] << 56;
}
// this is essentially an unsafe *((double*)&value)
return BitConverter.Int64BitsToDouble(value);
}
/// <summary>
/// Reads a <see cref="bool"/> value from an array of bytes.
/// </summary>
/// <param name="bytes">The array of bytes to read from.</param>
/// <param name="position">The position in the array where the value should be read.</param>
/// <returns>The value.</returns>
public static bool ReadBool(this byte[] bytes, int position)
=> bytes.ReadByte(position) != 0;
/// <summary>
/// Reads a <see cref="char"/> value from an array of bytes.
/// </summary>
/// <param name="bytes">The array of bytes to read from.</param>
/// <param name="position">The position in the array where the value should be read.</param>
/// <param name="endianness">The endianness.</param>
/// <returns>The value.</returns>
public static char ReadChar(this byte[] bytes, int position, Endianness endianness)
{
Debug.Assert(bytes != null && position >= 0 && bytes.Length >= position + SizeOfChar);
unchecked
{
return (char)(endianness.IsBigEndian()
? bytes[position] << 8 | bytes[position + 1]
: bytes[position] | bytes[position + 1] << 8);
}
}
}
}
<|start_filename|>src/Hazelcast.Net/HazelcastOptions.cs<|end_filename|>
// Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Generic;
using System.Linq;
using Hazelcast.Clustering;
using Hazelcast.Configuration.Binding;
using Hazelcast.Core;
using Hazelcast.Messaging;
using Hazelcast.Metrics;
using Hazelcast.Networking;
namespace Hazelcast
{
/// <summary>
/// Represents the Hazelcast client options.
/// </summary>
public sealed partial class HazelcastOptions
{
/// <summary>
/// Initializes a new instance of the <see cref="HazelcastOptions"/> class.
/// </summary>
public HazelcastOptions()
{
Subscribers = new List<IHazelcastClientEventSubscriber>();
SubscribersBinder = new CollectionBinder<InjectionOptions>(x
=> Subscribers.Add(new HazelcastClientEventSubscriber(x.TypeName)));
// initializers are executed prior to executing the constructor, so Preview has a value here,
// and we cannot use initializers for these properties these initializers cannot reference
// the non-static Preview property + the order of execution of initializers defined in partials
// is undefined
Networking = new NetworkingOptions(Preview);
Messaging = new MessagingOptions(Preview);
}
/// <summary>
/// Initializes a new instance of the <see cref="HazelcastOptions"/> class.
/// </summary>
private HazelcastOptions(HazelcastOptions other)
{
ClientName = other.ClientName;
ClusterName = other.ClusterName;
Subscribers = new List<IHazelcastClientEventSubscriber>(other.Subscribers);
Labels = new HashSet<string>(other.Labels);
LoggerFactory = other.LoggerFactory.Clone();
PatternMatcher = other.PatternMatcher;
((IClusterOptions) this).ClientNamePrefix = ((IClusterOptions) other).ClientNamePrefix;
Preview = other.Preview.Clone();
Core = other.Core.Clone();
Heartbeat = other.Heartbeat.Clone();
Networking = other.Networking.Clone(Preview);
Authentication = other.Authentication.Clone();
LoadBalancer = other.LoadBalancer.Clone();
Serialization = other.Serialization.Clone();
Messaging = other.Messaging.Clone(Preview);
Events = other.Events.Clone();
Metrics = other.Metrics.Clone();
NearCache = other.NearCache.Clone();
NearCaches = other.NearCaches.ToDictionary(kvp => kvp.Key, kvp => kvp.Value.Clone());
FlakeIdGenerators = other.FlakeIdGenerators.ToDictionary(kvp => kvp.Key, kvp => kvp.Value.Clone());
}
/// <summary>
/// Gets the <see cref="IServiceProvider"/>.
/// </summary>
/// <remarks>
/// <para>In dependency-injection scenario the service provider may be available,
/// so that service factories can return injected services. In non-dependency-injection
/// scenario, this returns <c>null</c>.</para>
/// </remarks>
/// <returns>The service provider.</returns>
public IServiceProvider ServiceProvider { get; internal set; }
/// <summary>
/// (unsupported) Gets the <see cref="PreviewOptions"/>.
/// </summary>
public PreviewOptions Preview { get; } = new PreviewOptions();
/// <summary>
/// Gets the <see cref="CoreOptions"/>.
/// </summary>
/// <returns>The core options.</returns>
[BinderIgnore(false)]
internal CoreOptions Core { get; } = new CoreOptions();
/// <summary>
/// Gets the metrics options.
/// </summary>
public MetricsOptions Metrics { get; } = new MetricsOptions();
/// <summary>
/// Clones the options.
/// </summary>
/// <returns>A deep clone of the options.</returns>
internal HazelcastOptions Clone() => new HazelcastOptions(this);
}
}
| iozcelik/hazelcast-csharp-client |
<|start_filename|>src/common/dropdown-toggle-select/index.css<|end_filename|>
.dropdown-toggle-select::after {
display: inline-block;
width: 0;
height: 0;
margin-left: 0.255em;
vertical-align: 0.255em;
content: '';
border-top: 0.3em solid;
border-right: 0.3em solid transparent;
border-bottom: 0;
border-left: 0.3em solid transparent;
position: absolute;
right: 12px;
top: 45%;
}
.btn-outline-dropdown-toggle-select {
background-color: transparent;
background-image: none;
border-color: #ced4da;
}
.btn-outline-dropdown-toggle-select:hover {
}
<|start_filename|>.storybook/config.js<|end_filename|>
import { configure } from '@storybook/react';
function loadStories() {
require('../src/common/dropdown-toggle-select/index.story.jsx');
require('../src/navbar/index.story.jsx');
require('../src/editor/index.story.jsx');
require('../src/editor/toolbar/index.story.jsx');
require('../src/editor/toolbar/font-dropdown/index.story.jsx');
require('../src/editor/toolbar/size-dropdown/index.story.jsx');
require('../src/editor/toolbar/theme-dropdown/index.story.jsx');
require('../src/editor/document/index.story.jsx');
require('../src/heart/index.story.jsx');
require('../src/not-found/index.story.jsx');
}
configure(loadStories, module);
<|start_filename|>src/index.css<|end_filename|>
body {
margin: 0;
padding: 0;
font-family: sans-serif;
}
body,
html,
#root {
height: 100%;
}
@media screen and (max-width: 767px) {
.responsive-container {
height: calc(100% - 254px);
}
}
@media screen and (min-width: 768px) {
.responsive-container {
height: calc(100% - 130px);
}
}
<|start_filename|>src/editor/document/index.css<|end_filename|>
@media only print {
.no-print {
display: none !important;
}
}
@media only screen {
.only-print {
display: none !important;
}
}
code {
counter-reset: line;
}
.code-line {
counter-increment: line;
position: relative;
display: block;
margin-left: 2.5em;
padding-left: 1em;
border-left: 1px solid transparent;
}
.code-line:before {
content: ' ' counter(line);
position: absolute;
margin-left: -3.5em;
color: #212529;
}
.code-line:nth-child(n + 10):before {
content: ' ' counter(line);
}
.code-line:nth-child(n + 100):before {
content: counter(line);
}
.code-line-vertical {
counter-increment: line;
position: relative;
display: block;
margin-left: 2.5em;
padding-left: 1em;
border-left: 1px solid #212529;
}
.code-line-vertical:before {
content: ' ' counter(line);
position: absolute;
margin-left: -3.5em;
color: #212529;
}
.code-line-vertical:nth-child(n + 10):before {
content: ' ' counter(line);
}
.code-line-vertical:nth-child(n + 100):before {
content: counter(line);
}
| zkarmi/codeprinter |
<|start_filename|>radio_astro/include/radio_astro/dedispersion.h<|end_filename|>
/* -*- c++ -*- */
/*
* Copyright 2020 DSPIRA.
*
* This is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3, or (at your option)
* any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this software; see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street,
* Boston, MA 02110-1301, USA.
*/
#ifndef INCLUDED_RADIO_ASTRO_DEDISPERSION_H
#define INCLUDED_RADIO_ASTRO_DEDISPERSION_H
#include <radio_astro/api.h>
#include <gnuradio/block.h>
namespace gr {
namespace radio_astro {
/*!
* \brief Dedisperse incoming power spectrum
* \ingroup radio_astro
*
*/
class RADIO_ASTRO_API dedispersion : virtual public gr::block
{
public:
typedef boost::shared_ptr<dedispersion> sptr;
/*!
* \brief Return a shared_ptr to a new instance of radio_astro::dedispersion.
*
* To avoid accidental use of raw pointers, radio_astro::dedispersion's
* constructor is in a private implementation
* class. radio_astro::dedispersion::make is the public interface for
* creating new instances.
*/
static sptr make(int vec_length, float dms, float f_obs, float bw, float t_int, int nt);
};
} // namespace radio_astro
} // namespace gr
#endif /* INCLUDED_RADIO_ASTRO_DEDISPERSION_H */
<|start_filename|>radio_astro/lib/vmedian_impl.cc<|end_filename|>
/* -*- c++ -*- */
/*
* Copyright 2019 - Quiet Skies LLC -- <NAME> - <EMAIL>
*
* This is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3, or (at your option)
* any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this software; see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gnuradio/io_signature.h>
#include <stdio.h>
#include <time.h>
#include "vmedian_impl.h"
#include <iostream>
#include <chrono>
namespace gr {
namespace radio_astro {
vmedian::sptr
vmedian::make(int vec_length, int n)
{
return gnuradio::get_initial_sptr
(new vmedian_impl(vec_length, n));
}
/*
* The private constructor
*/
vmedian_impl::vmedian_impl(int vec_length, int n)
: gr::block("vmedian",
gr::io_signature::make(1, 1, sizeof(float)*vec_length),
gr::io_signature::make(1, 1, sizeof(float)*vec_length)),
d_vec_length(vec_length),
d_n(n)
{ set_vlen( vec_length); /* initialize all imput values */
set_mode( n);
}
/*
* Our virtual destructor.
*/
vmedian_impl::~vmedian_impl()
{
}
void
vmedian_impl::forecast (int noutput_items, gr_vector_int &ninput_items_required)
{
/* <+forecast+> e.g. ninput_items_required[0] = noutput_items */
unsigned ninputs = ninput_items_required.size();
/* for each output vector, d_n input vectors are needed */
for(unsigned int i = 0; i < ninputs; i++)
ninput_items_required[i] = d_n*noutput_items;
}
void
vmedian_impl::set_mode ( int n)
{
printf("Medianing %d vectors", n);
d_n = n;
d_n1 = d_n - 1;
d_n2 = d_n - 2;
oneovern2 = 1./float(d_n2); // Normally median 4 values, so exclude min,max (2)
} // end of set_mode()
void
vmedian_impl::set_vlen ( int invlen)
{ vlen = invlen;
if (vlen < 32)
{ vlen = 32;
printf("Vector Length too short, using %5d\n", vlen);
}
else if (vlen > MAX_VLEN)
{ vlen = MAX_VLEN;
printf("Vector Length too large, using %5d\n", vlen);
}
d_vec_length = vlen;
} // end of set_vlen()
int
vmedian_impl::general_work (int noutput_items,
gr_vector_int &ninput_items,
gr_vector_const_void_star &input_items,
gr_vector_void_star &output_items)
{
const float *in = (const float *) input_items[0], * onein;
float *out = (float *) output_items[0], * oneout;
unsigned ninputs = ninput_items.size();
int success, nout = 0;
// for all input vectors
for (unsigned j = 0; j < ninputs; j++)
{ // process one vector at a time
onein = &in[j];
// write 0 or 1 output vectors
oneout = &out[nout];
success = vmedian( onein, oneout);
// every n vectors, one more output is written
nout += success;
}
// Tell runtime system how many input items we consumed on
// each input stream.
consume_each (ninputs);
// Tell runtime system how many output items we produced.
return nout;
} // end of vmedian_impl:: general_work
int
vmedian_impl::vmedian(const float *input, float *output)
{
int nout = 0;
if ( count == 0)
{ for(unsigned int j=0; j < vlen; j++)
{ vsum[j] = vmin[j] = vmax[j] = input[j];
}
count = 1;
}
else if (count < d_n1)
{
for(unsigned int j=0; j < vlen; j++)
{ vsum[j] += input[j];
if (input[j] > vmax[j])
vmax[j] = input[j];
else if (input[j] < vmin[j])
vmin[j] = input[j];
}
count += 1;
}
else { /* if here, count is full, time to complete the median */
for(unsigned int j=0; j < vlen; j++)
{ if (input[j] > vmax[j])
output[j] = vsum[j] - vmin[j];
else if (input[j] < vmin[j])
output[j] = vsum[j] - vmax[j];
else { /* else neither min nor max, must add to sum */
vsum[j] += input[j];
output[j] = vsum[j] - (vmax[j] + vmin[j]);
} /* end else neither min nor max */
} /* end for all channels */
// finally scale by number of vectors averaged
for(unsigned int j=0; j < vlen; j++)
output[j] *= oneovern2;
nout = 1;
count = 0;
} /* end else final count */
return nout;
} // end of vmedian_impl::vmedian()
} /* namespace radio_astro */
} /* namespace gr */
<|start_filename|>radio_astro/include/radio_astro/vmedian.h<|end_filename|>
/* -*- c++ -*- */
/*
* Copyright 2020 Quiet Skies LLC -- <NAME> - <EMAIL>.
*
* This is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3, or (at your option)
* any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this software; see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street,
* Boston, MA 02110-1301, USA.
*/
#ifndef INCLUDED_RADIO_ASTRO_VMEDIAN_H
#define INCLUDED_RADIO_ASTRO_VMEDIAN_H
#include <radio_astro/api.h>
#include <gnuradio/block.h>
namespace gr {
namespace radio_astro {
/*!
* \brief Vector Median of several vectors. For 3 or 4 vectors
* the code implements exactly the median of the values
* for more vectors, the result is the sum of all values minus the
* miniumum and maximum values
* input:
* vector of length vector_lenght
* parameters
* 1. Vector length
* 2. N Number of vectors to median. This is the decimation rate
* output:
* 1: Vector of floating point samples
* \ingroup radio_astro
*
*/
class RADIO_ASTRO_API vmedian : virtual public gr::block
{
public:
typedef boost::shared_ptr<vmedian> sptr;
/*!
* \brief Return a shared_ptr to a new instance of radio_astro::vmedian.
*
* To avoid accidental use of raw pointers, radio_astro::vmedian's
* constructor is in a private implementation
* class. radio_astro::vmedian::make is the public interface for
* creating new instances.
*/
virtual void set_vlen(int vec_length) = 0; // This is the nsigma parameter
virtual void set_mode(int n) = 0; // Number of vectors to median > 2
static sptr make(int vec_length, int n);
};
} // namespace radio_astro
} // namespace gr
#endif /* INCLUDED_RADIO_ASTRO_VMEDIAN_H */
<|start_filename|>radio_astro/lib/vmedian_impl.h<|end_filename|>
/* -*- c++ -*- */
/*
* Copyright 2019 - Quiet Skies LLC -- <NAME> - <EMAIL>
*
* This is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3, or (at your option)
* any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this software; see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street,
* Boston, MA 02110-1301, USA.
*/
#ifndef INCLUDED_RADIO_ASTRO_VMEDIAN_IMPL_H
#define INCLUDED_RADIO_ASTRO_VMEDIAN_IMPL_H
#include <radio_astro/vmedian.h>
#define MAX_VLEN 16384
namespace gr {
namespace radio_astro {
class vmedian_impl : public vmedian
{
private:
// values computed in this block
int d_vec_length = 2048;
int d_n = 4;
int d_n1 = d_n-1;
int d_n2 = d_n-2;
int count = 0; // count of vectors so far processed
int vlen = d_vec_length;
float vsum[MAX_VLEN]; // vector sum of samples in channel
float vmin[MAX_VLEN]; // vector of minimum values in channel
float vmax[MAX_VLEN]; // vector of maximum values in channel
float oneovern2 = 1./float(d_n2); // Normally median 4 values, so exclude min,max (2)
public:
vmedian_impl(int vec_length, int n);
~vmedian_impl();
// Where all the action really happens
void forecast (int noutput_items, gr_vector_int &ninput_items_required);
void set_mode( int n);
void set_vlen( int vec_length);
int vmedian(const float *input, float *output);
int general_work(int noutput_items,
gr_vector_int &ninput_items,
gr_vector_const_void_star &input_items,
gr_vector_void_star &output_items);
};
} // namespace radio_astro
} // namespace gr
#endif /* INCLUDED_RADIO_ASTRO_VMEDIAN_IMPL_H */
<|start_filename|>radio_astro/include/radio_astro/detect.h<|end_filename|>
/* -*- c++ -*- */
/*
* Copyright 2020 Quiet Skies LLC -- <NAME> - <EMAIL>.
*
* This is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3, or (at your option)
* any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this software; see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street,
* Boston, MA 02110-1301, USA.
*/
#ifndef INCLUDED_RADIO_ASTRO_DETECT_H
#define INCLUDED_RADIO_ASTRO_DETECT_H
#include <radio_astro/api.h>
#include <gnuradio/block.h>
namespace gr {
namespace radio_astro {
/*!
* \brief Event Detection by comparison of signal to RMS Noise level.
* event detection: fill a circular buffer with complex samples and
* search for peaks nsigma above the RMS of the data stream
* input:
* complex vector of I/Q samples
* parameters
* 1. Vector length
* 2. Number of sigma to declare an event
* 3. Bandwidth used to unwind the time of the event in circular buffer
* 4. Estimated time it takes for sample to go from input of horn to block
* 5. Mode: 1: Monitor, just pass input data,
* 2: Detect events and repeatedly output the last event
* output:
* 1: Vector of complex I/Q samples
* Event is tagged with three floating point values:
* 1. Modified Julian Date of Event
* 2. Peak intensity
* 3. RMS of data stream near event
* \ingroup radio_astro
*
*/
class RADIO_ASTRO_API detect : virtual public gr::block
{
public:
typedef boost::shared_ptr<detect> sptr;
/*!
* \brief Return a shared_ptr to a new instance of radio_astro::detect.
*
* To avoid accidental use of raw pointers, radio_astro::detect's
* constructor is in a private implementation
* class. radio_astro::detect::make is the public interface for
* creating new instances.
*/
static sptr make(int vec_length, float dms, float f_obs, float bw, float t_int, int nt);
virtual void set_dms(float dms) = 0; // This is the nsigma parameter
virtual void set_vlen(int vec_length) = 0; // This is the nsigma parameter
virtual void set_mode(int nt) = 0; // Data stream (mode == 0) or event
virtual void set_bw(float bw) = 0;
virtual void set_freq(float f_obs) = 0;
};
} // namespace radio_astro
} // namespace gr
#endif /* INCLUDED_RADIO_ASTRO_DETECT_H */
<|start_filename|>radio_astro/cmake/Modules/radio_astroConfig.cmake<|end_filename|>
INCLUDE(FindPkgConfig)
PKG_CHECK_MODULES(PC_RADIO_ASTRO radio_astro)
FIND_PATH(
RADIO_ASTRO_INCLUDE_DIRS
NAMES radio_astro/api.h
HINTS $ENV{RADIO_ASTRO_DIR}/include
${PC_RADIO_ASTRO_INCLUDEDIR}
PATHS ${CMAKE_INSTALL_PREFIX}/include
/usr/local/include
/usr/include
)
FIND_LIBRARY(
RADIO_ASTRO_LIBRARIES
NAMES gnuradio-radio_astro
HINTS $ENV{RADIO_ASTRO_DIR}/lib
${PC_RADIO_ASTRO_LIBDIR}
PATHS ${CMAKE_INSTALL_PREFIX}/lib
${CMAKE_INSTALL_PREFIX}/lib64
/usr/local/lib
/usr/local/lib64
/usr/lib
/usr/lib64
)
include("${CMAKE_CURRENT_LIST_DIR}/radio_astroTarget.cmake")
INCLUDE(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(RADIO_ASTRO DEFAULT_MSG RADIO_ASTRO_LIBRARIES RADIO_ASTRO_INCLUDE_DIRS)
MARK_AS_ADVANCED(RADIO_ASTRO_LIBRARIES RADIO_ASTRO_INCLUDE_DIRS)
<|start_filename|>radio_astro/lib/detect_impl.h<|end_filename|>
/* -*- c++ -*- */
/*
* Copyright 2019 - Quiet Skies LLC -- <NAME> - <EMAIL>
*
* This is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3, or (at your option)
* any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this software; see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street,
* Boston, MA 02110-1301, USA.
*/
/* HISTORY */
/* 21MAR24 GIL reduce maximum buffer size */
#ifndef INCLUDED_RADIO_ASTRO_DETECT_IMPL_H
#define INCLUDED_RADIO_ASTRO_DETECT_IMPL_H
#include <radio_astro/detect.h>
#ifndef TIME_UTC // must define utc time flag
#define TIME_UTC 1
#endif
// #define MAX_VLEN 16384
//#define MAX_VLEN 8192
//#define MAX_VLEN 4096
#define MAX_VLEN 2048
#define MAX_BUFF (2L*MAX_VLEN)
// constants for calculating Modified Julian Date
#define DaysPer400Years (365L*400 + 97)
#define DaysPer100Years (365L*100 + 24)
#define DaysPer4Years (365*4 + 1)
#define DaysPer1Year 365
#define MonthsPerYear 12
#define MonthsPer400Years (12*400)
#define MonthMarch 3
#define mjdOffset (678881 /* Epoch Nov 17, 1858 */)
static const short DaysMarch1ToBeginingOfMonth[12] = {
0,
31,
31 + 30,
31 + 30 + 31,
31 + 30 + 31 + 30,
31 + 30 + 31 + 30 + 31,
31 + 30 + 31 + 30 + 31 + 31,
31 + 30 + 31 + 30 + 31 + 31 + 30,
31 + 30 + 31 + 30 + 31 + 31 + 30 + 31,
31 + 30 + 31 + 30 + 31 + 31 + 30 + 31 + 30,
31 + 30 + 31 + 30 + 31 + 31 + 30 + 31 + 30 + 31,
31 + 30 + 31 + 30 + 31 + 31 + 30 + 31 + 30 + 31 + 31 };
namespace gr {
namespace radio_astro {
class detect_impl : public detect
{
private:
// values computed in this block
int d_vec_length = 2048;
float d_dms = 4.0;
float d_f_obs = 1.;
float d_bw = 1.;
float d_t_int = 0.;
int d_nt = 1;
int vlen = d_vec_length;
int vlen2 = vlen/2;
double nsigma = 4.0;
double peak = 0; // peak, rms and date/time of detected event
double rms = 0; // rms of values in circular buffer
double mjd = 0; // modified Julian Date of event
gr_complex circular[MAX_BUFF];
float circular2[MAX_BUFF]; // circular buffer for input samples**2
long inext = 0; // next place for a sample in buffer
long inext2 = MAX_BUFF/2; // place to check for new peak
long imax2 = 0; // index to last maximum
double max2 = 0; // max value squared so far
double sum2 = 0; // sum of values squared
double rms2 = 0; // rms squared of values in circular buffer
long nsum = 0; // count of samples in current sum
long nmaxcount = vlen; // count of samples until detection restarts
double oneovern = 1./double(nmaxcount);
bool bufferfull = false;// assume buffer is not full
double nsigma_rms = 0; // comparision value for event detection
gr_complex samples[MAX_VLEN]; // output event buffer
bool initialized = 0; // flag initializing output
double bufferdelay = float(MAX_VLEN/2)*1.E-6/d_bw;
unsigned long vcount = 0; // count of vectors processed
unsigned long logvcount = 0; // count of last logged mjd
long eventoffset = 0; // index of event in block
double dt0 = 0.; // extimate sample delay from 1st != 0 vector
long nzero = 0; // count zero vectors for dt0 estimate
double mjd0 = 0.; // save MJD of current day
long lastday = 0; // store last day to determine new mjd0 calc
long ecount = 0; // count of events detected
public:
detect_impl(int vec_length,float dms, float f_obs, float bw, float t_int, int nt);
~detect_impl();
// Where all the action really happens
void forecast (int noutput_items, gr_vector_int &ninput_items_required);
// set nsigma for a detection;
void set_dms( float dms);
// set the bandwidth, in MHz
void set_bw( float bw);
void set_freq( float f_obs);
void set_dt( float t_int);
void set_mode( int nt);
void set_vlen( int vec_length);
int update_buffer();
int event(const long ninputs, const gr_complex *input, gr_complex *output);
int general_work(int noutput_items,
gr_vector_int &ninput_items,
gr_vector_const_void_star &input_items,
gr_vector_void_star &output_items);
/* function for Modified Julian Date (MJD) */
long ymd_to_mjd(int year, int month, int day);
/* more accurate function for Modified Julian Date (MJD) */
long ymd_to_mjd_x(int year, int month, int day);
double get_mjd();
};
} // namespace radio_astro
} // namespace gr
#endif /* INCLUDED_RADIO_ASTRO_DETECT_IMPL_H */
<|start_filename|>radio_astro/lib/detect_impl.cc<|end_filename|>
/* -*- c++ -*- */
/*
* Copyright 2019 - Quiet Skies LLC -- <NAME> - <EMAIL>
*
* This is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3, or (at your option)
* any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this software; see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street,
* Boston, MA 02110-1301, USA.
*/
/* HISTORY
* 21Mar25 GIL fix truncation of time offset calculation
* 21Mar24 GIL take into account the number of vectors in the detect
* 21Mar24 GIL allow detections 1/2 a vector after last event
* 21Mar23 GIL make detect asyncrhonus
* 20Jun25 GIL process all provided vectors
* 20Jun23 GIL try to find reason some events are missed
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gnuradio/io_signature.h>
#include <stdio.h>
#include <time.h>
#include "detect_impl.h"
#include <iostream>
#include <chrono>
#define EPSILON 0.03 // define small value for waiting for data
namespace gr {
namespace radio_astro {
detect::sptr
detect::make(int vec_length, float dms, float f_obs, float bw, float t_int, int nt)
{
return gnuradio::get_initial_sptr
(new detect_impl(vec_length, dms, f_obs, bw, t_int, nt));
}
/*
* The private constructor
*/
detect_impl::detect_impl(int vec_length, float dms, float f_obs, float bw, float t_int, int nt)
: gr::block("detect",
gr::io_signature::make(1, 1, sizeof(gr_complex)*vec_length),
gr::io_signature::make(1, 1, sizeof(gr_complex)*vec_length)),
d_vec_length(vec_length),
d_dms(dms),
d_f_obs(f_obs),
d_bw(bw),
d_t_int(t_int),
d_nt(nt)
{ set_vlen( vec_length); /* initialize all imput values */
set_mode( nt);
set_dms( dms);
set_bw( bw);
set_freq( f_obs);
set_dt( t_int);
}
/*
* Our virtual destructor.
*/
detect_impl::~detect_impl()
{
}
long
detect_impl::ymd_to_mjd(int year, int month, int day)
{ long double MJD = 0;
long I = year, J = month, K = day, JD = 0;
// Julian date, JD, is calculated only with integer math for 1800 to 2099
JD = (K-32075);
JD += 1461*(I+4800+(J-14)/12)/4+367*(J-2-(J-14)/12*12)/12;
JD -= 3*((I+4900+(J-14)/12)/100)/4;
MJD = JD - 2400000;
// printf("Date %5d/%2d/%2d -> %9.1f\n",year, month, day, MJD);
return( int(MJD));
} //end of ymd_to_mjd()
long
detect_impl::ymd_to_mjd_x(int year, int month, int day)
{
year += month / MonthsPerYear;
month %= MonthsPerYear;
// Adjust for month/year to Mar... Feb
while (month < MonthMarch) {
month += MonthsPerYear; // Months per year
year--;
}
long double d = (year / 400) * DaysPer400Years;
long y400 = (int) (year % 400);
d += (y400 / 100) * DaysPer100Years;
int y100 = y400 % 100;
d += (y100 / 4) * DaysPer4Years;
long y4 = y100 % 4;
d += y4 * DaysPer1Year;
d += DaysMarch1ToBeginingOfMonth[month - MonthMarch];
d += day;
// November 17, 1858 == MJD 0
d--;
d -= mjdOffset;
return d;
} /* end of int ymd_to_mjd_x() */
double
detect_impl::get_mjd( )
{
double mjd = 0, seconds = 0, dtd = 0.;
struct timespec ts;
long r = clock_gettime(CLOCK_REALTIME, &ts);
char buff[100];
time_t now = time(NULL);
struct tm *ptm = gmtime(&now);
long year = ptm->tm_year + 1900;
long month = ptm->tm_mon + 1;
long day = ptm->tm_mday;
// printf("Current date: %5d %3d %3d\n", year, month, day);
if (lastday == day) { // if date has not changed, use previous mjd
mjd = mjd0;
}
else {
mjd0 = ymd_to_mjd_x( year, month, day);
mjd = mjd0;
// printf("Mjd0: %16.6f\n", mjd0);
}
strftime(buff, sizeof buff, "%D %T", gmtime(&ts.tv_sec));
seconds = ptm->tm_sec + (60.*ptm->tm_min) + (3600.*ptm->tm_hour);
// seconds = seconds % 86400.;
seconds += (1.e-9*ts.tv_nsec);
dtd = (seconds/86400.);
mjd += dtd;
if (lastday != day) {
printf("Current time: %s.%09ld UTC\n", buff, ts.tv_nsec);
printf("New Day:%15.9f, %12.6fs (%15.12f, last=%ld, current=%ld)\n", \
mjd, seconds, dtd, lastday, day);
}
lastday = day; // save day so that MJD is only updated once a day
return mjd;
} // end of get_mjd()
void
detect_impl::forecast (int noutput_items, gr_vector_int &ninput_items_required)
{
/* <+forecast+> e.g. ninput_items_required[0] = noutput_items */
long ninputs = ninput_items_required.size();
for(long i = 0; i < ninputs; i++)
ninput_items_required[i] = noutput_items;
}
void
detect_impl::set_dms ( float dms)
{
nsigma = dms;
printf("Input N Sigma: %7.1f\n", nsigma);
d_dms = dms;
}
void
detect_impl::set_dt ( float dt)
{
d_t_int = dt;
printf("Input Sample Delay: %15.9f s\n", d_t_int);
}
void
detect_impl::set_bw ( float bw)
{
if (bw < 0.01)
{printf("Input Bandwidth too small: %10.6f (MHz)\n", bw);
bw = 1.0;
}
d_bw = bw;
printf("Input Bandwidth: %7.1f (MHz)\n", bw);
bufferdelay = float(MAX_VLEN/2)/d_bw;
}
void
detect_impl::set_freq ( float freq)
{
d_f_obs = freq;
printf("Input Frequency: %7.1f (MHz)\n", d_f_obs);
}
void
detect_impl::set_mode ( int nt)
{
if (nt == 0){
printf("Input Mode: Monitor\n");
}
else {
printf("Input Mode: Detect\n");
}
d_nt = nt;
} // end of set_mode()
void
detect_impl::set_vlen ( int invlen)
{ vlen = invlen;
if (vlen < 32)
{ vlen = 32;
printf("Vector Length too short, using %5d\n", vlen);
}
else if (vlen > MAX_VLEN)
{ vlen = MAX_VLEN;
printf("Vector Length too large, using %5d\n", vlen);
}
d_vec_length = vlen;
vlen2 = vlen/2;
nmaxcount = vlen; // set detection pause for 1 vector
// vectors do not yet work; circular = std::vector<gr_complex>(vlen);
// now must initialize indicies
inext = 0;
bufferfull = false;
inext2 = vlen2 + 1;
// printf("Buffer is not full: %5d\n", inext2);
} // end of set_vlen()
int
detect_impl::general_work (int noutput_items,
gr_vector_int &ninput_items,
gr_vector_const_void_star &input_items,
gr_vector_void_star &output_items)
{
const gr_complex *in = (const gr_complex *) input_items[0];
gr_complex *out = (gr_complex *) output_items[0];
long ninputs = ninput_items.size();
int success = 0;
// since this is a 1 to 1 process, the numbrer of inputs is
// the same as the number of output items
success = event(noutput_items, in, out);
// Tell runtime system how many input items we consumed on
// each input stream.
consume_each (noutput_items);
// Tell runtime system how many output items we produced.
return success;
} // end of detect_impl:: general_work
int
detect_impl::update_buffer()
{ long i = inext2 - vlen2, length = vlen, jstart = 0;
// the event is centered on sample inext2. Must copy vlen2 before
// and after the event. Deal with circular buffer
// if event is within the circular buffer
if ((i >= 0) && ((i + length) < MAX_BUFF))
{ for (long j = jstart; j < length; j++)
{ samples[j] = circular[i];
i++;
}
}
else if (i < 0) // if before beging of buffer, on other end
{ i += MAX_BUFF;
length = MAX_BUFF - i;
// printf("Two part-shift; Move 1: i=%ld, length=%ld\n", i, length);
for (long j = 0; j < length; j++)
{ samples[j] = circular[i];
i++;
}
i = 0;
jstart = length;
length = vlen - length;
// printf("Two part-shift; Move 2: i=%ld, length=%ld\n", i, length);
for (long j = jstart; j < vlen; j++)
{ samples[j] = circular[i];
i++;
}
}
else
{ /* near end of circular buffer */
length = MAX_BUFF - i;
if (length > vlen)
length = vlen;
// printf("End Two part+shift; Move 1: i=%ld, length=%ld\n", i, length);
for (long j = 0; j < length; j++)
{
samples[j] = circular[i];
i++;
}
i = 0;
jstart = length;
length = vlen - length;
// printf("End Two part+shift; Move 2: i=%ld, shift=%ld\n", i, length);
for (long j = jstart; j < vlen; j++)
{
samples[j] = circular[i];
i++;
}
} // else near end of circular buffer
return 0;
} // end of update_buffer()
int
detect_impl::event(const long ninputs, const gr_complex *input, gr_complex *output)
{
//outbuf = (float *) //create fresh one if necessary
float n_sigma = d_dms; // translate variables
long datalen = d_vec_length * ninputs, nout = 0, jjj = 0, inext0 = inext,
detected = 0;
gr_complex rp = 0;
double mag2 = 0, dmjd = 0., dtd = 0;
// get time all samples arrive for any events found
dmjd = get_mjd();
// buffer has N vectors added, offset to time of first sample
dtd = float(datalen);
dtd = dtd/d_bw;
if (! initialized) {
printf("Uninit: MJD + offset: %15.6fs, %10.6fs %ld\n", \
dmjd, dtd, datalen);
}
dtd = dtd/86400.; // convert time offset to days
dmjd = dmjd - dtd;
vcount += ninputs;
// fill the circular buffer
for(long j=0; j < datalen; j++)
{ rp = input[j];
mag2 = (rp.real()*rp.real()) + (rp.imag()*rp.imag());
circular[inext] = rp;
circular2[inext] = mag2;
sum2 += mag2;
nsum ++; // count samples in RMS calc
if (nsum >= nmaxcount) // if RMS sum is complete
{rms2 = sum2*oneovern;
rms = sqrt(rms2);
// switch to test on single sample
// nsigma_rms = nsigma*nsigma*rms2;
nsigma_rms = nsigma*rms;
sum2 = 0; // restart rms sum
nsum = 0;
bufferfull = true; // flag buffer is now full enough
} // end if RMS sum complete
inext++; // update index to next sample to save
if (inext >= MAX_BUFF) // if at end of buffer, loop
inext = 0;
inext2++; // update position for search
if (inext2 >= MAX_BUFF) // if at end of circular buffer
inext2 = 0; // go back to beginning
if (bufferfull) // when buffer is full, find peaks
{
if ((circular[inext2].real() > nsigma_rms) ||
(circular[inext2].real() < -nsigma_rms) ||
(circular[inext2].imag() > nsigma_rms) ||
(circular[inext2].imag() < -nsigma_rms))
{ // truncate RMS for RMS matching
detected = 1;
rms = int( rms * 100000.);
rms = rms / 100000.;
imax2 = inext2;
peak = sqrt(circular2[inext2]);
// printf( "N-sigma Peak found: %7.1f\n", peak/rms);
// add tags to event
add_item_tag(0, // Port number
nitems_written(0) + 1, // Offset
pmt::mp("PEAK"), // Key
pmt::from_double(peak) // Value
);
add_item_tag(0, // Port number
nitems_written(0) + 1, // Offset
pmt::mp("RMS"), // Key
pmt::from_double(rms) // Value
);
// time now is after all samples have arrived.
// the event was found at sample inext2
// first count samples since started loop
// dmjd is the beginning of the buffer, now add offset.
// Notice the sample might have been in buffere before,
// so dt can be negative
dtd = float(j);
dtd = dtd - float(vlen2);
if (ecount < 1) {
printf("Event: dtd: %15.6f (delta)\n", dtd);
printf("Event: bw : %15.6f (bw)\n", d_bw);
}
dtd = dtd/d_bw;
if (ecount < 5) {
printf("MJD: %15.6f; Peak=%8.4f+/-%6.4f (dt=%9.6fs)\n", \
dmjd, peak, rms, dtd);
}
dtd = dtd / 86400.; // convert to days
dmjd = dmjd + dtd;
dmjd = dmjd - dt0; // delay through gnuradio + device
add_item_tag(0, // Port number
nitems_written(0) + 1, // Offset
pmt::mp("MJD"), // Key
pmt::from_double(dmjd) // Value
);
add_item_tag(0, // Port number
nitems_written(0) + 1, // Offset
pmt::mp("EVECTOR"), // Key
pmt::from_uint64(vcount) // Value
);
add_item_tag(0, // Port number
nitems_written(0) + 1, // Offset
pmt::mp("EOFFSET"), // Key
pmt::from_long(inext2) // Value
);
add_item_tag(0, // Port number
nitems_written(0) + 1, // Offset
pmt::mp("VOFFSET"), // Key
pmt::from_long(inext0) // Value
);
add_item_tag(0, // Port number
nitems_written(0) + 1, // Offset
pmt::mp("ENV"), // Key
pmt::from_long(ninputs) // Value
);
update_buffer(); // center event in samples[]
nsum = 0; // restart Sum count
sum2 = 0.; // restart RMS Sum
bufferfull = false;
ecount = ecount + 1;
} // end if an event found
} // end if buffer full
} // end for all samples
if (! initialized) {
for (int iii = 0; iii < vlen; iii++)
{ samples[iii] = input[iii];
}
for (int iii = 0; iii < datalen; iii++)
{output[iii] = input[iii];
} // end for all input vectors }
// if still zero, then not suitable for initialization
rp = input[vlen2]; // get middle value in input
if (((rp.real() < EPSILON) and (rp.real() > -EPSILON)) and
((rp.imag() < EPSILON) and (rp.imag() > -EPSILON)))
{
nzero = nzero + ninputs;
return 0; // return noting no data yet
}
else {
printf("Nonzero %f, %f (%d)\n", rp.real(), rp.imag(), vlen2);
dt0 = nzero * vlen / d_bw; // time until nonzero (s)
printf("Non zero data found after %ld vectors\n", nzero);
printf("dt0 = %12.6fs\n", dt0);
dt0 = dt0/86400.; // convert from seconds to days
initialized = 1; // no need to re-initialize the event
return 1; // show initial samples
} // end else not zero data in vector
} // end if not yet initialzed
if (d_nt == 0) // if monitoring input, just output input
{
// always output the last event
for (int iii = 0; iii < datalen; iii++)
{ output[iii] = input[iii];
}
return 1;
}
if (detected == 1) {
// repeated output the last event
jjj = 0;
for (nout = 0; nout < ninputs; nout++)
{ // fill all output vectors with last event
for (int iii = 0; iii < d_vec_length; iii++)
{ output[jjj] = samples[iii];
jjj++;
}
} // end for all input vectors
} // end else output event
else {
// to reduce CPU usage, only log vector count MJDs every
// second or so.
if (vcount > logvcount)
{
dmjd = dmjd - dt0;
// printf("Mjd: %16.6d\n", dmjd);
add_item_tag(0, // Port number
nitems_written(0) + 1, // Offset
pmt::mp("VMJD"), // Key
pmt::from_double(dmjd) // Value
);
add_item_tag(0, // Port number
nitems_written(0) + 1, // Offset
pmt::mp("VCOUNT"), // Key
pmt::from_uint64(vcount) // Value
);
add_item_tag(0, // Port number
nitems_written(0) + 1, // Offset
pmt::mp("NV"), // Key
pmt::from_uint64(ninputs) // Value
);
add_item_tag(0, // Port number
nitems_written(0) + 1, // Offset
pmt::mp("VOFFSET"), // Key
pmt::from_long(inext0) // Value
);
// add a vector count log entry every second or so
logvcount = vcount + 10000;
} // end if time to log MJD vs vector count
} // end else if not detected, can log vector
// return detected event count, either 0 or 1
return detected;
} // end of detect_impl::event()
} /* namespace radio_astro */
} /* namespace gr */
<|start_filename|>radio_astro/lib/dedispersion_impl.cc<|end_filename|>
/* -*- c++ -*- */
/*
* Copyright 2020 DSPIRA.
*
* This is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3, or (at your option)
* any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this software; see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gnuradio/io_signature.h>
#include "dedispersion_impl.h"
namespace gr {
namespace radio_astro {
dedispersion::sptr
dedispersion::make(int vec_length, float dms, float f_obs, float bw, float t_int, int nt)
{
return gnuradio::get_initial_sptr
(new dedispersion_impl(vec_length, dms, f_obs, bw, t_int, nt));
}
/*
* The private constructor
*/
dedispersion_impl::dedispersion_impl(int vec_length, float dms, float f_obs, float bw, float t_int, int nt)
: gr::block("dedispersion",
gr::io_signature::make(1, 1, sizeof(float)*vec_length*nt),
gr::io_signature::make(1, 1, sizeof(float)*nt)),
d_vec_length(vec_length),
d_dms(dms),
d_f_obs(f_obs),
d_bw(bw),
d_t_int(t_int),
d_nt(nt)
{}
/*
* Our virtual destructor.
*/
dedispersion_impl::~dedispersion_impl()
{
}
void
dedispersion_impl::forecast (int noutput_items, gr_vector_int &ninput_items_required)
{
/* <+forecast+> e.g. ninput_items_required[0] = noutput_items */
unsigned ninputs = ninput_items_required.size();
for(unsigned int i = 0; i < ninputs; i++)
ninput_items_required[i] = noutput_items;
}
int
dedispersion_impl::general_work (int noutput_items,
gr_vector_int &ninput_items,
gr_vector_const_void_star &input_items,
gr_vector_void_star &output_items)
{
const float *in = (const float *) input_items[0];
float *out = (float *) output_items[0];
int success;
success = dedisperse(in, out);
std::cout << success;
//std::cout << out[0*d_dms+ 0] << " " << out[31*d_dms+49] <<"\n";
// Do <+signal processing+>
// Tell runtime system how many input items we consumed on
// each input stream.
consume_each (noutput_items);
// Tell runtime system how many output items we produced.
return noutput_items;
}
int
dedispersion_impl::dedisperse(const float *input, float *output)
{
//outbuf = (float *) //create fresh one if necessary
float dmk = 4148808/d_t_int;
int shift;
unsigned int y;
float f_low = d_f_obs - d_bw/2;
float inv_f_low_sq = 1/(f_low*f_low);
//std::cout << input[10*d_vec_length + 20] << " " << input[31*d_vec_length + 0] <<"\n";
//for(unsigned int i=0; i < d_dms; i++){
//need to zero outbuf
for (unsigned int k=0; k < d_nt; k++){
output[k] = 0;
}
for(unsigned int j=0; j < d_vec_length; j++){
shift = round( dmk * d_dms * (inv_f_low_sq - 1/((d_bw*j/d_vec_length + f_low)*(d_bw*j/d_vec_length + f_low) )));
for(unsigned int k=0; k < d_nt; k++){
y = (k-shift) % d_nt;
output[k] += input[y*d_vec_length+j];
}
}
//}
return 0;
}
} /* namespace radio_astro */
} /* namespace gr */
| interstellarmedium/HI_telescope |
<|start_filename|>codes/chapter20/templates/show_request.html<|end_filename|>
<html>
<body>
<h1>Request Line:</h1>
<p>
{request.method} {request.path} {request.http_version}
</p>
<h1>Headers:</h1>
<pre>{headers}</pre>
<h1>Body:</h1>
<pre>{body}</pre>
</body>
</html>
<|start_filename|>package.json<|end_filename|>
{
"name": "python_web_application_for_3rd_year_engineer",
"version": "1.0.0",
"description": "伸び悩んでいる3年目Webエンジニアのための、Python Webアプリケーション自作入門",
"repository": {
"type": "git",
"url": "<EMAIL>:bigen1925/python_web_application_for_3rd_year_engineer.git"
},
"author": "<EMAIL>",
"license": "MIT",
"dependencies": {
"zenn-cli": "^0.1.69"
}
}
| uenoka/introduction-to-web-application-with-python |
<|start_filename|>src/c/jlinuxfork.c<|end_filename|>
/* vi: set sw=4 ts=4: */
#include <stdlib.h>
#include <jni.h>
#include <errno.h>
#include <signal.h>
#include <string.h>
#include <sys/wait.h>
#include <sys/stat.h>
#include <unistd.h>
#include <spawn.h>
#include <fcntl.h>
#include "jlinuxfork.h"
extern char ** environ;
#define THROW_IO_EXCEPTION(cls, env) do { \
cls = (*env)->FindClass(env, "java/io/IOException"); \
(*env)->ThrowNew(env, cls, sys_errlist[errno]); \
} while (0)
#define MAX_BUFFER_SIZE 131071
char ** javaArrayToChar(JNIEnv * env, jobject array);
void inline releaseCharArray(JNIEnv * env, jobject javaArray, char ** cArray);
jobjectArray fdIntToObject(JNIEnv * env, int * fds, size_t length);
char ** createPrependedArgv(char * path, char * chdir, char ** argv, int length, int * fds);
void freePargv(char ** pargv);
int isExecutable(char * path);
static void closeSafely(int fd);
/*
* Class: net_axiak_runtime_SpawnedProcess
* Method: execProcess
* Signature: ([Ljava/lang/String;[Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/io/FileDescriptor;Ljava/io/FileDescriptor;Ljava/io/FileDescriptor;)I
*/
JNIEXPORT jint JNICALL Java_net_axiak_runtime_SpawnedProcess_execProcess
(JNIEnv * env, jclass clazz, jobjectArray cmdarray, jobjectArray envp, jstring chdir,
jstring jbinrunner, jobject stdin_fd, jobject stdout_fd, jobject stderr_fd)
{
int cpid = -1, length, i, total_buffer_size = 0;
jboolean iscopy;
char ** argv = NULL, ** c_envp = NULL, ** prepended_argv = NULL, *tmp;
jstring program_name;
jfieldID fid;
jclass cls;
char *path;
int fds[3] = {1, 0, 2};
int pipe_fd1[2], pipe_fd2[2], pipe_fd3[2];
jobjectArray fdResult;
pipe_fd1[0] = pipe_fd1[1] = pipe_fd2[0] = pipe_fd2[1] = pipe_fd3[0] = pipe_fd3[1] = -1;
path = (char *)(*env)->GetStringUTFChars(env, jbinrunner, &iscopy);
if (path == NULL) {
goto Catch;
}
if (pipe(pipe_fd1) != 0) {
THROW_IO_EXCEPTION(cls, env);
goto Catch;
}
if (pipe(pipe_fd2) != 0) {
THROW_IO_EXCEPTION(cls, env);
goto Catch;
}
if (pipe(pipe_fd3) != 0) {
THROW_IO_EXCEPTION(cls, env);
goto Catch;
}
length = (*env)->GetArrayLength(env, cmdarray);
if (!length) {
cls = (*env)->FindClass(env, "java/lang/IndexOutOfBoundsException");
(*env)->ThrowNew(env, cls, "A non empty cmdarray is required.");
goto Catch;
}
if ((argv = javaArrayToChar(env, cmdarray)) == NULL) {
goto Catch;
}
program_name = (jstring)(*env)->GetObjectArrayElement(env, cmdarray, 0);
if (envp == NULL) {
c_envp = environ;
} else if ((c_envp = javaArrayToChar(env, envp)) == NULL) {
goto Catch;
}
/* Mapping for client to pipe. */
fds[0] = pipe_fd1[0];
fds[1] = pipe_fd2[1];
fds[2] = pipe_fd3[1];
/* Get the cwd */
tmp = (char *)(*env)->GetStringUTFChars(env, chdir, &iscopy);
prepended_argv = createPrependedArgv(path, tmp, argv, length, fds);
if (prepended_argv == NULL) {
goto Catch;
}
for (i = 0; prepended_argv[i] != NULL; ++i) {
total_buffer_size += strlen(prepended_argv[i]) + 1;
}
for (i = 0; c_envp[i] != NULL; ++i) {
total_buffer_size += strlen(c_envp[i]) + 1;
}
if (total_buffer_size > MAX_BUFFER_SIZE) {
cls = (*env)->FindClass(env, "java/lang/IllegalArgumentException");
(*env)->ThrowNew(env, cls, "The environment and arguments combined require too much space.");
goto Catch;
}
cpid = vfork();
if (cpid == 0) {
if (execve(path, prepended_argv, c_envp) == -1) {
fprintf(stderr, "execve error: %s\n", strerror(errno));
}
_exit(-1);
} else if (cpid < 0) {
THROW_IO_EXCEPTION(cls, env);
goto Catch;
}
(*env)->ReleaseStringChars(env, chdir, (const jchar *)tmp);
/* Mapping for parent to pipe. */
fds[0] = pipe_fd1[1];
fds[1] = pipe_fd2[0];
fds[2] = pipe_fd3[0];
cls = (*env)->FindClass(env, "java/io/FileDescriptor");
if (cls == 0) {
goto Catch;
}
fid = (*env)->GetFieldID(env, cls, "fd", "I");
(*env)->SetIntField(env, stdin_fd, fid, fds[0]);
(*env)->SetIntField(env, stdout_fd, fid, fds[1]);
(*env)->SetIntField(env, stderr_fd, fid, fds[2]);
fdResult = fdIntToObject(env, fds, 3);
if (fdResult == NULL) {
goto Catch;
}
(*env)->ExceptionClear(env);
Finally:
closeSafely(pipe_fd1[0]);
closeSafely(pipe_fd2[1]);
closeSafely(pipe_fd3[1]);
/* Here we make sure we are good memory citizens. */
freePargv(prepended_argv);
if (argv != NULL)
releaseCharArray(env, cmdarray, argv);
if (envp != NULL)
releaseCharArray(env, envp, c_envp);
if (path != NULL)
(*env)->ReleaseStringChars(env, jbinrunner, (jchar *)path);
return cpid;
Catch:
closeSafely(fds[0]);
closeSafely(fds[1]);
closeSafely(fds[2]);
goto Finally;
}
/*
* Class: SpawnedProcess
* Method: killProcess
* Signature: (I)V
*/
JNIEXPORT void JNICALL Java_net_axiak_runtime_SpawnedProcess_killProcess
(JNIEnv * env, jclass clazz, jint pid)
{
kill(pid, 2);
kill(pid, 5);
kill(pid, 9);
}
/*
* Class: SpawnedProcess
* Method: waitForProcess
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_net_axiak_runtime_SpawnedProcess_waitForProcess
(JNIEnv * env, jclass clazz, jint pid)
{
/* Read http://www.opengroup.org/onlinepubs/000095399/functions/wait.html */
int stat_loc;
errno = 0;
while(waitpid(pid, &stat_loc, 0) < 0) {
switch (errno) {
case ECHILD:
return 0;
case EINTR:
break;
default:
return -1;
}
}
if (WIFEXITED(stat_loc)) {
return WEXITSTATUS(stat_loc);
} else if (WIFSIGNALED(stat_loc)) {
return 0x80 + WTERMSIG(stat_loc);
} else {
return stat_loc;
}
}
char ** javaArrayToChar(JNIEnv * env, jobject array)
{
int i, length = (*env)->GetArrayLength(env, array);
char ** result = (char **)malloc(sizeof(char *) * (length + 1));
jboolean iscopy;
jstring tmp;
if (result == NULL) {
return result;
}
result[length] = NULL;
for (i = 0; i < length; i++) {
tmp = (jstring)(*env)->GetObjectArrayElement(env, array, i);
result[i] = (char *)(*env)->GetStringUTFChars(env, tmp, &iscopy);
}
return result;
}
void inline releaseCharArray(JNIEnv * env, jobject javaArray, char ** cArray)
{
int i, length = (*env)->GetArrayLength(env, javaArray);
if (cArray == NULL)
return;
for (i = 0; i < length; i++) {
if (cArray[i] != NULL)
(*env)->ReleaseStringChars(env,
(jstring)(*env)->GetObjectArrayElement(env, javaArray, i),
(jchar *)cArray[i]);
}
free(cArray);
}
jobjectArray fdIntToObject(JNIEnv * env, int * fds, size_t length)
{
jclass clazz = (*env)->FindClass(env, "java/io/FileDescriptor");
jobjectArray result;
jmethodID fdesc;
jfieldID field_fd;
int i;
if (clazz == 0) {
return NULL;
}
result = (*env)->NewObjectArray(env, length, clazz, NULL);
if (result == NULL) {
return NULL;
}
fdesc = (*env)->GetMethodID(env, clazz, "<init>", "()V");
if (fdesc == 0) {
return NULL;
}
field_fd = (*env)->GetFieldID(env, clazz, "fd", "I");
if (field_fd == 0) {
return NULL;
}
for (i = 0; i < length; i++) {
jobject tmp = (*env)->NewObject(env, clazz, fdesc);
(*env)->SetIntField(env, tmp, field_fd, fds[i]);
(*env)->SetObjectArrayElement(env, result, i, tmp);
}
return result;
}
char ** createPrependedArgv(char * path, char * chdir, char ** argv, int length, int * fds)
{
char ** pargv = (char **)malloc(sizeof(char *) * (length + 6));
int i;
if (pargv == NULL) {
return NULL;
}
for (i = 1; i < (length + 6); i++) {
pargv[i] = NULL;
}
pargv[0] = path;
pargv[4] = chdir;
/* Set fds */
for (i = 0; i < 3; i++) {
pargv[i + 1] = (char *)malloc(3 * fds[i]);
if (pargv[i + 1] == NULL) {
goto error;
}
sprintf(pargv[i + 1], "%d", fds[i]);
}
for (i = 0; i < length; i++) {
pargv[i + 5] = argv[i];
}
return pargv;
error:
freePargv(pargv);
return NULL;
}
void freePargv(char ** pargv)
{
int i;
if (pargv != NULL) {
for (i = 1; i < 4; i++) {
if (pargv[i] != NULL) {
free(pargv[i]);
}
}
free(pargv);
}
}
int _isAbsPathExecutable(char * absolute_path)
{
struct stat filestat;
return stat(absolute_path, &filestat) == 0
&& filestat.st_mode & S_IXUSR;
}
int isExecutable(char * path)
{
char *path_list, *path_copy, *ptr, *buffer;
int executable = 0;
if (_isAbsPathExecutable(path)) {
return 1;
}
path_list = getenv("PATH");
if (!path_list) {
path_list = "/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin";
}
path_copy = (char *)malloc(strlen(path_list) + 1);
strcpy (path_copy, path_list);
buffer = (char *)malloc(strlen(path_list) + strlen(path) + 2);
ptr = strtok(path_copy, ":");
while (ptr != NULL) {
strcpy(buffer, ptr);
strcat(buffer, "/");
strcat(buffer, path);
if (_isAbsPathExecutable(buffer)) {
executable = 1;
goto isExecutableEnd;
}
ptr = strtok(NULL, ":");
}
isExecutableEnd:
free(buffer);
free(path_copy);
return executable;
}
static void closeSafely(int fd)
{
if (fd != -1) {
close(fd);
}
}
/*
Local Variables:
c-file-style: "linux"
c-basic-offset: 4
tab-width: 4
End:
*/
<|start_filename|>src/c/jlinuxfork.h<|end_filename|>
/* DO NOT EDIT THIS FILE - it is machine generated */
#include <jni.h>
/* Header for class com_crunchtime_utils_runtime_SpawnedProcess */
#ifndef _Included_net_axiak_runtime_SpawnedProcess
#define _Included_net_axiak_runtime_SpawnedProcess
#ifdef __cplusplus
extern "C" {
#endif
/*
* Class: net_axiak_runtime_SpawnedProcess
* Method: execProcess
* Signature: ([Ljava/lang/String;[Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/io/FileDescriptor;Ljava/io/FileDescriptor;Ljava/io/FileDescriptor;)I
*/
JNIEXPORT jint JNICALL Java_net_axiak_runtime_SpawnedProcess_execProcess
(JNIEnv *, jobject, jobjectArray, jobjectArray, jstring, jstring, jobject, jobject, jobject);
/*
* Class: net_axiak_runtime_SpawnedProcess
* Method: waitForProcess
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_net_axiak_runtime_SpawnedProcess_waitForProcess
(JNIEnv *, jobject, jint);
/*
* Class: net_axiak_runtime_SpawnedProcess
* Method: killProcess
* Signature: (I)V
*/
JNIEXPORT void JNICALL Java_net_axiak_runtime_SpawnedProcess_killProcess
(JNIEnv *, jobject, jint);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>Makefile<|end_filename|>
all:
make -C src/c all
install:
make -C src/c install
clean:
make -C src/c clean
<|start_filename|>src/c/Makefile<|end_filename|>
includes = -I$(JAVA_HOME)/include -I$(JAVA_HOME)/include/linux
cc = cc
all: binrunner library
library: jlinuxfork.c jlinuxfork.h
$(cc) -fPIC -Wno-long-long -pedantic -O3 -Wall -g -o libjlinuxfork.so -shared -Wl,-soname,libspawn.so $(includes) jlinuxfork.c -lc
clean:
rm -fv *.so binrunner
binrunner:
$(cc) -O3 -Wall -pedantic -o binrunner binrunner.c
chmod -v +x binrunner
install: binrunner libjlinuxfork.so
sudo mv binrunner /usr/bin
sudo cp libjlinuxfork.so /usr/lib/
<|start_filename|>test/TestClass.java<|end_filename|>
import net.axiak.runtime.*;
import java.io.*;
class TestClass {
@SuppressWarnings("unused")
public static void main(String [] args) {
String[] memory_soaker = new String[1000000];
String[] cmd = {"ls", "-l"};
for (int i = 0; i < memory_soaker.length; i++) {
memory_soaker[i] = String.valueOf(i);
}
System.out.println("");
System.out.println("");
System.out.println("");
try {
SpawnRuntime runtime = new SpawnRuntime(Runtime.getRuntime());
if (runtime.isLinuxSpawnLoaded()) {
System.out.println(" >> java_posix_spawn libraries are found and being tested.");
} else {
System.out.println(" >> java_posix_spawn is not built and/or there was an error. Running with fallback.");
}
Process result = runtime.exec(cmd);
int lastChar = 0;
do {
lastChar = result.getInputStream().read();
if (lastChar != -1)
System.out.print((char)lastChar);
} while(lastChar != -1);
} catch (IOException ignored) {
System.out.println(ignored);
}
}
}
<|start_filename|>src/c/binrunner.c<|end_filename|>
#include <stdlib.h>
#include <stdio.h>
#include <errno.h>
#include <signal.h>
#include <sys/wait.h>
#include <unistd.h>
#include <spawn.h>
#include <fcntl.h>
#include <string.h>
int main(int argc, char ** argv)
{
int fds_map[3] = {-1, -1, -1};
int i;
char ** new_argv;
int open_max;
int open_index;
if (argc < 6) {
fprintf(stderr, "Usage: %s stdin# stdout# stderr# chdir program [argv0 ... ]\n",
argv[0]);
return -1;
}
fds_map[0] = atoi(argv[1]);
fds_map[1] = atoi(argv[2]);
fds_map[2] = atoi(argv[3]);
for (i = 0; i < 3; i++) {
if (dup2(fds_map[i], i) == -1) {
fprintf(stderr, "Error in dup2\n");
return -1;
}
}
if (!(strlen(argv[4]) == 1 && strncmp(argv[4], ".", 1) == 0)) {
if (chdir(argv[4]) != 0) {
fprintf(stderr, "Error in chdir()\n");
return -1;
}
}
new_argv = (char **)malloc(sizeof(char *) * (argc - 4));
for (i = 5; i < argc; i++) {
new_argv[i - 5] = argv[i];
}
new_argv[argc - 5] = NULL;
open_max = sysconf(_SC_OPEN_MAX);
for(open_index=3;open_index < open_max; open_index++) {
fcntl(open_index, F_SETFD, FD_CLOEXEC);
}
if (execvp(argv[5], new_argv) == -1) {
fprintf(stderr, "Error: %s\n", strerror(errno));
return -1;
}
return -1;
}
<|start_filename|>src/java/net/axiak/runtime/SpawnRuntime.java<|end_filename|>
package net.axiak.runtime;
import java.io.File;
import java.io.IOException;
import java.util.StringTokenizer;
public class SpawnRuntime {
private Runtime runtime;
private Boolean linuxSpawnLoaded;
private static SpawnRuntime instance;
public SpawnRuntime(Runtime runtime) {
this.runtime = runtime;
linuxSpawnLoaded = SpawnedProcess.isLibraryLoaded();
}
public Process exec(String [] cmdarray, String [] envp, File chdir) throws IOException {
if (linuxSpawnLoaded) {
return new SpawnedProcess(cmdarray, envp, chdir);
} else {
return runtime.exec(cmdarray, envp, chdir);
}
}
public Process exec(String [] cmdarray, String [] envp) throws IOException {
return exec(cmdarray, envp, new File("."));
}
public Process exec(String [] cmdarray) throws IOException {
return exec(cmdarray, null);
}
public Process exec(String command) throws IOException {
String[] cmdarray = {command};
return exec(command, null, new File("."));
}
public Process exec(String command, String[] envp, File dir) throws IOException {
if (command.length() == 0) {
throw new IllegalArgumentException("Empty command");
}
StringTokenizer st = new StringTokenizer(command);
String[] cmdarray = new String[st.countTokens()];
for (int i = 0; st.hasMoreTokens(); i++) {
cmdarray[i] = st.nextToken();
}
return exec(cmdarray, envp, dir);
}
public Process exec(String command, String[] envp) throws IOException {
return exec(command, envp, new File("."));
}
public static SpawnRuntime getInstance() {
if (instance == null) {
instance = new SpawnRuntime(Runtime.getRuntime());
}
return instance;
}
public Boolean isLinuxSpawnLoaded() {
return linuxSpawnLoaded;
}
}
<|start_filename|>test/Makefile<|end_filename|>
target_dir = ../target
classpath = $(target_dir)/jlinuxfork.jar
javac = $(JAVA_HOME)/bin/javac
java = $(JAVA_HOME)/bin/java
# TUNE THESE
XMS = 100m
XMX = 200m
runtest: compile
$(java) -classpath $(classpath):. -Xms$(XMS) -Xmx$(XMX) -Djava.library.path=$(target_dir) -Dposixspawn.binrunner=$(target_dir)/binrunner TestClass
runfallback: compile
$(java) -classpath $(classpath):. -Xms$(XMS) -Xmx$(XMX) TestClass
compile: TestClass.class
$(javac) -classpath $(classpath):. TestClass.java
<|start_filename|>src/java/net/axiak/runtime/SpawnedProcess.java<|end_filename|>
package net.axiak.runtime;
import java.io.*;
import java.security.AccessController;
import java.security.PrivilegedAction;
public class SpawnedProcess extends Process {
private String name;
private int exitCode;
private int pid;
private boolean hasExited;
private OutputStream stdin;
private InputStream stdout;
private InputStream stderr;
private FileDescriptor stdin_fd;
public FileDescriptor stdout_fd;
private FileDescriptor stderr_fd;
private static String binrunner;
private static boolean libLoaded;
private static Throwable libLoadError;
private native int execProcess(String [] cmdarray, String [] env, String chdir, String binrunner,
FileDescriptor stdin_fd, FileDescriptor stdout_fd,
FileDescriptor stderr_fd) throws IndexOutOfBoundsException, IOException;
private native int waitForProcess(int pid);
private native void killProcess(int pid);
static {
binrunner = findBinRunner(System.getProperty("posixspawn.binrunner", "binrunner"));
try {
String arch = System.getProperty("os.arch", "i386");
try {
System.loadLibrary("jlinuxfork-" + arch);
} catch (Throwable t) {
System.loadLibrary("jlinuxfork");
}
libLoaded = true;
}
catch (Throwable t) {
libLoaded = false;
libLoadError = t;
}
}
private static String findBinRunner(String originalPath) {
if (new File(originalPath).exists()) {
return originalPath;
}
String Path = System.getenv("PATH");
String [] paths = ((Path == null) ? "/usr/bin:/bin" : Path).split(":");
for (String path: paths) {
if (path == null) {
continue;
}
File currentFile = new File(path, originalPath);
if (currentFile.exists()) {
return currentFile.getAbsolutePath();
}
}
return null;
}
private static class Gate {
private boolean exited = false;
private IOException savedException;
synchronized void exit() { /* Opens the gate */
exited = true;
this.notify();
}
synchronized void waitForExit() { /* wait until the gate is open */
boolean interrupted = false;
while (!exited) {
try {
this.wait();
} catch (InterruptedException e) {
interrupted = true;
}
}
if (interrupted) {
Thread.currentThread().interrupt();
}
}
void setException (IOException e) {
savedException = e;
}
IOException getException() {
return savedException;
}
}
public SpawnedProcess(final String [] cmdarray, final String [] envp, final File chdir) throws IOException {
if (binrunner == null) {
throw new RuntimeException("Couldn't find binrunner program. Tried: " + System.getProperty("linuxfork.binrunner", "binrunner"));
}
for (String arg : cmdarray) {
if (arg == null) {
throw new NullPointerException();
}
}
String prog = cmdarray[0];
SecurityManager security = System.getSecurityManager();
if (security != null) {
security.checkExec(prog);
}
stdin_fd = new FileDescriptor();
stdout_fd = new FileDescriptor();
stderr_fd = new FileDescriptor();
final Gate gate = new Gate();
AccessController.doPrivileged(
new PrivilegedAction<Object>() {
public Object run() {
try {
pid = execProcess(cmdarray, envp, chdir.getAbsolutePath(), binrunner, stdin_fd, stdout_fd, stderr_fd);
} catch (IOException e) {
gate.setException(e);
gate.exit();
return null;
}
stdin = new BufferedOutputStream(new FileOutputStream(stdin_fd));
stdout = new BufferedInputStream(new FileInputStream(stdout_fd));
stderr = new FileInputStream(stderr_fd);
Thread t = new Thread("process reaper") {
public void run() {
gate.exit();
int res = waitForProcess(pid);
synchronized (SpawnedProcess.this) {
hasExited = true;
exitCode = res;
SpawnedProcess.this.notifyAll();
}
}
};
t.setDaemon(true);
t.start();
return null;
}
});
if (gate.getException() != null) {
throw gate.getException();
}
}
public static boolean isLibraryLoaded() {
return libLoaded;
}
public static Throwable getLibLoadError() {
return libLoadError;
}
@Override
public synchronized int exitValue() throws IllegalThreadStateException {
if (!hasExited) {
throw new IllegalThreadStateException("Process has not yet exited.");
}
return this.exitCode;
}
@Override
public InputStream getInputStream () {
return stdout;
}
@Override
public InputStream getErrorStream() {
return stderr;
}
@Override
public OutputStream getOutputStream() {
return stdin;
}
@Override
public synchronized int waitFor() throws InterruptedException {
while (!hasExited) {
wait();
}
return exitCode;
}
@Override
public void destroy () {
synchronized (this) {
if (!hasExited) {
killProcess(pid);
}
}
closeStreams();
}
private void closeStreams() {
try {
stdin.close();
} catch (IOException e) {}
try {
stdout.close();
} catch (IOException e) {}
try {
stderr.close();
} catch (IOException e) {}
}
@Override
public String toString() {
if (hasExited) {
return "[SpawnedProcess pid=" + pid + " exitcode=" + exitCode + "]";
} else {
return "[SpawnedProcess pid=" + pid + " exited=false]";
}
}
}
| axiak/java_posix_spawn |
<|start_filename|>package.json<|end_filename|>
{
"name": "auth-ts-boilerplate",
"version": "1.0.0",
"main": "src/server.ts",
"repository": "ssh://git@github.com:smakosh/auth-ts-boilerplate.git",
"author": "Smakosh <<EMAIL>>",
"license": "MIT",
"private": false,
"devDependencies": {
"@types/body-parser": "^1.19.0",
"@types/connect-redis": "^0.0.16",
"@types/express": "^4.17.9",
"@types/express-session": "^1.17.3",
"@types/faker": "^5.1.7",
"@types/node": "^14.14.31",
"@types/passport": "^1.0.6",
"@types/passport-local": "^1.0.33",
"@types/redis": "^2.8.28",
"@types/uuid": "^8.3.0",
"@typescript-eslint/eslint-plugin": "^4.15.2",
"@typescript-eslint/parser": "^4.15.2",
"eslint": "^7.20.0",
"eslint-config-prettier": "^8.0.0",
"eslint-plugin-prettier": "^3.3.1",
"husky": "^5.1.1",
"jest": "^26.6.3",
"nodemon": "^2.0.7",
"prettier": "^2.2.1",
"ts-node": "9.1.1",
"ts-node-dev": "^1.1.1",
"typescript": "4.2.2"
},
"dependencies": {
"@sendgrid/mail": "^7.4.2",
"apollo-server": "^2.21.0",
"apollo-server-express": "^2.21.0",
"argon2": "^0.27.1",
"body-parser": "^1.19.0",
"class-validator": "^0.13.1",
"connect-redis": "^5.1.0",
"cors": "^2.8.5",
"dayjs": "^1.10.4",
"dotenv": "^8.2.0",
"express": "^4.17.1",
"express-graphql": "^0.12.0",
"express-session": "^1.17.1",
"faker": "^5.4.0",
"graphql": "^15.5.0",
"graphql-custom-types": "^1.6.0",
"helmet": "^4.3.1",
"module-alias": "^2.2.2",
"passport": "^0.4.1",
"pg": "^8.7.1",
"redis": "^3.0.2",
"reflect-metadata": "^0.1.10",
"type-graphql": "^1.1.1",
"typeorm": "0.2.31",
"typeorm-seeding": "^1.6.1",
"uuid": "^8.3.2",
"winston": "^3.3.3"
},
"scripts": {
"lint": "eslint --ext .ts --ignore-path .eslintignore ./src",
"lint:fix": "yarn lint --fix",
"pretty": "prettier **/*.ts --write",
"start": "ts-node ./src/server.ts",
"start:dev": "ts-node-dev --inspect --debug --exit-child --respawn --transpile-only ./src/server.ts",
"start:local:dev": "nodemon ./dist/server.js",
"build": "tsc",
"watch": "tsc --watch",
"migration:create": "typeorm migration:create",
"migration:up": "ts-node ./node_modules/typeorm/cli.js migration:run",
"migration:down": "ts-node ./node_modules/typeorm/cli.js migration:revert",
"seed:config": "ts-node ./node_modules/typeorm-seeding/dist/cli.js config",
"seed:run": "ts-node ./node_modules/typeorm-seeding/dist/cli.js seed",
"schema:drop": "ts-node ./node_modules/typeorm/cli.js schema:drop",
"schema:sync": "ts-node ./node_modules/typeorm/cli.js schema:sync",
"entity:create": "typeorm entity:create",
"docker:compose": "docker-compose up -d --build",
"docker:compose:dev": "docker-compose --file dev.compose.yml up -d --build",
"docker:compose:prod": "docker-compose --file prod.compose.yml up -d --build",
"docker:logs": "docker logs auth_boilerplate_ts_server",
"docker:logs:follow": "docker logs --tail 20 auth_boilerplate_ts_server --follow",
"docker:yarn": "docker exec -t auth_boilerplate_ts_server yarn",
"docker:migration:up": "docker exec -t auth_boilerplate_ts_server yarn migration:up",
"docker:migration:down": "docker exec -t auth_boilerplate_ts_server yarn migration:down",
"docker:seed:run": "docker exec -t auth_boilerplate_ts_server yarn seed:run",
"docker:schema:drop": "docker exec -t auth_boilerplate_ts_server yarn schema:drop",
"docker:schema:sync": "docker exec -t auth_boilerplate_ts_server yarn schema:sync",
"docker:restart": "docker-compose restart",
"docker:exec": "docker exec -it auth_boilerplate_ts_server sh",
"docker:up": "docker-compose up -d",
"docker:stop": "docker-compose stop"
},
"_moduleAliases": {
"@root": ".",
"@entity": "dist/entity"
}
}
| emcooper/auth-ts-boilerplate |
<|start_filename|>cartographer/common/ordered_multi_queue.h<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CARTOGRAPHER_COMMON_ORDERED_MULTI_QUEUE_H_
#define CARTOGRAPHER_COMMON_ORDERED_MULTI_QUEUE_H_
#include <algorithm>
#include <map>
#include <memory>
#include "cartographer/common/blocking_queue.h"
#include "cartographer/common/make_unique.h"
#include "cartographer/common/port.h"
namespace cartographer {
namespace common {
// Number of items that can be queued up before we LOG(WARNING).
const int kMaxQueueSize = 500;
// Maintains multiple queues of sorted values and dispatches merge sorted
// values. This class is thread-compatible.
template <typename QueueKeyType, typename SortKeyType, typename ValueType>
class OrderedMultiQueue {
public:
using Callback = std::function<void(std::unique_ptr<ValueType>)>;
// Will wait to see at least one value for each 'expected_queue_keys' before
// dispatching the next smallest value across all queues.
explicit OrderedMultiQueue(const SortKeyType min_sort_key = SortKeyType())
: last_dispatched_key_(min_sort_key) {}
~OrderedMultiQueue() {}
void AddQueue(const QueueKeyType& queue_key, Callback callback) {
CHECK(FindOrNull(queue_key) == nullptr);
queues_[queue_key].callback = callback;
}
void MarkQueueAsFinished(const QueueKeyType& queue_key) {
auto& queue = FindOrDie(queue_key);
CHECK(!queue.finished);
queue.finished = true;
Dispatch();
}
bool HasQueue(const QueueKeyType& queue_key) {
return queues_.count(queue_key) != 0;
}
void Add(const QueueKeyType& queue_key, const SortKeyType& sort_key,
std::unique_ptr<ValueType> value) {
auto* queue = FindOrNull(queue_key);
if (queue == nullptr) {
// TODO(damonkohler): This will not work for every value of "queue_key".
LOG_EVERY_N(WARNING, 60) << "Ignored value for queue: '" << queue_key
<< "'";
return;
}
queue->queue.Push(
common::make_unique<KeyValuePair>(sort_key, std::move(value)));
Dispatch();
}
// Dispatches all remaining values in sorted order and removes the underlying
// queues.
void Flush() {
std::vector<QueueKeyType> unfinished_queues;
for (auto& entry : queues_) {
if (!entry.second.finished) {
unfinished_queues.push_back(entry.first);
}
}
for (auto& unfinished_queue : unfinished_queues) {
MarkQueueAsFinished(unfinished_queue);
}
}
// Returns the number of available values associated with 'queue_key'.
int num_available(const QueueKeyType& queue_key) {
return FindOrDie(queue_key).queue.Size();
}
private:
struct KeyValuePair {
KeyValuePair(const SortKeyType& sort_key, std::unique_ptr<ValueType> value)
: sort_key(sort_key), value(std::move(value)) {}
SortKeyType sort_key;
std::unique_ptr<ValueType> value;
};
struct Queue {
common::BlockingQueue<std::unique_ptr<KeyValuePair>> queue;
Callback callback;
bool finished = false;
};
// Returns the queue with 'key' or LOG(FATAL).
Queue& FindOrDie(const QueueKeyType& key) {
auto it = queues_.find(key);
CHECK(it != queues_.end()) << "Did not find '" << key << "'.";
return it->second;
}
// Returns the queue with 'key' or nullptr.
Queue* FindOrNull(const QueueKeyType& key) {
auto it = queues_.find(key);
if (it == queues_.end()) {
return nullptr;
}
return &it->second;
}
void Dispatch() {
while (true) {
Queue* next_queue = nullptr;
const KeyValuePair* next_key_value_pair = nullptr;
for (auto it = queues_.begin(); it != queues_.end();) {
auto& queue = it->second.queue;
const auto* key_value_pair = queue.template Peek<KeyValuePair>();
if (key_value_pair == nullptr) {
if (it->second.finished) {
queues_.erase(it++);
continue;
}
CannotMakeProgress();
return;
}
if (next_key_value_pair == nullptr ||
std::forward_as_tuple(key_value_pair->sort_key, it->first) <
std::forward_as_tuple(next_key_value_pair->sort_key,
it->first)) {
next_key_value_pair = key_value_pair;
next_queue = &it->second;
}
CHECK_LE(last_dispatched_key_, next_key_value_pair->sort_key)
<< "Non-sorted values added to queue: '" << it->first << "'";
++it;
}
if (next_key_value_pair == nullptr) {
CHECK(queues_.empty());
return;
}
last_dispatched_key_ = next_key_value_pair->sort_key;
next_queue->callback(std::move(next_queue->queue.Pop()->value));
}
}
// Called when not all necessary queues are filled to dispatch messages.
void CannotMakeProgress() {
for (auto& entry : queues_) {
LOG_IF_EVERY_N(WARNING, entry.second.queue.Size() > kMaxQueueSize, 60)
<< "Queue " << entry.first << " exceeds maximum size.";
}
}
// Used to verify that values are dispatched in sorted order.
SortKeyType last_dispatched_key_;
std::map<QueueKeyType, Queue> queues_;
};
} // namespace common
} // namespace cartographer
#endif // CARTOGRAPHER_COMMON_ORDERED_MULTI_QUEUE_H_
<|start_filename|>cartographer/mapping_3d/laser_fan_inserter.h<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CARTOGRAPHER_MAPPING_3D_LASER_FAN_INSERTER_H_
#define CARTOGRAPHER_MAPPING_3D_LASER_FAN_INSERTER_H_
#include "cartographer/mapping_3d/hybrid_grid.h"
#include "cartographer/mapping_3d/proto/laser_fan_inserter_options.pb.h"
#include "cartographer/sensor/laser.h"
#include "cartographer/sensor/point_cloud.h"
namespace cartographer {
namespace mapping_3d {
proto::LaserFanInserterOptions CreateLaserFanInserterOptions(
common::LuaParameterDictionary* parameter_dictionary);
class LaserFanInserter {
public:
explicit LaserFanInserter(const proto::LaserFanInserterOptions& options);
LaserFanInserter(const LaserFanInserter&) = delete;
LaserFanInserter& operator=(const LaserFanInserter&) = delete;
// Inserts 'laser_fan' into 'hybrid_grid'.
void Insert(const sensor::LaserFan3D& laser_fan,
HybridGrid* hybrid_grid) const;
private:
const proto::LaserFanInserterOptions options_;
const std::vector<uint16> hit_table_;
const std::vector<uint16> miss_table_;
};
} // namespace mapping_3d
} // namespace cartographer
#endif // CARTOGRAPHER_MAPPING_3D_LASER_FAN_INSERTER_H_
<|start_filename|>cartographer/mapping_3d/sparse_pose_graph/optimization_problem.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/mapping_3d/sparse_pose_graph/optimization_problem.h"
#include <array>
#include <cmath>
#include <map>
#include <memory>
#include <string>
#include <vector>
#include "Eigen/Core"
#include "cartographer/common/ceres_solver_options.h"
#include "cartographer/common/make_unique.h"
#include "cartographer/common/math.h"
#include "cartographer/common/time.h"
#include "cartographer/mapping_3d/acceleration_cost_function.h"
#include "cartographer/mapping_3d/ceres_pose.h"
#include "cartographer/mapping_3d/imu_integration.h"
#include "cartographer/mapping_3d/rotation_cost_function.h"
#include "cartographer/transform/transform.h"
#include "ceres/ceres.h"
#include "ceres/jet.h"
#include "ceres/rotation.h"
#include "glog/logging.h"
namespace cartographer {
namespace mapping_3d {
namespace sparse_pose_graph {
namespace {
struct ConstantYawQuaternionPlus {
template <typename T>
bool operator()(const T* x, const T* delta, T* x_plus_delta) const {
const T delta_norm =
ceres::sqrt(common::Pow2(delta[0]) + common::Pow2(delta[1]));
const T sin_delta_over_delta =
delta_norm < 1e-6 ? T(1.) : ceres::sin(delta_norm) / delta_norm;
T q_delta[4];
q_delta[0] = delta_norm < 1e-6 ? T(1.) : ceres::cos(delta_norm);
q_delta[1] = sin_delta_over_delta * delta[0];
q_delta[2] = sin_delta_over_delta * delta[1];
q_delta[3] = T(0.);
// We apply the 'delta' which is interpreted as an angle-axis rotation
// vector in the xy-plane of the submap frame. This way we can align to
// gravity because rotations around the z-axis in the submap frame do not
// change gravity alignment, while disallowing random rotations of the map
// that have nothing to do with gravity alignment (i.e. we disallow steps
// just changing "yaw" of the complete map).
ceres::QuaternionProduct(x, q_delta, x_plus_delta);
return true;
}
};
} // namespace
OptimizationProblem::OptimizationProblem(
const mapping::sparse_pose_graph::proto::OptimizationProblemOptions&
options)
: options_(options) {}
OptimizationProblem::~OptimizationProblem() {}
template <typename T>
std::array<T, 6> OptimizationProblem::SpaCostFunction::ComputeUnscaledError(
const transform::Rigid3d& zbar_ij, const T* const c_i_rotation,
const T* const c_i_translation, const T* const c_j_rotation,
const T* const c_j_translation) {
const Eigen::Quaternion<T> R_i_inverse(c_i_rotation[0], -c_i_rotation[1],
-c_i_rotation[2], -c_i_rotation[3]);
const Eigen::Matrix<T, 3, 1> delta(c_j_translation[0] - c_i_translation[0],
c_j_translation[1] - c_i_translation[1],
c_j_translation[2] - c_i_translation[2]);
const Eigen::Matrix<T, 3, 1> h_translation = R_i_inverse * delta;
const Eigen::Quaternion<T> h_rotation_inverse =
Eigen::Quaternion<T>(c_j_rotation[0], -c_j_rotation[1], -c_j_rotation[2],
-c_j_rotation[3]) *
Eigen::Quaternion<T>(c_i_rotation[0], c_i_rotation[1], c_i_rotation[2],
c_i_rotation[3]);
const Eigen::Matrix<T, 3, 1> angle_axis_difference =
transform::RotationQuaternionToAngleAxisVector(
h_rotation_inverse * zbar_ij.rotation().cast<T>());
return {{T(zbar_ij.translation().x()) - h_translation[0],
T(zbar_ij.translation().y()) - h_translation[1],
T(zbar_ij.translation().z()) - h_translation[2],
angle_axis_difference[0], angle_axis_difference[1],
angle_axis_difference[2]}};
}
template <typename T>
void OptimizationProblem::SpaCostFunction::ComputeScaledError(
const Constraint::Pose& pose, const T* const c_i_rotation,
const T* const c_i_translation, const T* const c_j_rotation,
const T* const c_j_translation, T* const e) {
std::array<T, 6> e_ij =
ComputeUnscaledError(pose.zbar_ij, c_i_rotation, c_i_translation,
c_j_rotation, c_j_translation);
// Matrix-vector product of sqrt_Lambda_ij * e_ij
for (int s = 0; s != 6; ++s) {
e[s] = T(0.);
for (int t = 0; t != 6; ++t) {
e[s] += pose.sqrt_Lambda_ij(s, t) * e_ij[t];
}
}
}
template <typename T>
bool OptimizationProblem::SpaCostFunction::operator()(
const T* const c_i_rotation, const T* const c_i_translation,
const T* const c_j_rotation, const T* const c_j_translation, T* e) const {
ComputeScaledError(pose_, c_i_rotation, c_i_translation, c_j_rotation,
c_j_translation, e);
return true;
}
void OptimizationProblem::AddImuData(common::Time time,
const Eigen::Vector3d& linear_acceleration,
const Eigen::Vector3d& angular_velocity) {
imu_data_.push_back(ImuData{time, linear_acceleration, angular_velocity});
}
void OptimizationProblem::AddTrajectoryNode(
common::Time time, const transform::Rigid3d& initial_point_cloud_pose,
const transform::Rigid3d& point_cloud_pose) {
node_data_.push_back(
NodeData{time, initial_point_cloud_pose, point_cloud_pose});
}
void OptimizationProblem::SetMaxNumIterations(const int32 max_num_iterations) {
options_.mutable_ceres_solver_options()->set_max_num_iterations(
max_num_iterations);
}
void OptimizationProblem::Solve(
const std::vector<Constraint>& constraints,
const transform::Rigid3d& submap_0_transform,
const std::vector<const mapping::Submaps*>& trajectories,
std::vector<transform::Rigid3d>* submap_transforms) {
if (node_data_.empty()) {
// Nothing to optimize.
return;
}
CHECK(!imu_data_.empty());
ceres::Problem::Options problem_options;
ceres::Problem problem(problem_options);
// Set the starting point.
std::deque<CeresPose> C_submaps;
std::deque<CeresPose> C_point_clouds;
// Tie the first submap to the origin.
CHECK(!submap_transforms->empty());
C_submaps.emplace_back(
transform::Rigid3d::Identity(),
common::make_unique<ceres::AutoDiffLocalParameterization<
ConstantYawQuaternionPlus, 4, 2>>(),
&problem);
problem.SetParameterBlockConstant(C_submaps.back().translation());
for (size_t i = 1; i != submap_transforms->size(); ++i) {
C_submaps.emplace_back(
(*submap_transforms)[i],
common::make_unique<ceres::QuaternionParameterization>(), &problem);
}
for (size_t j = 0; j != node_data_.size(); ++j) {
C_point_clouds.emplace_back(
node_data_[j].point_cloud_pose,
common::make_unique<ceres::QuaternionParameterization>(), &problem);
}
// Add cost functions for the loop closing constraints.
for (const Constraint& constraint : constraints) {
CHECK_GE(constraint.i, 0);
CHECK_LT(constraint.i, submap_transforms->size());
CHECK_GE(constraint.j, 0);
CHECK_LT(constraint.j, node_data_.size());
problem.AddResidualBlock(
new ceres::AutoDiffCostFunction<SpaCostFunction, 6, 4, 3, 4, 3>(
new SpaCostFunction(constraint.pose)),
new ceres::HuberLoss(options_.huber_scale()),
C_submaps[constraint.i].rotation(),
C_submaps[constraint.i].translation(),
C_point_clouds[constraint.j].rotation(),
C_point_clouds[constraint.j].translation());
}
CHECK(!node_data_.empty());
CHECK_GE(trajectories.size(), node_data_.size());
// Add constraints for IMU observed data: angular velocities and
// accelerations.
auto it = imu_data_.cbegin();
while ((it + 1) != imu_data_.cend() && (it + 1)->time <= node_data_[0].time) {
++it;
}
for (size_t j = 1; j < node_data_.size(); ++j) {
auto it2 = it;
const IntegrateImuResult<double> result = IntegrateImu(
imu_data_, node_data_[j - 1].time, node_data_[j].time, &it);
if (j + 1 < node_data_.size()) {
const common::Duration first_delta_time =
node_data_[j].time - node_data_[j - 1].time;
const common::Duration second_delta_time =
node_data_[j + 1].time - node_data_[j].time;
const common::Time first_center =
node_data_[j - 1].time + first_delta_time / 2;
const common::Time second_center =
node_data_[j].time + second_delta_time / 2;
const IntegrateImuResult<double> result_to_first_center =
IntegrateImu(imu_data_, node_data_[j - 1].time, first_center, &it2);
const IntegrateImuResult<double> result_center_to_center =
IntegrateImu(imu_data_, first_center, second_center, &it2);
// 'delta_velocity' is the change in velocity from the point in time
// halfway between the first and second poses to halfway between second
// and third pose. It is computed from IMU data and still contains a
// delta due to gravity. The orientation of this vector is in the IMU
// frame at the second pose.
const Eigen::Vector3d delta_velocity =
(result.delta_rotation.inverse() *
result_to_first_center.delta_rotation) *
result_center_to_center.delta_velocity;
problem.AddResidualBlock(
new ceres::AutoDiffCostFunction<AccelerationCostFunction, 3, 4, 3, 3,
3, 1>(new AccelerationCostFunction(
options_.acceleration_scale(), delta_velocity,
common::ToSeconds(first_delta_time),
common::ToSeconds(second_delta_time))),
nullptr, C_point_clouds[j].rotation(),
C_point_clouds[j - 1].translation(), C_point_clouds[j].translation(),
C_point_clouds[j + 1].translation(), &gravity_constant_);
}
problem.AddResidualBlock(
new ceres::AutoDiffCostFunction<RotationCostFunction, 3, 4, 4>(
new RotationCostFunction(options_.rotation_scale(),
result.delta_rotation)),
nullptr, C_point_clouds[j - 1].rotation(),
C_point_clouds[j].rotation());
}
// Solve.
ceres::Solver::Summary summary;
ceres::Solver::Options ceres_solver_options =
common::CreateCeresSolverOptions(options_.ceres_solver_options());
ceres::Solve(ceres_solver_options, &problem, &summary);
if (options_.log_solver_summary()) {
LOG(INFO) << summary.FullReport();
LOG(INFO) << "Gravity was: " << gravity_constant_;
}
// Store the result.
for (size_t i = 0; i != submap_transforms->size(); ++i) {
(*submap_transforms)[i] = C_submaps[i].ToRigid();
}
for (size_t j = 0; j != node_data_.size(); ++j) {
node_data_[j].point_cloud_pose = C_point_clouds[j].ToRigid();
}
}
const std::vector<NodeData>& OptimizationProblem::node_data() const {
return node_data_;
}
} // namespace sparse_pose_graph
} // namespace mapping_3d
} // namespace cartographer
<|start_filename|>cartographer/common/histogram.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/common/histogram.h"
#include <algorithm>
#include <numeric>
#include <string>
#include "cartographer/common/port.h"
#include "glog/logging.h"
namespace cartographer {
namespace common {
namespace {
string PaddedTo(string input, int new_length) {
CHECK_GE(new_length, input.size());
input.insert(input.begin(), new_length - input.size(), ' ');
return input;
}
} // namespace
void BucketHistogram::Hit(const string& bucket) { ++buckets_[bucket]; }
string BucketHistogram::ToString() const {
int64 sum = 0;
size_t max_bucket_name_length = 0;
for (const auto& pair : buckets_) {
sum += pair.second;
max_bucket_name_length =
std::max(pair.first.size(), max_bucket_name_length);
}
string result;
for (const auto& pair : buckets_) {
const float percent = 100.f * pair.second / std::max<int64>(1, sum);
result += PaddedTo(pair.first, max_bucket_name_length) + ": " +
PaddedTo(std::to_string(pair.second), 7) + " (" +
std::to_string(percent) + " %)\n";
}
result += "Total: " + std::to_string(sum);
return result;
}
void Histogram::Add(const float value) { values_.push_back(value); }
string Histogram::ToString(const int buckets) const {
CHECK_GE(buckets, 1);
if (values_.empty()) {
return "Count: 0";
}
const float min = *std::min_element(values_.begin(), values_.end());
const float max = *std::max_element(values_.begin(), values_.end());
const float mean =
std::accumulate(values_.begin(), values_.end(), 0.f) / values_.size();
string result = "Count: " + std::to_string(values_.size()) + " Min: " +
std::to_string(min) + " Max: " + std::to_string(max) +
" Mean: " + std::to_string(mean);
if (min == max) {
return result;
}
CHECK_LT(min, max);
float lower_bound = min;
int total_count = 0;
for (int i = 0; i != buckets; ++i) {
const float upper_bound =
(i + 1 == buckets)
? max
: (max * (i + 1) / buckets + min * (buckets - i - 1) / buckets);
int count = 0;
for (const float value : values_) {
if (lower_bound <= value &&
(i + 1 == buckets ? value <= upper_bound : value < upper_bound)) {
++count;
}
}
total_count += count;
result += "\n[" + std::to_string(lower_bound) + ", " +
std::to_string(upper_bound) + ((i + 1 == buckets) ? "]" : ")");
constexpr int kMaxBarChars = 20;
const int bar =
(count * kMaxBarChars + values_.size() / 2) / values_.size();
result += "\t";
for (int i = 0; i != kMaxBarChars; ++i) {
result += (i < (kMaxBarChars - bar)) ? " " : "#";
}
result += "\tCount: " + std::to_string(count) + " (" +
std::to_string(count * 1e2f / values_.size()) + "%)";
result += "\tTotal: " + std::to_string(total_count) + " (" +
std::to_string(total_count * 1e2f / values_.size()) + "%)";
lower_bound = upper_bound;
}
return result;
}
} // namespace common
} // namespace cartographer
<|start_filename|>cartographer/mapping_3d/local_trajectory_builder.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/mapping_3d/local_trajectory_builder.h"
#include "cartographer/common/make_unique.h"
#include "cartographer/mapping_3d/kalman_local_trajectory_builder.h"
#include "cartographer/mapping_3d/optimizing_local_trajectory_builder.h"
namespace cartographer {
namespace mapping_3d {
std::unique_ptr<LocalTrajectoryBuilderInterface> CreateLocalTrajectoryBuilder(
const proto::LocalTrajectoryBuilderOptions&
local_trajectory_builder_options) {
switch (local_trajectory_builder_options.use()) {
case proto::LocalTrajectoryBuilderOptions::KALMAN:
return common::make_unique<KalmanLocalTrajectoryBuilder>(
local_trajectory_builder_options);
case proto::LocalTrajectoryBuilderOptions::OPTIMIZING:
return common::make_unique<OptimizingLocalTrajectoryBuilder>(
local_trajectory_builder_options);
}
LOG(FATAL);
}
} // namespace mapping_3d
} // namespace cartographer
<|start_filename|>cartographer/kalman_filter/pose_tracker_test.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/kalman_filter/pose_tracker.h"
#include <random>
#include "cartographer/common/lua_parameter_dictionary.h"
#include "cartographer/common/lua_parameter_dictionary_test_helpers.h"
#include "cartographer/common/make_unique.h"
#include "cartographer/transform/rigid_transform.h"
#include "cartographer/transform/rigid_transform_test_helpers.h"
#include "gtest/gtest.h"
namespace cartographer {
namespace kalman_filter {
namespace {
constexpr double kOdometerVariance = 1e-12;
using transform::IsNearly;
using transform::Rigid3d;
using ::testing::Not;
class PoseTrackerTest : public ::testing::Test {
protected:
PoseTrackerTest() {
auto parameter_dictionary = common::MakeDictionary(R"text(
return {
orientation_model_variance = 1e-8,
position_model_variance = 1e-8,
velocity_model_variance = 1e-8,
imu_gravity_time_constant = 100.,
imu_gravity_variance = 1e-9,
num_odometry_states = 1,
}
)text");
const proto::PoseTrackerOptions options =
CreatePoseTrackerOptions(parameter_dictionary.get());
pose_tracker_ = common::make_unique<PoseTracker>(
options, PoseTracker::ModelFunction::k3D, common::FromUniversal(1000));
}
std::unique_ptr<PoseTracker> pose_tracker_;
};
TEST(CovarianceTest, EmbedAndProjectCovariance) {
std::mt19937 prng(42);
std::uniform_real_distribution<float> distribution(-10.f, 10.f);
for (int i = 0; i < 100; ++i) {
Pose2DCovariance covariance;
for (int row = 0; row < 3; ++row) {
for (int column = 0; column < 3; ++column) {
covariance(row, column) = distribution(prng);
}
}
const PoseCovariance embedded_covariance =
Embed3D(covariance, distribution(prng), distribution(prng));
EXPECT_TRUE(
(Project2D(embedded_covariance).array() == covariance.array()).all());
}
}
TEST_F(PoseTrackerTest, SaveAndRestore) {
std::vector<Rigid3d> poses(3);
std::vector<PoseCovariance> covariances(3);
pose_tracker_->GetPoseEstimateMeanAndCovariance(common::FromUniversal(1500),
&poses[0], &covariances[0]);
pose_tracker_->AddImuLinearAccelerationObservation(
common::FromUniversal(2000), Eigen::Vector3d(1, 1, 9));
PoseTracker copy_of_pose_tracker = *pose_tracker_;
const Eigen::Vector3d observation(2, 0, 8);
pose_tracker_->AddImuLinearAccelerationObservation(
common::FromUniversal(3000), observation);
pose_tracker_->GetPoseEstimateMeanAndCovariance(common::FromUniversal(3500),
&poses[1], &covariances[1]);
copy_of_pose_tracker.AddImuLinearAccelerationObservation(
common::FromUniversal(3000), observation);
copy_of_pose_tracker.GetPoseEstimateMeanAndCovariance(
common::FromUniversal(3500), &poses[2], &covariances[2]);
EXPECT_THAT(poses[0], Not(IsNearly(poses[1], 1e-6)));
EXPECT_FALSE((covariances[0].array() == covariances[1].array()).all());
EXPECT_THAT(poses[1], IsNearly(poses[2], 1e-6));
EXPECT_TRUE((covariances[1].array() == covariances[2].array()).all());
}
TEST_F(PoseTrackerTest, AddImuLinearAccelerationObservation) {
auto time = common::FromUniversal(1000);
for (int i = 0; i < 300; ++i) {
time += std::chrono::seconds(5);
pose_tracker_->AddImuLinearAccelerationObservation(
time, Eigen::Vector3d(0., 0., 10.));
}
{
Rigid3d pose;
PoseCovariance covariance;
pose_tracker_->GetPoseEstimateMeanAndCovariance(time, &pose, &covariance);
const Eigen::Quaterniond actual = Eigen::Quaterniond(pose.rotation());
const Eigen::Quaterniond expected = Eigen::Quaterniond::Identity();
EXPECT_TRUE(actual.isApprox(expected, 1e-3)) << expected.coeffs() << " vs\n"
<< actual.coeffs();
}
for (int i = 0; i < 300; ++i) {
time += std::chrono::seconds(5);
pose_tracker_->AddImuLinearAccelerationObservation(
time, Eigen::Vector3d(0., 10., 0.));
}
time += std::chrono::milliseconds(5);
Rigid3d pose;
PoseCovariance covariance;
pose_tracker_->GetPoseEstimateMeanAndCovariance(time, &pose, &covariance);
const Eigen::Quaterniond actual = Eigen::Quaterniond(pose.rotation());
const Eigen::Quaterniond expected = Eigen::Quaterniond(
Eigen::AngleAxisd(M_PI / 2., Eigen::Vector3d::UnitX()));
EXPECT_TRUE(actual.isApprox(expected, 1e-3)) << expected.coeffs() << " vs\n"
<< actual.coeffs();
}
TEST_F(PoseTrackerTest, AddImuAngularVelocityObservation) {
auto time = common::FromUniversal(1000);
for (int i = 0; i < 300; ++i) {
time += std::chrono::milliseconds(5);
pose_tracker_->AddImuAngularVelocityObservation(time,
Eigen::Vector3d::Zero());
}
{
Rigid3d pose;
PoseCovariance covariance;
pose_tracker_->GetPoseEstimateMeanAndCovariance(time, &pose, &covariance);
const Eigen::Quaterniond actual = Eigen::Quaterniond(pose.rotation());
const Eigen::Quaterniond expected = Eigen::Quaterniond::Identity();
EXPECT_TRUE(actual.isApprox(expected, 1e-3)) << expected.coeffs() << " vs\n"
<< actual.coeffs();
}
const double target_radians = M_PI / 2.;
const double num_observations = 300.;
const double angular_velocity = target_radians / (num_observations * 5e-3);
for (int i = 0; i < num_observations; ++i) {
time += std::chrono::milliseconds(5);
pose_tracker_->AddImuAngularVelocityObservation(
time, Eigen::Vector3d(angular_velocity, 0., 0.));
}
time += std::chrono::milliseconds(5);
Rigid3d pose;
PoseCovariance covariance;
pose_tracker_->GetPoseEstimateMeanAndCovariance(time, &pose, &covariance);
const Eigen::Quaterniond actual = Eigen::Quaterniond(pose.rotation());
const Eigen::Quaterniond expected = Eigen::Quaterniond(
Eigen::AngleAxisd(M_PI / 2., Eigen::Vector3d::UnitX()));
EXPECT_TRUE(actual.isApprox(expected, 1e-3)) << expected.coeffs() << " vs\n"
<< actual.coeffs();
}
TEST_F(PoseTrackerTest, AddPoseObservation) {
auto time = common::FromUniversal(1000);
for (int i = 0; i < 300; ++i) {
time += std::chrono::milliseconds(5);
pose_tracker_->AddPoseObservation(
time, Rigid3d::Identity(),
Eigen::Matrix<double, 6, 6>::Identity() * 1e-6);
}
{
Rigid3d actual;
PoseCovariance covariance;
pose_tracker_->GetPoseEstimateMeanAndCovariance(time, &actual, &covariance);
EXPECT_THAT(actual, IsNearly(Rigid3d::Identity(), 1e-3));
}
const Rigid3d expected =
Rigid3d::Translation(Eigen::Vector3d(1., 2., 3.)) *
Rigid3d::Rotation(Eigen::AngleAxisd(
M_PI / 2., Eigen::Vector3d(0., 0., 3.).normalized()));
for (int i = 0; i < 300; ++i) {
time += std::chrono::milliseconds(15);
pose_tracker_->AddPoseObservation(
time, expected, Eigen::Matrix<double, 6, 6>::Identity() * 1e-9);
}
time += std::chrono::milliseconds(15);
Rigid3d actual;
PoseCovariance covariance;
pose_tracker_->GetPoseEstimateMeanAndCovariance(time, &actual, &covariance);
EXPECT_THAT(actual, IsNearly(expected, 1e-3));
}
TEST_F(PoseTrackerTest, AddOdometerPoseObservation) {
common::Time time = common::FromUniversal(0);
std::vector<Rigid3d> odometer_track;
odometer_track.push_back(Rigid3d::Identity());
odometer_track.push_back(
Rigid3d::Rotation(Eigen::AngleAxisd(0.3, Eigen::Vector3d::UnitZ())));
odometer_track.push_back(
Rigid3d::Translation(Eigen::Vector3d(0.2, 0., 0.)) *
Rigid3d::Rotation(Eigen::AngleAxisd(0.3, Eigen::Vector3d::UnitZ())));
odometer_track.push_back(
Rigid3d::Translation(Eigen::Vector3d(0.3, 0.1, 0.)) *
Rigid3d::Rotation(Eigen::AngleAxisd(0.6, Eigen::Vector3d::UnitZ())));
odometer_track.push_back(
Rigid3d::Translation(Eigen::Vector3d(0.2, 0.2, 0.1)) *
Rigid3d::Rotation(Eigen::AngleAxisd(0.3, Eigen::Vector3d::UnitZ())));
odometer_track.push_back(
Rigid3d::Translation(Eigen::Vector3d(0.1, 0.2, 0.2)) *
Rigid3d::Rotation(Eigen::AngleAxisd(0.6, Eigen::Vector3d::UnitZ())));
odometer_track.push_back(Rigid3d::Translation(Eigen::Vector3d(0., 0.1, 0.2)));
Rigid3d actual;
PoseCovariance unused_covariance;
for (const Rigid3d& pose : odometer_track) {
time += std::chrono::seconds(1);
pose_tracker_->AddOdometerPoseObservation(
time, pose, kOdometerVariance * PoseCovariance::Identity());
pose_tracker_->GetPoseEstimateMeanAndCovariance(time, &actual,
&unused_covariance);
EXPECT_THAT(actual, IsNearly(pose, 1e-2));
}
// Sanity check that the test has signal:
EXPECT_THAT(actual, Not(IsNearly(odometer_track[0], 1e-2)));
}
} // namespace
} // namespace kalman_filter
} // namespace cartographer
<|start_filename|>cartographer/mapping_3d/motion_filter.h<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CARTOGRAPHER_MAPPING_3D_MOTION_FILTER_H_
#define CARTOGRAPHER_MAPPING_3D_MOTION_FILTER_H_
#include <limits>
#include "cartographer/common/lua_parameter_dictionary.h"
#include "cartographer/common/time.h"
#include "cartographer/mapping_3d/proto/motion_filter_options.pb.h"
#include "cartographer/transform/rigid_transform.h"
namespace cartographer {
namespace mapping_3d {
proto::MotionFilterOptions CreateMotionFilterOptions(
common::LuaParameterDictionary* parameter_dictionary);
// Takes poses as input and filters them to get fewer poses.
class MotionFilter {
public:
explicit MotionFilter(const proto::MotionFilterOptions& options);
// If the accumulated motion (linear, rotational, or time) is above the
// threshold, returns false. Otherwise the relative motion is accumulated and
// true is returned.
bool IsSimilar(common::Time time, const transform::Rigid3d& pose);
private:
const proto::MotionFilterOptions options_;
int num_total_ = 0;
int num_different_ = 0;
common::Time last_time_;
transform::Rigid3d last_pose_;
};
} // namespace mapping_3d
} // namespace cartographer
#endif // CARTOGRAPHER_MAPPING_3D_MOTION_FILTER_H_
<|start_filename|>cartographer/transform/transform_interpolation_buffer.h<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CARTOGRAPHER_TRANSFORM_TRANSFORM_INTERPOLATION_BUFFER_H_
#define CARTOGRAPHER_TRANSFORM_TRANSFORM_INTERPOLATION_BUFFER_H_
#include <deque>
#include <memory>
#include "cartographer/common/time.h"
#include "cartographer/proto/trajectory.pb.h"
#include "cartographer/transform/rigid_transform.h"
namespace cartographer {
namespace transform {
// A time-ordered buffer of transforms that supports interpolated lookups.
class TransformInterpolationBuffer {
public:
static std::unique_ptr<TransformInterpolationBuffer> FromTrajectory(
const cartographer::proto::Trajectory& trajectory);
// Adds a new transform to the buffer and removes the oldest transform if the
// buffer size limit is exceeded.
void Push(common::Time time, const transform::Rigid3d& transform);
// Returns true if an interpolated transfrom can be computed at 'time'.
bool Has(common::Time time) const;
// Returns an interpolated transform at 'time'. CHECK()s that a transform at
// 'time' is available.
transform::Rigid3d Lookup(common::Time time) const;
// Returns the timestamp of the earliest transform in the buffer or 0 if the
// buffer is empty.
common::Time earliest_time() const;
// Returns the timestamp of the earliest transform in the buffer or 0 if the
// buffer is empty.
common::Time latest_time() const;
// Returns true if the buffer is empty.
bool empty() const;
private:
struct TimestampedTransform {
common::Time time;
transform::Rigid3d transform;
};
std::deque<TimestampedTransform> deque_;
};
} // namespace transform
} // namespace cartographer
#endif // CARTOGRAPHER_TRANSFORM_TRANSFORM_INTERPOLATION_BUFFER_H_
<|start_filename|>cartographer/mapping_3d/optimizing_local_trajectory_builder_options.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/mapping_3d/optimizing_local_trajectory_builder_options.h"
namespace cartographer {
namespace mapping_3d {
proto::OptimizingLocalTrajectoryBuilderOptions
CreateOptimizingLocalTrajectoryBuilderOptions(
common::LuaParameterDictionary* const parameter_dictionary) {
proto::OptimizingLocalTrajectoryBuilderOptions options;
options.set_high_resolution_grid_scale(
parameter_dictionary->GetDouble("high_resolution_grid_scale"));
options.set_low_resolution_grid_scale(
parameter_dictionary->GetDouble("low_resolution_grid_scale"));
options.set_velocity_scale(parameter_dictionary->GetDouble("velocity_scale"));
options.set_translation_scale(
parameter_dictionary->GetDouble("translation_scale"));
options.set_rotation_scale(parameter_dictionary->GetDouble("rotation_scale"));
options.set_odometry_translation_scale(
parameter_dictionary->GetDouble("odometry_translation_scale"));
options.set_odometry_rotation_scale(
parameter_dictionary->GetDouble("odometry_rotation_scale"));
return options;
}
} // namespace mapping_3d
} // namespace cartographer
<|start_filename|>cartographer/mapping_3d/scan_matching/real_time_correlative_scan_matcher.h<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// A voxel accurate scan matcher, exhaustively evaluating the scan matching
// search space.
#ifndef CARTOGRAPHER_MAPPING_3D_SCAN_MATCHING_REAL_TIME_CORRELATIVE_SCAN_MATCHER_H_
#define CARTOGRAPHER_MAPPING_3D_SCAN_MATCHING_REAL_TIME_CORRELATIVE_SCAN_MATCHER_H_
#include <vector>
#include "Eigen/Core"
#include "cartographer/mapping_2d/scan_matching/proto/real_time_correlative_scan_matcher_options.pb.h"
#include "cartographer/mapping_3d/hybrid_grid.h"
#include "cartographer/sensor/point_cloud.h"
namespace cartographer {
namespace mapping_3d {
namespace scan_matching {
class RealTimeCorrelativeScanMatcher {
public:
explicit RealTimeCorrelativeScanMatcher(
const mapping_2d::scan_matching::proto::
RealTimeCorrelativeScanMatcherOptions& options);
RealTimeCorrelativeScanMatcher(const RealTimeCorrelativeScanMatcher&) =
delete;
RealTimeCorrelativeScanMatcher& operator=(
const RealTimeCorrelativeScanMatcher&) = delete;
// Aligns 'point_cloud' within the 'hybrid_grid' given an
// 'initial_pose_estimate' then updates 'pose_estimate' with the result and
// returns the score.
float Match(const transform::Rigid3d& initial_pose_estimate,
const sensor::PointCloud& point_cloud,
const HybridGrid& hybrid_grid,
transform::Rigid3d* pose_estimate) const;
private:
std::vector<transform::Rigid3f> GenerateExhaustiveSearchTransforms(
float resolution, const sensor::PointCloud& point_cloud) const;
float ScoreCandidate(const HybridGrid& hybrid_grid,
const sensor::PointCloud& transformed_point_cloud,
const transform::Rigid3f& transform) const;
const mapping_2d::scan_matching::proto::RealTimeCorrelativeScanMatcherOptions
options_;
};
} // namespace scan_matching
} // namespace mapping_3d
} // namespace cartographer
#endif // CARTOGRAPHER_MAPPING_3D_SCAN_MATCHING_REAL_TIME_CORRELATIVE_SCAN_MATCHER_H_
<|start_filename|>cartographer/mapping_3d/kalman_local_trajectory_builder.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/mapping_3d/kalman_local_trajectory_builder.h"
#include "cartographer/common/make_unique.h"
#include "cartographer/common/time.h"
#include "cartographer/kalman_filter/proto/pose_tracker_options.pb.h"
#include "cartographer/mapping_2d/scan_matching/proto/real_time_correlative_scan_matcher_options.pb.h"
#include "cartographer/mapping_3d/proto/local_trajectory_builder_options.pb.h"
#include "cartographer/mapping_3d/proto/submaps_options.pb.h"
#include "cartographer/mapping_3d/scan_matching/proto/ceres_scan_matcher_options.pb.h"
#include "glog/logging.h"
namespace cartographer {
namespace mapping_3d {
KalmanLocalTrajectoryBuilder::KalmanLocalTrajectoryBuilder(
const proto::LocalTrajectoryBuilderOptions& options)
: options_(options),
submaps_(common::make_unique<Submaps>(options.submaps_options())),
scan_matcher_pose_estimate_(transform::Rigid3d::Identity()),
motion_filter_(options.motion_filter_options()),
real_time_correlative_scan_matcher_(
common::make_unique<scan_matching::RealTimeCorrelativeScanMatcher>(
options_.kalman_local_trajectory_builder_options()
.real_time_correlative_scan_matcher_options())),
ceres_scan_matcher_(common::make_unique<scan_matching::CeresScanMatcher>(
options_.ceres_scan_matcher_options())),
num_accumulated_(0),
first_pose_prediction_(transform::Rigid3f::Identity()),
accumulated_laser_fan_{Eigen::Vector3f::Zero(), {}, {}} {}
KalmanLocalTrajectoryBuilder::~KalmanLocalTrajectoryBuilder() {}
mapping_3d::Submaps* KalmanLocalTrajectoryBuilder::submaps() {
return submaps_.get();
}
kalman_filter::PoseTracker* KalmanLocalTrajectoryBuilder::pose_tracker() const {
return pose_tracker_.get();
}
void KalmanLocalTrajectoryBuilder::AddImuData(
const common::Time time, const Eigen::Vector3d& linear_acceleration,
const Eigen::Vector3d& angular_velocity) {
if (!pose_tracker_) {
pose_tracker_ = common::make_unique<kalman_filter::PoseTracker>(
options_.kalman_local_trajectory_builder_options()
.pose_tracker_options(),
kalman_filter::PoseTracker::ModelFunction::k3D, time);
}
pose_tracker_->AddImuLinearAccelerationObservation(time, linear_acceleration);
pose_tracker_->AddImuAngularVelocityObservation(time, angular_velocity);
transform::Rigid3d pose_estimate;
kalman_filter::PoseCovariance unused_covariance_estimate;
pose_tracker_->GetPoseEstimateMeanAndCovariance(time, &pose_estimate,
&unused_covariance_estimate);
}
std::unique_ptr<KalmanLocalTrajectoryBuilder::InsertionResult>
KalmanLocalTrajectoryBuilder::AddLaserFan3D(
const common::Time time, const sensor::LaserFan3D& laser_fan) {
if (!pose_tracker_) {
LOG(INFO) << "PoseTracker not yet initialized.";
return nullptr;
}
transform::Rigid3d pose_prediction;
kalman_filter::PoseCovariance unused_covariance_prediction;
pose_tracker_->GetPoseEstimateMeanAndCovariance(
time, &pose_prediction, &unused_covariance_prediction);
if (num_accumulated_ == 0) {
first_pose_prediction_ = pose_prediction.cast<float>();
accumulated_laser_fan_ =
sensor::LaserFan3D{Eigen::Vector3f::Zero(), {}, {}};
}
const transform::Rigid3f tracking_delta =
first_pose_prediction_.inverse() * pose_prediction.cast<float>();
const sensor::LaserFan3D laser_fan_in_first_tracking =
sensor::TransformLaserFan3D(laser_fan, tracking_delta);
for (const Eigen::Vector3f& laser_return :
laser_fan_in_first_tracking.returns) {
const Eigen::Vector3f delta =
laser_return - laser_fan_in_first_tracking.origin;
const float range = delta.norm();
if (range >= options_.laser_min_range()) {
if (range <= options_.laser_max_range()) {
accumulated_laser_fan_.returns.push_back(laser_return);
} else {
// We insert a ray cropped to 'laser_max_range' as a miss for hits
// beyond the maximum range. This way the free space up to the maximum
// range will be updated.
accumulated_laser_fan_.misses.push_back(
laser_fan_in_first_tracking.origin +
options_.laser_max_range() / range * delta);
}
}
}
++num_accumulated_;
if (num_accumulated_ >= options_.scans_per_accumulation()) {
num_accumulated_ = 0;
return AddAccumulatedLaserFan3D(
time, sensor::TransformLaserFan3D(accumulated_laser_fan_,
tracking_delta.inverse()));
}
return nullptr;
}
std::unique_ptr<KalmanLocalTrajectoryBuilder::InsertionResult>
KalmanLocalTrajectoryBuilder::AddAccumulatedLaserFan3D(
const common::Time time, const sensor::LaserFan3D& laser_fan_in_tracking) {
const sensor::LaserFan3D filtered_laser_fan = {
laser_fan_in_tracking.origin,
sensor::VoxelFiltered(laser_fan_in_tracking.returns,
options_.laser_voxel_filter_size()),
sensor::VoxelFiltered(laser_fan_in_tracking.misses,
options_.laser_voxel_filter_size())};
if (filtered_laser_fan.returns.empty()) {
LOG(WARNING) << "Dropped empty laser scanner point cloud.";
return nullptr;
}
transform::Rigid3d pose_prediction;
kalman_filter::PoseCovariance covariance_prediction;
pose_tracker_->GetPoseEstimateMeanAndCovariance(time, &pose_prediction,
&covariance_prediction);
transform::Rigid3d initial_ceres_pose = pose_prediction;
sensor::AdaptiveVoxelFilter adaptive_voxel_filter(
options_.high_resolution_adaptive_voxel_filter_options());
const sensor::PointCloud filtered_point_cloud_in_tracking =
adaptive_voxel_filter.Filter(filtered_laser_fan.returns);
if (options_.kalman_local_trajectory_builder_options()
.use_online_correlative_scan_matching()) {
real_time_correlative_scan_matcher_->Match(
pose_prediction, filtered_point_cloud_in_tracking,
submaps_->high_resolution_matching_grid(), &initial_ceres_pose);
}
transform::Rigid3d pose_observation;
kalman_filter::PoseCovariance covariance_observation;
ceres::Solver::Summary summary;
sensor::AdaptiveVoxelFilter low_resolution_adaptive_voxel_filter(
options_.low_resolution_adaptive_voxel_filter_options());
const sensor::PointCloud low_resolution_point_cloud_in_tracking =
low_resolution_adaptive_voxel_filter.Filter(filtered_laser_fan.returns);
ceres_scan_matcher_->Match(scan_matcher_pose_estimate_, initial_ceres_pose,
{{&filtered_point_cloud_in_tracking,
&submaps_->high_resolution_matching_grid()},
{&low_resolution_point_cloud_in_tracking,
&submaps_->low_resolution_matching_grid()}},
&pose_observation, &covariance_observation,
&summary);
pose_tracker_->AddPoseObservation(time, pose_observation,
covariance_observation);
kalman_filter::PoseCovariance covariance_estimate;
pose_tracker_->GetPoseEstimateMeanAndCovariance(
time, &scan_matcher_pose_estimate_, &covariance_estimate);
last_pose_estimate_ = {
time,
{pose_prediction, covariance_prediction},
{pose_observation, covariance_observation},
{scan_matcher_pose_estimate_, covariance_estimate},
scan_matcher_pose_estimate_,
sensor::TransformPointCloud(filtered_laser_fan.returns,
pose_observation.cast<float>())};
return InsertIntoSubmap(time, filtered_laser_fan, pose_observation,
covariance_estimate);
}
void KalmanLocalTrajectoryBuilder::AddOdometerPose(
const common::Time time, const transform::Rigid3d& pose,
const kalman_filter::PoseCovariance& covariance) {
if (!pose_tracker_) {
pose_tracker_.reset(new kalman_filter::PoseTracker(
options_.kalman_local_trajectory_builder_options()
.pose_tracker_options(),
kalman_filter::PoseTracker::ModelFunction::k3D, time));
}
pose_tracker_->AddOdometerPoseObservation(time, pose, covariance);
}
const KalmanLocalTrajectoryBuilder::PoseEstimate&
KalmanLocalTrajectoryBuilder::pose_estimate() const {
return last_pose_estimate_;
}
void KalmanLocalTrajectoryBuilder::AddTrajectoryNodeIndex(
int trajectory_node_index) {
submaps_->AddTrajectoryNodeIndex(trajectory_node_index);
}
std::unique_ptr<KalmanLocalTrajectoryBuilder::InsertionResult>
KalmanLocalTrajectoryBuilder::InsertIntoSubmap(
const common::Time time, const sensor::LaserFan3D& laser_fan_in_tracking,
const transform::Rigid3d& pose_observation,
const kalman_filter::PoseCovariance& covariance_estimate) {
if (motion_filter_.IsSimilar(time, pose_observation)) {
return nullptr;
}
const Submap* const matching_submap =
submaps_->Get(submaps_->matching_index());
std::vector<const Submap*> insertion_submaps;
for (int insertion_index : submaps_->insertion_indices()) {
insertion_submaps.push_back(submaps_->Get(insertion_index));
}
submaps_->InsertLaserFan(sensor::TransformLaserFan3D(
laser_fan_in_tracking, pose_observation.cast<float>()));
return std::unique_ptr<InsertionResult>(new InsertionResult{
time, laser_fan_in_tracking, pose_observation, covariance_estimate,
submaps_.get(), matching_submap, insertion_submaps});
}
} // namespace mapping_3d
} // namespace cartographer
<|start_filename|>cartographer/mapping_3d/scan_matching/precomputation_grid.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/mapping_3d/scan_matching/precomputation_grid.h"
#include <algorithm>
#include "Eigen/Core"
#include "cartographer/common/math.h"
#include "cartographer/mapping/probability_values.h"
#include "glog/logging.h"
namespace cartographer {
namespace mapping_3d {
namespace scan_matching {
namespace {
// C++11 defines that integer division rounds towards zero. For index math, we
// actually need it to round towards negative infinity. Luckily bit shifts have
// that property.
inline int DivideByTwoRoundingTowardsNegativeInfinity(const int value) {
return value >> 1;
}
// Computes the half resolution index corresponding to the full resolution
// 'cell_index'.
Eigen::Array3i CellIndexAtHalfResolution(const Eigen::Array3i& cell_index) {
return Eigen::Array3i(
DivideByTwoRoundingTowardsNegativeInfinity(cell_index[0]),
DivideByTwoRoundingTowardsNegativeInfinity(cell_index[1]),
DivideByTwoRoundingTowardsNegativeInfinity(cell_index[2]));
}
} // namespace
PrecomputationGrid ConvertToPrecomputationGrid(const HybridGrid& hybrid_grid) {
PrecomputationGrid result(hybrid_grid.resolution(), hybrid_grid.origin());
for (auto it = HybridGrid::Iterator(hybrid_grid); !it.Done(); it.Next()) {
const int cell_value = common::RoundToInt(
(mapping::ValueToProbability(it.GetValue()) -
mapping::kMinProbability) *
(255.f / (mapping::kMaxProbability - mapping::kMinProbability)));
CHECK_GE(cell_value, 0);
CHECK_LE(cell_value, 255);
*result.mutable_value(it.GetCellIndex()) = cell_value;
}
return result;
}
PrecomputationGrid PrecomputeGrid(const PrecomputationGrid& grid,
const bool half_resolution,
const Eigen::Array3i& shift) {
PrecomputationGrid result(grid.resolution(), grid.origin());
for (auto it = PrecomputationGrid::Iterator(grid); !it.Done(); it.Next()) {
for (int i = 0; i != 8; ++i) {
// We use this value to update 8 values in the resulting grid, at
// position (x - {0, 'shift'}, y - {0, 'shift'}, z - {0, 'shift'}).
// If 'shift' is 2 ** (depth - 1), where depth 0 is the original grid,
// this results in precomputation grids analogous to the 2D case.
const Eigen::Array3i cell_index =
it.GetCellIndex() - shift * PrecomputationGrid::GetOctant(i);
auto* const cell_value = result.mutable_value(
half_resolution ? CellIndexAtHalfResolution(cell_index) : cell_index);
*cell_value = std::max(it.GetValue(), *cell_value);
}
}
return result;
}
} // namespace scan_matching
} // namespace mapping_3d
} // namespace cartographer
<|start_filename|>cartographer/mapping_3d/local_trajectory_builder_options.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/mapping_3d/local_trajectory_builder_options.h"
#include "cartographer/mapping_3d/kalman_local_trajectory_builder_options.h"
#include "cartographer/mapping_3d/motion_filter.h"
#include "cartographer/mapping_3d/optimizing_local_trajectory_builder_options.h"
#include "cartographer/mapping_3d/scan_matching/ceres_scan_matcher.h"
#include "cartographer/mapping_3d/submaps.h"
#include "cartographer/sensor/voxel_filter.h"
#include "glog/logging.h"
namespace cartographer {
namespace mapping_3d {
proto::LocalTrajectoryBuilderOptions CreateLocalTrajectoryBuilderOptions(
common::LuaParameterDictionary* const parameter_dictionary) {
proto::LocalTrajectoryBuilderOptions options;
options.set_laser_min_range(
parameter_dictionary->GetDouble("laser_min_range"));
options.set_laser_max_range(
parameter_dictionary->GetDouble("laser_max_range"));
options.set_scans_per_accumulation(
parameter_dictionary->GetInt("scans_per_accumulation"));
options.set_laser_voxel_filter_size(
parameter_dictionary->GetDouble("laser_voxel_filter_size"));
*options.mutable_high_resolution_adaptive_voxel_filter_options() =
sensor::CreateAdaptiveVoxelFilterOptions(
parameter_dictionary
->GetDictionary("high_resolution_adaptive_voxel_filter")
.get());
*options.mutable_low_resolution_adaptive_voxel_filter_options() =
sensor::CreateAdaptiveVoxelFilterOptions(
parameter_dictionary
->GetDictionary("low_resolution_adaptive_voxel_filter")
.get());
*options.mutable_ceres_scan_matcher_options() =
scan_matching::CreateCeresScanMatcherOptions(
parameter_dictionary->GetDictionary("ceres_scan_matcher").get());
*options.mutable_motion_filter_options() = CreateMotionFilterOptions(
parameter_dictionary->GetDictionary("motion_filter").get());
*options.mutable_submaps_options() = mapping_3d::CreateSubmapsOptions(
parameter_dictionary->GetDictionary("submaps").get());
*options.mutable_kalman_local_trajectory_builder_options() =
CreateKalmanLocalTrajectoryBuilderOptions(
parameter_dictionary->GetDictionary("kalman_local_trajectory_builder")
.get());
*options.mutable_optimizing_local_trajectory_builder_options() =
CreateOptimizingLocalTrajectoryBuilderOptions(
parameter_dictionary
->GetDictionary("optimizing_local_trajectory_builder")
.get());
const string use_string = parameter_dictionary->GetString("use");
proto::LocalTrajectoryBuilderOptions::Use use;
CHECK(proto::LocalTrajectoryBuilderOptions::Use_Parse(use_string, &use))
<< "Unknown local_trajectory_builder kind: " << use_string;
options.set_use(use);
return options;
}
} // namespace mapping_3d
} // namespace cartographer
<|start_filename|>cartographer/mapping_3d/scan_matching/fast_correlative_scan_matcher.h<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// This is an implementation of a 3D branch-and-bound algorithm similar to
// mapping_2d::FastCorrelativeScanMatcher.
#ifndef CARTOGRAPHER_MAPPING_3D_SCAN_MATCHING_FAST_CORRELATIVE_SCAN_MATCHER_H_
#define CARTOGRAPHER_MAPPING_3D_SCAN_MATCHING_FAST_CORRELATIVE_SCAN_MATCHER_H_
#include <memory>
#include <vector>
#include "Eigen/Core"
#include "cartographer/common/port.h"
#include "cartographer/mapping/trajectory_node.h"
#include "cartographer/mapping_2d/scan_matching/fast_correlative_scan_matcher.h"
#include "cartographer/mapping_3d/hybrid_grid.h"
#include "cartographer/mapping_3d/scan_matching/proto/fast_correlative_scan_matcher_options.pb.h"
#include "cartographer/mapping_3d/scan_matching/rotational_scan_matcher.h"
#include "cartographer/sensor/point_cloud.h"
namespace cartographer {
namespace mapping_3d {
namespace scan_matching {
proto::FastCorrelativeScanMatcherOptions
CreateFastCorrelativeScanMatcherOptions(
common::LuaParameterDictionary* parameter_dictionary);
class PrecomputationGridStack;
struct DiscreteScan {
transform::Rigid3f pose;
// Contains a vector of discretized scans for each 'depth'.
std::vector<std::vector<Eigen::Array3i>> cell_indices_per_depth;
};
struct Candidate {
Candidate(const int scan_index, const Eigen::Array3i& offset)
: scan_index(scan_index), offset(offset) {}
// Index into the discrete scans vectors.
int scan_index;
// Linear offset from the initial pose in cell indices. For lower resolution
// candidates this is the lowest offset of the 2^depth x 2^depth x 2^depth
// block of possibilities.
Eigen::Array3i offset;
// Score, higher is better.
float score = 0.f;
bool operator<(const Candidate& other) const { return score < other.score; }
bool operator>(const Candidate& other) const { return score > other.score; }
};
class FastCorrelativeScanMatcher {
public:
FastCorrelativeScanMatcher(
const HybridGrid& hybrid_grid,
const std::vector<mapping::TrajectoryNode>& nodes,
const proto::FastCorrelativeScanMatcherOptions& options);
~FastCorrelativeScanMatcher();
FastCorrelativeScanMatcher(const FastCorrelativeScanMatcher&) = delete;
FastCorrelativeScanMatcher& operator=(const FastCorrelativeScanMatcher&) =
delete;
// Aligns 'coarse_point_cloud' within the 'hybrid_grid' given an
// 'initial_pose_estimate'. If a score above 'min_score' (excluding equality)
// is possible, true is returned, and 'score' and 'pose_estimate' are updated
// with the result. 'fine_point_cloud' is used to compute the rotational scan
// matcher score.
bool Match(const transform::Rigid3d& initial_pose_estimate,
const sensor::PointCloud& coarse_point_cloud,
const sensor::PointCloud& fine_point_cloud, float min_score,
float* score, transform::Rigid3d* pose_estimate) const;
private:
DiscreteScan DiscretizeScan(const sensor::PointCloud& point_cloud,
const transform::Rigid3f& pose) const;
std::vector<DiscreteScan> GenerateDiscreteScans(
const sensor::PointCloud& coarse_point_cloud,
const sensor::PointCloud& fine_point_cloud,
const transform::Rigid3f& initial_pose) const;
std::vector<Candidate> GenerateLowestResolutionCandidates(
int num_discrete_scans) const;
void ScoreCandidates(int depth,
const std::vector<DiscreteScan>& discrete_scans,
std::vector<Candidate>* const candidates) const;
std::vector<Candidate> ComputeLowestResolutionCandidates(
const std::vector<DiscreteScan>& discrete_scans) const;
Candidate BranchAndBound(const std::vector<DiscreteScan>& discrete_scans,
const std::vector<Candidate>& candidates,
int candidate_depth, float min_score) const;
const proto::FastCorrelativeScanMatcherOptions options_;
const float resolution_;
const int linear_xy_window_size_;
const int linear_z_window_size_;
std::unique_ptr<PrecomputationGridStack> precomputation_grid_stack_;
RotationalScanMatcher rotational_scan_matcher_;
};
} // namespace scan_matching
} // namespace mapping_3d
} // namespace cartographer
#endif // CARTOGRAPHER_MAPPING_3D_SCAN_MATCHING_FAST_CORRELATIVE_SCAN_MATCHER_H_
<|start_filename|>cartographer/mapping_3d/submaps.h<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CARTOGRAPHER_MAPPING_3D_SUBMAPS_H_
#define CARTOGRAPHER_MAPPING_3D_SUBMAPS_H_
#include <memory>
#include <string>
#include <vector>
#include "Eigen/Geometry"
#include "cartographer/mapping/sparse_pose_graph.h"
#include "cartographer/mapping/submaps.h"
#include "cartographer/mapping_2d/laser_fan_inserter.h"
#include "cartographer/mapping_2d/probability_grid.h"
#include "cartographer/mapping_3d/hybrid_grid.h"
#include "cartographer/mapping_3d/laser_fan_inserter.h"
#include "cartographer/mapping_3d/proto/submaps_options.pb.h"
#include "cartographer/sensor/laser.h"
#include "cartographer/transform/transform.h"
namespace cartographer {
namespace mapping_3d {
void InsertIntoProbabilityGrid(
const sensor::LaserFan3D& laser_fan_3d, const transform::Rigid3f& pose,
const float slice_z, const mapping_2d::LaserFanInserter& laser_fan_inserter,
mapping_2d::ProbabilityGrid* result);
proto::SubmapsOptions CreateSubmapsOptions(
common::LuaParameterDictionary* parameter_dictionary);
struct Submap : public mapping::Submap {
Submap(float high_resolution, float low_resolution,
const Eigen::Vector3f& origin, int begin_laser_fan_index);
HybridGrid high_resolution_hybrid_grid;
HybridGrid low_resolution_hybrid_grid;
bool finished = false;
std::vector<int> trajectory_node_indices;
};
// A container of Submaps.
class Submaps : public mapping::Submaps {
public:
explicit Submaps(const proto::SubmapsOptions& options);
Submaps(const Submaps&) = delete;
Submaps& operator=(const Submaps&) = delete;
const Submap* Get(int index) const override;
int size() const override;
void SubmapToProto(
int index, const std::vector<mapping::TrajectoryNode>& trajectory_nodes,
const transform::Rigid3d& global_submap_pose,
mapping::proto::SubmapQuery::Response* response) override;
// Inserts 'laser_fan' into the Submap collection.
void InsertLaserFan(const sensor::LaserFan3D& laser_fan);
// Returns the 'high_resolution' HybridGrid to be used for matching.
const HybridGrid& high_resolution_matching_grid() const;
// Returns the 'low_resolution' HybridGrid to be used for matching.
const HybridGrid& low_resolution_matching_grid() const;
// Adds a node to be used when visualizing the submap.
void AddTrajectoryNodeIndex(int trajectory_node_index);
private:
struct PixelData {
int min_z = INT_MAX;
int max_z = INT_MIN;
int count = 0;
float probability_sum = 0.f;
float max_probability = 0.5f;
};
void AddSubmap(const Eigen::Vector3f& origin);
void AccumulatePixelData(const int width, const int height,
const Eigen::Array2i& min_index,
const Eigen::Array2i& max_index);
void ExtractVoxelData(const HybridGrid& hybrid_grid,
const transform::Rigid3f& transform,
Eigen::Array2i* min_index, Eigen::Array2i* max_index);
// Builds texture data containing interleaved value and alpha for the
// visualization from 'accumulated_pixel_data_' into 'celldata_'.
void ComputePixelValues(const int width, const int height);
const proto::SubmapsOptions options_;
std::vector<std::unique_ptr<Submap>> submaps_;
LaserFanInserter laser_fan_inserter_;
// Number of LaserFans inserted.
int num_laser_fans_ = 0;
// Number of LaserFans inserted since the last Submap was added.
int num_laser_fans_in_last_submap_ = 0;
// The following members are used for visualization and kept around for
// performance reasons (mainly to avoid reallocations).
std::vector<PixelData> accumulated_pixel_data_;
string celldata_;
// The first three entries of this is are a cell_index and the last is the
// corresponding probability value. We batch them together like this to only
// have one vector and have better cache locality.
std::vector<Eigen::Array4i> voxel_indices_and_probabilities_;
};
} // namespace mapping_3d
} // namespace cartographer
#endif // CARTOGRAPHER_MAPPING_3D_SUBMAPS_H_
<|start_filename|>cartographer/mapping_3d/local_trajectory_builder_interface.h<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CARTOGRAPHER_MAPPING_3D_LOCAL_TRAJECTORY_BUILDER_INTERFACE_H_
#define CARTOGRAPHER_MAPPING_3D_LOCAL_TRAJECTORY_BUILDER_INTERFACE_H_
#include <memory>
#include <vector>
#include "cartographer/common/time.h"
#include "cartographer/mapping/global_trajectory_builder_interface.h"
#include "cartographer/mapping_3d/submaps.h"
#include "cartographer/sensor/laser.h"
#include "cartographer/transform/rigid_transform.h"
namespace cartographer {
namespace mapping_3d {
class LocalTrajectoryBuilderInterface {
public:
using PoseEstimate = mapping::GlobalTrajectoryBuilderInterface::PoseEstimate;
struct InsertionResult {
common::Time time;
sensor::LaserFan3D laser_fan_in_tracking;
transform::Rigid3d pose_observation;
kalman_filter::PoseCovariance covariance_estimate;
const Submaps* submaps;
const Submap* matching_submap;
std::vector<const Submap*> insertion_submaps;
};
virtual ~LocalTrajectoryBuilderInterface() {}
LocalTrajectoryBuilderInterface(const LocalTrajectoryBuilderInterface&) =
delete;
LocalTrajectoryBuilderInterface& operator=(
const LocalTrajectoryBuilderInterface&) = delete;
virtual void AddImuData(common::Time time,
const Eigen::Vector3d& linear_acceleration,
const Eigen::Vector3d& angular_velocity) = 0;
virtual std::unique_ptr<InsertionResult> AddLaserFan3D(
common::Time time, const sensor::LaserFan3D& laser_fan) = 0;
virtual void AddOdometerPose(
common::Time time, const transform::Rigid3d& pose,
const kalman_filter::PoseCovariance& covariance) = 0;
// Register a 'trajectory_node_index' from the SparsePoseGraph corresponding
// to the latest inserted laser scan. This is used to remember which
// trajectory node should be used to visualize a Submap.
virtual void AddTrajectoryNodeIndex(int trajectory_node_index) = 0;
virtual mapping_3d::Submaps* submaps() = 0;
virtual const PoseEstimate& pose_estimate() const = 0;
virtual kalman_filter::PoseTracker* pose_tracker() const = 0;
protected:
LocalTrajectoryBuilderInterface() {}
};
} // namespace mapping_3d
} // namespace cartographer
#endif // CARTOGRAPHER_MAPPING_3D_LOCAL_TRAJECTORY_BUILDER_INTERFACE_H_
<|start_filename|>cartographer/sensor/sensor_packet_period_histogram_builder.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/sensor/sensor_packet_period_histogram_builder.h"
#include "glog/logging.h"
namespace cartographer {
namespace sensor {
namespace {
string ToBucket(int ticks) {
if (ticks < 1 * 10000) {
return "< 1ms";
} else if (ticks < 3 * 10000) {
return "< 3ms";
} else if (ticks < 5 * 10000) {
return "< 5ms";
} else if (ticks < 7 * 10000) {
return "< 7ms";
} else if (ticks < 10 * 10000) {
return "< 10ms";
} else if (ticks < 30 * 10000) {
return "< 30ms";
} else if (ticks < 100 * 10000) {
return "< 100ms";
} else if (ticks < 500 * 10000) {
return "< 500ms";
}
return "> 500ms";
}
} // namespace
void SensorPacketPeriodHistogramBuilder::Add(const int trajectory_id,
const int64 timestamp,
const string& frame_id) {
if (histograms_.count(trajectory_id) == 0) {
histograms_.emplace(trajectory_id,
std::unordered_map<string, common::BucketHistogram>());
}
if (histograms_.at(trajectory_id).count(frame_id) == 0) {
histograms_.at(trajectory_id).emplace(frame_id, common::BucketHistogram());
}
const Key key = std::make_pair(trajectory_id, frame_id);
if (last_timestamps_.count(key) != 0) {
const int64 previous_timestamp = last_timestamps_.at(key);
histograms_.at(trajectory_id)
.at(frame_id)
.Hit(ToBucket(timestamp - previous_timestamp));
}
last_timestamps_[key] = timestamp;
}
void SensorPacketPeriodHistogramBuilder::LogHistogramsAndClear() {
for (const auto& trajectory_map_entry : histograms_) {
LOG(INFO) << "Printing histograms for trajectory with id "
<< trajectory_map_entry.first;
for (const auto& frame_id_to_histogram_map : trajectory_map_entry.second) {
LOG(INFO) << "Sensor packet period histogram for '"
<< frame_id_to_histogram_map.first << "' from trajectory '"
<< trajectory_map_entry.first << "':\n"
<< frame_id_to_histogram_map.second.ToString();
}
}
histograms_.clear();
last_timestamps_.clear();
}
} // namespace sensor
} // namespace cartographer
<|start_filename|>cartographer/mapping_3d/scan_matching/rotational_scan_matcher.h<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CARTOGRAPHER_MAPPING_3D_SCAN_MATCHING_ROTATIONAL_SCAN_MATCHER_H_
#define CARTOGRAPHER_MAPPING_3D_SCAN_MATCHING_ROTATIONAL_SCAN_MATCHER_H_
#include <vector>
#include "Eigen/Geometry"
#include "cartographer/mapping/trajectory_node.h"
#include "cartographer/sensor/point_cloud.h"
namespace cartographer {
namespace mapping_3d {
namespace scan_matching {
class RotationalScanMatcher {
public:
explicit RotationalScanMatcher(
const std::vector<mapping::TrajectoryNode>& nodes, int histogram_size);
RotationalScanMatcher(const RotationalScanMatcher&) = delete;
RotationalScanMatcher& operator=(const RotationalScanMatcher&) = delete;
// Scores how well a 'point_cloud' can be understood as rotated by certain
// 'angles' relative to the 'nodes'. Each angle results in a score between
// 0 (worst) and 1 (best).
std::vector<float> Match(const sensor::PointCloud& point_cloud,
const std::vector<float>& angles) const;
private:
float MatchHistogram(const Eigen::VectorXf& scan_histogram) const;
Eigen::VectorXf histogram_;
};
} // namespace scan_matching
} // namespace mapping_3d
} // namespace cartographer
#endif // CARTOGRAPHER_MAPPING_3D_SCAN_MATCHING_ROTATIONAL_SCAN_MATCHER_H_
<|start_filename|>cartographer/mapping/trajectory_connectivity_test.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/mapping/trajectory_connectivity.h"
#include <algorithm>
#include <memory>
#include <vector>
#include "cartographer/common/lua_parameter_dictionary_test_helpers.h"
#include "cartographer/mapping_2d/submaps.h"
#include "gtest/gtest.h"
namespace cartographer {
namespace mapping {
namespace {
class TrajectoryConnectivityTest : public ::testing::Test {
protected:
TrajectoryConnectivityTest() {
for (int i = 0; i < 10; ++i) {
auto parameter_dictionary = common::MakeDictionary(R"text(
return {
resolution = 0.05,
half_length = 10.,
num_laser_fans = 10,
output_debug_images = false,
laser_fan_inserter = {
insert_free_space = true,
hit_probability = 0.53,
miss_probability = 0.495,
},
})text");
trajectories_.emplace_back(new mapping_2d::Submaps(
mapping_2d::CreateSubmapsOptions(parameter_dictionary.get())));
}
}
// Helper function to avoid .get() noise.
const Submaps* trajectory(int index) { return trajectories_.at(index).get(); }
TrajectoryConnectivity trajectory_connectivity_;
std::vector<std::unique_ptr<const Submaps>> trajectories_;
};
TEST_F(TrajectoryConnectivityTest, TransitivelyConnected) {
// Make sure nothing's connected until we connect some things.
for (auto& trajectory_a : trajectories_) {
for (auto& trajectory_b : trajectories_) {
EXPECT_FALSE(trajectory_connectivity_.TransitivelyConnected(
trajectory_a.get(), trajectory_b.get()));
}
}
// Connect some stuff up.
trajectory_connectivity_.Connect(trajectory(0), trajectory(1));
EXPECT_TRUE(trajectory_connectivity_.TransitivelyConnected(trajectory(0),
trajectory(1)));
trajectory_connectivity_.Connect(trajectory(8), trajectory(9));
EXPECT_TRUE(trajectory_connectivity_.TransitivelyConnected(trajectory(8),
trajectory(9)));
EXPECT_FALSE(trajectory_connectivity_.TransitivelyConnected(trajectory(0),
trajectory(9)));
trajectory_connectivity_.Connect(trajectory(1), trajectory(8));
for (int i : {0, 1}) {
for (int j : {8, 9}) {
EXPECT_TRUE(trajectory_connectivity_.TransitivelyConnected(
trajectory(i), trajectory(j)));
}
}
}
TEST_F(TrajectoryConnectivityTest, EmptyConnectedComponents) {
auto connections = trajectory_connectivity_.ConnectedComponents();
EXPECT_EQ(0, connections.size());
}
TEST_F(TrajectoryConnectivityTest, ConnectedComponents) {
for (int i = 0; i <= 4; ++i) {
trajectory_connectivity_.Connect(trajectory(0), trajectory(i));
}
for (int i = 5; i <= 9; ++i) {
trajectory_connectivity_.Connect(trajectory(5), trajectory(i));
}
auto connections = trajectory_connectivity_.ConnectedComponents();
ASSERT_EQ(2, connections.size());
// The clustering is arbitrary; we need to figure out which one is which.
const std::vector<const Submaps*>* zero_cluster = nullptr;
const std::vector<const Submaps*>* five_cluster = nullptr;
if (std::find(connections[0].begin(), connections[0].end(), trajectory(0)) !=
connections[0].end()) {
zero_cluster = &connections[0];
five_cluster = &connections[1];
} else {
zero_cluster = &connections[1];
five_cluster = &connections[0];
}
for (int i = 0; i <= 9; ++i) {
EXPECT_EQ(i <= 4, std::find(zero_cluster->begin(), zero_cluster->end(),
trajectory(i)) != zero_cluster->end());
EXPECT_EQ(i > 4, std::find(five_cluster->begin(), five_cluster->end(),
trajectory(i)) != five_cluster->end());
}
}
TEST_F(TrajectoryConnectivityTest, ConnectionCount) {
for (int i = 0; i < 10; ++i) {
trajectory_connectivity_.Connect(trajectory(0), trajectory(1));
// Permute the arguments to check invariance.
EXPECT_EQ(i + 1, trajectory_connectivity_.ConnectionCount(trajectory(1),
trajectory(0)));
}
for (int i = 1; i < 9; ++i) {
EXPECT_EQ(0, trajectory_connectivity_.ConnectionCount(trajectory(i),
trajectory(i + 1)));
}
}
} // namespace
} // namespace mapping
} // namespace cartographer
<|start_filename|>cartographer/mapping_3d/global_trajectory_builder.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/mapping_3d/global_trajectory_builder.h"
#include "cartographer/mapping_3d/local_trajectory_builder.h"
namespace cartographer {
namespace mapping_3d {
GlobalTrajectoryBuilder::GlobalTrajectoryBuilder(
const proto::LocalTrajectoryBuilderOptions& options,
SparsePoseGraph* sparse_pose_graph)
: sparse_pose_graph_(sparse_pose_graph),
local_trajectory_builder_(CreateLocalTrajectoryBuilder(options)) {}
GlobalTrajectoryBuilder::~GlobalTrajectoryBuilder() {}
const mapping_3d::Submaps* GlobalTrajectoryBuilder::submaps() const {
return local_trajectory_builder_->submaps();
}
mapping_3d::Submaps* GlobalTrajectoryBuilder::submaps() {
return local_trajectory_builder_->submaps();
}
kalman_filter::PoseTracker* GlobalTrajectoryBuilder::pose_tracker() const {
return local_trajectory_builder_->pose_tracker();
}
void GlobalTrajectoryBuilder::AddImuData(
const common::Time time, const Eigen::Vector3d& linear_acceleration,
const Eigen::Vector3d& angular_velocity) {
local_trajectory_builder_->AddImuData(time, linear_acceleration,
angular_velocity);
sparse_pose_graph_->AddImuData(time, linear_acceleration, angular_velocity);
}
void GlobalTrajectoryBuilder::AddLaserFan3D(
const common::Time time, const sensor::LaserFan3D& laser_fan) {
auto insertion_result =
local_trajectory_builder_->AddLaserFan3D(time, laser_fan);
if (insertion_result == nullptr) {
return;
}
const int trajectory_node_index = sparse_pose_graph_->AddScan(
insertion_result->time, insertion_result->laser_fan_in_tracking,
insertion_result->pose_observation, insertion_result->covariance_estimate,
insertion_result->submaps, insertion_result->matching_submap,
insertion_result->insertion_submaps);
local_trajectory_builder_->AddTrajectoryNodeIndex(trajectory_node_index);
}
void GlobalTrajectoryBuilder::AddOdometerPose(
const common::Time time, const transform::Rigid3d& pose,
const kalman_filter::PoseCovariance& covariance) {
local_trajectory_builder_->AddOdometerPose(time, pose, covariance);
}
const GlobalTrajectoryBuilder::PoseEstimate&
GlobalTrajectoryBuilder::pose_estimate() const {
return local_trajectory_builder_->pose_estimate();
}
} // namespace mapping_3d
} // namespace cartographer
<|start_filename|>cartographer/sensor/configuration.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/sensor/configuration.h"
#include "Eigen/Core"
#include "Eigen/Geometry"
#include "cartographer/sensor/proto/configuration.pb.h"
#include "cartographer/transform/proto/transform.pb.h"
#include "cartographer/transform/rigid_transform.h"
#include "cartographer/transform/transform.h"
#include "glog/logging.h"
namespace cartographer {
namespace sensor {
proto::Configuration::Sensor CreateSensorConfiguration(
common::LuaParameterDictionary* const parameter_dictionary) {
proto::Configuration::Sensor sensor;
sensor.set_frame_id(parameter_dictionary->GetString("frame_id"));
*sensor.mutable_transform() = transform::ToProto(transform::FromDictionary(
parameter_dictionary->GetDictionary("transform").get()));
return sensor;
}
proto::Configuration CreateConfiguration(
common::LuaParameterDictionary* const parameter_dictionary) {
proto::Configuration configuration;
for (auto& sensor_parameter_dictionary :
parameter_dictionary->GetArrayValuesAsDictionaries()) {
*configuration.add_sensor() =
CreateSensorConfiguration(sensor_parameter_dictionary.get());
}
return configuration;
}
bool IsEnabled(const string& frame_id,
const sensor::proto::Configuration& sensor_configuration) {
for (const auto& sensor : sensor_configuration.sensor()) {
if (sensor.frame_id() == frame_id) {
return true;
}
}
return false;
}
transform::Rigid3d GetTransformToTracking(
const string& frame_id,
const sensor::proto::Configuration& sensor_configuration) {
for (const auto& sensor : sensor_configuration.sensor()) {
if (sensor.frame_id() == frame_id) {
return transform::ToRigid3(sensor.transform());
}
}
LOG(FATAL) << "No configuration found for sensor with frame ID '" << frame_id
<< "'.";
}
} // namespace sensor
} // namespace cartographer
<|start_filename|>cartographer/mapping_3d/submaps.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/mapping_3d/submaps.h"
#include <cmath>
#include <limits>
#include "cartographer/common/math.h"
#include "cartographer/common/port.h"
#include "cartographer/sensor/laser.h"
#include "glog/logging.h"
namespace cartographer {
namespace mapping_3d {
namespace {
constexpr float kSliceHalfHeight = 0.1f;
struct LaserSegment {
Eigen::Vector2f from;
Eigen::Vector2f to;
bool hit; // Whether there is a laser return at 'to'.
};
// We compute a slice around the xy-plane. 'transform' is applied to the laser
// rays in global map frame to allow choosing an arbitrary slice.
void GenerateSegmentForSlice(const sensor::LaserFan3D& laser_fan_3d,
const transform::Rigid3f& pose,
const transform::Rigid3f& transform,
std::vector<LaserSegment>* segments) {
const sensor::LaserFan3D laser_fan =
sensor::TransformLaserFan3D(laser_fan_3d, transform * pose);
segments->reserve(laser_fan.returns.size());
for (const Eigen::Vector3f& hit : laser_fan.returns) {
const Eigen::Vector2f laser_origin_xy = laser_fan.origin.head<2>();
const float laser_origin_z = laser_fan.origin.z();
const float delta_z = hit.z() - laser_origin_z;
const Eigen::Vector2f delta_xy = hit.head<2>() - laser_origin_xy;
if (laser_origin_z < -kSliceHalfHeight) {
// Laser ray originates below the slice.
if (hit.z() > kSliceHalfHeight) {
// Laser ray is cutting through the slice.
segments->push_back(LaserSegment{
laser_origin_xy +
(-kSliceHalfHeight - laser_origin_z) / delta_z * delta_xy,
laser_origin_xy +
(kSliceHalfHeight - laser_origin_z) / delta_z * delta_xy,
false});
} else if (hit.z() > -kSliceHalfHeight) {
// Laser return is inside the slice.
segments->push_back(LaserSegment{
laser_origin_xy +
(-kSliceHalfHeight - laser_origin_z) / delta_z * delta_xy,
hit.head<2>(), true});
}
} else if (laser_origin_z < kSliceHalfHeight) {
// Laser ray originates inside the slice.
if (hit.z() < -kSliceHalfHeight) {
// Laser hit is below.
segments->push_back(LaserSegment{
laser_origin_xy,
laser_origin_xy +
(-kSliceHalfHeight - laser_origin_z) / delta_z * delta_xy,
false});
} else if (hit.z() < kSliceHalfHeight) {
// Full ray is inside the slice.
segments->push_back(LaserSegment{laser_origin_xy, hit.head<2>(), true});
} else {
// Laser hit is above.
segments->push_back(LaserSegment{
laser_origin_xy,
laser_origin_xy +
(kSliceHalfHeight - laser_origin_z) / delta_z * delta_xy,
false});
}
} else {
// Laser ray originates above the slice.
if (hit.z() < -kSliceHalfHeight) {
// Laser ray is cutting through the slice.
segments->push_back(LaserSegment{
laser_origin_xy +
(kSliceHalfHeight - laser_origin_z) / delta_z * delta_xy,
laser_origin_xy +
(-kSliceHalfHeight - laser_origin_z) / delta_z * delta_xy,
false});
} else if (hit.z() < kSliceHalfHeight) {
// Laser return is inside the slice.
segments->push_back(LaserSegment{
laser_origin_xy +
(kSliceHalfHeight - laser_origin_z) / delta_z * delta_xy,
hit.head<2>(), true});
}
}
}
}
void UpdateFreeSpaceFromSegment(const LaserSegment& segment,
const std::vector<uint16>& miss_table,
mapping_2d::ProbabilityGrid* result) {
Eigen::Array2i from = result->limits().GetXYIndexOfCellContainingPoint(
segment.from.x(), segment.from.y());
Eigen::Array2i to = result->limits().GetXYIndexOfCellContainingPoint(
segment.to.x(), segment.to.y());
bool large_delta_y =
std::abs(to.y() - from.y()) > std::abs(to.x() - from.x());
if (large_delta_y) {
std::swap(from.x(), from.y());
std::swap(to.x(), to.y());
}
if (from.x() > to.x()) {
std::swap(from, to);
}
const int dx = to.x() - from.x();
const int dy = std::abs(to.y() - from.y());
int error = dx / 2;
const int direction = (from.y() < to.y()) ? 1 : -1;
for (; from.x() < to.x(); ++from.x()) {
if (large_delta_y) {
result->ApplyLookupTable(Eigen::Array2i(from.y(), from.x()), miss_table);
} else {
result->ApplyLookupTable(from, miss_table);
}
error -= dy;
if (error < 0) {
from.y() += direction;
error += dx;
}
}
}
void InsertSegmentsIntoProbabilityGrid(
const std::vector<LaserSegment>& segments,
const std::vector<uint16>& hit_table, const std::vector<uint16>& miss_table,
mapping_2d::ProbabilityGrid* result) {
result->StartUpdate();
if (segments.empty()) {
return;
}
Eigen::Vector2f min = segments.front().from;
Eigen::Vector2f max = min;
for (const LaserSegment& segment : segments) {
min = min.cwiseMin(segment.from);
min = min.cwiseMin(segment.to);
max = max.cwiseMax(segment.from);
max = max.cwiseMax(segment.to);
}
const float padding = 10. * result->limits().resolution();
max += Eigen::Vector2f(padding, padding);
min -= Eigen::Vector2f(padding, padding);
result->GrowLimits(min.x(), min.y());
result->GrowLimits(max.x(), max.y());
for (const LaserSegment& segment : segments) {
if (segment.hit) {
result->ApplyLookupTable(result->limits().GetXYIndexOfCellContainingPoint(
segment.to.x(), segment.to.y()),
hit_table);
}
}
for (const LaserSegment& segment : segments) {
UpdateFreeSpaceFromSegment(segment, miss_table, result);
}
}
} // namespace
void InsertIntoProbabilityGrid(
const sensor::LaserFan3D& laser_fan_3d, const transform::Rigid3f& pose,
const float slice_z, const mapping_2d::LaserFanInserter& laser_fan_inserter,
mapping_2d::ProbabilityGrid* result) {
std::vector<LaserSegment> segments;
GenerateSegmentForSlice(
laser_fan_3d, pose,
transform::Rigid3f::Translation(-slice_z * Eigen::Vector3f::UnitZ()),
&segments);
InsertSegmentsIntoProbabilityGrid(segments, laser_fan_inserter.hit_table(),
laser_fan_inserter.miss_table(), result);
}
proto::SubmapsOptions CreateSubmapsOptions(
common::LuaParameterDictionary* parameter_dictionary) {
proto::SubmapsOptions options;
options.set_high_resolution(
parameter_dictionary->GetDouble("high_resolution"));
options.set_high_resolution_max_range(
parameter_dictionary->GetDouble("high_resolution_max_range"));
options.set_low_resolution(parameter_dictionary->GetDouble("low_resolution"));
options.set_num_laser_fans(
parameter_dictionary->GetNonNegativeInt("num_laser_fans"));
*options.mutable_laser_fan_inserter_options() = CreateLaserFanInserterOptions(
parameter_dictionary->GetDictionary("laser_fan_inserter").get());
CHECK_GT(options.num_laser_fans(), 0);
return options;
}
Submap::Submap(const float high_resolution, const float low_resolution,
const Eigen::Vector3f& origin, const int begin_laser_fan_index)
: mapping::Submap(origin, begin_laser_fan_index),
high_resolution_hybrid_grid(high_resolution, origin),
low_resolution_hybrid_grid(low_resolution, origin) {}
Submaps::Submaps(const proto::SubmapsOptions& options)
: options_(options),
laser_fan_inserter_(options.laser_fan_inserter_options()) {
// We always want to have at least one likelihood field which we can return,
// and will create it at the origin in absence of a better choice.
AddSubmap(Eigen::Vector3f::Zero());
}
const Submap* Submaps::Get(int index) const {
CHECK_GE(index, 0);
CHECK_LT(index, size());
return submaps_[index].get();
}
int Submaps::size() const { return submaps_.size(); }
void Submaps::SubmapToProto(
int index, const std::vector<mapping::TrajectoryNode>& trajectory_nodes,
const transform::Rigid3d& global_submap_pose,
mapping::proto::SubmapQuery::Response* const response) {
// Generate an X-ray view through the 'hybrid_grid', aligned to the xy-plane
// in the global map frame.
const HybridGrid& hybrid_grid = Get(index)->high_resolution_hybrid_grid;
response->set_resolution(hybrid_grid.resolution());
// Compute a bounding box for the texture.
Eigen::Array2i min_index(INT_MAX, INT_MAX);
Eigen::Array2i max_index(INT_MIN, INT_MIN);
ExtractVoxelData(
hybrid_grid,
(global_submap_pose * Get(index)->local_pose().inverse()).cast<float>(),
&min_index, &max_index);
const int width = max_index.y() - min_index.y() + 1;
const int height = max_index.x() - min_index.x() + 1;
response->set_width(width);
response->set_height(height);
AccumulatePixelData(width, height, min_index, max_index);
ComputePixelValues(width, height);
common::FastGzipString(celldata_, response->mutable_cells());
*response->mutable_slice_pose() =
transform::ToProto(global_submap_pose.inverse() *
transform::Rigid3d::Translation(Eigen::Vector3d(
max_index.x() * hybrid_grid.resolution(),
max_index.y() * hybrid_grid.resolution(),
global_submap_pose.translation().z())));
}
void Submaps::InsertLaserFan(const sensor::LaserFan3D& laser_fan) {
CHECK_LT(num_laser_fans_, std::numeric_limits<int>::max());
++num_laser_fans_;
for (const int index : insertion_indices()) {
Submap* submap = submaps_[index].get();
laser_fan_inserter_.Insert(
sensor::FilterLaserFanByMaxRange(laser_fan,
options_.high_resolution_max_range()),
&submap->high_resolution_hybrid_grid);
laser_fan_inserter_.Insert(laser_fan, &submap->low_resolution_hybrid_grid);
submap->end_laser_fan_index = num_laser_fans_;
}
++num_laser_fans_in_last_submap_;
if (num_laser_fans_in_last_submap_ == options_.num_laser_fans()) {
AddSubmap(laser_fan.origin);
}
}
const HybridGrid& Submaps::high_resolution_matching_grid() const {
return submaps_[matching_index()]->high_resolution_hybrid_grid;
}
const HybridGrid& Submaps::low_resolution_matching_grid() const {
return submaps_[matching_index()]->low_resolution_hybrid_grid;
}
void Submaps::AddTrajectoryNodeIndex(const int trajectory_node_index) {
for (int i = 0; i != size(); ++i) {
Submap& submap = *submaps_[i];
if (submap.end_laser_fan_index == num_laser_fans_ &&
submap.begin_laser_fan_index <= num_laser_fans_ - 1) {
submap.trajectory_node_indices.push_back(trajectory_node_index);
}
}
}
void Submaps::AddSubmap(const Eigen::Vector3f& origin) {
if (size() > 1) {
Submap* submap = submaps_[size() - 2].get();
CHECK(!submap->finished);
submap->finished = true;
}
submaps_.emplace_back(new Submap(options_.high_resolution(),
options_.low_resolution(), origin,
num_laser_fans_));
LOG(INFO) << "Added submap " << size();
num_laser_fans_in_last_submap_ = 0;
}
void Submaps::AccumulatePixelData(const int width, const int height,
const Eigen::Array2i& min_index,
const Eigen::Array2i& max_index) {
accumulated_pixel_data_.clear();
accumulated_pixel_data_.resize(width * height);
for (const Eigen::Array4i& voxel_index_and_probability :
voxel_indices_and_probabilities_) {
const Eigen::Array2i pixel_index = voxel_index_and_probability.head<2>();
if ((pixel_index < min_index).any() || (pixel_index > max_index).any()) {
// Out of bounds. This could happen because of floating point inaccuracy.
continue;
}
const int x = max_index.x() - pixel_index[0];
const int y = max_index.y() - pixel_index[1];
PixelData& pixel = accumulated_pixel_data_[x * width + y];
++pixel.count;
pixel.min_z = std::min(pixel.min_z, voxel_index_and_probability[2]);
pixel.max_z = std::max(pixel.max_z, voxel_index_and_probability[2]);
const float probability =
mapping::ValueToProbability(voxel_index_and_probability[3]);
pixel.probability_sum += probability;
pixel.max_probability = std::max(pixel.max_probability, probability);
}
}
void Submaps::ExtractVoxelData(const HybridGrid& hybrid_grid,
const transform::Rigid3f& transform,
Eigen::Array2i* min_index,
Eigen::Array2i* max_index) {
voxel_indices_and_probabilities_.clear();
const float resolution_inverse = 1. / hybrid_grid.resolution();
constexpr double kXrayObstructedCellProbabilityLimit = 0.501;
for (auto it = HybridGrid::Iterator(hybrid_grid); !it.Done(); it.Next()) {
const uint16 probability_value = it.GetValue();
const float probability = mapping::ValueToProbability(probability_value);
if (probability < kXrayObstructedCellProbabilityLimit) {
// We ignore non-obstructed cells.
continue;
}
const Eigen::Vector3f cell_center_local =
hybrid_grid.GetCenterOfCell(it.GetCellIndex());
const Eigen::Vector3f cell_center_global = transform * cell_center_local;
const Eigen::Array4i voxel_index_and_probability(
common::RoundToInt(cell_center_global.x() * resolution_inverse),
common::RoundToInt(cell_center_global.y() * resolution_inverse),
common::RoundToInt(cell_center_global.z() * resolution_inverse),
probability_value);
voxel_indices_and_probabilities_.push_back(voxel_index_and_probability);
const Eigen::Array2i pixel_index = voxel_index_and_probability.head<2>();
*min_index = min_index->cwiseMin(pixel_index);
*max_index = max_index->cwiseMax(pixel_index);
}
}
void Submaps::ComputePixelValues(const int width, const int height) {
celldata_.resize(2 * width * height);
constexpr float kMinZDifference = 3.f;
constexpr float kFreeSpaceWeight = 0.15f;
auto it = celldata_.begin();
for (size_t i = 0; i < accumulated_pixel_data_.size(); ++i) {
const PixelData& pixel = accumulated_pixel_data_.at(i);
// TODO(whess): Take into account submap rotation.
// TODO(whess): Document the approach and make it more independent from the
// chosen resolution.
const float z_difference = pixel.count > 0 ? pixel.max_z - pixel.min_z : 0;
if (z_difference < kMinZDifference) {
*it = 0; // value
++it;
*it = 0; // alpha
++it;
continue;
}
const float free_space = std::max(z_difference - pixel.count, 0.f);
const float free_space_weight = kFreeSpaceWeight * free_space;
const float total_weight = pixel.count + free_space_weight;
const float free_space_probability = 1.f - pixel.max_probability;
const float average_probability = mapping::ClampProbability(
(pixel.probability_sum + free_space_probability * free_space_weight) /
total_weight);
const int delta =
128 - mapping::ProbabilityToLogOddsInteger(average_probability);
const uint8 alpha = delta > 0 ? 0 : -delta;
const uint8 value = delta > 0 ? delta : 0;
*it = value; // value
++it;
*it = (value || alpha) ? alpha : 1; // alpha
++it;
}
}
} // namespace mapping_3d
} // namespace cartographer
<|start_filename|>cartographer/sensor/laser_test.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/sensor/laser.h"
#include <utility>
#include <vector>
#include "gmock/gmock.h"
namespace cartographer {
namespace sensor {
namespace {
using ::testing::Contains;
using ::testing::PrintToString;
TEST(ProjectorTest, ToLaserFan) {
proto::LaserScan laser_scan;
for (int i = 0; i < 8; ++i) {
laser_scan.add_range()->add_value(1.f);
}
laser_scan.set_angle_min(0.f);
laser_scan.set_angle_max(8.f * static_cast<float>(M_PI_4));
laser_scan.set_angle_increment(static_cast<float>(M_PI_4));
const LaserFan fan = ToLaserFan(laser_scan, 0.f, 10.f, 1.f);
EXPECT_TRUE(fan.point_cloud[0].isApprox(Eigen::Vector2f(1.f, 0.f), 1e-6));
EXPECT_TRUE(fan.point_cloud[1].isApprox(
Eigen::Vector2f(1.f / std::sqrt(2.f), 1.f / std::sqrt(2.f)), 1e-6));
EXPECT_TRUE(fan.point_cloud[2].isApprox(Eigen::Vector2f(0.f, 1.f), 1e-6));
EXPECT_TRUE(fan.point_cloud[3].isApprox(
Eigen::Vector2f(-1.f / std::sqrt(2.f), 1.f / std::sqrt(2.f)), 1e-6));
EXPECT_TRUE(fan.point_cloud[4].isApprox(Eigen::Vector2f(-1.f, 0.f), 1e-6));
EXPECT_TRUE(fan.point_cloud[5].isApprox(
Eigen::Vector2f(-1.f / std::sqrt(2.f), -1.f / std::sqrt(2.f)), 1e-6));
EXPECT_TRUE(fan.point_cloud[6].isApprox(Eigen::Vector2f(0.f, -1.f), 1e-6));
EXPECT_TRUE(fan.point_cloud[7].isApprox(
Eigen::Vector2f(1.f / std::sqrt(2.f), -1.f / std::sqrt(2.f)), 1e-6));
}
TEST(ProjectorTest, ToLaserFanWithInfinityAndNaN) {
proto::LaserScan laser_scan;
laser_scan.add_range()->add_value(1.f);
laser_scan.add_range()->add_value(std::numeric_limits<float>::infinity());
laser_scan.add_range()->add_value(2.f);
laser_scan.add_range()->add_value(std::numeric_limits<float>::quiet_NaN());
laser_scan.add_range()->add_value(3.f);
laser_scan.set_angle_min(0.f);
laser_scan.set_angle_max(3.f * static_cast<float>(M_PI_4));
laser_scan.set_angle_increment(static_cast<float>(M_PI_4));
const LaserFan fan = ToLaserFan(laser_scan, 2.f, 10.f, 1.f);
ASSERT_EQ(2, fan.point_cloud.size());
EXPECT_TRUE(fan.point_cloud[0].isApprox(Eigen::Vector2f(0.f, 2.f), 1e-6));
EXPECT_TRUE(fan.point_cloud[1].isApprox(Eigen::Vector2f(-3.f, 0.f), 1e-6));
ASSERT_EQ(1, fan.missing_echo_point_cloud.size());
EXPECT_TRUE(fan.missing_echo_point_cloud[0].isApprox(
Eigen::Vector2f(1.f / std::sqrt(2.f), 1.f / std::sqrt(2.f)), 1e-6));
}
// Custom matcher for pair<eigen::Vector3f, int> entries.
MATCHER_P(PairApproximatelyEquals, expected,
string("is equal to ") + PrintToString(expected)) {
return (arg.first - expected.first).isZero(0.001f) &&
arg.second == expected.second;
}
TEST(LaserTest, Compression) {
LaserFan3D fan = {Eigen::Vector3f(1, 1, 1),
{Eigen::Vector3f(0, 1, 2), Eigen::Vector3f(4, 5, 6),
Eigen::Vector3f(0, 1, 2)},
{Eigen::Vector3f(7, 8, 9)},
{1, 2, 3}};
LaserFan3D actual = Decompress(Compress(fan));
EXPECT_TRUE(actual.origin.isApprox(Eigen::Vector3f(1, 1, 1), 1e-6));
EXPECT_EQ(3, actual.returns.size());
EXPECT_EQ(1, actual.misses.size());
EXPECT_EQ(actual.returns.size(), actual.reflectivities.size());
EXPECT_TRUE(actual.misses[0].isApprox(Eigen::Vector3f(7, 8, 9), 0.001f));
// Returns and their corresponding reflectivities will be reordered, so we
// pair them up into a vector, and compare in an unordered manner.
std::vector<std::pair<Eigen::Vector3f, int>> pairs;
for (size_t i = 0; i < actual.returns.size(); ++i) {
pairs.emplace_back(actual.returns[i], actual.reflectivities[i]);
}
EXPECT_EQ(3, pairs.size());
EXPECT_THAT(pairs,
Contains(PairApproximatelyEquals(std::pair<Eigen::Vector3f, int>(
Eigen::Vector3f(0, 1, 2), 1))));
EXPECT_THAT(pairs,
Contains(PairApproximatelyEquals(std::pair<Eigen::Vector3f, int>(
Eigen::Vector3f(0, 1, 2), 3))));
EXPECT_THAT(pairs,
Contains(PairApproximatelyEquals(std::pair<Eigen::Vector3f, int>(
Eigen::Vector3f(4, 5, 6), 2))));
}
} // namespace
} // namespace sensor
} // namespace cartographer
<|start_filename|>cartographer/common/rate_timer_test.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/common/rate_timer.h"
#include "gtest/gtest.h"
namespace cartographer {
namespace common {
namespace {
TEST(RateTimerTest, ComputeRate) {
RateTimer<> rate_timer(common::FromSeconds(1.));
common::Time time = common::FromUniversal(42);
for (int i = 0; i < 100; ++i) {
rate_timer.Pulse(time);
time += common::FromSeconds(0.1);
}
EXPECT_NEAR(10., rate_timer.ComputeRate(), 1e-3);
}
struct SimulatedClock {
using rep = std::chrono::steady_clock::rep;
using period = std::chrono::steady_clock::period;
using duration = std::chrono::steady_clock::duration;
using time_point = std::chrono::steady_clock::time_point;
static constexpr bool is_steady = true;
static time_point time;
static time_point now() noexcept { return time; }
};
SimulatedClock::time_point SimulatedClock::time;
TEST(RateTimerTest, ComputeWallTimeRateRatio) {
common::Time time = common::FromUniversal(42);
RateTimer<SimulatedClock> rate_timer(common::FromSeconds(1.));
for (int i = 0; i < 100; ++i) {
rate_timer.Pulse(time);
time += common::FromSeconds(0.1);
SimulatedClock::time +=
std::chrono::duration_cast<SimulatedClock::duration>(
std::chrono::duration<double>(0.05));
}
EXPECT_NEAR(2., rate_timer.ComputeWallTimeRateRatio(), 1e-3);
}
} // namespace
} // namespace common
} // namespace cartographer
<|start_filename|>cartographer/mapping_3d/scan_matching/precomputation_grid.h<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CARTOGRAPHER_MAPPING_3D_SCAN_MATCHING_PRECOMPUTATION_GRID_H_
#define CARTOGRAPHER_MAPPING_3D_SCAN_MATCHING_PRECOMPUTATION_GRID_H_
#include "cartographer/mapping_3d/hybrid_grid.h"
namespace cartographer {
namespace mapping_3d {
namespace scan_matching {
class PrecomputationGrid : public HybridGridBase<uint8> {
public:
PrecomputationGrid(const float resolution, const Eigen::Vector3f& origin)
: HybridGridBase<uint8>(resolution, origin) {}
// Maps values from [0, 255] to [kMinProbability, kMaxProbability].
static float ToProbability(float value) {
return mapping::kMinProbability +
value *
((mapping::kMaxProbability - mapping::kMinProbability) / 255.f);
}
};
// Converts a HybridGrid to a PrecomputationGrid representing the same data,
// but only using 8 bit instead of 2 x 16 bit.
PrecomputationGrid ConvertToPrecomputationGrid(const HybridGrid& hybrid_grid);
// Returns a grid of the same resolution containing the maximum value of
// original voxels in 'grid'. This maximum is over the 8 voxels that have
// any combination of index components optionally increased by 'shift'.
// If 'shift' is 2 ** (depth - 1), where depth 0 is the original grid, and this
// is using the precomputed grid of one depth before, this results in
// precomputation grids analogous to the 2D case.
PrecomputationGrid PrecomputeGrid(const PrecomputationGrid& grid,
bool half_resolution,
const Eigen::Array3i& shift);
} // namespace scan_matching
} // namespace mapping_3d
} // namespace cartographer
#endif // CARTOGRAPHER_MAPPING_3D_SCAN_MATCHING_PRECOMPUTATION_GRID_H_
<|start_filename|>cartographer/sensor/sensor_packet_period_histogram_builder.h<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CARTOGRAPHER_SENSOR_SENSOR_PACKET_PERIOD_HISTOGRAM_BUILDER_H_
#define CARTOGRAPHER_SENSOR_SENSOR_PACKET_PERIOD_HISTOGRAM_BUILDER_H_
#include <map>
#include <unordered_map>
#include "cartographer/common/histogram.h"
#include "cartographer/common/port.h"
namespace cartographer {
namespace sensor {
class SensorPacketPeriodHistogramBuilder {
public:
void Add(int trajectory_id, int64 timestamp, const string& frame_id);
void LogHistogramsAndClear();
private:
using Key = std::pair<int, string>;
std::map<Key, int64> last_timestamps_;
std::unordered_map<int, std::unordered_map<string, common::BucketHistogram>>
histograms_;
};
} // namespace sensor
} // namespace cartographer
#endif // CARTOGRAPHER_SENSOR_SENSOR_PACKET_PERIOD_HISTOGRAM_BUILDER_H_
<|start_filename|>cartographer/mapping_3d/scan_matching/rotation_delta_cost_functor.h<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CARTOGRAPHER_MAPPING_3D_SCAN_MATCHING_ROTATION_DELTA_COST_FUNCTOR_H_
#define CARTOGRAPHER_MAPPING_3D_SCAN_MATCHING_ROTATION_DELTA_COST_FUNCTOR_H_
#include <cmath>
#include "Eigen/Core"
#include "cartographer/transform/rigid_transform.h"
#include "cartographer/transform/transform.h"
#include "ceres/rotation.h"
namespace cartographer {
namespace mapping_3d {
namespace scan_matching {
// Computes the cost of rotating the pose estimate. Cost increases with the
// solution's distance from the rotation estimate.
class RotationDeltaCostFunctor {
public:
// Constructs a new RotationDeltaCostFunctor from the given
// 'rotation_estimate'.
explicit RotationDeltaCostFunctor(const double scaling_factor,
const Eigen::Quaterniond& initial_rotation)
: scaling_factor_(scaling_factor) {
initial_rotation_inverse_[0] = initial_rotation.w();
initial_rotation_inverse_[1] = -initial_rotation.x();
initial_rotation_inverse_[2] = -initial_rotation.y();
initial_rotation_inverse_[3] = -initial_rotation.z();
}
RotationDeltaCostFunctor(const RotationDeltaCostFunctor&) = delete;
RotationDeltaCostFunctor& operator=(const RotationDeltaCostFunctor&) = delete;
template <typename T>
bool operator()(const T* const rotation_quaternion, T* residual) const {
T delta[4];
T initial_rotation_inverse[4] = {
T(initial_rotation_inverse_[0]), T(initial_rotation_inverse_[1]),
T(initial_rotation_inverse_[2]), T(initial_rotation_inverse_[3])};
ceres::QuaternionProduct(initial_rotation_inverse, rotation_quaternion,
delta);
// Will compute the squared norm of the imaginary component of the delta
// quaternion which is sin(phi/2)^2.
residual[0] = scaling_factor_ * delta[1];
residual[1] = scaling_factor_ * delta[2];
residual[2] = scaling_factor_ * delta[3];
return true;
}
private:
const double scaling_factor_;
double initial_rotation_inverse_[4];
};
} // namespace scan_matching
} // namespace mapping_3d
} // namespace cartographer
#endif // CARTOGRAPHER_MAPPING_3D_SCAN_MATCHING_ROTATION_DELTA_COST_FUNCTOR_H_
<|start_filename|>cartographer/mapping_3d/hybrid_grid_test.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/mapping_3d/hybrid_grid.h"
#include <random>
#include <tuple>
#include "gmock/gmock.h"
namespace cartographer {
namespace mapping_3d {
namespace {
TEST(HybridGridTest, ApplyOdds) {
HybridGrid hybrid_grid(1.f, Eigen::Vector3f(-0.5f, -0.5f, -0.5f));
EXPECT_FALSE(hybrid_grid.IsKnown(Eigen::Array3i(0, 0, 0)));
EXPECT_FALSE(hybrid_grid.IsKnown(Eigen::Array3i(0, 1, 0)));
EXPECT_FALSE(hybrid_grid.IsKnown(Eigen::Array3i(1, 0, 0)));
EXPECT_FALSE(hybrid_grid.IsKnown(Eigen::Array3i(1, 1, 0)));
EXPECT_FALSE(hybrid_grid.IsKnown(Eigen::Array3i(0, 0, 1)));
EXPECT_FALSE(hybrid_grid.IsKnown(Eigen::Array3i(0, 1, 1)));
EXPECT_FALSE(hybrid_grid.IsKnown(Eigen::Array3i(1, 0, 1)));
EXPECT_FALSE(hybrid_grid.IsKnown(Eigen::Array3i(1, 1, 1)));
hybrid_grid.SetProbability(Eigen::Array3i(1, 0, 1), 0.5f);
hybrid_grid.StartUpdate();
hybrid_grid.ApplyLookupTable(
Eigen::Array3i(1, 0, 1),
mapping::ComputeLookupTableToApplyOdds(mapping::Odds(0.9f)));
EXPECT_GT(hybrid_grid.GetProbability(Eigen::Array3i(1, 0, 1)), 0.5f);
hybrid_grid.SetProbability(Eigen::Array3i(0, 1, 0), 0.5f);
hybrid_grid.StartUpdate();
hybrid_grid.ApplyLookupTable(
Eigen::Array3i(0, 1, 0),
mapping::ComputeLookupTableToApplyOdds(mapping::Odds(0.1f)));
EXPECT_LT(hybrid_grid.GetProbability(Eigen::Array3i(0, 1, 0)), 0.5f);
// Tests adding odds to an unknown cell.
hybrid_grid.StartUpdate();
hybrid_grid.ApplyLookupTable(
Eigen::Array3i(1, 1, 1),
mapping::ComputeLookupTableToApplyOdds(mapping::Odds(0.42f)));
EXPECT_NEAR(hybrid_grid.GetProbability(Eigen::Array3i(1, 1, 1)), 0.42f, 1e-4);
// Tests that further updates are ignored if StartUpdate() isn't called.
hybrid_grid.ApplyLookupTable(
Eigen::Array3i(1, 1, 1),
mapping::ComputeLookupTableToApplyOdds(mapping::Odds(0.9f)));
EXPECT_NEAR(hybrid_grid.GetProbability(Eigen::Array3i(1, 1, 1)), 0.42f, 1e-4);
hybrid_grid.StartUpdate();
hybrid_grid.ApplyLookupTable(
Eigen::Array3i(1, 1, 1),
mapping::ComputeLookupTableToApplyOdds(mapping::Odds(0.9f)));
EXPECT_GT(hybrid_grid.GetProbability(Eigen::Array3i(1, 1, 1)), 0.42f);
}
TEST(HybridGridTest, GetProbability) {
HybridGrid hybrid_grid(1.f, Eigen::Vector3f(-0.5f, -0.5f, -0.5f));
hybrid_grid.SetProbability(
hybrid_grid.GetCellIndex(Eigen::Vector3f(-0.5f, 0.5f, 0.5f)),
mapping::kMaxProbability);
EXPECT_NEAR(hybrid_grid.GetProbability(
hybrid_grid.GetCellIndex(Eigen::Vector3f(-0.5f, 0.5f, 0.5f))),
mapping::kMaxProbability, 1e-6);
for (const Eigen::Array3i& index :
{hybrid_grid.GetCellIndex(Eigen::Vector3f(-0.5f, 1.5, 0.5f)),
hybrid_grid.GetCellIndex(Eigen::Vector3f(.5f, 0.5, 0.5f)),
hybrid_grid.GetCellIndex(Eigen::Vector3f(0.5f, 1.5, 0.5f))}) {
EXPECT_FALSE(hybrid_grid.IsKnown(index));
}
}
MATCHER_P(AllCwiseEqual, index, "") { return (arg == index).all(); }
TEST(HybridGridTest, GetCellIndex) {
HybridGrid hybrid_grid(2.f, Eigen::Vector3f(-7.f, -13.f, -2.f));
EXPECT_THAT(hybrid_grid.GetCellIndex(Eigen::Vector3f(-7.f, -13.f, -2.f)),
AllCwiseEqual(Eigen::Array3i(0, 0, 0)));
EXPECT_THAT(hybrid_grid.GetCellIndex(Eigen::Vector3f(-7.f, 13.f, 8.f)),
AllCwiseEqual(Eigen::Array3i(0, 13, 5)));
EXPECT_THAT(hybrid_grid.GetCellIndex(Eigen::Vector3f(7.f, -13.f, 8.f)),
AllCwiseEqual(Eigen::Array3i(7, 0, 5)));
EXPECT_THAT(hybrid_grid.GetCellIndex(Eigen::Vector3f(7.f, 13.f, -2.f)),
AllCwiseEqual(Eigen::Array3i(7, 13, 0)));
// Check around the origin.
EXPECT_THAT(hybrid_grid.GetCellIndex(Eigen::Vector3f(1.5f, -1.5f, -1.5f)),
AllCwiseEqual(Eigen::Array3i(4, 6, 0)));
EXPECT_THAT(hybrid_grid.GetCellIndex(Eigen::Vector3f(0.5f, -0.5f, -0.5f)),
AllCwiseEqual(Eigen::Array3i(4, 6, 1)));
EXPECT_THAT(hybrid_grid.GetCellIndex(Eigen::Vector3f(-0.5f, 1.5f, 0.5f)),
AllCwiseEqual(Eigen::Array3i(3, 7, 1)));
EXPECT_THAT(hybrid_grid.GetCellIndex(Eigen::Vector3f(-1.5f, 0.5f, 1.5f)),
AllCwiseEqual(Eigen::Array3i(3, 7, 2)));
}
TEST(HybridGridTest, GetCenterOfCell) {
HybridGrid hybrid_grid(2.f, Eigen::Vector3f(-7.f, -13.f, -2.f));
const Eigen::Array3i index(3, 2, 1);
const Eigen::Vector3f center = hybrid_grid.GetCenterOfCell(index);
EXPECT_NEAR(-1.f, center.x(), 1e-6);
EXPECT_NEAR(-9.f, center.y(), 1e-6);
EXPECT_NEAR(0.f, center.z(), 1e-6);
EXPECT_THAT(hybrid_grid.GetCellIndex(center), AllCwiseEqual(index));
}
TEST(HybridGridTest, TestIteration) {
HybridGrid hybrid_grid(2.f, Eigen::Vector3f(-7.f, -12.f, 0.f));
std::map<std::tuple<int, int, int>, float> values;
std::mt19937 rng(1285120005);
std::uniform_real_distribution<float> value_distribution(
mapping::kMinProbability, mapping::kMaxProbability);
std::uniform_int_distribution<int> xyz_distribution(-3000, 2999);
for (int i = 0; i < 10000; ++i) {
const auto x = xyz_distribution(rng);
const auto y = xyz_distribution(rng);
const auto z = xyz_distribution(rng);
values.emplace(std::make_tuple(x, y, z), value_distribution(rng));
}
for (const auto& pair : values) {
const Eigen::Array3i cell_index(std::get<0>(pair.first),
std::get<1>(pair.first),
std::get<2>(pair.first));
hybrid_grid.SetProbability(cell_index, pair.second);
}
for (auto it = HybridGrid::Iterator(hybrid_grid); !it.Done(); it.Next()) {
const Eigen::Array3i cell_index = it.GetCellIndex();
const float iterator_probability =
mapping::ValueToProbability(it.GetValue());
EXPECT_EQ(iterator_probability, hybrid_grid.GetProbability(cell_index));
const std::tuple<int, int, int> key =
std::make_tuple(cell_index[0], cell_index[1], cell_index[2]);
EXPECT_TRUE(values.count(key));
EXPECT_NEAR(values[key], iterator_probability, 1e-4);
values.erase(key);
}
// Test that range based loop is equivalent to using the iterator.
auto it = HybridGrid::Iterator(hybrid_grid);
for (const auto& cell : hybrid_grid) {
ASSERT_FALSE(it.Done());
EXPECT_THAT(cell.first, AllCwiseEqual(it.GetCellIndex()));
EXPECT_EQ(cell.second, it.GetValue());
it.Next();
}
// Now 'values' must not contain values.
for (const auto& pair : values) {
const Eigen::Array3i cell_index(std::get<0>(pair.first),
std::get<1>(pair.first),
std::get<2>(pair.first));
ADD_FAILURE() << cell_index << " Probability: " << pair.second;
}
}
} // namespace
} // namespace mapping_3d
} // namespace cartographer
<|start_filename|>cartographer/transform/transform.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/transform/transform.h"
namespace cartographer {
namespace transform {
Rigid2d ToRigid2(const proto::Rigid2d& pose) {
return Rigid2d({pose.translation().x(), pose.translation().y()},
pose.rotation());
}
Eigen::Vector2d ToEigen(const proto::Vector2d& vector) {
return Eigen::Vector2d(vector.x(), vector.y());
}
Eigen::Vector3f ToEigen(const proto::Vector3f& vector) {
return Eigen::Vector3f(vector.x(), vector.y(), vector.z());
}
Eigen::Vector3d ToEigen(const proto::Vector3d& vector) {
return Eigen::Vector3d(vector.x(), vector.y(), vector.z());
}
Eigen::Quaterniond ToEigen(const proto::Quaterniond& quaternion) {
return Eigen::Quaterniond(quaternion.w(), quaternion.x(), quaternion.y(),
quaternion.z());
}
proto::Rigid2d ToProto(const transform::Rigid2d& transform) {
proto::Rigid2d proto;
proto.mutable_translation()->set_x(transform.translation().x());
proto.mutable_translation()->set_y(transform.translation().y());
proto.set_rotation(transform.rotation().angle());
return proto;
}
proto::Rigid2f ToProto(const transform::Rigid2f& transform) {
proto::Rigid2f proto;
proto.mutable_translation()->set_x(transform.translation().x());
proto.mutable_translation()->set_y(transform.translation().y());
proto.set_rotation(transform.rotation().angle());
return proto;
}
proto::Rigid3d ToProto(const transform::Rigid3d& rigid) {
proto::Rigid3d proto;
*proto.mutable_translation() = ToProto(rigid.translation());
*proto.mutable_rotation() = ToProto(rigid.rotation());
return proto;
}
transform::Rigid3d ToRigid3(const proto::Rigid3d& rigid) {
return transform::Rigid3d(ToEigen(rigid.translation()),
ToEigen(rigid.rotation()));
}
proto::Rigid3f ToProto(const transform::Rigid3f& rigid) {
proto::Rigid3f proto;
*proto.mutable_translation() = ToProto(rigid.translation());
*proto.mutable_rotation() = ToProto(rigid.rotation());
return proto;
}
proto::Vector3f ToProto(const Eigen::Vector3f& vector) {
proto::Vector3f proto;
proto.set_x(vector.x());
proto.set_y(vector.y());
proto.set_z(vector.z());
return proto;
}
proto::Vector3d ToProto(const Eigen::Vector3d& vector) {
proto::Vector3d proto;
proto.set_x(vector.x());
proto.set_y(vector.y());
proto.set_z(vector.z());
return proto;
}
proto::Quaternionf ToProto(const Eigen::Quaternionf& quaternion) {
proto::Quaternionf proto;
proto.set_w(quaternion.w());
proto.set_x(quaternion.x());
proto.set_y(quaternion.y());
proto.set_z(quaternion.z());
return proto;
}
proto::Quaterniond ToProto(const Eigen::Quaterniond& quaternion) {
proto::Quaterniond proto;
proto.set_w(quaternion.w());
proto.set_x(quaternion.x());
proto.set_y(quaternion.y());
proto.set_z(quaternion.z());
return proto;
}
} // namespace transform
} // namespace cartographer
<|start_filename|>cartographer/common/ordered_multi_queue_test.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/common/ordered_multi_queue.h"
#include <vector>
#include "cartographer/common/make_unique.h"
#include "gtest/gtest.h"
namespace cartographer {
namespace common {
namespace {
TEST(OrderedMultiQueue, Ordering) {
std::vector<int> values;
OrderedMultiQueue<int, int, int> queue;
for (int i : {1, 2, 3}) {
queue.AddQueue(i, [&values](std::unique_ptr<int> value) {
if (!values.empty()) {
EXPECT_GT(*value, values.back());
}
values.push_back(*value);
});
}
queue.Add(1, 4, common::make_unique<int>(4));
queue.Add(1, 5, common::make_unique<int>(5));
queue.Add(1, 6, common::make_unique<int>(6));
EXPECT_TRUE(values.empty());
queue.Add(2, 1, common::make_unique<int>(1));
EXPECT_TRUE(values.empty());
queue.Add(3, 2, common::make_unique<int>(2));
EXPECT_EQ(values.size(), 1);
queue.Add(2, 3, common::make_unique<int>(3));
EXPECT_EQ(values.size(), 2);
queue.Add(2, 7, common::make_unique<int>(7));
queue.Add(3, 8, common::make_unique<int>(8));
queue.Flush();
EXPECT_EQ(8, values.size());
for (size_t i = 0; i < values.size(); ++i) {
EXPECT_EQ(i + 1, values[i]);
}
}
TEST(OrderedMultiQueue, MarkQueueAsFinished) {
std::vector<int> values;
OrderedMultiQueue<int, int, int> queue;
for (int i : {1, 2, 3}) {
queue.AddQueue(i, [&values](std::unique_ptr<int> value) {
if (!values.empty()) {
EXPECT_GT(*value, values.back());
}
values.push_back(*value);
});
}
queue.Add(1, 1, common::make_unique<int>(1));
queue.Add(1, 2, common::make_unique<int>(2));
queue.Add(1, 3, common::make_unique<int>(3));
EXPECT_TRUE(values.empty());
queue.MarkQueueAsFinished(1);
EXPECT_TRUE(values.empty());
queue.MarkQueueAsFinished(2);
EXPECT_TRUE(values.empty());
queue.MarkQueueAsFinished(3);
EXPECT_EQ(3, values.size());
for (size_t i = 0; i < values.size(); ++i) {
EXPECT_EQ(i + 1, values[i]);
}
}
} // namespace
} // namespace common
} // namespace cartographer
<|start_filename|>cartographer/mapping/probability_values_test.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/mapping/probability_values.h"
#include "gtest/gtest.h"
namespace cartographer {
namespace mapping {
namespace {
TEST(ProbabilityValuesTest, OddsConversions) {
EXPECT_NEAR(ProbabilityFromOdds(Odds(kMinProbability)), kMinProbability,
1e-6);
EXPECT_NEAR(ProbabilityFromOdds(Odds(kMaxProbability)), kMaxProbability,
1e-6);
EXPECT_NEAR(ProbabilityFromOdds(Odds(0.5)), 0.5, 1e-6);
}
} // namespace
} // namespace mapping
} // namespace cartographer
<|start_filename|>cartographer/mapping_3d/scan_matching/fast_correlative_scan_matcher_test.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/mapping_3d/scan_matching/fast_correlative_scan_matcher.h"
#include <algorithm>
#include <cmath>
#include <random>
#include <string>
#include "cartographer/common/lua_parameter_dictionary_test_helpers.h"
#include "cartographer/mapping_3d/laser_fan_inserter.h"
#include "cartographer/transform/rigid_transform_test_helpers.h"
#include "cartographer/transform/transform.h"
#include "gtest/gtest.h"
namespace cartographer {
namespace mapping_3d {
namespace scan_matching {
namespace {
proto::FastCorrelativeScanMatcherOptions
CreateFastCorrelativeScanMatcherTestOptions(const int branch_and_bound_depth) {
auto parameter_dictionary = common::MakeDictionary(
"return {"
"branch_and_bound_depth = " +
std::to_string(branch_and_bound_depth) +
", "
"full_resolution_depth = " +
std::to_string(branch_and_bound_depth) +
", "
"rotational_histogram_size = 30, "
"min_rotational_score = 0.1, "
"linear_xy_search_window = 0.8, "
"linear_z_search_window = 0.8, "
"angular_search_window = 0.3, "
"}");
return CreateFastCorrelativeScanMatcherOptions(parameter_dictionary.get());
}
mapping_3d::proto::LaserFanInserterOptions CreateLaserFanInserterTestOptions() {
auto parameter_dictionary = common::MakeDictionary(
"return { "
"hit_probability = 0.7, "
"miss_probability = 0.4, "
"num_free_space_voxels = 5, "
"}");
return CreateLaserFanInserterOptions(parameter_dictionary.get());
}
TEST(FastCorrelativeScanMatcherTest, CorrectPose) {
std::mt19937 prng(42);
std::uniform_real_distribution<float> distribution(-1.f, 1.f);
LaserFanInserter laser_fan_inserter(CreateLaserFanInserterTestOptions());
constexpr float kMinScore = 0.1f;
const auto options = CreateFastCorrelativeScanMatcherTestOptions(5);
sensor::PointCloud point_cloud{
Eigen::Vector3f(4.f, 0.f, 0.f), Eigen::Vector3f(4.5f, 0.f, 0.f),
Eigen::Vector3f(5.f, 0.f, 0.f), Eigen::Vector3f(5.5f, 0.f, 0.f),
Eigen::Vector3f(0.f, 4.f, 0.f), Eigen::Vector3f(0.f, 4.5f, 0.f),
Eigen::Vector3f(0.f, 5.f, 0.f), Eigen::Vector3f(0.f, 5.5f, 0.f),
Eigen::Vector3f(0.f, 0.f, 4.f), Eigen::Vector3f(0.f, 0.f, 4.5f),
Eigen::Vector3f(0.f, 0.f, 5.f), Eigen::Vector3f(0.f, 0.f, 5.5f)};
for (int i = 0; i != 20; ++i) {
const float x = 0.7f * distribution(prng);
const float y = 0.7f * distribution(prng);
const float z = 0.7f * distribution(prng);
const float theta = 0.2f * distribution(prng);
const auto expected_pose =
transform::Rigid3f::Translation(Eigen::Vector3f(x, y, z)) *
transform::Rigid3f::Rotation(
Eigen::AngleAxisf(theta, Eigen::Vector3f::UnitZ()));
HybridGrid hybrid_grid(0.05f /* resolution */,
Eigen::Vector3f(0.5f, 1.5f, 2.5f) /* origin */);
hybrid_grid.StartUpdate();
laser_fan_inserter.Insert(
sensor::LaserFan3D{
expected_pose.translation(),
sensor::TransformPointCloud(point_cloud, expected_pose),
{}},
&hybrid_grid);
FastCorrelativeScanMatcher fast_correlative_scan_matcher(hybrid_grid, {},
options);
transform::Rigid3d pose_estimate;
float score;
EXPECT_TRUE(fast_correlative_scan_matcher.Match(
transform::Rigid3d::Identity(), point_cloud, point_cloud, kMinScore,
&score, &pose_estimate));
EXPECT_LT(kMinScore, score);
EXPECT_THAT(expected_pose,
transform::IsNearly(pose_estimate.cast<float>(), 0.05f))
<< "Actual: " << transform::ToProto(pose_estimate).DebugString()
<< "\nExpected: " << transform::ToProto(expected_pose).DebugString();
}
}
} // namespace
} // namespace scan_matching
} // namespace mapping_3d
} // namespace cartographer
<|start_filename|>cartographer/transform/transform_interpolation_buffer_test.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/transform/transform_interpolation_buffer.h"
#include "Eigen/Core"
#include "Eigen/Geometry"
#include "cartographer/transform/rigid_transform.h"
#include "cartographer/transform/rigid_transform_test_helpers.h"
#include "gtest/gtest.h"
namespace cartographer {
namespace transform {
namespace {
TEST(TransformInterpolationBufferTest, testHas) {
TransformInterpolationBuffer buffer;
EXPECT_FALSE(buffer.Has(common::FromUniversal(50)));
buffer.Push(common::FromUniversal(50), transform::Rigid3d::Identity());
EXPECT_FALSE(buffer.Has(common::FromUniversal(25)));
EXPECT_TRUE(buffer.Has(common::FromUniversal(50)));
EXPECT_FALSE(buffer.Has(common::FromUniversal(75)));
buffer.Push(common::FromUniversal(100), transform::Rigid3d::Identity());
EXPECT_FALSE(buffer.Has(common::FromUniversal(25)));
EXPECT_TRUE(buffer.Has(common::FromUniversal(50)));
EXPECT_TRUE(buffer.Has(common::FromUniversal(75)));
EXPECT_TRUE(buffer.Has(common::FromUniversal(100)));
EXPECT_FALSE(buffer.Has(common::FromUniversal(125)));
EXPECT_EQ(common::FromUniversal(50), buffer.earliest_time());
EXPECT_EQ(common::FromUniversal(100), buffer.latest_time());
}
TEST(TransformInterpolationBufferTest, testLookup) {
TransformInterpolationBuffer buffer;
buffer.Push(common::FromUniversal(50), transform::Rigid3d::Identity());
// The rotation needs to be relatively small in order for the interpolation to
// remain a z-axis rotation.
buffer.Push(common::FromUniversal(100),
transform::Rigid3d::Translation(Eigen::Vector3d(10., 10., 10.)) *
transform::Rigid3d::Rotation(
Eigen::AngleAxisd(2., Eigen::Vector3d::UnitZ())));
const common::Time time = common::FromUniversal(75);
const transform::Rigid3d interpolated = buffer.Lookup(time);
EXPECT_THAT(
interpolated,
IsNearly(transform::Rigid3d::Translation(Eigen::Vector3d(5., 5., 5.)) *
transform::Rigid3d::Rotation(
Eigen::AngleAxisd(1., Eigen::Vector3d::UnitZ())),
1e-6));
}
} // namespace
} // namespace transform
} // namespace cartographer
<|start_filename|>cartographer/sensor/voxel_filter.h<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CARTOGRAPHER_SENSOR_VOXEL_FILTER_H_
#define CARTOGRAPHER_SENSOR_VOXEL_FILTER_H_
#include <tuple>
#include <unordered_set>
#include <utility>
#include "cartographer/common/lua_parameter_dictionary.h"
#include "cartographer/mapping_3d/hybrid_grid.h"
#include "cartographer/sensor/point_cloud.h"
#include "cartographer/sensor/proto/adaptive_voxel_filter_options.pb.h"
namespace cartographer {
namespace sensor {
// Returns a voxel filtered copy of 'point_cloud' where 'size' is the length
// a voxel edge.
PointCloud2D VoxelFiltered(const PointCloud2D& point_cloud, float size);
// Returns a voxel filtered copy of 'point_cloud' where 'size' is the length
// a voxel edge.
PointCloud VoxelFiltered(const PointCloud& point_cloud, float size);
// Voxel filter for point clouds. For each voxel, the assembled point cloud
// contains the first point that fell into it from any of the inserted point
// clouds.
class VoxelFilter {
public:
// 'size' is the length of a voxel edge.
explicit VoxelFilter(float size);
VoxelFilter(const VoxelFilter&) = delete;
VoxelFilter& operator=(const VoxelFilter&) = delete;
// Inserts a point cloud into the voxel filter.
void InsertPointCloud(const PointCloud2D& point_cloud);
// Returns the filtered point cloud representing the occupied voxels.
const PointCloud2D& point_cloud() const;
private:
struct IntegerPairHash {
size_t operator()(const std::pair<int64, int64>& x) const {
const uint64 first = x.first;
const uint64 second = x.second;
return first ^ (first + 0x9e3779b9u + (second << 6) + (second >> 2));
}
};
const float size_;
std::unordered_set<std::pair<int64, int64>, IntegerPairHash> voxels_;
PointCloud2D point_cloud_;
};
// The same as VoxelFilter but for 3D PointClouds.
class VoxelFilter3D {
public:
// 'size' is the length of a voxel edge.
explicit VoxelFilter3D(float size);
VoxelFilter3D(const VoxelFilter3D&) = delete;
VoxelFilter3D& operator=(const VoxelFilter3D&) = delete;
// Inserts a point cloud into the voxel filter.
void InsertPointCloud(const PointCloud& point_cloud);
// Returns the filtered point cloud representing the occupied voxels.
const PointCloud& point_cloud() const;
private:
mapping_3d::HybridGridBase<uint8> voxels_;
PointCloud point_cloud_;
};
proto::AdaptiveVoxelFilterOptions CreateAdaptiveVoxelFilterOptions(
common::LuaParameterDictionary* const parameter_dictionary);
class AdaptiveVoxelFilter {
public:
explicit AdaptiveVoxelFilter(
const proto::AdaptiveVoxelFilterOptions& options);
AdaptiveVoxelFilter(const AdaptiveVoxelFilter&) = delete;
AdaptiveVoxelFilter& operator=(const AdaptiveVoxelFilter&) = delete;
PointCloud2D Filter(const PointCloud2D& point_cloud) const;
PointCloud Filter(const PointCloud& point_cloud) const;
private:
const proto::AdaptiveVoxelFilterOptions options_;
};
} // namespace sensor
} // namespace cartographer
#endif // CARTOGRAPHER_SENSOR_VOXEL_FILTER_H_
<|start_filename|>cartographer/mapping_3d/sparse_pose_graph/optimization_problem.h<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CARTOGRAPHER_MAPPING_3D_SPARSE_POSE_GRAPH_OPTIMIZATION_PROBLEM_H_
#define CARTOGRAPHER_MAPPING_3D_SPARSE_POSE_GRAPH_OPTIMIZATION_PROBLEM_H_
#include <array>
#include <deque>
#include <map>
#include <vector>
#include "Eigen/Core"
#include "Eigen/Geometry"
#include "cartographer/common/lua_parameter_dictionary.h"
#include "cartographer/common/port.h"
#include "cartographer/common/time.h"
#include "cartographer/mapping/sparse_pose_graph/proto/optimization_problem_options.pb.h"
#include "cartographer/mapping_3d/imu_integration.h"
#include "cartographer/mapping_3d/submaps.h"
namespace cartographer {
namespace mapping_3d {
namespace sparse_pose_graph {
struct NodeData {
common::Time time;
transform::Rigid3d initial_point_cloud_pose;
transform::Rigid3d point_cloud_pose;
};
// Implements the SPA loop closure method.
class OptimizationProblem {
public:
using Constraint = mapping::SparsePoseGraph::Constraint3D;
explicit OptimizationProblem(
const mapping::sparse_pose_graph::proto::OptimizationProblemOptions&
options);
~OptimizationProblem();
OptimizationProblem(const OptimizationProblem&) = delete;
OptimizationProblem& operator=(const OptimizationProblem&) = delete;
void AddImuData(common::Time time, const Eigen::Vector3d& linear_acceleration,
const Eigen::Vector3d& angular_velocity);
void AddTrajectoryNode(common::Time time,
const transform::Rigid3d& initial_point_cloud_pose,
const transform::Rigid3d& point_cloud_pose);
void SetMaxNumIterations(int32 max_num_iterations);
// Computes the optimized poses. The point cloud at 'point_cloud_poses[i]'
// belongs to 'trajectories[i]'. Within a given trajectory, scans are expected
// to be contiguous.
void Solve(const std::vector<Constraint>& constraints,
const transform::Rigid3d& submap_0_transform,
const std::vector<const mapping::Submaps*>& trajectories,
std::vector<transform::Rigid3d>* submap_transforms);
const std::vector<NodeData>& node_data() const;
private:
class SpaCostFunction {
public:
explicit SpaCostFunction(const Constraint::Pose& pose) : pose_(pose) {}
// Compute the error (linear offset and rotational error) without scaling
// it by the covariance.
template <typename T>
static std::array<T, 6> ComputeUnscaledError(
const transform::Rigid3d& zbar_ij, const T* const c_i_rotation,
const T* const c_i_translation, const T* const c_j_rotation,
const T* const c_j_translation);
// Computes the error scaled by 'sqrt_Lambda_ij', storing it in 'e'.
template <typename T>
static void ComputeScaledError(const Constraint::Pose& pose,
const T* const c_i_rotation,
const T* const c_i_translation,
const T* const c_j_rotation,
const T* const c_j_translation, T* const e);
template <typename T>
bool operator()(const T* const c_i_rotation, const T* const c_i_translation,
const T* const c_j_rotation, const T* const c_j_translation,
T* const e) const;
private:
const Constraint::Pose pose_;
};
mapping::sparse_pose_graph::proto::OptimizationProblemOptions options_;
std::deque<ImuData> imu_data_;
std::vector<NodeData> node_data_;
double gravity_constant_ = 9.8;
};
} // namespace sparse_pose_graph
} // namespace mapping_3d
} // namespace cartographer
#endif // CARTOGRAPHER_MAPPING_3D_SPARSE_POSE_GRAPH_OPTIMIZATION_PROBLEM_H_
<|start_filename|>cartographer/common/math.h<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CARTOGRAPHER_COMMON_MATH_H_
#define CARTOGRAPHER_COMMON_MATH_H_
#include <cmath>
#include <vector>
#include "Eigen/Core"
#include "cartographer/common/port.h"
#include "ceres/ceres.h"
namespace cartographer {
namespace common {
// Clamps 'value' to be in the range ['min', 'max'].
template <typename T>
T Clamp(const T value, const T min, const T max) {
if (value > max) {
return max;
}
if (value < min) {
return min;
}
return value;
}
// Calculates 'base'^'exponent'.
template <typename T>
constexpr T Power(T base, int exponent) {
return (exponent != 0) ? base * Power(base, exponent - 1) : T(1);
}
// Calculates a^2.
template <typename T>
constexpr T Pow2(T a) {
return Power(a, 2);
}
// Calculates the real part of the square root of 'a'. This is helpful when
// rounding errors generate a small negative argument. Otherwise std::sqrt
// returns NaN if its argument is negative.
template <typename T>
constexpr T RealSqrt(T a) {
return sqrt(std::max(T(0.), a));
}
// Converts from degrees to radians.
constexpr double DegToRad(double deg) { return M_PI * deg / 180.; }
// Converts form radians to degrees.
constexpr double RadToDeg(double rad) { return 180. * rad / M_PI; }
// Bring the 'difference' between two angles into [-pi; pi].
template <typename T>
T NormalizeAngleDifference(T difference) {
while (difference > M_PI) {
difference -= T(2. * M_PI);
}
while (difference < -M_PI) {
difference += T(2. * M_PI);
}
return difference;
}
template <typename T>
T atan2(const Eigen::Matrix<T, 2, 1>& vector) {
return ceres::atan2(vector.y(), vector.x());
}
// Computes 'A'^{-1/2} for A being symmetric, positive-semidefinite.
// Eigenvalues of 'A' are clamped to be at least 'lower_eigenvalue_bound'.
template <int N>
Eigen::Matrix<double, N, N> ComputeSpdMatrixSqrtInverse(
const Eigen::Matrix<double, N, N>& A, const double lower_eigenvalue_bound) {
Eigen::SelfAdjointEigenSolver<Eigen::Matrix<double, N, N>>
covariance_eigen_solver(A);
if (covariance_eigen_solver.info() != Eigen::Success) {
LOG(WARNING) << "SelfAdjointEigenSolver failed; A =\n" << A;
return Eigen::Matrix<double, N, N>::Identity();
}
// Since we compute the inverse, we do not allow smaller values to avoid
// infinity and NaN.
const double relative_lower_bound = lower_eigenvalue_bound;
return covariance_eigen_solver.eigenvectors() *
covariance_eigen_solver.eigenvalues()
.cwiseMax(relative_lower_bound)
.cwiseInverse()
.cwiseSqrt()
.asDiagonal() *
covariance_eigen_solver.eigenvectors().inverse();
}
} // namespace common
} // namespace cartographer
#endif // CARTOGRAPHER_COMMON_MATH_H_
<|start_filename|>cartographer/mapping/sensor_collator_test.cc<|end_filename|>
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer/mapping/sensor_collator.h"
#include <memory>
#include "cartographer/common/lua_parameter_dictionary_test_helpers.h"
#include "cartographer/common/make_unique.h"
#include "cartographer/common/time.h"
#include "cartographer/sensor/proto/sensor.pb.h"
#include "gtest/gtest.h"
namespace cartographer {
namespace mapping {
namespace {
struct TestData {
string frame_id;
};
TEST(SensorCollator, Ordering) {
TestData first{"horizontal_laser"};
TestData second{"vertical_laser"};
TestData third{"imu"};
TestData fourth{"horizontal_laser"};
TestData fifth{"vertical_laser"};
TestData sixth{"something"};
const std::unordered_set<string> frame_ids = {
"horizontal_laser", "vertical_laser", "imu", "something"};
std::vector<std::pair<int64, TestData>> received;
SensorCollator<TestData> sensor_collator;
sensor_collator.AddTrajectory(
0, frame_ids,
[&received](const int64 timestamp, std::unique_ptr<TestData> packet) {
received.push_back(std::make_pair(timestamp, *packet));
});
sensor_collator.AddSensorData(0, 100, first.frame_id,
common::make_unique<TestData>(first));
sensor_collator.AddSensorData(0, 600, sixth.frame_id,
common::make_unique<TestData>(sixth));
sensor_collator.AddSensorData(0, 400, fourth.frame_id,
common::make_unique<TestData>(fourth));
sensor_collator.AddSensorData(0, 200, second.frame_id,
common::make_unique<TestData>(second));
sensor_collator.AddSensorData(0, 500, fifth.frame_id,
common::make_unique<TestData>(fifth));
sensor_collator.AddSensorData(0, 300, third.frame_id,
common::make_unique<TestData>(third));
EXPECT_EQ(3, received.size());
EXPECT_EQ(100, received[0].first);
EXPECT_EQ("horizontal_laser", received[0].second.frame_id);
EXPECT_EQ(200, received[1].first);
EXPECT_EQ("vertical_laser", received[1].second.frame_id);
EXPECT_EQ(300, received[2].first);
EXPECT_EQ("imu", received[2].second.frame_id);
sensor_collator.Flush();
ASSERT_EQ(6, received.size());
EXPECT_EQ("horizontal_laser", received[3].second.frame_id);
EXPECT_EQ(500, received[4].first);
EXPECT_EQ("vertical_laser", received[4].second.frame_id);
EXPECT_EQ(600, received[5].first);
EXPECT_EQ("something", received[5].second.frame_id);
}
} // namespace
} // namespace mapping
} // namespace cartographer
| linghusmile/Cartographer_- |
<|start_filename|>pkg/webhook/default_server/seldondeployment/mutating/seldondeployment_create_update_handler.go<|end_filename|>
/*
Copyright 2019 The Seldon Team.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package mutating
import (
"context"
"encoding/json"
"fmt"
machinelearningv1alpha2 "github.com/seldonio/seldon-operator/pkg/apis/machinelearning/v1alpha2"
"github.com/seldonio/seldon-operator/pkg/utils"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"net/http"
"os"
"sigs.k8s.io/controller-runtime/pkg/runtime/inject"
"sigs.k8s.io/controller-runtime/pkg/webhook/admission"
"sigs.k8s.io/controller-runtime/pkg/webhook/admission/types"
"strconv"
)
func init() {
webhookName := "mutating-create-update-seldondeployment"
if HandlerMap[webhookName] == nil {
HandlerMap[webhookName] = []admission.Handler{}
}
HandlerMap[webhookName] = append(HandlerMap[webhookName], &SeldonDeploymentCreateUpdateHandler{})
}
// SeldonDeploymentCreateUpdateHandler handles SeldonDeployment
type SeldonDeploymentCreateUpdateHandler struct {
// To use the client, you need to do the following:
// - uncomment it
// - import sigs.k8s.io/controller-runtime/pkg/client
// - uncomment the InjectClient method at the bottom of this file.
// Client client.Client
// Decoder decodes objects
Decoder types.Decoder
}
func addDefaultsToGraph(pu *machinelearningv1alpha2.PredictiveUnit) {
if pu.Type == nil {
ty := machinelearningv1alpha2.UNKNOWN_TYPE
pu.Type = &ty
}
if pu.Implementation == nil {
im := machinelearningv1alpha2.UNKNOWN_IMPLEMENTATION
pu.Implementation = &im
}
for i := 0; i < len(pu.Children); i++ {
addDefaultsToGraph(&pu.Children[i])
}
}
func (h *SeldonDeploymentCreateUpdateHandler) MutatingSeldonDeploymentFn(ctx context.Context, mlDep *machinelearningv1alpha2.SeldonDeployment) error {
var firstPuPortNum int32 = 9000
var defaultMode = corev1.DownwardAPIVolumeSourceDefaultMode
if env_preditive_unit_service_port, ok := os.LookupEnv("PREDICTIVE_UNIT_SERVICE_PORT"); ok {
portNum, err := strconv.Atoi(env_preditive_unit_service_port)
if err != nil {
return err
} else {
firstPuPortNum = int32(portNum)
}
}
nextPortNum := firstPuPortNum
portMap := map[string]int32{}
if mlDep.ObjectMeta.Namespace == "" {
mlDep.ObjectMeta.Namespace = "default"
}
for i := 0; i < len(mlDep.Spec.Predictors); i++ {
p := mlDep.Spec.Predictors[i]
if p.Graph.Type == nil {
ty := machinelearningv1alpha2.UNKNOWN_TYPE
p.Graph.Type = &ty
}
// Add version label for predictor if not present
if p.Labels == nil {
p.Labels = map[string]string{}
}
if _, present := p.Labels["version"]; !present {
p.Labels["version"] = p.Name
}
addDefaultsToGraph(p.Graph)
fmt.Println("predictor is now")
jstr, _ := json.Marshal(p)
fmt.Println(string(jstr))
mlDep.Spec.Predictors[i] = p
for j := 0; j < len(p.ComponentSpecs); j++ {
cSpec := mlDep.Spec.Predictors[i].ComponentSpecs[j]
//Add downwardAPI
cSpec.Spec.Volumes = append(cSpec.Spec.Volumes, corev1.Volume{Name: machinelearningv1alpha2.PODINFO_VOLUME_NAME, VolumeSource: corev1.VolumeSource{
DownwardAPI: &corev1.DownwardAPIVolumeSource{Items: []corev1.DownwardAPIVolumeFile{
{Path: "annotations", FieldRef: &corev1.ObjectFieldSelector{FieldPath: "metadata.annotations", APIVersion: "v1"}}}, DefaultMode: &defaultMode}}})
// add service details for each container - looping this way as if containers in same pod and its the engine pod both need to be localhost
for k := 0; k < len(cSpec.Spec.Containers); k++ {
con := &cSpec.Spec.Containers[k]
if _, present := portMap[con.Name]; !present {
portMap[con.Name] = nextPortNum
nextPortNum++
}
portNum := portMap[con.Name]
pu := machinelearningv1alpha2.GetPredcitiveUnit(p.Graph, con.Name)
if pu != nil {
if pu.Endpoint == nil {
pu.Endpoint = &machinelearningv1alpha2.Endpoint{Type: machinelearningv1alpha2.REST}
}
var portType string
if pu.Endpoint.Type == machinelearningv1alpha2.GRPC {
portType = "grpc"
} else {
portType = "http"
}
if con != nil {
existingPort := utils.GetPort(portType, con.Ports)
if existingPort != nil {
portNum = existingPort.ContainerPort
}
}
// Set ports and hostname in predictive unit so engine can read it from SDep
// if this is the first componentSpec then it's the one to put the engine in - note using outer loop counter here
if _, hasSeparateEnginePod := mlDep.Spec.Annotations[machinelearningv1alpha2.ANNOTATION_SEPARATE_ENGINE]; j == 0 && !hasSeparateEnginePod {
pu.Endpoint.ServiceHost = "localhost"
} else {
containerServiceValue := machinelearningv1alpha2.GetContainerServiceName(mlDep, p, con)
pu.Endpoint.ServiceHost = containerServiceValue + "." + mlDep.ObjectMeta.Namespace + ".svc.cluster.local."
}
pu.Endpoint.ServicePort = portNum
}
}
// Add defaultMode to volumes ifnot set to ensure no changes when comparing later in controller
for k := 0; k < len(cSpec.Spec.Volumes); k++ {
vol := &cSpec.Spec.Volumes[k]
if vol.Secret != nil && vol.Secret.DefaultMode == nil {
var defaultMode = corev1.SecretVolumeSourceDefaultMode
vol.Secret.DefaultMode = &defaultMode
} else if vol.ConfigMap != nil && vol.ConfigMap.DefaultMode == nil {
var defaultMode = corev1.ConfigMapVolumeSourceDefaultMode
vol.ConfigMap.DefaultMode = &defaultMode
} else if vol.DownwardAPI != nil && vol.DownwardAPI.DefaultMode == nil {
var defaultMode = corev1.DownwardAPIVolumeSourceDefaultMode
vol.DownwardAPI.DefaultMode = &defaultMode
} else if vol.Projected != nil && vol.Projected.DefaultMode == nil {
var defaultMode = corev1.ProjectedVolumeSourceDefaultMode
vol.Projected.DefaultMode = &defaultMode
}
}
}
pus := machinelearningv1alpha2.GetPredictiveUnitList(p.Graph)
//some pus might not have a container spec so pick those up
for l := 0; l < len(pus); l++ {
pu := pus[l]
con := utils.GetContainerForPredictiveUnit(&p, pu.Name)
// want to set host and port for engine to use in orchestration
//only assign host and port if there's a container or it's a prepackaged model server
if !utils.IsPrepack(pu) && (con == nil || con.Name == "") {
continue
}
if _, present := portMap[pu.Name]; !present {
portMap[pu.Name] = nextPortNum
nextPortNum++
}
portNum := portMap[pu.Name]
// Add a default REST endpoint if none provided
// pu needs to have an endpoint as engine reads it from SDep in order to direct graph traffic
// probes etc will be added later by controller
if pu.Endpoint == nil {
pu.Endpoint = &machinelearningv1alpha2.Endpoint{Type: machinelearningv1alpha2.REST}
}
var portType string
if pu.Endpoint.Type == machinelearningv1alpha2.GRPC {
portType = "grpc"
} else {
portType = "http"
}
if con != nil {
existingPort := utils.GetPort(portType, con.Ports)
if existingPort != nil {
portNum = existingPort.ContainerPort
}
con.VolumeMounts = append(con.VolumeMounts, corev1.VolumeMount{
Name: machinelearningv1alpha2.PODINFO_VOLUME_NAME,
MountPath: machinelearningv1alpha2.PODINFO_VOLUME_PATH,
})
}
// Set ports and hostname in predictive unit so engine can read it from SDep
// if this is the firstPuPortNum then we've not added engine yet so put the engine in here
if pu.Endpoint.ServiceHost == "" {
if _, hasSeparateEnginePod := mlDep.Spec.Annotations[machinelearningv1alpha2.ANNOTATION_SEPARATE_ENGINE]; portNum == firstPuPortNum && !hasSeparateEnginePod {
pu.Endpoint.ServiceHost = "localhost"
} else {
containerServiceValue := machinelearningv1alpha2.GetContainerServiceName(mlDep, p, con)
pu.Endpoint.ServiceHost = containerServiceValue + "." + mlDep.ObjectMeta.Namespace + ".svc.cluster.local."
}
}
if pu.Endpoint.ServicePort == 0 {
pu.Endpoint.ServicePort = portNum
}
// for prepack servers we want to add a container name and image to correspond to grafana dashboards
if utils.IsPrepack(pu) {
existing := con != nil
if !existing {
con = &corev1.Container{
Name: pu.Name,
VolumeMounts: []corev1.VolumeMount{
{
Name: machinelearningv1alpha2.PODINFO_VOLUME_NAME,
MountPath: machinelearningv1alpha2.PODINFO_VOLUME_PATH,
},
},
}
}
utils.SetImageNameForPrepackContainer(pu, con)
// if new Add container to componentSpecs
if !existing {
if len(p.ComponentSpecs) > 0 {
p.ComponentSpecs[0].Spec.Containers = append(p.ComponentSpecs[0].Spec.Containers, *con)
} else {
podSpec := machinelearningv1alpha2.SeldonPodSpec{
Metadata: metav1.ObjectMeta{CreationTimestamp: metav1.Now()},
Spec: corev1.PodSpec{
Containers: []corev1.Container{*con},
},
}
p.ComponentSpecs = []*machinelearningv1alpha2.SeldonPodSpec{&podSpec}
// p is a copy so update the entry
mlDep.Spec.Predictors[i] = p
}
}
}
}
}
return nil
}
var _ admission.Handler = &SeldonDeploymentCreateUpdateHandler{}
// Handle handles admission requests.
func (h *SeldonDeploymentCreateUpdateHandler) Handle(ctx context.Context, req types.Request) types.Response {
obj := &machinelearningv1alpha2.SeldonDeployment{}
err := h.Decoder.Decode(req, obj)
if err != nil {
return admission.ErrorResponse(http.StatusBadRequest, err)
}
copy := obj.DeepCopy()
err = h.MutatingSeldonDeploymentFn(ctx, copy)
if err != nil {
return admission.ErrorResponse(http.StatusInternalServerError, err)
}
return admission.PatchResponse(obj, copy)
}
//var _ inject.Client = &SeldonDeploymentCreateUpdateHandler{}
//
//// InjectClient injects the client into the SeldonDeploymentCreateUpdateHandler
//func (h *SeldonDeploymentCreateUpdateHandler) InjectClient(c client.Client) error {
// h.Client = c
// return nil
//}
var _ inject.Decoder = &SeldonDeploymentCreateUpdateHandler{}
// InjectDecoder injects the decoder into the SeldonDeploymentCreateUpdateHandler
func (h *SeldonDeploymentCreateUpdateHandler) InjectDecoder(d types.Decoder) error {
h.Decoder = d
return nil
}
| MichaelXcc/seldon-operator |
<|start_filename|>src/main/java/com/wiley/elements/types/NullTeasyElement.java<|end_filename|>
package com.wiley.elements.types;
import com.wiley.elements.*;
import com.wiley.elements.should.NullShould;
import com.wiley.elements.should.NullShouldImmediately;
import com.wiley.elements.should.Should;
import com.wiley.elements.types.locate.LocatableFactory;
import com.wiley.elements.waitfor.ElementWaitFor;
import com.wiley.elements.waitfor.NullElementWaitFor;
import com.wiley.elements.waitfor.NullElementWaitForImmediately;
import org.openqa.selenium.*;
import org.openqa.selenium.interactions.internal.Coordinates;
import java.util.List;
import static com.wiley.holders.DriverHolder.getDriver;
/**
* Represents element that is absent (not found)
*/
public class NullTeasyElement implements TeasyElement, org.openqa.selenium.interactions.Locatable {
private TeasyElementData elementData;
private Locatable locatable;
public NullTeasyElement(TeasyElementData elementData) {
this.elementData = elementData;
this.locatable = new LocatableFactory(elementData, getDriver()).get();
}
@Override
public Should should() {
return new NullShouldImmediately(this);
}
@Override
public Should should(SearchStrategy strategy) {
return new NullShould(elementData, new TeasyFluentWait<>(getDriver(), strategy), strategy);
}
@Override
public ElementWaitFor waitFor() {
return new NullElementWaitForImmediately(this);
}
@Override
public ElementWaitFor waitFor(SearchStrategy strategy) {
return new NullElementWaitFor(elementData, new TeasyFluentWait<>(getDriver(), strategy), strategy);
}
/*
All other methods of TeasyElement should throw an exception because it's not possible to
interact with element that does not exist
*/
@Override
public <X> X getScreenshotAs(OutputType<X> target) {
throw noSuchElementException();
}
@Override
public WebElement getWrappedWebElement() {
throw noSuchElementException();
}
@Override
public void submit() {
throw noSuchElementException();
}
@Override
public void sendKeys(CharSequence... charSequences) {
throw noSuchElementException();
}
@Override
public void clear() {
throw noSuchElementException();
}
@Override
public String getTagName() {
throw noSuchElementException();
}
@Override
public String getAttribute(String s) {
throw noSuchElementException();
}
@Override
public boolean isSelected() {
throw noSuchElementException();
}
@Override
public boolean isEnabled() {
throw noSuchElementException();
}
@Override
public boolean isStale() {
throw noSuchElementException();
}
@Override
public String getText() {
throw noSuchElementException();
}
@Override
public List<WebElement> findElements(By by) {
throw noSuchElementException();
}
@Override
public WebElement findElement(By by) {
throw noSuchElementException();
}
@Override
public boolean isDisplayed() {
throw noSuchElementException();
}
@Override
public Point getLocation() {
throw noSuchElementException();
}
@Override
public Dimension getSize() {
throw noSuchElementException();
}
@Override
public Rectangle getRect() {
throw noSuchElementException();
}
@Override
public String getCssValue(String s) {
throw noSuchElementException();
}
@Override
public Locatable getLocatable() {
return locatable;
}
@Override
public TeasyElementData getElementData() {
return elementData;
}
@Override
public TeasyElement getParent() {
throw noSuchElementException();
}
@Override
public TeasyElement getParent(int level) {
throw noSuchElementException();
}
@Override
public TeasyElement element(By by) {
throw noSuchElementException();
}
@Override
public TeasyElement element(By by, SearchStrategy strategy) {
throw noSuchElementException();
}
@Override
public TeasyElementList elements(By by) {
throw noSuchElementException();
}
@Override
public TeasyElementList elements(By by, SearchStrategy strategy) {
throw noSuchElementException();
}
@Override
public TeasyElement domElement(By by) {
throw noSuchElementException();
}
@Override
public TeasyElement domElement(By by, SearchStrategy strategy) {
throw noSuchElementException();
}
@Override
public TeasyElementList domElements(By by) {
throw noSuchElementException();
}
@Override
public TeasyElementList domElements(By by, SearchStrategy strategy) {
throw noSuchElementException();
}
@Override
public void click() {
throw noSuchElementException();
}
@Override
public Coordinates getCoordinates() {
throw noSuchElementException();
}
private NotFoundElException noSuchElementException() {
throw new NotFoundElException(elementData.getBy());
}
}
<|start_filename|>src/test/java/com/wiley/autotest/framework/tests/conditions/EmptyElementsList.java<|end_filename|>
package com.wiley.autotest.framework.tests.conditions;
import com.wiley.autotest.framework.config.BaseUnitTest;
import com.wiley.autotest.framework.pages.ConditionsPage;
import com.wiley.elements.NotFoundElException;
import org.testng.annotations.Test;
public class EmptyElementsList extends BaseUnitTest {
@Test(expectedExceptions = NotFoundElException.class)
public void exceptionShouldBeThrowsForEmptyListAndConditionCall() {
openPage("emptyList.html", ConditionsPage.class)
.callConditionForEmptyList();
}
@Test
public void noExceptionShouldBeThrownForEmptyListAndAbsent() {
openPage("emptyList.html", ConditionsPage.class)
.callAbsentConditionForEmptyList();
}
}
<|start_filename|>src/main/java/com/wiley/elements/custom/Select.java<|end_filename|>
package com.wiley.elements.custom;
import com.wiley.elements.SearchStrategy;
import com.wiley.elements.TeasyElement;
import com.wiley.elements.find.VisibleElementLookUp;
import com.wiley.elements.types.TeasyElementList;
import com.wiley.holders.DriverHolder;
import org.openqa.selenium.By;
/**
* Don't forget to add javadoc for a class!
*/
public final class Select {
private final TeasyElement el;
public Select(TeasyElement el) {
this.el = el;
}
public Select(By locator) {
this(new VisibleElementLookUp(DriverHolder.getDriver(),
new SearchStrategy()).find(locator));
}
public void selectByIndex(int index) {
//add code to check that the option is not ALREADY selected
getOptions().get(index).click();
}
public void selectByVisibleText(String text) {
for (TeasyElement el : getOptions()) {
if (el.getText().equals(text)) {
el.click();
break;
}
}
}
private TeasyElementList getOptions() {
return this.el.elements(By.tagName("option"));
}
}
<|start_filename|>src/main/java/com/wiley/elements/TeasyElement.java<|end_filename|>
package com.wiley.elements;
import com.wiley.elements.should.Should;
import com.wiley.elements.types.Locatable;
import com.wiley.elements.types.TeasyElementList;
import com.wiley.elements.waitfor.ElementWaitFor;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import java.util.List;
/**
* Teasy representation of a {@link WebElement}
*/
public interface TeasyElement extends WebElement {
/**
* Do not use this method. it does not fit TeasyElement concept and is only here because
* it is originally a method of WebElement.
* use {@link #elements(By) or {@link #domElements(By)}
*/
@Override
@Deprecated
List<WebElement> findElements(By by);
/**
* Do not use this method. it does not fit TeasyElement concept and is only here because
* it is originally a method of WebElement
* use {@link #element(By) or {@link #domElements(By))}
*/
@Override
@Deprecated
WebElement findElement(By by);
/**
* Gets the original web element
*
* @return pure {@link WebElement}
*/
WebElement getWrappedWebElement();
/**
* Tells you whether element is stale (i.e. was detached from DOM)
*
* @return true - in case of a stale element, false - otherwise
*/
boolean isStale();
/**
* Gets {@link Locatable} interface
* Used for interactions with location of an element
*
* @return instance of {@link Locatable}
*/
Locatable getLocatable();
/**
* Gets data used for element creation (e.g. search context, locator)
*
* @return - {@link TeasyElementData}
*/
TeasyElementData getElementData();
/**
* {@link #should(SearchStrategy)} with default {@link SearchStrategy}
*/
Should should();
/**
* Calls assertion engine with custom settings passed via {@link SearchStrategy}
*
* @param strategy - custom settings for assertion
* @return instance of {@link Should}
*/
Should should(SearchStrategy strategy);
/**
* {@link #waitFor(SearchStrategy)} with default {@link SearchStrategy}
*/
ElementWaitFor waitFor();
/**
* Calls waiting engine with custom settings passed via {@link SearchStrategy}
*
* @param strategy - custom settings for waiting
* @return instance of {@link ElementWaitFor}
*/
ElementWaitFor waitFor(SearchStrategy strategy);
/**
* Gets parent element
*
* @return - {@link TeasyElement}
*/
TeasyElement getParent();
/**
* Gets parent element of N-th level
* For the <el3> from below if we call getParent(2)
* will return TeasyElement for <el1>
* <el>
* <el1>
* <el2>
* <el3>example DOM structure</el3>
* </el2>
* </el1>
* </el>
*
* @param level - levels up of a parenting
* @return - {@link TeasyElement}
*/
TeasyElement getParent(int level);
/**
* {@link #element(By, SearchStrategy)} with default {@link SearchStrategy}
*/
TeasyElement element(By by);
/**
* Finds first visible element using locator {@link By}
* and custom search strategy {@link SearchStrategy}
*
* @param by - locator
* @param strategy - custom search strategy
* @return - {@link TeasyElement}
*/
TeasyElement element(By by, SearchStrategy strategy);
/**
* {@link #elements(By, SearchStrategy)} with default {@link SearchStrategy}
*/
TeasyElementList elements(By by);
/**
* Finds visible elements using locator {@link By}
* and custom search strategy {@link SearchStrategy}
*
* @param by - locator
* @param strategy - custom search strategy
* @return - {@link TeasyElementList}
*/
TeasyElementList elements(By by, SearchStrategy strategy);
/**
* {@link #domElement(By, SearchStrategy)} with default {@link SearchStrategy}
*/
TeasyElement domElement(By by);
/**
* Finds first dom element using locator {@link By}
* and custom search strategy {@link SearchStrategy}
* note:
* dom element is just an element present in dom
* which is not necessarily visible
*
* @param by - locator
* @param strategy - custom search strategy
* @return - {@link TeasyElement}
*/
TeasyElement domElement(By by, SearchStrategy strategy);
/**
* {@link #domElements(By, SearchStrategy)} with default {@link SearchStrategy}
*/
TeasyElementList domElements(By by);
/**
* Finds dom elements using locator {@link By}
* and custom search strategy {@link SearchStrategy}
* note:
* dom element is just an element present in dom
* which is not necessarily visible
*
* @param by - locator
* @param strategy - custom search strategy
* @return {@link TeasyElementList}
*/
TeasyElementList domElements(By by, SearchStrategy strategy);
}
<|start_filename|>src/test/java/com/wiley/autotest/framework/tests/elements/DragAndDropElement.java<|end_filename|>
package com.wiley.autotest.framework.tests.elements;
import com.wiley.autotest.framework.config.BaseUnitTest;
import com.wiley.autotest.framework.pages.TestElementPage;
import org.testng.annotations.Test;
public class DragAndDropElement extends BaseUnitTest {
@Test
public void testDragAndDrop(){
openPage("dragAndDrop.html", TestElementPage.class)
.checkDragAndDrop();
}
}
<|start_filename|>src/main/java/com/wiley/utils/JsActions.java<|end_filename|>
package com.wiley.utils;
import com.wiley.holders.DriverHolder;
import org.openqa.selenium.JavascriptExecutor;
import org.openqa.selenium.WebElement;
/**
* Javascript actions through WebDriver
* A wrapper around WebDriver taken from a SeleniumHolder
*/
public final class JsActions {
private JsActions() {
}
public static void dragAndDrop(WebElement dragElement, WebElement dropTo) {
executeScript(
"function dnd(elemDrag, elemDrop) {\n" +
" var DELAY_INTERVAL_MS = 100;\n" +
" var MAX_TRIES = 10;\n" +
" var dragStartEvent;\n" +
" if (!elemDrag || !elemDrop) {\n" +
" return false;\n" +
" }\n" +
" function fireMouseEvent(type, elem, dataTransfer) {\n" +
" var evt = document.createEvent('MouseEvents');\n" +
" evt.initMouseEvent(type, true, true, window, 1, 1, 1, 0, 0, false, false, false, false, 0, elem);\n" +
" if (/^dr/i.test(type)) {\n" +
" evt.dataTransfer = dataTransfer || createNewDataTransfer();\n" +
" }\n" +
" elem.dispatchEvent(evt);\n" +
" return evt;\n" +
" }\n" +
" function createNewDataTransfer() {\n" +
" var data = {};\n" +
" return {\n" +
" clearData: function (key) {\n" +
" if (key === undefined) {\n" +
" data = {};\n" +
" } else {\n" +
" delete data[key];\n" +
" }\n" +
" },\n" +
" getData: function (key) {\n" +
" return data[key];\n" +
" },\n" +
" setData: function (key, value) {\n" +
" data[key] = value;\n" +
" },\n" +
" setDragImage: function () {\n" +
" },\n" +
" dropEffect: 'none',\n" +
" files: [],\n" +
" items: [],\n" +
" types: []\n" +
" }\n" +
" }\n" +
" fireMouseEvent('mousedown', elemDrag);\n" +
" dragStartEvent = fireMouseEvent('dragstart', elemDrag);\n" +
" function dragover() {\n" +
" fireMouseEvent('dragover', elemDrop, dragStartEvent.dataTransfer);\n" +
" }\n" +
" function drop() {\n" +
" fireMouseEvent('drop', elemDrop, dragStartEvent.dataTransfer);\n" +
" fireMouseEvent('mouseup', elemDrop);\n" +
" fireMouseEvent('dragend', elemDrag);\n" +
" }\n" +
" setTimeout(dragover, DELAY_INTERVAL_MS);\n" +
" setTimeout(drop, DELAY_INTERVAL_MS * 2);\n" +
" return true;\n" +
"}\n" +
" dnd(arguments[0], arguments[1])",
dragElement, dropTo);
}
/**
* {@link JavascriptExecutor#executeScript(String, Object...)}
*/
public static Object executeScript(String script, Object... args) {
return driver().executeScript(script, args);
}
/**
* {@link JavascriptExecutor#executeAsyncScript(String, Object...)}
*/
public static Object executeAsyncScript(String script, Object... args) {
return driver().executeAsyncScript(script, args);
}
private static JavascriptExecutor driver() {
return (JavascriptExecutor) DriverHolder.getDriver();
}
}
<|start_filename|>src/main/java/com/wiley/driver/WebDriverFactory.java<|end_filename|>
package com.wiley.driver;
import com.wiley.config.Configuration;
import com.wiley.driver.factory.TeasyDriver;
import com.wiley.driver.frames.FramesTransparentWebDriver;
import com.wiley.driver.frames.WebDriverDecorator;
import com.wiley.holders.DriverHolder;
import com.wiley.holders.TestParamsHolder;
import com.wiley.utils.Report;
import io.appium.java_client.AppiumDriver;
import io.appium.java_client.android.AndroidDriver;
import io.appium.java_client.ios.IOSDriver;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebDriverException;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.openqa.selenium.remote.SessionId;
import java.net.InetAddress;
import java.net.URL;
import static com.wiley.holders.DriverHolder.getDriver;
/**
* User: ntyukavkin
* Date: 12.04.2018
* Time: 17:22
*/
public class WebDriverFactory {
private static final int START_COUNT = 0;
private static final ThreadLocal<Integer> tryToCreateDriverCount = ThreadLocal.withInitial(() -> START_COUNT);
private static final ThreadLocal<Integer> restartDriverAfterNumberOfTests = ThreadLocal.withInitial(() -> START_COUNT);
public static void initDriver(DesiredCapabilities extraCaps) {
restartDriverAfterNumberOfTests.set(restartDriverAfterNumberOfTests.get() + 1);
if (getDriver() != null && (isBrowserDead() || isNeedToRestartDriver())) {
quitWebDriver();
}
if (getDriver() == null) {
try {
FramesTransparentWebDriver driver = createDriver(extraCaps);
addShutdownHook(driver);
setGridParams(driver);
setDriverParams(driver);
setMobileParams(driver);
} catch (Throwable t) {
lastTryToCreateDriver(t);
}
}
}
public static void initDriver(){
initDriver(new DesiredCapabilities());
}
private static FramesTransparentWebDriver createDriver(DesiredCapabilities extraCaps) {
TeasyDriver teasyDriver = new TeasyDriver();
return new FramesTransparentWebDriver(teasyDriver.init(extraCaps));
}
private static void lastTryToCreateDriver(Throwable t) {
if (tryToCreateDriverCount.get() < Configuration.tryToStartDriverCount) {
tryToCreateDriverCount.set(tryToCreateDriverCount.get() + 1);
initDriver();
} else {
Integer passCount = tryToCreateDriverCount.get();
tryToCreateDriverCount.set(START_COUNT);
throw new WebDriverException("Unable to init driver after " + passCount + " attempts! Cause: " + t.getMessage(), t);
}
}
private static void setDriverParams(FramesTransparentWebDriver driver) {
DriverHolder.setDriver(driver);
}
private static void setMobileParams(FramesTransparentWebDriver driver) {
AndroidDriver androidDriver = castToAndroidDriver(driver);
IOSDriver iosDriver = castToIOSDriver(driver);
if (androidDriver != null || iosDriver != null) {
DriverHolder.setAppiumDriver((AppiumDriver) castToWebDriverDecorator(driver));
DriverHolder.setAndroidDriver(androidDriver);
DriverHolder.setIOSDriver(iosDriver);
}
}
private static void setGridParams(FramesTransparentWebDriver driver) {
try {
SessionId sessionId = ((RemoteWebDriver) driver.getDriver()).getSessionId();
TestParamsHolder.setSessionId(sessionId);
String nodeIp = Configuration.runWithGrid ? new GridApi(new URL(Configuration.gridHubUrl), sessionId).getNodeIp() : InetAddress.getLocalHost().getHostAddress();
TestParamsHolder.setNodeIP(nodeIp);
} catch (Throwable ignored) {
Report.jenkins("Throwable occurs when set node id.", ignored);
}
}
/**
* Checks whether browser is dead. Used to catch
* situations like "Error communicating with the remote browser. It may have died." exceptions
*
* @return true if browser is dead
*/
private static boolean isBrowserDead() {
try {
if (((FramesTransparentWebDriver) getDriver()).getWrappedDriver() instanceof AppiumDriver) {
getDriver().getPageSource();
} else {
getDriver().getCurrentUrl();
}
return false;
} catch (Throwable t) {
Report.jenkins("*****BROWSER IS DEAD ERROR***** ", t);
return true;
}
}
private static boolean isNeedToRestartDriver() {
return restartDriverAfterNumberOfTests.get() > Configuration.restartCount;
}
private static void quitWebDriver() {
restartDriverAfterNumberOfTests.set(START_COUNT);
try {
getDriver().quit();
} catch (Throwable t) {
Report.jenkins("*****TRYING TO QUIT DRIVER***** ", t);
}
DriverHolder.setDriver(null);
}
private static AndroidDriver castToAndroidDriver(WebDriver driver) {
WebDriver castToWebDriverDecorator = castToWebDriverDecorator(driver);
if (castToWebDriverDecorator instanceof AndroidDriver) {
return (AndroidDriver) castToWebDriverDecorator;
} else {
return null;
}
}
private static IOSDriver castToIOSDriver(WebDriver driver) {
WebDriver castToWebDriverDecorator = castToWebDriverDecorator(driver);
if (castToWebDriverDecorator instanceof IOSDriver) {
return (IOSDriver) castToWebDriverDecorator;
} else {
return null;
}
}
private static WebDriver castToWebDriverDecorator(WebDriver driver) {
return ((WebDriverDecorator) driver).getWrappedDriver();
}
private static void addShutdownHook(final WebDriver driver) {
Runtime.getRuntime().addShutdownHook(new Thread(driver::quit));
}
}
<|start_filename|>src/main/java/com/wiley/elements/NotFoundElException.java<|end_filename|>
package com.wiley.elements;
import org.openqa.selenium.By;
/**
* Throw when any action on {@link com.wiley.elements.types.NullTeasyElement} is performed
*/
public class NotFoundElException extends RuntimeException {
public NotFoundElException(By locator) {
super("Unable to find element with locatable '" + locator + "'");
}
}
<|start_filename|>src/main/java/com/wiley/driver/factory/capabilities/MacSafariCaps.java<|end_filename|>
package com.wiley.driver.factory.capabilities;
import org.openqa.selenium.MutableCapabilities;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.safari.SafariOptions;
public class MacSafariCaps extends TeasyCaps {
public MacSafariCaps(DesiredCapabilities customCaps) {
super(customCaps);
}
@Override
public MutableCapabilities get() {
return new SafariOptions().merge(customCaps);
}
}
| Moose0621/teasy |
<|start_filename|>camera-card.js<|end_filename|>
var LitElement =
LitElement ||
Object.getPrototypeOf(customElements.get("home-assistant-main"));
var html = LitElement.prototype.html;
class CameraCard extends LitElement {
static get properties() {
return {
hass: {},
_config: {}
};
}
shouldUpdate(changedProps) {
if (changedProps.has("_config")) {
return true;
}
const oldHass = changedProps.get("hass");
if (oldHass) {
return (
oldHass.states[this._config.entity] !==
this.hass.states[this._config.entity]
);
}
return true;
}
getCardSize() {
return 6;
}
setConfig(config) {
this._config = config;
}
render() {
if (!this._config || !this.hass) {
return html``;
}
const stateObj = this.hass.states[this._config.entity];
if (!stateObj) {
return html`
${this.renderStyle()}
<ha-card>
<div class="warning">
Entity not available: ${this._config.entity}
</div>
</ha-card>
`;
}
return html`
${this.renderStyle()}
<ha-card .header=${this._config.name}>
<more-info-camera
.hass="${this.hass}"
.stateObj="${stateObj}"
@click="${this._moreInfo}"
></more-info-camera>
</ha-card>
`;
}
renderStyle() {
return html`
<style>
.warning {
display: block;
color: black;
background-color: #fce588;
padding: 8px;
}
more-info-camera {
cursor: pointer;
}
</style>
`;
}
_moreInfo() {
this._fireEvent(this, "hass-more-info", {
entityId: this._config.entity
});
}
_fireEvent(node, type, detail, options) {
options = options || {};
detail = detail === null || detail === undefined ? {} : detail;
const event = new Event(type, {
bubbles: options.bubbles === undefined ? true : options.bubbles,
cancelable: Boolean(options.cancelable),
composed: options.composed === undefined ? true : options.composed
});
event.detail = detail;
node.dispatchEvent(event);
return event;
}
}
customElements.define("camera-card", CameraCard);
| custom-cards/camera-card |
<|start_filename|>src/electron.js<|end_filename|>
#!/usr/bin/env node
const electron = require('electron')
let electronApp = null
if (electron) {
electronApp = electron.app
}
let Menu = null
let dialog = null
if (electron) {
Menu = electron.Menu
dialog = electron.dialog
}
const BrowserWindow = electron.BrowserWindow
let getFileFromUser = null
let visifile = null
const path = require("path");
const url = require('url');
var fork = require2('child_process');
var fs = require2('fs');
var ip = require2('ip');
var isWin = /^win/.test(process.platform);
var isLinux = /^linux/.test(process.platform);
var isMac = /^darwin/.test(process.platform);
var mainNodeProcessStarted = false;
var restRoutes = new Object()
var envVars = new Object()
var systemReady = false;
var httpServer = null;
var username = "Unknown user";
var isDocker = require2('is-docker');
var ls = require2('ls-sync');
var rimraf = require2("rimraf");
let forge = require2('node-forge');
var pidusage = require2("pidusage");
var fs = require2('fs');
var mkdirp = require2('mkdirp')
var rmdir = require2('rmdir-sync');
var uuidv1 = require2('uuid/v1');
var fork = require2('child_process');
var express = require2('express')
var http = require2('http')
var https = require2('https');
var app = express()
var startupType = null
var startupDelay = 0
var isCodeTtyCode = false
var yazzInstanceId = uuidv1()
let certOptions = null
var expressWs = require2('express-ws')(app);
outputDebug("__filename: " + __filename)
outputDebug("__dirname: " + __dirname)
let nodeModulesPath = process.cwd()
if (process.execPath) {
let vjsPos = process.execPath.indexOf("vjs")
if (vjsPos != -1) {
let vjsLen = process.execPath.length - vjsPos
nodeModulesPath = process.execPath.substring(0, process.execPath.length - vjsLen);
}
}
//console.log("process.cwd(): " + process.cwd())
//console.log("nodeModulesPath: " + nodeModulesPath)
//console.log("process.execPath: " + process.execPath)
//console.log("")
//console.log("nodeModulesPath: " + nodeModulesPath)
//console.log("")
outputDebug("Platform: " + process.platform)
outputDebug("process.env.OPENSHIFT_NODEJS_IP:= " + process.env.OPENSHIFT_NODEJS_IP)
if (process.env.OPENSHIFT_NODEJS_IP) {
username = "node"
} else {
username = "node"
//if (isValidObject(os) && isValidObject(os.userInfo()) && isValidObject(os.userInfo().username)) {
// username = os.userInfo().username.toLowerCase();
//}
}
var LOCAL_HOME = process.env.HOME
outputDebug('LOCAL_HOME:' + LOCAL_HOME);
function outputToBrowser(txt) {
//var line = txt.toString().replace(/\'|\"|\n|\r"/g , "").toString()
let line = txt.toString().replace(/\'/g , "").toString()
let jsc = "document.write('<br>" + "" + line + " ')"
//console.log(line);
if (visifile) {
if (visifile.webContents) {
visifile.webContents.executeJavaScript(jsc);
}
} else {
console.log(txt)
}
}
//
// We set the HOME environment variable if we are running in OpenShift
//
outputDebug('DOCKER CHECK...');
if (isDocker()) {
outputDebug('Running inside a Linux container');
} else {
outputDebug('NOT running inside a Linux container');
}
if (!isValidObject(LOCAL_HOME) || (LOCAL_HOME == "/")) {
LOCAL_HOME = "/home/node"
}
function require2(npath) {
if (electronApp){
return require(npath)
}
return require(path.join(".",npath))
}
try {
if (isDocker()) {
} else if (electronApp){
console.log("Running in Electron")
let srcElectronDriver = path.join(nodeModulesPath,'node_modules/sqlite3/lib/binding/electron-v11.3-darwin-x64/node_sqlite3.node')
let destElectronPath = path.join(nodeModulesPath, 'node_modules/sqlite3/lib/binding/electron-v12.0-darwin-x64/')
let destElectronDriver = path.join(nodeModulesPath, 'node_modules/sqlite3/lib/binding/electron-v12.0-darwin-x64/node_sqlite3.node')
//mkdirp.sync( destElectronPath );
//copyFileSync( srcElectronDriver , destElectronDriver );
destElectronPath = path.join(nodeModulesPath, 'node_modules/sqlite3/lib/binding/electron-v12.0-darwin-x64/')
destElectronDriver = path.join(nodeModulesPath, 'node_modules/sqlite3/lib/binding/electron-v12.0-darwin-x64/node_sqlite3.node')
//mkdirp.sync( destElectronPath );
//copyFileSync( srcElectronDriver , destElectronDriver );
} else if (process.env["KUBERNETES_SERVICE_HOST"]) {
} else if (isWin) {
let pathWindows = path.join(nodeModulesPath,'node_modules\\sqlite3\\lib\\binding\\node-v64-win32-x64\\node_sqlite3.node')
let srcNodeJsFile = path.join(__dirname,'../node_sqlite3_win64.rename')
try {
fs.accessSync(srcNodeJsFile, fs.constants.R_OK | fs.constants.W_OK);
//console.log('can read/write ' + srcNodeJsFile);
fs.accessSync(pathWindows, fs.constants.R_OK | fs.constants.W_OK);
} catch (err) {
console.log("Setting up Visual Javascript to run for the first time. Please wait a few minutes...")
outputDebug('no access to ' + pathWindows + '!');
outputDebug("Creating Windows driver")
let curSource= path.join(__dirname,'..\\node_modules\\')
let targetFolder= path.join(nodeModulesPath,'')
if (curSource != targetFolder) {
let destNodeJsFile = path.join(nodeModulesPath,'node_modules\\sqlite3\\lib/binding\\node-v64-win32-x64\\node_sqlite3.node')
let destNodeJsFolder = path.join(nodeModulesPath,'node_modules\\sqlite3\\lib/binding\\node-v64-win32-x64\\')
//console.log("srcNodeJsFile: " + srcNodeJsFile)
//console.log("destNodeJsFile: " + destNodeJsFile)
//console.log("curSource: " + curSource)
//console.log("targetFolder: " + targetFolder)
//console.log("destNodeJsFolder: " + destNodeJsFolder)
mkdirp.sync(targetFolder);
copyFolderRecursiveSync( curSource, targetFolder );
mkdirp.sync(destNodeJsFolder);
copyFileSync( srcNodeJsFile, destNodeJsFile );
}
}
} else if (isMac) {
let pathMac = path.join(nodeModulesPath,'node_modules/sqlite3/lib/binding/node-v64-darwin-x64/node_sqlite3.node')
let srcNodeJsFile = path.join(__dirname,'../node_sqlite3_macos64.rename')
try {
fs.accessSync(srcNodeJsFile, fs.constants.R_OK | fs.constants.W_OK);
//console.log('can read/write ' + srcNodeJsFile);
fs.accessSync(pathMac, fs.constants.R_OK | fs.constants.W_OK);
outputDebug('can read/write ' + pathMac);
} catch (err) {
console.log("Setting up Visual Javascript to run for the first time. Please wait a few minutes...")
outputDebug('no access to ' + pathMac + '!');
outputDebug("Creating Mac driver")
let curSource= path.join(__dirname,'../node_modules/')
let targetFolder= path.join(nodeModulesPath,'')
//console.log("curSource: " + curSource)
//console.log("targetFolder: " + targetFolder)
if (curSource != targetFolder) {
//mkdirp.sync(path.join(__dirname,'../node_modules/sqlite3/lib/binding/node-v64-darwin-x64'));
var destNodeJsFile = path.join(nodeModulesPath,'node_modules/sqlite3/lib/binding/node-v64-darwin-x64/node_sqlite3.node')
//console.log("srcNodeJsFile: " + srcNodeJsFile)
//console.log("destNodeJsFile: " + destNodeJsFile)
mkdirp.sync(targetFolder);
copyFolderRecursiveSync( curSource, targetFolder );
copyFileSync( srcNodeJsFile, destNodeJsFile );
}
}
// otherwise assume that this is linux 64 bit
} else {
let pathLinux = path.join(nodeModulesPath,'node_modules/sqlite3/lib/binding/node-v64-linux-x64/node_sqlite3.node')
let srcNodeJsFile = path.join(__dirname,'../node_sqlite3_linux64.rename')
try {
fs.accessSync(srcNodeJsFile, fs.constants.R_OK | fs.constants.W_OK);
//console.log('can read/write ' + srcNodeJsFile);
fs.accessSync(pathLinux, fs.constants.R_OK | fs.constants.W_OK);
outputDebug('can read/write ' + pathLinux);
} catch (err) {
console.log("Setting up Visual Javascript to run for the first time. Please wait a few minutes...")
outputDebug('no access to ' + pathLinux + '!');
outputDebug("Creating Linux driver")
let curSource= path.join(__dirname,'../node_modules/')
let targetFolder= path.join(nodeModulesPath,'')
//console.log("curSource: " + curSource)
//console.log("targetFolder: " + targetFolder)
if (curSource != targetFolder) {
//mkdirp.sync(path.join(__dirname,'../node_modules/sqlite3/lib/binding/node-v64-darwin-x64'));
var destNodeJsFile = path.join(nodeModulesPath,'node_modules/sqlite3/lib/binding/node-v64-linux-x64/node_sqlite3.node')
//console.log("srcNodeJsFile: " + srcNodeJsFile)
//console.log("destNodeJsFile: " + destNodeJsFile)
mkdirp.sync(targetFolder);
copyFolderRecursiveSync( curSource, targetFolder );
copyFileSync( srcNodeJsFile, destNodeJsFile );
}
}
}
} catch(err){
console.log(err)
}
var request = require2("request");
var perf = require('./perf')
var compression = require2('compression')
var program = require2('commander');
var bodyParser = require2('body-parser');
var multer = require2('multer');
var cors = require2('cors')
var saveHelper = require('./save_helpers')
let sqlNodePath = path.join(nodeModulesPath,'node_modules/sqlite3')
//console.log("sqlNodePath: " + sqlNodePath)
var sqlite3 = null
if (electronApp){
sqlite3 = require("sqlite3");
} else {
sqlite3 = require(sqlNodePath);
}
var os = require2('os')
var Keycloak = require2('keycloak-connect');
var session = require2('express-session');
var memoryStore = new session.MemoryStore();
var kk = {
"realm": "yazz",
"auth-server-url": "http://127.0.0.1:8080/auth",
"ssl-required": "external",
"resource": "yazz",
"public-client": true,
"confidential-port": 0
}
var sessObj = session({
secret: 'some secret',
resave: false,
saveUninitialized: true,
store: memoryStore
})
var keycloak = new Keycloak({
store: memoryStore
},kk);
var upload
var dbPath = null
var dbsearch = null
var userData = null
let appDbs = {}
var port;
var hostaddress;
if (isWin) {
hostaddress = "127.0.0.1"//ip.address();
} else {
hostaddress = "0.0.0.0"//ip.address();
}
var hostaddressintranet;
hostaddressintranet = ip.address();
port = 80
var socket = null
var io = null;
var forkedProcesses = new Object();
var timeout = 0;
var serverwebsockets = [];
var portrange = 3000
var locked;
var useHttps;
var serverProtocol = "http";
var privateKey;
var publicCertificate;
var caCertificate1;
var caCertificate2;
var caCertificate3;
var hostcount = 0;
var queuedResponses = new Object();
var queuedResponseSeqNum = 1;
var executionProcessCount = 6;
app.use(compression())
app.use(sessObj);
app.use(keycloak.middleware({
logout: '/c',
admin: '/ad'
}));
var inmemcalc = false
var totalMem = 0
var returnedmemCount = 0
var allForked=[]
const apiMetrics = require2('prometheus-api-metrics');
app.use(apiMetrics())
const Prometheus = require2('prom-client');
const yazzMemoryUsageMetric = new Prometheus.Gauge({
name: 'yazz_total_memory_bytes',
help: 'Total Memory Usage'
});
const yazzProcessMainMemoryUsageMetric = new Prometheus.Gauge({
name: 'yazz_node_process_main_memory_bytes',
help: 'Memory Usage for Yazz NodeJS process "main"'
});
var stdin = process.openStdin();
var inputStdin = "";
stdin.on('data', function(chunk) {
inputStdin += chunk;
});
stdin.on('end', function() {
outputDebug("inputStdin: " + inputStdin)
});
if (process.argv.length > 1) {
program
.version('2021.0.1')
.option('-a, --runapp [runapp]', 'Run the app with ID as the homepage (default not set) [runapp]', null)
.option('-b, --virtualprocessors [virtualprocessors]', 'How many virtual processors to run (default 8 processors) [virtualprocessors]', 8)
.option('-c, --runhtml [runhtml]', 'Run using a local HTML page as the homepage (default not set) [runhtml]', null)
.option('-de, --deleteonexit [deleteonexit]', 'Delete database files on exit (default true) [deleteonexit]', 'false')
.option('-e, --debug [debug]', 'Allow to run NodeJS in debug mode (default false) [debug]', 'false')
.option('-f, --cacert1 [cacert1]', 'Public HTTPS CA certificate 1 [cacert1]', null)
.option('-g, --cacert2 [cacert2]', 'Public HTTPS CA certificate 2 [cacert2]', null)
.option('-h, --loadjsfile [loadjsfile]', 'Load the following JS from a file (default not set) [loadjsfile]', null)
.option('-i, --cacert3 [cacert3]', 'Public HTTPS CA certificate 3 [cacert3]', null)
.option('-j, --host [host]', 'Server address of the central host (default yazz.com) [host]', 'yazz.com')
.option('-k, --statsinterval [statsinterval]', 'Allow to show debug info every x seconds (default 10 seconds) [statsinterval]', 10)
.option('-l, --showstats [showstats]', 'Allow to show stats debug info (default false) [showstats]', 'false')
.option('-m, --showprogress [showprogress]', 'Show progress when starting Visual Javascript (default false) [showprogress]', 'false')
.option('-mjms, --maxJobProcessDurationMs [maxJobProcessDurationMs]', 'Maximum time to wait for a job to complete (default 10000 ms) [maxJobProcessDurationMs]', 10000)
.option('-n, --locked [locked]', 'Allow server to be locked/unlocked on start up (default true) [locked]', 'true')
.option('-o, --maxprocessesretry [maxprocessesretry]', 'Number of processes to retry when all cores are busy (default 10 processes) [maxprocessesretry]', 10)
.option('-ph, --public [public]', 'Public HTTPS certificate [public]', null)
.option('-q, --port [port]', 'Which port should I listen on? (default 80) [port]', parseInt)
.option('-r, --https [https]', 'Run using a HTTPS (default is http) [https]', 'false')
.option('-s, --hostport [hostport]', 'Server port of the central host (default 80) [hostport]', parseInt)
.option('-t, --usehost [usehost]', 'Use host name [usehost]', null)
.option('-u, --loadjsurl [loadjsurl]', 'Load the following JS from a URL (default not set) [loadjsurl]', null)
.option('-w, --deleteonstartup [deleteonstartup]', 'Delete database files on startup (default true) [deleteonstartup]', 'true')
.option('-x, --private [private]', 'Private HTTPS key [private]', null)
.option('-y, --showdebug [showdebug]', 'Allow to show debug info (default false) [showdebug]', 'false')
.option('-z, --loadjscode [loadjscode]', 'Load the following JS from the command line (default not set) [loadjscode]', null)
.option('-lh, --useselfsignedhttps [useselfsignedhttps]', 'Use self signed HTTPS for local development (default false) [useselfsignedhttps]', 'false')
.option('-jc, --jaegercollector [jaegercollector]', 'jaeger collector endpoint (default not set) eg: http://localhost:14268/api/traces [jaegercollector]', null)
.parse(process.argv);
} else {
program.host = 'yazz.com'
program.locked = 'true'
program.debug = 'false'
program.deleteonexit = 'true'
program.deleteonstartup = 'false'
program.runapp = null
program.loadjsurl = null
program.loadjsfile = null
program.runhtml = null
program.https = 'false'
program.usehost = null
}
var semver = require2('semver')
const initJaegerTracer = require2("jaeger-client").initTracer;
const {Tags, FORMAT_HTTP_HEADERS} = require2('opentracing')
var showProgress = false
if (program.showprogress == 'true') {
showProgress = true;
}
var showDebug = false
function outputDebug(text) {
if (showDebug) {
console.log(text);
} else {
if (showProgress) {
process.stdout.write(".");
}
}
};
if (program.showdebug == 'true') {
showDebug = true;
}
outputDebug(" showDebug: " + showDebug);
var showStats = false
if (program.showstats == 'true') {
showStats = true;
}
outputDebug(" showStats: " + showStats );
var useSelfSignedHttps = false
if (program.useselfsignedhttps == 'true') {
useSelfSignedHttps = true;
}
outputDebug(" useSelfSignedHttps: " + useSelfSignedHttps );
var statsInterval = -1
if (program.statsinterval > 0) {
statsInterval = program.statsinterval;
}
outputDebug(" statsInterval: " + statsInterval );
if (program.virtualprocessors > 0) {
executionProcessCount = program.virtualprocessors;
}
outputDebug(" executionProcessCount: " + executionProcessCount );
var maxProcessesCountToRetry = 10
if (program.maxprocessesretry > 0) {
maxProcessesCountToRetry = program.maxprocessesretry;
}
outputDebug(" maxProcessesCountToRetry: " + maxProcessesCountToRetry );
var maxJobProcessDurationMs = 10000
if (program.maxJobProcessDurationMs > 0) {
maxJobProcessDurationMs = program.maxJobProcessDurationMs;
}
outputDebug(" maxJobProcessDurationMs: " + maxJobProcessDurationMs );
var listOfEnvs = process.env
var envNames = Object.keys(listOfEnvs)
for (var i=0 ;i< envNames.length; i++){
let envName = envNames[i].replace(/[^a-zA-Z0-9]/g,'_');
outputDebug("Env var " + envName + ": " + listOfEnvs[envName])
envVars[envName] = listOfEnvs[envName]
}
if (isValidObject(envVars.virtualprocessors)) {
executionProcessCount = envVars.virtualprocessors
}
envVars.IP_ADDRESS = ip.address()
if (electron.app) {
envVars.RUNNING_IN_ELECTRON = true
}
let jaegerConfig = null
var jaegercollector = program.jaegercollector;
if (isValidObject(envVars.jaegercollector)) {
jaegercollector = envVars.jaegercollector
}
let tracer = null
const jaegerOptions = { };
if (jaegercollector) {
jaegerConfig = {
serviceName: "Visual_Javascript",
sampler: {
type: "const",
param: 1
},
reporter: {
collectorEndpoint: jaegercollector,
logSpans: true
}
}
console.log("Trying to connect to Jaeger at " + jaegercollector)
}
function isValidObject(variable){
if ((typeof variable !== 'undefined') && (variable != null)) {
return true
}
return false
}
outputDebug('Starting services');
var debug = false;
outputDebug("NodeJS version: " + process.versions.node);
if (semver.gt(process.versions.node, '6.9.0')) {
outputDebug("NodeJS version > 6.9 " );
}
if (program.debug == 'true') {
debug = true;
outputDebug(" debug: true" );
} else {
outputDebug(" debug: false" );
};
var deleteOnExit = (program.deleteonexit == 'true');
outputDebug("deleteOnExit: " + deleteOnExit)
var deleteOnStartup = (program.deleteonstartup == 'true');
outputDebug("deleteOnStartup: " + deleteOnStartup)
locked = (program.locked == 'true');
useHttps = (program.https == 'true');
if (useSelfSignedHttps) {
forge.options.usePureJavaScript = true;
var pki = forge.pki;
var keys = pki.rsa.generateKeyPair(2048);
var cert = pki.createCertificate();
cert.publicKey = keys.publicKey;
cert.serialNumber = '01';
cert.validity.notBefore = new Date();
cert.validity.notAfter = new Date();
cert.validity.notAfter.setFullYear(cert.validity.notBefore.getFullYear()+1);
var attrs = [
{name:'commonName',value:'yazz.com_' + uuidv1() }
,{name:'countryName',value:'UK'}
,{shortName:'ST',value:'Surrey'}
,{name:'localityName',value:'Redhill'}
,{name:'organizationName',value:'AppShare'}
,{shortName:'OU',value:'Test'}
];
cert.setSubject(attrs);
cert.setIssuer(attrs);
cert.sign(keys.privateKey);
var pem_pkey = pki.publicKeyToPem(keys.publicKey);
var pem_cert = pki.certificateToPem(cert);
console.log(pem_pkey);
console.log(pem_cert);
//https.createServer( { key:pem_pkey, cert:pem_cert },(req,res)=>
//https.createServer( { key: pki.privateKeyToPem(keys.privateKey), cert:pem_cert },(req,res)=>
//{
// res.writeHead(200, {'Content-Type': 'text/plain'});
// res.end('Hello World\n');
//}).listen(443);
certOptions = {
key: pki.privateKeyToPem(keys.privateKey)
,
cert:pem_cert
}
useHttps = true
}
if (useHttps) {
serverProtocol = "https"
}
outputDebug("useHttps: " + useHttps)
privateKey = program.private;
publicCertificate = program.public;
caCertificate1 = program.cacert1;
caCertificate2 = program.cacert2;
caCertificate3 = program.cacert3;
var useHost = program.usehost;
if (useHost) {
hostaddress = useHost
outputDebug("USE Host: " + useHost)
}
port = program.port;
outputDebug("port: " + port)
var runapp = program.runapp
if ( electronApp ) {
runapp = "homepage"
};
var runhtml = program.runhtml;
var loadjsurl = program.loadjsurl;
var loadjsfile = program.loadjsfile;
var loadjscode = program.loadjscode;
if (!isNumber(port)) {
port = 80;
if (useHttps) {
port = 443;
}
};
outputDebug('Yazz node local hostname: ' + ip.address() + ' ')
function setUpChildListeners(processName, fileName, debugPort) {
forkedProcesses[processName].on('close', async function() {
if (!shuttingDown) {
outputDebug("Child process " + processName + " exited.. restarting... ")
var stmtInsertProcessError = dbsearch.prepare( ` insert into
system_process_errors
( id,
timestamp,
process,
yazz_instance_id,
status,
base_component_id,
event,
system_code_id,
args,
error_message )
values
( ?, ?, ?, ?, ?, ?, ?, ?, ? , ? );`)
dbsearch.serialize(function() {
dbsearch.run("begin exclusive transaction");
var newId = uuidv1()
stmtInsertProcessError.run(
newId,
new Date().getTime(),
processName,
yazzInstanceId,
"KILLED",
null,
null,
null,
null,
null )
dbsearch.run("commit");
stmtInsertProcessError.finalize();
})
setupForkedProcess(processName, fileName, debugPort)
}
});
forkedProcesses[processName].on('message', async function(msg) {
//console.log("message from child: " + JSON.stringify(msg,null,2))
//console.log("message type from child: " + JSON.stringify(msg.message_type,null,2))
//------------------------------------------------------------------------------
//
//
//
//
//
//------------------------------------------------------------------------------
if (msg.message_type == "save_code") {
forkedProcesses["forked"].send({
message_type: "save_code",
base_component_id: msg.base_component_id,
parent_hash: msg.parent_hash,
code: msg.code,
options: msg.options
});
//------------------------------------------------------------------------------
//
//
//
//
//
//------------------------------------------------------------------------------
} else if (msg.message_type == "add_rest_api") {
outputDebug("add_rest_api called")
var newFunction = async function (req, res) {
var params = req.query;
var url = req.originalUrl;
var body = req.body;
var promise = new Promise(async function(returnFn) {
var seqNum = queuedResponseSeqNum;
queuedResponseSeqNum ++;
queuedResponses[ seqNum ] = function(value) {
returnFn(value)
}
outputDebug(" msg.base_component_id: " + msg.base_component_id);
outputDebug(" seqNum: " + seqNum);
forkedProcesses["forked"].send({
message_type: "callDriverMethod",
find_component: {
method_name: msg.base_component_id,
driver_name: msg.base_component_id
}
,
args: {
params: params,
body: body,
url: url
}
,
seq_num_parent: null,
seq_num_browser: null,
seq_num_local: seqNum,
});
})
var ret = await promise
if (ret.value) {
res.writeHead(200, {'Content-Type': 'application/json'});
res.end(JSON.stringify(
ret.value
));
if (jaegercollector) {
console.log("calling jaeger...")
try {
tracer = initJaegerTracer(jaegerConfig, jaegerOptions);
let span=tracer.startSpan(url)
span.setTag("call", "some-params")
span.finish()
tracer.close()
console.log("...called jaeger")
} catch(err){
console.log("Error calling jaeger: " + err)
}
}
} else if (ret.error) {
res.writeHead(200, {'Content-Type': 'application/json'});
res.end(JSON.stringify(
{error: ret.error}
));
} else {
res.writeHead(200, {'Content-Type': 'application/json'});
res.end(JSON.stringify(
{error: "Unknown problem occurred"}
));
}
}
// end of function def for newFunction
if (!isValidObject(restRoutes[msg.route])) {
if (msg.rest_method == "POST") {
app.post( '/' + msg.route + '/*' , async function(req, res){
await ((restRoutes[msg.route])(req,res))
})
app.post( '/' + msg.route , async function(req, res){
await ((restRoutes[msg.route])(req,res))
})
} else {
app.get( '/' + msg.route + '/*' , async function(req, res){
await ((restRoutes[msg.route])(req,res))
})
app.get( '/' + msg.route , async function(req, res){
await ((restRoutes[msg.route])(req,res))
})
}
}
restRoutes[msg.route] = newFunction
//------------------------------------------------------------------------------
//
//
//
//
//
//------------------------------------------------------------------------------
} else if (msg.message_type == "createdTablesInChild") {
forkedProcesses["forked"].send({ message_type: "setUpSql" });
forkedProcesses["forked"].send({ message_type: "greeting" , hello: 'world' });
outputDebug("mainNodeProcessStarted: " + mainNodeProcessStarted)
if (!mainNodeProcessStarted) {
mainNodeProcessStarted = true
outputDebug("createdTablesInChild")
isCodeTtyCode = await isTtyCode()
//console.log("isCodeTtyCode:= " + isCodeTtyCode)
if (isCodeTtyCode) {
await startServices()
} else {
console.log("Loading Visual Javascript. Please wait a few minutes ... ")
getPort()
}
}
//------------------------------------------------------------------------------
//
// This is the last thing that happens when AppShare is started
//
//
//
//------------------------------------------------------------------------------
} else if (msg.message_type == "drivers_loaded_by_child") {
await finalizeYazzLoading();
//------------------------------------------------------------------------------
//
//
//
//
//
//------------------------------------------------------------------------------
} else if (msg.message_type == "ipc_child_returning_uploaded_app_as_file_in_child_response") {
outputDebug("uploaded_app_as_file_in_child: " + JSON.stringify(msg))
// ______
// Server --1 data item--> Browser
// ______
//
sendOverWebSockets({
type: "uploaded_app_as_file_from_server",
code_id: msg.code_id,
base_component_id: msg.base_component_id,
client_file_upload_id: msg.client_file_upload_id
});
//------------------------------------------------------------------------------
//
//
//
//
//
//------------------------------------------------------------------------------
} else if (msg.message_type == "database_setup_in_child") {
if (msg.child_process_name == "forkedExeScheduler") {
forkedProcesses["forkedExeScheduler"].send({ message_type: "setUpSql" });
}
if (msg.child_process_name.startsWith("forkedExeProcess")) {
forkedProcesses[msg.child_process_name].send({ message_type: "setUpSql" });
forkedProcesses["forkedExeScheduler"].send({ message_type: "startNode",
node_id: msg.child_process_name,
child_process_id: forkedProcesses[msg.child_process_name].pid,
started: new Date()
});
}
//------------------------------------------------------------------------------
//
//
//
//
//
//------------------------------------------------------------------------------
} else if (msg.message_type == "return_add_local_driver_results_msg") {
//console.log("6 - return_get_search_results: " + msg.returned);
var rett = eval("(" + msg.success + ")");
var newCallbackFn = queuedResponses[ msg.seq_num_local ]
if (msg.result ) {
newCallbackFn(msg.result)
} else {
newCallbackFn({
error: msg.error
})
}
newres = null;
//------------------------------------------------------------------------------
//
//
//
//
//
//------------------------------------------------------------------------------
} else if (msg.message_type == "processor_free") {
forkedProcesses["forkedExeScheduler"].send({
message_type: "processor_free",
child_process_name: msg.child_process_name
});
//------------------------------------------------------------------------------
//
//
//
//
//
//------------------------------------------------------------------------------
} else if (msg.message_type == "execute_code_in_exe_child_process") {
forkedProcesses[msg.child_process_name].send({
message_type: "execute_code",
code: msg.code,
callback_index: msg.callback_index,
code_id: msg.code_id,
args: msg.args,
call_id: msg.call_id,
on_condition: msg.on_condition,
base_component_id: msg.base_component_id
});
//------------------------------------------------------------------------------
//
//
//
//
//
//------------------------------------------------------------------------------
} else if (msg.message_type == "function_call_request") {
forkedProcesses["forkedExeScheduler"].send({
message_type: "function_call_request",
child_process_name: msg.child_process_name,
find_component: msg.find_component,
args: msg.args,
callback_index: msg.callback_index,
caller_call_id: msg.caller_call_id
});
//------------------------------------------------------------------------------
//
//
//
//
//
//------------------------------------------------------------------------------
} else if (msg.message_type == "function_call_response") {
//console.log("*** function_call_response: " + JSON.stringify(msg,null,2))
forkedProcesses["forkedExeScheduler"].send({
message_type: "function_call_response",
child_process_name: msg.child_process_name,
driver_name: msg.driver_name,
method_name: msg.method_name,
result: msg.result,
callback_index: msg.callback_index,
called_call_id: msg.called_call_id
});
//------------------------------------------------------------------------------
//
//
//
//
//
//------------------------------------------------------------------------------
} else if (msg.message_type == "return_response_to_function_caller") {
//console.log("*) Electron.js got response for " + msg.child_process_name);
//console.log("*) "+ msg.result)
if (msg.child_process_name) {
forkedProcesses[msg.child_process_name].send({
message_type: "return_response_to_function_caller",
callback_index: msg.callback_index,
result: msg.result
});
}
//------------------------------------------------------------------------------
//
//
//
//
//
//------------------------------------------------------------------------------
} else if (msg.message_type == "ipc_child_returning_callDriverMethod_response") {
//console.log(" .......3: " + JSON.stringify(msg,null,2));
//console.log("6: return_query_items_ended")
//console.log("6.1: " + msg)
var new_ws = queuedResponses[ msg.seq_num_parent ]
if (msg.result) {
if (msg.result.code) {
var tr = msg.result.code
msg.result.code = tr
}
}
sendToBrowserViaWebSocket(
new_ws
,
{
type: "ws_to_browser_callDriverMethod_results",
value: msg.result,
seq_num: msg.seq_num_browser
});
//new_ws = null;
}
});
}
//------------------------------------------------------------------------------
//
//
//
//
//
//------------------------------------------------------------------------------
function setupForkedProcess( processName, fileName, debugPort ) {
var debugArgs = [];
let useElectron = ""
if (electronApp) {
useElectron = "TRUE"
console.log("***** Run all in electron, useElectron = TRUE")
} else {
console.log("***** NOT run in electron, useElectron = ")
}
if (debug) {
if (semver.gte(process.versions.node, '6.9.0')) {
//debugArgs = ['--inspect=' + debugPort];
debugArgs = [];
} else {
//debugArgs = ['--debug=' + debugPort];
debugArgs = [];
};
};
var forkedProcessPath
if (isWin) {
forkedProcessPath = path.join(__dirname, '..\\src\\' + fileName)
} else {
forkedProcessPath = path.join(__dirname, '../src/' + fileName)
}
console.log("forkedProcessPath: " + forkedProcessPath)
forkedProcesses[ processName ] = fork.fork(forkedProcessPath, [], {execArgv: debugArgs,
env: {electron: useElectron }});
setUpChildListeners(processName, fileName, debugPort);
if (processName == "forked") {
forkedProcesses["forked"].send({ message_type: "init" ,
user_data_path: userData,
child_process_name: "forked",
show_debug: showDebug,
show_progress: showProgress,
yazz_instance_id: yazzInstanceId,
jaeger_collector: jaegercollector
});
forkedProcesses["forked"].send({ message_type: "createTables" });
}
if (processName == "forkedExeScheduler") {
forkedProcesses["forkedExeScheduler"].send({ message_type: "init" ,
user_data_path: userData,
child_process_name: "forkedExeScheduler",
max_processes_count_to_retry: maxProcessesCountToRetry,
max_job_process_duration_ms: maxJobProcessDurationMs,
show_debug: showDebug,
show_progress: showProgress,
yazz_instance_id: yazzInstanceId,
jaeger_collector: jaegercollector
});
}
for (var i=0;i<executionProcessCount; i++ ) {
var exeProcName = "forkedExeProcess" + i
if (processName == exeProcName) {
forkedProcesses[exeProcName].send({ message_type: "init" ,
user_data_path: userData,
child_process_name: exeProcName,
show_debug: showDebug,
show_progress: showProgress,
yazz_instance_id: yazzInstanceId,
jaeger_collector: jaegercollector
});
}
}
outputDebug("Started subprocess '" + processName + "' ")
}
//------------------------------------------------------------------------------
//
//
//
//
//
//------------------------------------------------------------------------------
function setupMainChildProcess() {
setupForkedProcess("forked", "child.js", 40003)
}
function sendOverWebSockets(data) {
var ll = serverwebsockets.length;
//console.log('send to sockets Count: ' + JSON.stringify(serverwebsockets.length));
for (var i =0 ; i < ll; i++ ) {
var sock = serverwebsockets[i];
sock.emit(data.type,data);
//console.log(' sock ' + i + ': ' + JSON.stringify(sock.readyState));
}
}
function isNumber(n) {
return !isNaN(parseFloat(n)) && isFinite(n);
}
async function setupVisifileParams() {
outputDebug('-------* Port: ' + port);
outputDebug( ip.address() );
//console.log('addr: '+ ip.address());
//hostaddress = ip.address();
}
function shutDown() {
outputDebug(" shutDown() called")
if (!shuttingDown) {
shuttingDown = true;
if (dbsearch) {
outputDebug("Database closing...")
dbsearch.run("PRAGMA wal_checkpoint;")
dbsearch.close(function(err){
outputDebug("...database closed")
})
}
let appDbNames = Object.keys(appDbs)
for (let appDbIndex = 0; appDbIndex < appDbNames.length; appDbIndex ++) {
let thisAppDb = appDbs[appDbNames[appDbIndex]]
thisAppDb.run("PRAGMA wal_checkpoint;")
thisAppDb.close(function(err){
outputDebug("... " + appDbNames[appDbIndex] + " database closed")
})
}
if (forkedProcesses["forked"]) {
outputDebug("Killed Process forked")
forkedProcesses["forked"].kill();
}
if (forkedProcesses["forkedExeScheduler"]) {
outputDebug("Killed Exe Scheduler process")
forkedProcesses["forkedExeScheduler"].kill();
}
for (var i = 0; i < executionProcessCount; i++ ) {
var exeProcName = "forkedExeProcess" + i
forkedProcesses[exeProcName].kill();
outputDebug("Killed Process " + exeProcName)
}
outputDebug("deleteOnExit =" + deleteOnExit)
if (deleteOnExit) {
outputDebug("deleting dir :" + userData)
if (userData.length > 6) {
if (isWin) {
deleteYazzDataWindows(userData)
} else {
deleteYazzData(userData)
}
}
}
}
}
function deleteYazzDataWindows(dddd) {
outputDebug("deleteYazzDataWindows")
if (dddd.length > 6) {
var ff = 'timeout 8 && rd /s /q "' + dddd + '"'
outputDebug(ff)
fork.exec(ff
,
function(err, stdout, stderr) {
if (err) {
// node couldn't execute the command
return;
}
})
}
}
function deleteYazzDataV2(dddd) {
if ( fs.existsSync( dddd ) ) {
outputDebug("----------------------------------")
outputDebug("Before delete :" + ls(dddd))
outputDebug("----------------------------------")
rimraf.sync(path.join(dddd, 'uploads/'));
rimraf.sync(path.join(dddd, 'files/'));
rimraf.sync(path.join(dddd, 'apps/'));
rimraf.sync(path.join(dddd, 'app_dbs/'));
rimraf.sync(path.join(dddd, '*.visi'));
rimraf.sync(path.join(dddd, '*.visi*'));
}
outputDebug("----------------------------------")
outputDebug("After delete" )
outputDebug("----------------------------------")
}
function deleteYazzData(dddd) {
fork.exec('sleep 3 && cd "' + dddd + '" && rm -rf app_dbs apps uploads files *.visi*', function(err, stdout, stderr) {
if (err) {
// node couldn't execute the command
return;
}
})
}
function getPort () {
outputDebug('** called getPort v2')
if (useHttps) {
if (!certOptions) {
let caCerts = readCerts()
certOptions = {
key: fs.readFileSync(privateKey, 'utf8'),
cert: fs.readFileSync(publicCertificate, 'utf8'),
ca: caCerts
}
}
certOptions.requestCert = true
certOptions.rejectUnauthorized = false
httpServer = https.createServer(certOptions,app)
} else {
httpServer = http.createServer(app)
}
httpServer.listen(port, hostaddress, function (err) {
outputDebug('trying port: ' + port + ' ')
httpServer.once('close', function () {
})
httpServer.close()
httpServer = null;
})
httpServer.on('error', function (err) {
outputDebug('Couldnt connect on port ' + port + '...')
if (port < portrange) {
port = portrange
};
outputDebug('... trying port ' + port)
portrange += 1
getPort()
})
httpServer.on('listening', async function (err) {
outputDebug('Can connect on ' + ip.address() + ':' + port + ' :) ')
forkedProcesses["forked"].send({ message_type: "host_and_port" ,
child_process_name: "forked",
ip: hostaddress,
port: port
});
await startServices()
})
}
//------------------------------------------------------------------------------------------
//
// checkForJSLoaded
//
// This checks to see if AppShare is started with custom code. This code is
// then loaded into AppShare either as a web app or it is run as a UI app
//
//
//
//------------------------------------------------------------------------------------------
async function checkForJSLoaded() {
outputDebug("*********** In checkForJSLoaded() ************")
if (isValidObject(envVars.loadjsurl)) {
loadjsurl = envVars.loadjsurl
}
//
// load JS code from file
//
if (isValidObject(envVars.loadjsfile)) {
loadjsfile = envVars.loadjsfile
}
//console.log("process.argv.length : " + process.argv.length )
//console.log("process.argv[2] : " + process.argv[2] )
try {
if ((process.argv[2]) && (process.argv[2].startsWith("http://") || process.argv[2].startsWith("https://") )) {
loadjsurl = process.argv[2]
//console.log("inputStdin: " + inputStdin )
if ((!inputStdin) || (inputStdin.length == 0)) {
if ((process.argv[3]) && (!process.argv[3].startsWith("--"))) {
inputStdin = process.argv[3]
}
}
} else if ((process.argv[2]) && (process.argv[2].endsWith(".js") || process.argv[2].endsWith(".pilot") || process.argv[2].endsWith(".jsa") || process.argv[2].endsWith(".vjs") )) {
loadjsfile = process.argv[2]
if ((!inputStdin) || (inputStdin.length == 0)) {
if ((process.argv[3]) && (!process.argv[3].startsWith("--"))) {
inputStdin = process.argv[3]
}
}
} else if ((process.argv[2]) && (!process.argv[2].startsWith("--"))) {
loadjscode = process.argv[2]
outputDebug("load code: " + loadjscode )
//console.log("inputStdin: " + inputStdin )
//console.log("load code: " + loadjscode )
if ((!inputStdin) || (inputStdin.length == 0)) {
if ((process.argv[3]) && (!process.argv[3].startsWith("--"))) {
inputStdin = process.argv[3]
}
}
}
} catch(err) {
console.log("Error in checkForJSLoaded: " + err)
}
if (isValidObject(envVars.loadjscode)) {
loadjscode = envVars.loadjscode
}
let promise = new Promise(async function(returnFn) {
if (isValidObject(loadjsurl)) {
outputDebug("*********** Using loadjsurl ************")
var jsUrl = loadjsurl
https.get(jsUrl, (resp) => {
var data = '';
// A chunk of data has been recieved.
resp.on('data', (chunk) => {
data += chunk;
});
// The whole response has been received. Print out the result.
resp.on('end', () => {
//console.log("code:" + data);
var baseComponentIdForUrl = saveHelper.getValueOfCodeString(data, "base_component_id")
outputDebug("baseComponentIdForUrl:" + baseComponentIdForUrl);
if (!isValidObject(baseComponentIdForUrl)) {
baseComponentIdForUrl = loadjsurl.replace(/[^A-Z0-9]/ig, "_");
}
var jsCode = data
outputDebug("*********** Trying to load loadjsurl code *************")
forkedProcesses["forked"].send({
message_type: "save_code",
base_component_id: baseComponentIdForUrl,
parent_hash: null,
code: data,
options: {
make_public: true,
save_html: true
}
});
runapp = baseComponentIdForUrl
let frontEndCode = isFrontEndOnlyCode(data)
//console.log("frontEndCode: " + frontEndCode)
if (frontEndCode){
//inputStdin = loadjscode
} else {
//console.log("runapp: " + runapp)
//console.log("inputStdin: " + inputStdin)
startupType = "RUN_SERVER_CODE"
startupDelay = 1000
}
returnFn()
});
}).on("error", (err) => {
outputDebug("Error: " + err.message);
returnFn()
});
} else if (isValidObject(loadjsfile)) {
outputDebug("*********** Using loadjsfile ************")
var jsFile = loadjsfile
var data2 = fs.readFileSync(jsFile).toString()
var baseComponentIdForFile = saveHelper.getValueOfCodeString(data2, "base_component_id")
if (!isValidObject(baseComponentIdForFile)) {
baseComponentIdForFile = loadjsfile.replace(/[^A-Z0-9]/ig, "_");
}
//console.log("code from file:" + data2);
//console.log("*********** Trying to load loadjsfile code *************")
forkedProcesses["forked"].send({
message_type: "save_code",
base_component_id: baseComponentIdForFile,
parent_hash: null,
code: data2,
options: {
make_public: true,
save_html: true
}
});
runapp = baseComponentIdForFile
let frontEndCode = isFrontEndOnlyCode(data2)
//console.log("frontEndCode: " + frontEndCode)
if (frontEndCode){
//inputStdin = loadjscode
} else {
//console.log("runapp: " + runapp)
//console.log("inputStdin: " + inputStdin)
startupType = "RUN_SERVER_CODE"
startupDelay = 1000
}
returnFn()
} else if (isValidObject(loadjscode)) {
outputDebug("*********** Using loadjscode ************")
var data2 = loadjscode
var baseComponentIdForCode = saveHelper.getValueOfCodeString(data2, "base_component_id")
outputDebug("baseComponentIdForCode:" + baseComponentIdForCode);
if (!isValidObject(baseComponentIdForCode)) {
baseComponentIdForCode = "code_" + (("" + Math.random()).replace(/[^A-Z0-9]/ig, "_"));
outputDebug("baseComponentIdForFile:" + baseComponentIdForCode);
}
//console.log("code:" + data2);
outputDebug("*********** Trying to load loadjscode code *************")
forkedProcesses["forked"].send({
message_type: "save_code",
base_component_id: baseComponentIdForCode,
parent_hash: null,
code: data2,
options: {
make_public: true,
save_html: true
}
});
runapp = baseComponentIdForCode
//console.log("baseComponentIdForCode: " + baseComponentIdForCode)
//console.log("runapp: " + runapp)
let frontEndCode = isFrontEndOnlyCode(loadjscode)
//console.log("frontEndCode: " + frontEndCode)
if (frontEndCode){
//inputStdin = loadjscode
} else {
//console.log("runapp: " + runapp)
//console.log("inputStdin: " + inputStdin)
startupType = "RUN_SERVER_CODE"
startupDelay = 1000
}
returnFn()
} else {
returnFn()
}
})
var ret = await promise
return
}
//------------------------------------------------------------------------------------------
//
// checkForJSLoaded
//
// This checks to see if AppShare is started with custom code. This code is
// then loaded into AppShare either as a web app or it is run as a UI app
//
//
//
//------------------------------------------------------------------------------------------
async function isTtyCode() {
outputDebug("*********** In isTtyCode() ************")
if (isValidObject(envVars.loadjsurl)) {
loadjsurl = envVars.loadjsurl
}
//
// load JS code from file
//
if (isValidObject(envVars.loadjsfile)) {
loadjsfile = envVars.loadjsfile
}
//console.log("process.argv.length : " + process.argv.length )
//console.log("process.argv[2] : " + process.argv[2] )
try {
if ((process.argv[2]) && (process.argv[2].startsWith("http://") || process.argv[2].startsWith("https://") )) {
loadjsurl = process.argv[2]
} else if ((process.argv[2]) && (process.argv[2].endsWith(".js") || process.argv[2].endsWith(".pilot") || process.argv[2].endsWith(".jsa") || process.argv[2].endsWith(".vjs") )) {
loadjsfile = process.argv[2]
} else if ((process.argv[2]) && (!process.argv[2].startsWith("--"))) {
loadjscode = process.argv[2]
outputDebug("load code: " + loadjscode )
//console.log("load code: " + loadjscode )
}
} catch(err) {
console.log("Error in checkForJSLoaded: " + err)
}
if (isValidObject(envVars.loadjscode)) {
loadjscode = envVars.loadjscode
}
let promise = new Promise(async function(returnFn) {
if (isValidObject(loadjsurl)) {
var jsUrl = loadjsurl
https.get(jsUrl, (resp) => {
var data = '';
resp.on('data', (chunk) => {
data += chunk;
});
resp.on('end', () => {
let ttyCode = isFrontEndOnlyCode(data)
returnFn(!ttyCode)
});
}).on("error", (err) => {
outputDebug("Error: " + err.message);
returnFn(false)
});
} else if (isValidObject(loadjsfile)) {
var jsFile = loadjsfile
var data2 = fs.readFileSync(jsFile).toString()
let ttyCode = isFrontEndOnlyCode(data2)
returnFn(!ttyCode)
} else if (isValidObject(loadjscode)) {
let ttyCode = isFrontEndOnlyCode(loadjscode)
returnFn(!ttyCode)
} else {
returnFn(false)
}
})
let ttyCodeRet = await promise
return ttyCodeRet
}
function isFrontEndOnlyCode(code) {
if (!code){
return false
}
if (code.indexOf("Vue.") != -1) { return true }
if (code.indexOf("only_run_on_server(") != -1) { return false }
if (code.indexOf("only_run_on_frontend(") != -1) { return true }
if (code.indexOf("rest_api(") != -1) { return false }
return false
}
function mkdirSync(dirPath) {
try {
mkdirp.sync(dirPath)
} catch (err) {
//if (err.code !== 'EEXIST') throw err
}
}
function outputToConsole(text) {
var c = console;
c.log(text);
}
function copyFileSync( source, target ) {
var targetFile = target;
//if target is a directory a new file with the same name will be created
if ( fs.existsSync( target ) ) {
if ( fs.lstatSync( target ).isDirectory() ) {
targetFile = path.join( target, path.basename( source ) );
}
}
fs.writeFileSync(targetFile, fs.readFileSync(source));
}
function copyFolderRecursiveSync( source, target ) {
//console.log('çopy from: '+ source + ' to ' + target);
var files = [];
//check if folder needs to be created or integrated
var targetFolder = path.join( target, path.basename( source ) );
if ( !fs.existsSync( targetFolder ) ) {
fs.mkdirSync( targetFolder );
}
//copy
if ( fs.lstatSync( source ).isDirectory() ) {
try {
files = fs.readdirSync( source );
files.forEach( function ( file ) {
var curSource = path.join( source, file );
if ( fs.lstatSync( curSource ).isDirectory() ) {
try {
copyFolderRecursiveSync( curSource, targetFolder );
} catch(err) {
outputDebug(err)
}
} else {
try {
copyFileSync( curSource, targetFolder );
//console.log('copying: ' + targetFolder);
} catch(err) {
outputDebug(err)
}
}
} );
} catch(err) {
outputDebug(err)
}
}
}
// ============================================================
// This sends a message to a specific websocket
// ============================================================
function sendToBrowserViaWebSocket(aws, msg) {
aws.emit(msg.type,msg);
}
function isLocalMachine(req) {
if ((req.ip == '127.0.0.1') || (hostaddress == req.ip) || (hostaddress == "0.0.0.0")) { // this is the correct line to use
//if (req.ip == '127.0.0.1') { // this is used for debugging only so that we can deny access from the local machine
return true;
};
return false;
}
//------------------------------------------------------------------------------
// test if allowed
//------------------------------------------------------------------------------
function canAccess(req,res) {
if (!locked) {
return true;
};
if (isLocalMachine(req) ) {
return true;
};
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end("Sorry but access to " + username + "'s data is not allowed. Please ask " + username + " to unlocked their Yazz account");
return false;
};
function extractHostname(url) {
var hostname;
//find & remove protocol (http, ftp, etc.) and get hostname
if (url.indexOf("://") > -1) {
hostname = url.split('/')[2];
}
else {
hostname = url.split('/')[0];
}
//find & remove port number
hostname = hostname.split(':')[0];
//find & remove "?"
hostname = hostname.split('?')[0];
return hostname;
}
function extractRootDomain(url) {
var domain = extractHostname(url),
splitArr = domain.split('.'),
arrLen = splitArr.length;
//extracting the root domain here
if (arrLen > 2) {
domain = splitArr[arrLen - 2] + '.' + splitArr[arrLen - 1];
}
return domain;
}
function findViafromString(inp) {
if (inp == null) {
return "";
}
var ll = inp.split(' ');
for (var i=0; i< ll.length ; i++){
if (ll[i] != null) {
if (ll[i].indexOf(":") != -1) {
return extractRootDomain(ll[i]);
}
}
}
return "";
}
function runOnPageExists(req, res, homepage) {
if (fs.existsSync(homepage)) {
if (!canAccess(req,res)) {
return;
}
res.end(fs.readFileSync(homepage));
} else {
setTimeout(function() {
runOnPageExists(req, res, homepage)
},3000)
}
}
function getRoot(req, res, next) {
hostcount++;
//console.log("Host: " + req.headers.host + ", " + hostcount);
//console.log("Full URL: " + req.protocol + '://' + req.get('host') + req.originalUrl);
var homepage = path.join(__dirname, '../public/go.html')
var homepageUrl = serverProtocol + '://yazz.com/visifile/index.html?time=' + new Date().getTime()
if (req.headers.host) {
if (req.query.goto) {
outputDebug("*** FOUND goto")
res.end(fs.readFileSync(homepage));
return
}
if (req.query.embed) {
outputDebug("*** FOUND embed")
res.end(fs.readFileSync(homepage));
return
}
if (req.headers.host.toLowerCase().endsWith('yazz.com')) {
res.writeHead(301,
{Location: homepageUrl }
);
res.end();
return;
};
if (req.headers.host.toLowerCase().endsWith('dannea.com')) {
res.writeHead(301,
{Location: homepageUrl }
);
res.end();
return;
};
if (req.headers.host.toLowerCase().endsWith('canlabs.com')) {
res.writeHead(301,
{Location: 'http://canlabs.com/canlabs/index.html'}
);
res.end();
return;
};
if (req.headers.host.toLowerCase().endsWith('gosharedata.com')) {
res.writeHead(301,
{Location: homepageUrl }
);
res.end();
return;
};
if (req.headers.host.toLowerCase().endsWith('visifile.com')) {
res.writeHead(301,
{Location: homepageUrl }
);
res.end();
return;
};
if (req.headers.host.toLowerCase().endsWith('visifiles.com')) {
res.writeHead(301,
{Location: homepageUrl}
);
res.end();
return;
};
if (req.headers.host.toLowerCase().endsWith('appshare.co')) {
res.writeHead(301,
{Location: homepageUrl }
);
res.end();
return;
};
};
if (isValidObject(envVars.YAZZ_RUN_APP)) {
runapp = envVars.YAZZ_RUN_APP
}
if (runhtml && (!req.query.goto) && (!req.query.embed)) {
homepage = runhtml
runOnPageExists(req,res,homepage)
return
} else if (runapp && (!req.query.goto) && (!req.query.embed)) {
homepage = path.join( userData, 'apps/' + runapp + '.html' )
runOnPageExists(req,res,homepage)
return
} else if (loadjsurl && (!req.query.goto) && (!req.query.embed)) {
homepage = path.join( userData, 'apps/' + runapp + '.html' )
runOnPageExists(req,res,homepage)
return
} else if (loadjsfile && (!req.query.goto) && (!req.query.embed)) {
homepage = path.join( userData, 'apps/' + runapp + '.html' )
runOnPageExists(req,res,homepage)
return
} else if (loadjscode && (!req.query.goto) && (!req.query.embed)) {
homepage = path.join( userData, 'apps/' + runapp + '.html' )
runOnPageExists(req,res,homepage)
return
} else {
homepage = path.join( userData, 'apps/homepage.html' )
runOnPageExists(req,res,homepage)
return
}
outputDebug("Serving: " + homepage)
}
function getEditApp(req, res) {
hostcount++;
// I dont know why sockets.io calls .map files here
if (req.path.endsWith(".map")) {
return
}
var parts = req.path.split('/');
var lastSegment = parts.pop() || parts.pop();
outputDebug("URL PATH: " + lastSegment);
//console.log("Full URL: " + req.protocol + '://' + req.get('host') + req.originalUrl);
//
// send the edit page
//
var homepage = path.join(__dirname, '../public/go.html')
var baseComponentId = lastSegment
var newStaticFileContent = fs.readFileSync(homepage)
newStaticFileContent = newStaticFileContent.toString().replace("var editAppShareApp = null", "var editAppShareApp = '" + baseComponentId + "'")
res.writeHead(200, {'Content-Type': 'text/html; charset=utf-8'});
res.end(newStaticFileContent);
}
function websocketFn(ws) {
serverwebsockets.push(ws);
sendToBrowserViaWebSocket(ws, {type: "socket_connected"});
sendOverWebSockets({
type: "env_vars",
value: envVars
});
//console.log('Socket connected : ' + serverwebsockets.length);
sendOverWebSockets({
type: "network_ip_address_intranet",
value: hostaddressintranet
});
sendOverWebSockets({
type: "send_is_win",
value: isWin
});
ws.on('message', async function(msg) {
var receivedMessage = eval("(" + msg + ")");
//console.log(" 1- Server recieved message: " + JSON.stringify(receivedMessage));
// if we get the message "server_get_all_queries" from the web browser
if (receivedMessage.message_type == "server_get_all_queries") {
var seqNum = queuedResponseSeqNum;
queuedResponseSeqNum ++;
queuedResponses[seqNum] = ws;
//console.log(" 2 ");
forkedProcesses["forked"].send({
message_type: "get_all_queries",
seq_num: seqNum
});
} else if (receivedMessage.message_type == "loadUiComponent") {
//console.log("***** } else if (msg.message_type == loadUiComponent) ")
var componentIds = receivedMessage.find_components.base_component_ids
dbsearch.serialize(
function() {
var stmt = dbsearch.all(
"SELECT * FROM system_code WHERE base_component_id in " +
"(" + componentIds.map(function(){ return "?" }).join(",") + " )" +
" and code_tag = 'LATEST' ",
componentIds
,
function(err, results)
{
if (results) {
if (results.length > 0) {
var codeId = results[0].id
dbsearch.all(
"SELECT dependency_name FROM app_dependencies where code_id = ?; ",
codeId,
function(err, results2)
{
results[0].libs = results2
sendToBrowserViaWebSocket(
ws,
{
type: "server_returns_loadUiComponent_to_browser",
seq_num: receivedMessage.seq_num,
record: JSON.stringify(results,null,2),
args: JSON.stringify(receivedMessage.args,null,2),
test: 1
});
})
}
}
})
}, sqlite3.OPEN_READONLY)
// ______
// Browser --Send me your data--> Server
// ______
//
} else if (receivedMessage.message_type == "edit_static_app") {
outputDebug("*** server got message from static app: edit_static_app")
var sql_data = receivedMessage.sql_data
var code_fn = receivedMessage.code_fn
forkedProcesses["forked"].send({
message_type: "save_code_from_upload",
base_component_id: receivedMessage.base_component_id,
parent_hash: null,
code: code_fn,
client_file_upload_id: -1,
options: {save_html: true, fast_forward_database_to_latest_revision: true},
sqlite_data: sql_data
});
sendToBrowserViaWebSocket( ws,
{
type: "edit_static_app_url"
,
url: receivedMessage.host_editor_address +
"/edit/" +
receivedMessage.base_component_id
,
size_of_db: "" + (sql_data?sql_data.length:0)
,
code_fn: "" + (code_fn?code_fn.length:0)
});
// ______
// Browser --Send me your data--> Server
// ______
//
} else if (receivedMessage.message_type == "browser_asks_server_for_data") {
var seqNum = queuedResponseSeqNum;
queuedResponseSeqNum ++;
queuedResponses[seqNum] = ws;
// ______
// Server --Send me your data--> Subprocess
// ______
//
forkedProcesses["forked"].send({
message_type: "server_asks_subprocess_for_data",
seq_num: seqNum
});
//------------------------------------------------------------------------------
//
//
//
//
//
//------------------------------------------------------------------------------
} else if (receivedMessage.message_type == "electron_file_save_as") {
let saveOptions = {
title: "Save .vjs file"
,
buttonLabel : "Save As"
,
filters :[
{name: 'Visual Javascript', extensions: ['vjs']},
{name: 'Javascript', extensions: ['js']},
{name: 'All Files', extensions: ['*']}
]
}
dialog.showSaveDialog(null, saveOptions).then(result => {
let filePath = result.filePath
console.log("Save to: " + JSON.stringify(result,null,2))
sendOverWebSockets({
type: "set_saveCodeToFile_V2",
saveCodeToFile: filePath,
base_component_id: receivedMessage.base_component_id,
code_id: receivedMessage.code_id,
code: receivedMessage.code
});
setTimeout(function() {
let sd= uuidv1()
sendOverWebSockets({
type: "set_file_upload_uuid",
file_upload_uuid: sd
});
sendOverWebSockets({
type: "set_saveCodeToFile",
saveCodeToFile: filePath
});
saveCodeToFile = filePath
loadAppFromFile( filePath,
sd)
},1000)
})
} else if (receivedMessage.message_type == "browser_asks_server_for_data") {
var seqNum = queuedResponseSeqNum;
queuedResponseSeqNum ++;
queuedResponses[seqNum] = ws;
// ______
// Server --Send me your data--> Subprocess
// ______
//
forkedProcesses["forked"].send({
message_type: "server_asks_subprocess_for_data",
seq_num: seqNum
});
} else if (receivedMessage.message_type == "browser_asks_server_for_apps") {
// outputDebug("******************* browser_asks_server_for_apps *******************")
findLatestVersionOfApps( function(results) {
// outputDebug(JSON.stringify(results,null,2))
sendToBrowserViaWebSocket( ws,
{
type: "vf_app_names",
results: results
});
})
// --------------------------------------------------------------------
//
// callDriverMethod
//
// "callDriverMethod" is used to call server side apps/code.
//
//
//
// --------------------------------------------------------------------
} else if (receivedMessage.message_type == "callDriverMethod") {
// Use an integer counter to identify whoever was
// calling the server function (in this case a web browser with
// a web socket). We need to do this as there may be several
// web browsers connected to this one server
var seqNum = queuedResponseSeqNum;
queuedResponseSeqNum ++;
queuedResponses[ seqNum ] = ws;
//console.log(" .......1 Electron callDriverMethod: " + JSON.stringify(receivedMessage,null,2));
if (receivedMessage.find_component && receivedMessage.find_component.driver_name == "systemFunctionAppSql") {
let resultOfSql = await executeSqliteForApp( receivedMessage.args )
sendToBrowserViaWebSocket(
ws
,
{
type: "ws_to_browser_callDriverMethod_results",
value: resultOfSql,
seq_num: receivedMessage.seqNum
});
} else {
forkedProcesses["forked"].send({
message_type: "callDriverMethod",
find_component: receivedMessage.find_component,
args: receivedMessage.args,
seq_num_parent: seqNum,
seq_num_browser: receivedMessage.seqNum
});
}
}
});
};
function file_uploadSingleFn(req, res) {
//console.log('----- file_uploadSingle --------------');
//console.log(req.file);
//console.log("**FILE** " + JSON.stringify(Object.keys(req)));
//console.log('-------------------------------------------------------------------------------------');
//console.log('-------------------------------------------------------------------------------------');
//console.log('-------------------------------------------------------------------------------------');
//console.log(JSON.stringify(req.files.length));
//console.log("client_file_upload_id: " + JSON.stringify(req.body.client_file_upload_id,null,2))
var client_file_upload_id = req.body.client_file_upload_id
//console.log("**client_file_upload_id** " + JSON.stringify(client_file_upload_id));
//console.log( " next: " + JSON.stringify(next));
res.status( 200 ).send( req.file );
//console.log('Loading saved Creator app' );
var ifile = req.file
//console.log(" " + JSON.stringify(ifile));
var ext = ifile.originalname.split('.').pop();
ext = ext.toLowerCase();
//console.log('Ext: ' + ext);
if ((ext == "html") || (ext == "html")) {
var localp2;
localp2 = path.join(userData, 'uploads/' + ifile.filename);
var localp = localp2 + '.' + ext;
fs.renameSync(localp2, localp);
var readIn = fs.readFileSync(localp).toString()
//console.log('');
//console.log('Local saved path: ' + localp);
var indexStart = readIn.indexOf("/*APP_START*/")
var indexEnd = readIn.indexOf("/*APP_END*/")
//console.log(`indexStart: ${indexStart}`)
//console.log(`indexEnd: ${indexEnd}`)
if ((indexStart > 0) && (indexEnd > 0)) {
indexStart += 13 + 10
indexEnd -= 2
var tts = readIn.substring(indexStart,indexEnd)
//console.log(tts)
var ytr = unescape(tts)
outputDebug("SENDING FROM UPLOAD___=+++****")
var bci = saveHelper.getValueOfCodeString(ytr, "base_component_id")
var indexStart = readIn.indexOf("/*APP_START*/")
var indexEnd = readIn.indexOf("/*APP_END*/")
var indexOfSqliteData = readIn.indexOf("var sqlitedata = '")
var indexOfSqliteDataEnd = readIn.indexOf("'//sqlitedata")
var sqlitedatafromupload = null
if ((indexOfSqliteData != -1) && (indexOfSqliteDataEnd != -1)) {
sqlitedatafromupload = readIn.substring( indexOfSqliteData + 18,
indexOfSqliteDataEnd)
}
forkedProcesses["forked"].send({
message_type: "save_code_from_upload",
base_component_id: bci,
parent_hash: null,
code: ytr,
client_file_upload_id: client_file_upload_id,
options: {save_html: true, fast_forward_database_to_latest_revision: true},
sqlite_data: sqlitedatafromupload
});
}
} else if ((ext == "js") || (ext == "yazz") || (ext == "pilot") || (ext == "jsa") || (ext == "vjs") ) {
var localp2;
localp2 = path.join(userData, 'uploads/' + ifile.filename);
var localp = localp2 + '.' + ext;
fs.renameSync(localp2, localp);
var readIn = fs.readFileSync(localp).toString()
var bci = saveHelper.getValueOfCodeString(readIn, "base_component_id")
forkedProcesses["forked"].send({
message_type: "save_code_from_upload",
base_component_id: bci,
parent_hash: null,
code: readIn,
client_file_upload_id: client_file_upload_id,
options: {save_html: true, fast_forward_database_to_latest_revision: false},
sqlite_data: ""
});
} else {
outputDebug('Ignoring file ');
}
};
function file_uploadFn(req, res, next) {
//console.log('-------------------------------------------------------------------------------------');
//console.log('-------------------------------------------------------------------------------------');
//console.log('-------------------------------------------------------------------------------------');
//console.log('-------------------------------------------------------------------------------------');
//console.log('-------------------------------------------------------------------------------------');
//console.log(JSON.stringify(req.files.length));
//console.log("client_file_upload_id: " + JSON.stringify(req.body.client_file_upload_id,null,2))
var client_file_upload_id = req.body.client_file_upload_id
//console.log("**FILES** " + JSON.stringify(req.files));
//console.log( " next: " + JSON.stringify(next));
//console.log('......................................................................................');
//console.log('......................................................................................');
//console.log('......................................................................................');
//console.log('......................................................................................');
//console.log('......................................................................................');
res.status( 200 ).send( req.files );
var ll = req.files.length;
for (var i = 0; i < ll ; i ++) {
//console.log('Loading saved Creator app' );
var ifile = req.files[i];
//console.log(" " + JSON.stringify(ifile));
var ext = ifile.originalname.split('.').pop();
ext = ext.toLowerCase();
//console.log('Ext: ' + ext);
if ((ext == "html") || (ext == "html")) {
var localp2;
localp2 = path.join(userData, 'uploads/' + ifile.filename);
var localp = localp2 + '.' + ext;
fs.renameSync(localp2, localp);
var readIn = fs.readFileSync(localp).toString()
//console.log('');
//console.log('Local saved path: ' + localp);
var indexStart = readIn.indexOf("/*APP_START*/")
var indexEnd = readIn.indexOf("/*APP_END*/")
//console.log(`indexStart: ${indexStart}`)
//console.log(`indexEnd: ${indexEnd}`)
if ((indexStart > 0) && (indexEnd > 0)) {
indexStart += 13 + 10
indexEnd -= 2
var tts = readIn.substring(indexStart,indexEnd)
//console.log(tts)
var ytr = unescape(tts)
outputDebug("SENDINF FROM UPLAOD___=+++****")
var bci = saveHelper.getValueOfCodeString(ytr, "base_component_id")
var indexStart = readIn.indexOf("/*APP_START*/")
var indexEnd = readIn.indexOf("/*APP_END*/")
var indexOfSqliteData = readIn.indexOf("var sqlitedata = '")
var indexOfSqliteDataEnd = readIn.indexOf("'//sqlitedata")
var sqlitedatafromupload = null
if ((indexOfSqliteData != -1) && (indexOfSqliteDataEnd != -1)) {
sqlitedatafromupload = readIn.substring( indexOfSqliteData + 18,
indexOfSqliteDataEnd)
}
forkedProcesses["forked"].send({
message_type: "save_code_from_upload",
base_component_id: bci,
parent_hash: null,
code: ytr,
client_file_upload_id: client_file_upload_id,
options: {save_html: true, fast_forward_database_to_latest_revision: true},
sqlite_data: sqlitedatafromupload
});
}
} else if ((ext == "js") || (ext == "yazz") || (ext == "pilot") || (ext == "jsa") || (ext == "vjs")) {
var localp2;
localp2 = path.join(userData, 'uploads/' + ifile.filename);
var localp = localp2 + '.' + ext;
fs.renameSync(localp2, localp);
loadAppFromFile(localp,client_file_upload_id)
} else {
outputDebug('Ignoring file ');
}
}
};
function file_name_load(req, res, next) {
//console.log("params: " + JSON.stringify(req.query,null,2))
loadAppFromFile( req.query.file_name_load,
req.query.client_file_upload_id)
};
function loadAppFromFile(localp,client_file_upload_id) {
console.log("loadAppFromFile(" + localp + "," + client_file_upload_id + ")")
var readIn = fs.readFileSync(localp).toString()
var bci = saveHelper.getValueOfCodeString(readIn, "base_component_id")
forkedProcesses["forked"].send({
message_type: "save_code_from_upload",
base_component_id: bci,
parent_hash: null,
code: readIn,
client_file_upload_id: client_file_upload_id,
options: {
save_html: true,
fast_forward_database_to_latest_revision: false,
save_code_to_file: localp
}, sqlite_data: ""
});
}
function code_uploadFn(req, res) {
forkedProcesses["forked"].send({
message_type: "save_code_from_upload",
parent_hash: null,
code: "function(args) { /* rest_api('test3') */ return {ab: 163}}",
options: {save_html: true},
sqlite_data: ""
});
};
function keycloakProtector(params) {
return function(req,res,next) {
next()
return
var appName2=null
if (params.compIdFromReqFn) {
appName2 = params.compIdFromReqFn(req)
}
dbsearch.serialize(
function() {
var stmt = dbsearch.all(
"SELECT code FROM system_code where base_component_id = ? and code_tag = ?; ",
appName2,
"LATEST",
function(err, results)
{
if (results.length == 0) {
outputDebug("Could not find component : " + appName2)
} else {
outputDebug("Found code for : " + appName2)
var fileC = results[0].code.toString()
//console.log("Code : " + fileC)
var sscode = saveHelper.getValueOfCodeString(fileC,"keycloak",")//keycloak")
//console.log("sscode:" + sscode)
if (sscode) {
//var ssval = eval( "(" + sscode + ")")
//console.log("keycloak: " + JSON.stringify(sscode,null,2))
keycloak.protect()(req, res, next)
} else {
next()
}
}
})
}, sqlite3.OPEN_READONLY)
}
}
//------------------------------------------------------------
// This starts all the system services
//------------------------------------------------------------
async function startServices() {
if (!isCodeTtyCode) {
if (useHttps) {
var app2 = express()
var newhttp = http.createServer(app2);
app2.use(compression())
app2.get('/', function (req, res, next) {
return getRoot(req, res, next);
})
app2.get('*', function(req, res) {
if (req.headers.host.toLowerCase().endsWith('canlabs.com')) {
outputDebug("path: " + req.path)
var rty = req.path
if (req.path == "/canlabs") {
rty = "/canlabs/index.html"
}
var fileNameRead = path.join(__dirname, '../public' + rty)
res.end(fs.readFileSync(fileNameRead));
} else if ( req.path.indexOf(".well-known") != -1 ) {
var fileNameRead = path.join(__dirname, '../public' + req.path)
res.end(fs.readFileSync(fileNameRead));
} else {
outputDebug("Redirect HTTP to HTTPS")
res.redirect('https://' + req.headers.host + req.url);
}
})
newhttp.listen(80);
}
app.use(compression())
app.use(cors({ origin: '*' }));
app.use(function (req, res, next) {
// Website you wish to allow to connect
res.header('Access-Control-Allow-Origin', '*');
// Request methods you wish to allow
res.header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS, PUT, PATCH, DELETE');
// Request headers you wish to allow
res.header('Access-Control-Allow-Headers', 'X-Requested-With,content-type');
// Set to true if you need the website to include cookies in the requests sent
// to the API (e.g. in case you use sessions)
res.setHeader('Access-Control-Allow-Credentials', false);
// Pass to next layer of middleware
next();
});
//------------------------------------------------------------------------------
// Show the default page for the different domains
//------------------------------------------------------------------------------
app.get('/', function (req, res, next) {
console.log("calling main page")
console.log("jaeger: " + jaegercollector)
return getRoot(req, res, next);
})
app.get('/live-check',(req,res)=> {
outputDebug("Live check passed")
res.send ("Live check passed");
});
app.get('/readiness-check',(req,res)=> {
if (systemReady) {
outputDebug("Readiness check passed")
res.send ("Readiness check passed");
} else {
outputDebug("Readiness check failed")
res.status(500).send('Readiness check did not pass');
}
});
//------------------------------------------------------------------------------
// Allow an app to be edited
//------------------------------------------------------------------------------
app.get('/edit/*', function (req, res) {
return getEditApp(req, res);
})
app.use("/files", express.static(path.join(userData, '/files/')));
app.use("/weights", express.static(path.join(userData, '/weights/')));
function getAppNameFromHtml() {
}
function getBaseComponentIdFromRequest(req){
var parts = req.path.split('/');
var appHtmlFile = parts.pop() || parts.pop();
var appName = appHtmlFile.split('.').slice(0, -1).join('.')
return appName
}
//app.get('/app/*', keycloakProtector({compIdFromReqFn: getBaseComponentIdFromRequest}), function (req, res, next) {
app.get('/app/*', function (req, res, next) {
if (req.kauth) {
outputDebug('Keycloak details from server:')
outputDebug(req.kauth.grant)
}
var parts = req.path.split('/');
var appHtmlFile = parts.pop() || parts.pop();
//console.log("appHtemlFile: " + appHtmlFile);
var appName = appHtmlFile.split('.').slice(0, -1).join('.')
//console.log("appName: " + appName);
//console.log("path: " + path);
var appFilePath = path.join(userData, 'apps/' + appHtmlFile)
var fileC2 = fs.readFileSync(appFilePath, 'utf8').toString()
res.writeHead(200, {'Content-Type': 'text/html; charset=utf-8'});
res.end(fileC2);
})
//app.use("/app_dbs", express.static(path.join(userData, '/app_dbs/')));
app.use("/public/aframe_fonts", express.static(path.join(__dirname, '../public/aframe_fonts')));
app.use( express.static(path.join(__dirname, '../public/')))
app.use(bodyParser.json()); // support json encoded bodies
app.use(bodyParser.urlencoded({ extended: true })); // support encoded bodies
app.post('/file_open_single', upload.single( 'openfilefromhomepage' ), function (req, res, next) {
console.log("File open: " + JSON.stringify(req.file.originalname,null,2))
return file_uploadSingleFn(req, res, next);
});
app.post('/file_upload_single', upload.single( 'uploadfilefromhomepage' ), function (req, res, next) {
console.log("File upload: " + JSON.stringify(req.file.originalname,null,2))
return file_uploadSingleFn(req, res, next);
});
app.post('/file_upload', upload.array( 'file' ), function (req, res, next) {
return file_uploadFn(req, res, next);
});
app.get('/code_upload', function (req, res, next) {
code_uploadFn(req, res);
res.writeHead(200, {'Content-Type': 'text/html; charset=utf-8'});
res.end("Done");
});
app.get('/file_name_load', function (req, res, next) {
//console.log("Hit file_name_load")
file_name_load(req, res);
res.writeHead(200, {'Content-Type': 'text/html; charset=utf-8'});
res.end("Done");
});
app.get('/electron_file_open', async function (req, res, next) {
console.log('/electron_file_open')
res.writeHead(200, {'Content-Type': 'text/html; charset=utf-8'});
res.end("Done");
await getFileFromUser()
});
app.get('/lock', function (req, res) {
return lockFn(req, res);
})
}
process.on('uncaughtException', function (err) {
outputDebug(err);
})
//------------------------------------------------------------------------------
// start the web server
//------------------------------------------------------------------------------
if (!isCodeTtyCode) {
if (useHttps) {
if (!certOptions) {
let caCerts = readCerts()
certOptions = {
key: fs.readFileSync(privateKey, 'utf8'),
cert: fs.readFileSync(publicCertificate, 'utf8'),
ca: caCerts
}
}
certOptions.requestCert = true
certOptions.rejectUnauthorized = false
httpServer = https.createServer(certOptions,app)
} else {
httpServer = http.createServer(app)
}
socket = require2('socket.io')(http)
httpServer.listen(port, hostaddress, function () {
outputDebug("****HOST=" + hostaddress + "HOST****\n");
outputDebug("****PORT=" + port+ "PORT****\n");
outputDebug('Started on port ' + port + ' with local folder at ' + process.cwd() + ' and __dirname = ' + __dirname+ "\n");
//
// We dont listen on websockets here with socket.io as often they stop working!!!
// Crazy, I know!!!! So we removed websockets from the list of transports below
//
io = socket.listen(httpServer, {
log: false,
agent: false,
origins: '*:*',
transports: ['htmlfile', 'xhr-polling', 'jsonp-polling', 'polling']
});
io.on('connection', function (sck) {
var connt = JSON.stringify(sck.conn.transport,null,2);
websocketFn(sck)
});
})
}
setupForkedProcess("forkedExeScheduler", "exeScheduler.js", 40004)
for (var i=0;i<executionProcessCount; i++ ) {
var exeProcName = "forkedExeProcess" + i
setupForkedProcess(exeProcName, "exeProcess.js", 40100 + i)
}
//console.log('addr: '+ hostaddress + ":" + port);
setTimeout(async function(){
//--------------------------------------------------------
// Check if any JS is loaded
//--------------------------------------------------------
await checkForJSLoaded();
if (isCodeTtyCode) {
await finalizeYazzLoading()
} else {
forkedProcesses["forked"].send({message_type: 'setUpPredefinedComponents'});
}
},1000)
}
async function finalizeYazzLoading() {
if (!isCodeTtyCode) {
console.log(`
888 888 d8b 888
888 888 Y8P 888
888 888 888
Y88b d88P 888 .d8888b 888 888 8888b. 888
Y88b d88P 888 88K 888 888 "88b 888
Y88o88P 888 "Y8888b. 888 888 .d888888 888
Y888P 888 X88 Y88b 888 888 888 888
Y8P 888 88888P' "Y88888 "Y888888 888
888888 d8b 888
"88b Y8P 888
888 888
888 8888b. 888 888 8888b. .d8888b .d8888b 888d888 888 88888b. 888888
888 "88b 888 888 "88b 88K d88P" 888P" 888 888 "88b 888
888 .d888888 Y88 88P .d888888 "Y8888b. 888 888 888 888 888 888
88P 888 888 Y8bd8P 888 888 X88 Y88b. 888 888 888 d88P Y88b.
888 "Y888888 Y88P "Y888888 88888P' "Y8888P 888 888 88888P" "Y888
.d88P 888
.d88P" 888
888P" 888
.d8888b. .d8888b. .d8888b. d888
d88P Y88b d88P Y88b d88P Y88b d8888
888 888 888 888 888
.d88P 888 888 .d88P 888
.od888P" 888 888 .od888P" 888
d88P" 888 888 d88P" 888
888" Y88b d88P 888" 888
888888888 "Y8888P" 888888888 8888888
`)
console.log("\nAppShare Instance ID: " + yazzInstanceId );
console.log("\nRunning " + executionProcessCount + " virtual processors");
console.log("\nAppShare started on:");
console.log("Network Host Address: " + hostaddressintranet)
let localAddress = serverProtocol + "://" + hostaddress + ':' + port
console.log("Local Machine Address: " + localAddress);
if (electronApp) {
visifile.loadURL(localAddress)
}
} else {
var parsedInput = null
try {
parsedInput = eval("(" + inputStdin + ")");
} catch(qwe) {
//console.log("Err: " + qwe);
try {
let pss = "('" + inputStdin + "')";
pss = pss.replace(/(\r\n|\n|\r)/gm, "");
parsedInput = eval(pss);
} catch(ex) {
//console.log(ex)
}
}
//console.log("client args:" + JSON.stringify( parsedInput,null,2))
//console.log("Parsed: " + JSON.stringify(parsedInput));
(async function() {
var promise = new Promise(async function(returnFn) {
var seqNum = queuedResponseSeqNum;
queuedResponseSeqNum ++;
queuedResponses[ seqNum ] = function(value) {
returnFn(value)
}
if(startupType == "RUN_SERVER_CODE") {
setTimeout(function(){
forkedProcesses["forked"].send({
message_type: "callDriverMethod",
find_component: {
base_component_id: runapp
}
,
args: parsedInput
,
seq_num_parent: null,
seq_num_browser: null,
seq_num_local: seqNum,
});
},startupDelay)
} else {
}
})
var ret = await promise
//console.log("ret: " + JSON.stringify(ret,null,2))
if (ret.value) {
//console.log(JSON.stringify(ret.value,null,2));
process.stdout.write(JSON.stringify(ret.value,null,2));
process.stdout.write('\n');
//console.log("Who let the dogs out!");
}
//shutDown();
process.exit();
})()
}
systemReady = true
}
function findLatestVersionOfApps( callbackFn) {
dbsearch.serialize(
function() {
var stmt = dbsearch.all(
"SELECT id,base_component_id,display_name, component_options FROM system_code where component_scope = ? and code_tag = ?; ",
"app",
"LATEST",
function(err, results)
{
if (results.length > 0) {
callbackFn(results)
} else {
callbackFn(null)
}
})
}, sqlite3.OPEN_READONLY)
}
function bytesToMb(bytes) {
return (bytes / 1024 ) / 1024
}
function getChildMem(childProcessName,stats) {
var memoryused = 0
if (stats) {
memoryused = stats.memory ;
totalMem += memoryused
}
if (showStats) {
outputDebug(`${childProcessName}: ${Math.round(bytesToMb(memoryused) * 100) / 100} MB`);
}
}
function usePid(childProcessName,childprocess) {
pidusage(childprocess.pid, function (err, stats) {
getChildMem(childProcessName,stats)
returnedmemCount ++
if (returnedmemCount == allForked.length) {
if (showStats) {
outputDebug("------------------------------------")
outputDebug(" TOTAL MEM = " + bytesToMb(totalMem) + " MB")
outputDebug("------------------------------------")
}
inmemcalc = false
yazzMemoryUsageMetric.set(totalMem)
}
});
}
//------------------------------------------------------------------------------
//
//
//
//
//
//------------------------------------------------------------------------------
function readCerts() {
outputDebug("Checking CA certs" )
outputDebug("-----------------" )
outputDebug("" )
outputDebug("CA Cert 1 = " + caCertificate1)
outputDebug("CA Cert 2 = " + caCertificate2)
outputDebug("CA Cert 3 = " + caCertificate3)
outputDebug("" )
outputDebug("" )
let caCertsRet = []
if (caCertificate1) {
outputDebug("CA Cert 1 = " + caCertificate1)
var fff = fs.readFileSync(caCertificate1, 'utf8')
outputDebug(" = " + fff)
caCertsRet.push(fff)
}
if (caCertificate2) {
outputDebug("CA Cert 2 = " + caCertificate2)
var fff = fs.readFileSync(caCertificate2, 'utf8')
outputDebug(" = " + fff)
caCertsRet.push(fff)
}
if (caCertificate3) {
outputDebug("CA Cert 3 = " + caCertificate3)
var fff = fs.readFileSync(caCertificate3, 'utf8')
outputDebug(" = " + fff)
caCertsRet.push(fff)
}
return caCertsRet
}
setupVisifileParams();
if (electronApp) {
electronApp.on('ready', async function() {
visifile = new BrowserWindow({
width: 800,
height: 600,
webPreferences: {
nodeIntegration: false,
enableRemoteModule: true
}
})
visifile.maximize()
visifile.loadURL(url.format({
pathname: path.join(__dirname, 'loading.html'),
protocol: 'file:',
slashes: true
}))
outputToBrowser("Loading Yazz Visual Javascript ... ")
if (isWin) {
var localappdata = process.env.LOCALAPPDATA
userData = path.join(localappdata, '/Visifile/')
} else {
userData = electronApp.getPath('userData')
console.log("read userData : " + userData)
}
getFileFromUser = (async function() {
dialog.showOpenDialog(visifile, {
properties: ['openFile', 'openDirectory']
}).then(result => {
console.log(result.canceled)
console.log(result.filePaths)
if (result.canceled) {
return
}
console.log("********** load file........... ")
let sd= uuidv1()
sendOverWebSockets({
type: "set_file_upload_uuid",
file_upload_uuid: sd
});
sendOverWebSockets({
type: "set_saveCodeToFile",
saveCodeToFile: result.filePaths[0]
});
saveCodeToFile = result.filePaths[0]
loadAppFromFile( result.filePaths[0],
sd)
/*fs.readFile(result.filePaths[0], 'utf-8', (err, data) => {
if(err){
alert("An error ocurred reading the file :" + err.message);
return;
}
// Change how to handle the file content
console.log("The file content is : " + data);
});*/
}).catch(err => {
console.log(err)
})
})
findSystemDataDirectoryAndStart()
finishInit()
const template = [
// { role: 'appMenu' }
...(isMac ? [{
label: app.name,
submenu: [
{ role: 'about' },
{ type: 'separator' },
{ role: 'services' },
{ type: 'separator' },
{ role: 'hide' },
{ role: 'hideothers' },
{ role: 'unhide' },
{ type: 'separator' },
{ role: 'quit' }
]
}] : []),
// { role: 'fileMenu' }
{
label: 'File',
submenu: [
isMac ? { role: 'close' } : { role: 'quit' }
]
},
// { role: 'editMenu' }
{
label: 'Edit',
submenu: [
{ role: 'undo' },
{ role: 'redo' },
{ type: 'separator' },
{ role: 'cut' },
{ role: 'copy' },
{ role: 'paste' },
...(isMac ? [
{ role: 'pasteAndMatchStyle' },
{ role: 'delete' },
{ role: 'selectAll' },
{ type: 'separator' },
{
label: 'Speech',
submenu: [
{ role: 'startSpeaking' },
{ role: 'stopSpeaking' }
]
}
] : [
{ role: 'delete' },
{ type: 'separator' },
{ role: 'selectAll' }
])
]
},
// { role: 'viewMenu' }
{
label: 'View',
submenu: [
{ role: 'reload' },
{ role: 'forceReload' },
{ role: 'toggleDevTools' },
{ type: 'separator' },
{ role: 'resetZoom' },
{ role: 'zoomIn' },
{ role: 'zoomOut' },
{ type: 'separator' },
{ role: 'togglefullscreen' }
]
},
// { role: 'windowMenu' }
{
label: 'Window',
submenu: [
{ role: 'minimize' },
{ role: 'zoom' },
...(isMac ? [
{ type: 'separator' },
{ role: 'front' },
{ type: 'separator' },
{ role: 'window' }
] : [
{ role: 'close' }
])
]
},
{
role: 'help',
submenu: [
{
label: 'Docs',
click: async () => {
const { shell } = require('electron')
await shell.openExternal('https://yazz.com/visifile/docs/yazz_march_2020.pdf')
}
},
{
label: 'Learn More',
click: async () => {
const { shell } = require('electron')
await shell.openExternal('https://yazz.com/visifile/mac_app.html')
}
}
]
}
]
const menu = Menu.buildFromTemplate(template)
Menu.setApplicationMenu(menu)
})
// if not an electron app
} else {
outputDebug("process.platform = " + process.platform)
if (process.platform === "win32") {
var rl = require2("readline").createInterface({
input: process.stdin,
output: process.stdout
});
rl.on("SIGINT", function () {
shutDown();
process.exit();
});
}
if (isWin) {
outputDebug("Running as Windows")
var localappdata = process.env.LOCALAPPDATA
userData = path.join(localappdata, '/Yazz/')
} else {
outputDebug("Running as Linux/Mac")
userData = path.join(LOCAL_HOME, 'Yazz')
}
findSystemDataDirectoryAndStart()
finishInit()
}
function findSystemDataDirectoryAndStart() {
console.log("userData : " + userData)
console.log("username : " + username)
dbPath = path.join(userData, username + '.visi')
if (deleteOnStartup) {
outputDebug("deleting dir :" + userData)
if (userData.length > 6) {
deleteYazzDataV2(userData)
}
}
var uploadPath = path.join(userData, 'uploads/')
outputDebug("LOCAL_HOME: " + LOCAL_HOME)
outputDebug("userData: " + userData)
outputDebug("uploadPath: " + uploadPath)
upload = multer( { dest: uploadPath});
if (!fs.existsSync( path.join(userData, 'uploads') )) {
mkdirp.sync(path.join(userData, 'uploads'));
}
if (!fs.existsSync( path.join(userData, 'files') )) {
mkdirp.sync(path.join(userData, 'files'));
}
if (!fs.existsSync( path.join(userData, 'apps') )) {
mkdirp.sync(path.join(userData, 'apps'));
}
if (!fs.existsSync( path.join(userData, 'app_dbs') )) {
mkdirp.sync(path.join(userData, 'app_dbs'));
}
outputDebug('process.env.LOCALAPPDATA: ' + JSON.stringify(localappdata ,null,2))
outputDebug("Local home data path: " + LOCAL_HOME)
outputDebug("userData: " + JSON.stringify(userData ,null,2))
outputDebug("process.env keys: " + Object.keys(process.env))
dbsearch = new sqlite3.Database(dbPath);
dbsearch.run("PRAGMA journal_mode=WAL;")
}
async function executeSqliteForApp( args ) {
if (!args.sql) {
return []
}
var getSqlResults = new Promise(returnResult => {
//console.log("dbPath: " + JSON.stringify(dbPath,null,2))
//console.log("args: " + JSON.stringify(args,null,2))
let appDb = null
if (appDbs[args.base_component_id]) {
appDb = appDbs[args.base_component_id]
//console.log("Using cached db " + args.base_component_id)
} else {
let dbPath = path.join(userData, 'app_dbs/' + args.base_component_id + '.visi')
appDb = new sqlite3.Database(dbPath);
appDb.run("PRAGMA journal_mode=WAL;")
appDbs[args.base_component_id] = appDb
}
if (args.sql.toLocaleLowerCase().trim().startsWith("select")) {
//console.log("Read only query " + args.sql)
appDb.serialize(
function() {
appDb.all(
args.sql
,
args.params
,
function(err, results)
{
returnResult(results)
})
}, sqlite3.OPEN_READONLY)
} else {
appDb.serialize(
function() {
appDb.run("begin deferred transaction");
appDb.run(args.sql, args.params)
appDb.run("commit");
returnResult([])
})
}
})
var res = await getSqlResults
return res
}
var shuttingDown = false;
function finishInit() {
process.on('exit', function() {
shutDown();
});
process.on('quit', function() {
shutDown();
});
process.on("SIGINT", function () {
shutDown();
process.exit()
});
setupMainChildProcess();
//------------------------------------------------------------------------------
//
//
//
//
//
//------------------------------------------------------------------------------
if (!electron) {
if (statsInterval > 0) {
setInterval(function(){
if (!inmemcalc) {
inmemcalc = true
totalMem = 0
const used = process.memoryUsage().heapUsed ;
totalMem += used
yazzProcessMainMemoryUsageMetric.set(used)
if (showStats) {
outputDebug(`Main: ${Math.round( bytesToMb(used) * 100) / 100} MB`);
}
allForked = Object.keys(forkedProcesses)
returnedmemCount = 0
for (var ttt=0; ttt< allForked.length; ttt++) {
var childProcessName = allForked[ttt]
const childprocess = forkedProcesses[childProcessName]
usePid(childProcessName,childprocess)
}
}
},(statsInterval * 1000))
}
}
}
| LaudateCorpus1/visualjavascript |
<|start_filename|>AutoPauseStealth/HarmonyPatches/Patches.cs<|end_filename|>
using HarmonyLib;
namespace AutoPauseStealth.Patches
{
[HarmonyPatch(typeof(AudioTimeSyncController))]
[HarmonyPatch("StartSong")]
class AudioTimeSyncControllerPatch
{
static void Postfix(AudioTimeSyncController __instance)
{
if (AutoPauseStealthController.StabilityPeriodActive && !AutoPauseStealthController.IsMultiplayer)
{
AutoPauseStealthController.ScoreController.enabled = false;
AutoPauseStealthController.SongController.PauseSong();
Logger.log?.Debug($"AutoPauseStealthController.StabilityPeriodActive is true " +
$"=> Pausing game right after AudioTimeSyncControllerPatch::StartSong()");
}
return;
}
}
[HarmonyPatch(typeof(PauseMenuManager))]
[HarmonyPatch("ShowMenu")]
class PauseMenuManagerPatch
{
static void Postfix(PauseMenuManager __instance)
{
if (!AutoPauseStealthController.IsMultiplayer)
{
AutoPauseStealthController.instance.OnPauseShowMenu();
}
return;
}
}
} | denpadokei/AutoPauseStealth |
<|start_filename|>module-search/src/main/java/com/popo/module_search/mvp/mvp/ui/activity/SearchActivity.java<|end_filename|>
package com.popo.module_search.mvp.mvp.ui.activity;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.view.MenuItemCompat;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.SearchView;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.inputmethod.InputMethodManager;
import android.widget.EditText;
import com.alibaba.android.arouter.facade.annotation.Route;
import com.jess.arms.base.BaseActivity;
import com.jess.arms.di.component.AppComponent;
import com.jess.arms.utils.ArmsUtils;
import com.popo.module_search.R;
import com.popo.module_search.mvp.di.component.DaggerSearchComponent;
import com.popo.module_search.mvp.mvp.contract.SearchContarct;
import com.popo.module_search.mvp.mvp.presenter.SearchPresent;
import javax.inject.Inject;
import me.jessyan.armscomponent.commonsdk.core.RouterHub;
import timber.log.Timber;
@Route(path = RouterHub.SEARCH_ACTIVITY)
public class SearchActivity extends BaseActivity<SearchPresent> implements SearchContarct.View,SwipeRefreshLayout.OnRefreshListener{
@Inject
RecyclerView.Adapter mAdapter;
RecyclerView recyclerView;
Toolbar toolbar;
SearchView searchView;
SwipeRefreshLayout swipeRefreshLayout;
@Override
public void setupActivityComponent(@NonNull AppComponent appComponent) {
DaggerSearchComponent
.builder()
.appComponent(appComponent)
.view(this)
.build()
.inject(this);
}
@Override
public int initView(@Nullable Bundle savedInstanceState) {
return R.layout.search_main;
}
@Override
public void initData(@Nullable Bundle savedInstanceState) {
toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayShowTitleEnabled(false);
swipeRefreshLayout=(SwipeRefreshLayout)findViewById(R.id.swipeRefreshLayout);
swipeRefreshLayout.setEnabled(false);
recyclerView=(RecyclerView)findViewById(R.id.recyclerView);
LinearLayoutManager llm = new LinearLayoutManager(this);
llm.setOrientation(LinearLayoutManager.VERTICAL);
recyclerView.setLayoutManager(llm);
recyclerView.setAdapter(mAdapter);
}
@Override
public void showLoading() {
swipeRefreshLayout.setRefreshing(true);
}
@Override
public void hideLoading() {
swipeRefreshLayout.setRefreshing(false);
}
@Override
public void startLoadMore() {
}
@Override
public Activity getActivity() {
return this;
}
@Override
public void endLoadMore() {
}
@Override
public void showMessage(@NonNull String message) {
}
@Override
public void launchActivity(@NonNull Intent intent) {
ArmsUtils.startActivity(intent);
}
@Override
public void killMyself() {
finish();
}
@Override
public void onRefresh() {
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.search_toobar,menu);
MenuItem searchItem = menu.findItem(R.id.menu_search);
//通过MenuItem得到SearchView
searchView = (SearchView) MenuItemCompat.getActionView(searchItem);
searchView.setSubmitButtonEnabled(true);
searchView.setQueryHint("搜索游戏");
searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
mPresenter.searchFromSteam(query);
searchView.clearFocus();
// InputMethodManager imm = (InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE);
// if (imm != null) {
// imm.hideSoftInputFromWindow(getWindow().getDecorView().getWindowToken(), 0);
// }
return true;
}
@Override
public boolean onQueryTextChange(String newText) {
return false;
}
});
return super.onCreateOptionsMenu(menu);
}
}
| noterpopo/Hands-Chopping |
<|start_filename|>HtmlTextView/src/main/java/org/sufficientlysecure/htmltextview/HtmlFormatterBuilder.java<|end_filename|>
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sufficientlysecure.htmltextview;
import android.text.Html.ImageGetter;
import androidx.annotation.Nullable;
public class HtmlFormatterBuilder {
private String html;
private ImageGetter imageGetter;
private ClickableTableSpan clickableTableSpan;
private DrawTableLinkSpan drawTableLinkSpan;
private OnClickATagListener onClickATagListener;
private float indent = 24.0f;
private boolean removeTrailingWhiteSpace = true;
public String getHtml() {
return html;
}
public ImageGetter getImageGetter() {
return imageGetter;
}
public ClickableTableSpan getClickableTableSpan() {
return clickableTableSpan;
}
public DrawTableLinkSpan getDrawTableLinkSpan() {
return drawTableLinkSpan;
}
public OnClickATagListener getOnClickATagListener() {
return onClickATagListener;
}
public float getIndent() {
return indent;
}
public boolean isRemoveTrailingWhiteSpace() {
return removeTrailingWhiteSpace;
}
public HtmlFormatterBuilder setHtml(@Nullable final String html) {
this.html = html;
return this;
}
public HtmlFormatterBuilder setImageGetter(@Nullable final ImageGetter imageGetter) {
this.imageGetter = imageGetter;
return this;
}
public HtmlFormatterBuilder setClickableTableSpan(@Nullable final ClickableTableSpan clickableTableSpan) {
this.clickableTableSpan = clickableTableSpan;
return this;
}
public HtmlFormatterBuilder setDrawTableLinkSpan(@Nullable final DrawTableLinkSpan drawTableLinkSpan) {
this.drawTableLinkSpan = drawTableLinkSpan;
return this;
}
public void setOnClickATagListener(OnClickATagListener onClickATagListener) {
this.onClickATagListener = onClickATagListener;
}
public HtmlFormatterBuilder setIndent(final float indent) {
this.indent = indent;
return this;
}
public HtmlFormatterBuilder setRemoveTrailingWhiteSpace(final boolean removeTrailingWhiteSpace) {
this.removeTrailingWhiteSpace = removeTrailingWhiteSpace;
return this;
}
}
<|start_filename|>HtmlTextView/src/main/java/org/sufficientlysecure/htmltextview/OnClickATagListener.java<|end_filename|>
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sufficientlysecure.htmltextview;
import android.view.View;
import androidx.annotation.Nullable;
/**
* This listener can define what happens when the a tag is clicked
*/
public interface OnClickATagListener {
/**
* Notifies of anchor tag click events.
* @param widget - the {@link HtmlTextView} instance
* @param spannedText - the string value of the text spanned
* @param href - the url for the anchor tag
* @return indicates whether the click event has been handled
*/
boolean onClick(View widget, String spannedText, @Nullable String href);
}
<|start_filename|>HtmlTextView/src/main/java/org/sufficientlysecure/htmltextview/HtmlTextView.java<|end_filename|>
/*
* Copyright (C) 2013-2014 <NAME> <<EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sufficientlysecure.htmltextview;
import android.content.Context;
import android.text.Html;
import android.text.Spannable;
import android.text.Spanned;
import android.text.style.QuoteSpan;
import android.util.AttributeSet;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RawRes;
import java.io.InputStream;
import java.util.Scanner;
public class HtmlTextView extends JellyBeanSpanFixTextView {
public static final String TAG = "HtmlTextView";
public static final boolean DEBUG = false;
public int blockQuoteBackgroundColor = getResources().getColor(R.color.White);
public int blockQuoteStripColor = getResources().getColor(R.color.black);
public float blockQuoteStripWidth = 10F;
public float blockQuoteGap = 20F;
@Nullable
private ClickableTableSpan clickableTableSpan;
@Nullable
private DrawTableLinkSpan drawTableLinkSpan;
@Nullable
private OnClickATagListener onClickATagListener;
private float indent = 24.0f; // Default to 24px.
private boolean removeTrailingWhiteSpace = true;
public HtmlTextView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
public HtmlTextView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public HtmlTextView(Context context) {
super(context);
}
/**
* @see org.sufficientlysecure.htmltextview.HtmlTextView#setHtml(int)
*/
public void setHtml(@RawRes int resId) {
setHtml(resId, null);
}
/**
* @see org.sufficientlysecure.htmltextview.HtmlTextView#setHtml(String)
*/
public void setHtml(@NonNull String html) {
setHtml(html, null);
}
/**
* Loads HTML from a raw resource, i.e., a HTML file in res/raw/.
* This allows translatable resource (e.g., res/raw-de/ for german).
* The containing HTML is parsed to Android's Spannable format and then displayed.
*
* @param resId for example: R.raw.help
* @param imageGetter for fetching images. Possible ImageGetter provided by this library:
* HtmlLocalImageGetter and HtmlRemoteImageGetter
*/
public void setHtml(@RawRes int resId, @Nullable Html.ImageGetter imageGetter) {
InputStream inputStreamText = getContext().getResources().openRawResource(resId);
setHtml(convertStreamToString(inputStreamText), imageGetter);
}
/**
* Parses String containing HTML to Android's Spannable format and displays it in this TextView.
* Using the implementation of Html.ImageGetter provided.
*
* @param html String containing HTML, for example: "<b>Hello world!</b>"
* @param imageGetter for fetching images. Possible ImageGetter provided by this library:
* HtmlLocalImageGetter and HtmlRemoteImageGetter
*/
public void setHtml(@NonNull String html, @Nullable Html.ImageGetter imageGetter) {
Spanned styledText = HtmlFormatter.formatHtml(
html, imageGetter, clickableTableSpan, drawTableLinkSpan,
new HtmlFormatter.TagClickListenerProvider() {
@Override
public OnClickATagListener provideTagClickListener() {
return onClickATagListener;
}
}, indent, removeTrailingWhiteSpace
);
replaceQuoteSpans(styledText);
setText(styledText);
// make links work
setMovementMethod(LocalLinkMovementMethod.getInstance());
}
/**
* The Html.fromHtml method has the behavior of adding extra whitespace at the bottom
* of the parsed HTML displayed in for example a TextView. In order to remove this
* whitespace call this method before setting the text with setHtml on this TextView.
*
* @param removeTrailingWhiteSpace true if the whitespace rendered at the bottom of a TextView
* after setting HTML should be removed.
*/
public void setRemoveTrailingWhiteSpace(boolean removeTrailingWhiteSpace) {
this.removeTrailingWhiteSpace = removeTrailingWhiteSpace;
}
public void setClickableTableSpan(@Nullable ClickableTableSpan clickableTableSpan) {
this.clickableTableSpan = clickableTableSpan;
}
public void setDrawTableLinkSpan(@Nullable DrawTableLinkSpan drawTableLinkSpan) {
this.drawTableLinkSpan = drawTableLinkSpan;
}
public void setOnClickATagListener(@Nullable OnClickATagListener onClickATagListener) {
this.onClickATagListener = onClickATagListener;
}
/**
* Add ability to increase list item spacing. Useful for configuring spacing based on device
* screen size. This applies to ordered and unordered lists.
*
* @param px pixels to indent.
*/
public void setListIndentPx(float px) {
this.indent = px;
}
/**
* http://stackoverflow.com/questions/309424/read-convert-an-inputstream-to-a-string
*/
@NonNull
private static String convertStreamToString(@NonNull InputStream is) {
Scanner s = new Scanner(is).useDelimiter("\\A");
return s.hasNext() ? s.next() : "";
}
private void replaceQuoteSpans(Spanned spanned) {
Spannable spannable = (Spannable) spanned;
QuoteSpan[] quoteSpans = spannable.getSpans(0, spannable.length() - 1, QuoteSpan.class);
for (QuoteSpan quoteSpan : quoteSpans) {
int start = spannable.getSpanStart(quoteSpan);
int end = spannable.getSpanEnd(quoteSpan);
int flags = spannable.getSpanFlags(quoteSpan);
spannable.removeSpan(quoteSpan);
spannable.setSpan(new DesignQuoteSpan(
blockQuoteBackgroundColor,
blockQuoteStripColor,
blockQuoteStripWidth,
blockQuoteGap),
start,
end,
flags);
}
}
}
<|start_filename|>HtmlTextView/src/main/java/org/sufficientlysecure/htmltextview/DesignQuoteSpan.java<|end_filename|>
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sufficientlysecure.htmltextview;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.text.Layout;
import android.text.style.LeadingMarginSpan;
import android.text.style.LineBackgroundSpan;
import androidx.annotation.NonNull;
public class DesignQuoteSpan implements LeadingMarginSpan, LineBackgroundSpan {
private int backgroundColor, stripColor;
private float stripeWidth, gap;
DesignQuoteSpan(int backgroundColor, int stripColor, float stripWidth, float gap) {
this.backgroundColor = backgroundColor;
this.stripColor = stripColor;
this.stripeWidth = stripWidth;
this.gap = gap;
}
@Override
public int getLeadingMargin(boolean first) {
return (int) (stripeWidth + gap);
}
@Override
public void drawLeadingMargin(Canvas c, Paint p, int x, int dir, int top, int baseline,
int bottom, CharSequence text, int start, int end, boolean first,
Layout layout) {
Paint.Style style = p.getStyle();
int paintColor = p.getColor();
p.setStyle(Paint.Style.FILL);
p.setColor(stripColor);
c.drawRect((float) x, (float) top, x + dir * stripeWidth, (float) bottom, p);
p.setStyle(style);
p.setColor(paintColor);
}
@Override
public void drawBackground(@NonNull Canvas canvas, @NonNull Paint paint,
int left, int right, int top, int baseline, int bottom,
@NonNull CharSequence text, int start, int end, int lineNumber) {
int paintColor = paint.getColor();
paint.setColor(backgroundColor);
canvas.drawRect((float) left, (float) top, (float) right, (float) bottom, paint);
paint.setColor(paintColor);
}
}
<|start_filename|>HtmlTextView/src/main/java/org/sufficientlysecure/htmltextview/WrapperContentHandler.java<|end_filename|>
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sufficientlysecure.htmltextview;
import android.text.Editable;
import android.text.Html;
import org.xml.sax.Attributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.Locator;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
public class WrapperContentHandler implements ContentHandler, Html.TagHandler {
private ContentHandler mContentHandler;
private WrapperTagHandler mTagHandler;
private Editable mSpannableStringBuilder;
public WrapperContentHandler(WrapperTagHandler tagHandler) {
this.mTagHandler = tagHandler;
}
@Override
public void handleTag(boolean opening, String tag, Editable output, XMLReader xmlReader) {
if (mContentHandler == null) {
mSpannableStringBuilder = output;
mContentHandler = xmlReader.getContentHandler();
xmlReader.setContentHandler(this);
}
}
@Override
public void setDocumentLocator(Locator locator) {
mContentHandler.setDocumentLocator(locator);
}
@Override
public void startDocument() throws SAXException {
mContentHandler.startDocument();
}
@Override
public void endDocument() throws SAXException {
mContentHandler.endDocument();
}
@Override
public void startPrefixMapping(String prefix, String uri) throws SAXException {
mContentHandler.startPrefixMapping(prefix, uri);
}
@Override
public void endPrefixMapping(String prefix) throws SAXException {
mContentHandler.endPrefixMapping(prefix);
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException {
if (!mTagHandler.handleTag(true, localName, mSpannableStringBuilder, attributes)) {
mContentHandler.startElement(uri, localName, qName, attributes);
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (!mTagHandler.handleTag(false, localName, mSpannableStringBuilder, null)) {
mContentHandler.endElement(uri, localName, qName);
}
}
@Override
public void characters(char[] ch, int start, int length) throws SAXException {
mContentHandler.characters(ch, start, length);
}
@Override
public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException {
mContentHandler.ignorableWhitespace(ch, start, length);
}
@Override
public void processingInstruction(String target, String data) throws SAXException {
mContentHandler.processingInstruction(target, data);
}
@Override
public void skippedEntity(String name) throws SAXException {
mContentHandler.skippedEntity(name);
}
}
| Mika-89/html-textview |
<|start_filename|>rest/resourcecache.go<|end_filename|>
package rest
import (
"container/list"
"sync"
"time"
)
// ResourceCache, is an LRU-TTL Cache, that caches Responses base on headers
// It uses 3 goroutines -> one for LRU, and the other two for TTL.
// The cache itself.
var resourceCache *resourceTtlLruMap
// ByteSize is a helper for configuring MaxCacheSize
type ByteSize int64
const (
_ = iota
// KB = KiloBytes
KB ByteSize = 1 << (10 * iota)
// MB = MegaBytes
MB
// GB = GigaBytes
GB
)
// MaxCacheSize is the Maxium Byte Size to be hold by the ResourceCache
// Default is 1 GigaByte
// Type: rest.ByteSize
var MaxCacheSize = 1 * GB
// Current Cache Size.
var cacheSize int64
type lruOperation int
const (
move lruOperation = iota
push
del
last
)
type lruMsg struct {
operation lruOperation
resp *Response
}
type resourceTtlLruMap struct {
cache map[string]*Response
skipList *skipList // skiplist for TTL
lruList *list.List // List for LRU
lruChan chan *lruMsg // Channel for LRU messages
ttlChan chan bool // Channel for TTL messages
popChan chan string
rwMutex sync.RWMutex //Read Write Locking Mutex
}
func init() {
resourceCache = &resourceTtlLruMap{
cache: make(map[string]*Response),
skipList: newSkipList(),
lruList: list.New(),
lruChan: make(chan *lruMsg, 10000),
ttlChan: make(chan bool, 1000),
popChan: make(chan string),
rwMutex: sync.RWMutex{},
}
go resourceCache.lruOperations()
go resourceCache.ttl()
}
func (rCache *resourceTtlLruMap) lruOperations() {
for {
msg := <-rCache.lruChan
switch msg.operation {
case move:
rCache.lruList.MoveToFront(msg.resp.listElement)
case push:
msg.resp.listElement = rCache.lruList.PushFront(msg.resp.Request.URL.String())
case del:
rCache.lruList.Remove(msg.resp.listElement)
case last:
rCache.popChan <- rCache.lruList.Back().Value.(string)
}
}
}
func (rCache *resourceTtlLruMap) get(key string) *Response {
//Read lock only
rCache.rwMutex.RLock()
resp := rCache.cache[key]
rCache.rwMutex.RUnlock()
//If expired, remove it
if resp != nil && resp.ttl != nil && resp.ttl.Sub(time.Now()) <= 0 {
//Full lock
rCache.rwMutex.Lock()
defer rCache.rwMutex.Unlock()
//JIC, get the freshest version
resp = rCache.cache[key]
//Check again with the lock
if resp != nil && resp.ttl != nil && resp.ttl.Sub(time.Now()) <= 0 {
rCache.remove(key, resp)
return nil //return. Do not send the move message
}
}
if resp != nil {
//Buffered msg to LruList
//Move forward
rCache.lruChan <- &lruMsg{
operation: move,
resp: resp,
}
}
return resp
}
// Set if key not exist
func (rCache *resourceTtlLruMap) setNX(key string, value *Response) {
//Full Lock
rCache.rwMutex.Lock()
defer rCache.rwMutex.Unlock()
v := rCache.cache[key]
if v == nil {
rCache.cache[key] = value
//PushFront in LruList
rCache.lruChan <- &lruMsg{
operation: push,
resp: value,
}
//Set ttl if necesary
if value.ttl != nil {
value.skipListElement = rCache.skipList.insert(key, *value.ttl)
rCache.ttlChan <- true
}
// Add Response Size to Cache
// Not necessary to use atomic
cacheSize += value.size()
for i := 0; ByteSize(cacheSize) >= MaxCacheSize && i < 10; i++ {
rCache.lruChan <- &lruMsg{
last,
nil,
}
k := <-rCache.popChan
r := rCache.cache[k]
rCache.remove(k, r)
}
}
}
//
func (rCache *resourceTtlLruMap) remove(key string, resp *Response) {
delete(rCache.cache, key) //Delete from map
rCache.skipList.remove(resp.skipListElement) //Delete from skipList
rCache.lruChan <- &lruMsg{ //Delete from LruList
operation: del,
resp: resp,
}
// Delete bytes cache
// Not need for atomic
cacheSize -= resp.size()
}
func (rCache *resourceTtlLruMap) ttl() {
// Function to send a message when the timer expires
backToFuture := func() {
rCache.ttlChan <- true
}
// A timer.
future := time.AfterFunc(24*time.Hour, backToFuture)
for {
<-rCache.ttlChan
//Full Lock
rCache.rwMutex.Lock()
now := time.Now()
// Traverse the skiplist which is ordered by ttl.
// We do this by looping at level 0
for node := rCache.skipList.head.next[0]; node != nil; node = node.next[0] {
timeLeft := node.ttl.Sub(now)
// If we still have time, check the timer and break
if timeLeft > 0 {
if !future.Reset(timeLeft) {
future = time.AfterFunc(timeLeft, backToFuture)
}
break
}
// Remove from cache if time's up
rCache.remove(node.key, rCache.cache[node.key])
}
rCache.rwMutex.Unlock()
}
}
<|start_filename|>rest/rest_test.go<|end_filename|>
package rest
import (
"net/http"
"strings"
"testing"
"time"
)
func TestGet(t *testing.T) {
resp := Get(server.URL + "/user")
if resp.StatusCode != http.StatusOK {
t.Fatal("Status != OK (200)")
}
}
func TestSlowGet(t *testing.T) {
var f [100]*Response
for i := range f {
f[i] = rb.Get("/slow/user")
if f[i].Response.StatusCode != http.StatusOK {
t.Fatal("f Status != OK (200)")
}
}
}
func TestHead(t *testing.T) {
resp := Head(server.URL + "/user")
if resp.StatusCode != http.StatusOK {
t.Fatal("Status != OK (200)")
}
}
func TestPost(t *testing.T) {
resp := Post(server.URL+"/user", &User{Name: "Matilda"})
if resp.StatusCode != http.StatusCreated {
t.Fatal("Status != OK (201)")
}
}
func TestPostXML(t *testing.T) {
rbXML := RequestBuilder{
BaseURL: server.URL,
ContentType: XML,
}
resp := rbXML.Post("/xml/user", &User{Name: "Matilda"})
if resp.StatusCode != http.StatusCreated {
t.Fatal("Status != OK (201)")
}
}
func TestPut(t *testing.T) {
resp := Put(server.URL+"/user/3", &User{Name: "Pichucha"})
if resp.StatusCode != http.StatusOK {
t.Fatal("Status != OK (200")
}
}
func TestPatch(t *testing.T) {
resp := Patch(server.URL+"/user/3", &User{Name: "Pichucha"})
if resp.StatusCode != http.StatusOK {
t.Fatal("Status != OK (200")
}
}
func TestDelete(t *testing.T) {
resp := Delete(server.URL + "/user/4")
if resp.StatusCode != http.StatusOK {
t.Fatal("Status != OK (200")
}
}
func TestOptions(t *testing.T) {
resp := Options(server.URL + "/user")
if resp.StatusCode != http.StatusOK {
t.Fatal("Status != OK (200")
}
}
func TestAsyncGet(t *testing.T) {
AsyncGet(server.URL+"/user", func(r *Response) {
if r.StatusCode != http.StatusOK {
t.Fatal("Status != OK (200)")
}
})
time.Sleep(50 * time.Millisecond)
}
func TestAsyncHead(t *testing.T) {
AsyncHead(server.URL+"/user", func(r *Response) {
if r.StatusCode != http.StatusOK {
t.Fatal("Status != OK (200)")
}
})
time.Sleep(50 * time.Millisecond)
}
func TestAsyncPost(t *testing.T) {
AsyncPost(server.URL+"/user", &User{Name: "Matilda"}, func(r *Response) {
if r.StatusCode != http.StatusCreated {
t.Fatal("Status != OK (201)")
}
})
time.Sleep(50 * time.Millisecond)
}
func TestAsyncPut(t *testing.T) {
AsyncPut(server.URL+"/user/3", &User{Name: "Pichucha"}, func(r *Response) {
if r.StatusCode != http.StatusOK {
t.Fatal("Status != OK (200)")
}
})
time.Sleep(50 * time.Millisecond)
}
func TestAsyncPatch(t *testing.T) {
AsyncPatch(server.URL+"/user/3", &User{Name: "Pichucha"}, func(r *Response) {
if r.StatusCode != http.StatusOK {
t.Fatal("Status != OK (200)")
}
})
time.Sleep(50 * time.Millisecond)
}
func TestAsyncDelete(t *testing.T) {
AsyncDelete(server.URL+"/user/4", func(r *Response) {
if r.StatusCode != http.StatusOK {
t.Fatal("Status != OK (200)")
}
})
time.Sleep(50 * time.Millisecond)
}
func TestAsyncOptions(t *testing.T) {
AsyncOptions(server.URL+"/user", func(r *Response) {
if r.StatusCode != http.StatusOK {
t.Fatal("Status != OK (200)")
}
})
time.Sleep(50 * time.Millisecond)
}
func TestHeaders(t *testing.T) {
h := make(http.Header)
h.Add("X-Test", "test")
builder := RequestBuilder{
BaseURL: server.URL,
Headers: h,
}
r := builder.Get("/header")
if r.StatusCode != http.StatusOK {
t.Fatal("Status != OK (200)")
}
}
func TestWrongURL(t *testing.T) {
r := Get("foo")
if r.Err == nil {
t.Fatal("Wrong URL should get an error")
}
}
/*Increase percentage of net.go coverage */
func TestRequestWithProxyAndFollowRedirect(t *testing.T) {
customPool := CustomPool{
MaxIdleConnsPerHost: 100,
Proxy: "http://saraza",
}
restClient := new(RequestBuilder)
restClient.ContentType = JSON
restClient.DisableTimeout = true
restClient.CustomPool = &customPool
restClient.FollowRedirect = true
response := restClient.Get(server.URL + "/user")
expected := "error connecting to proxy"
if !strings.Contains(response.Err.Error(), expected) {
t.Fatalf("Expected %v Error, Got %v as Response", expected, response.Err.Error())
}
}
func TestRequestSendingClientMetrics(t *testing.T) {
restClient := new(RequestBuilder)
response := restClient.Get(server.URL + "/user")
if response.StatusCode != http.StatusOK {
t.Fatal("Status != OK (200)")
}
}
func TestResponseExceedsConnectTimeout(t *testing.T) {
restClient := RequestBuilder{CustomPool: &CustomPool{}}
restClient.ConnectTimeout = 1 * time.Nanosecond
restClient.Timeout = 35 * time.Millisecond
restClient.ContentType = JSON
scuResponse := restClient.Get(server.URL + "/cache/slow/user")
scuResponseErrIsTimeoutExceeded := func() bool {
expected := "dial tcp"
if scuResponse.Err != nil {
return strings.Contains(scuResponse.Err.Error(), expected)
}
return false
}
if !scuResponseErrIsTimeoutExceeded() {
t.Errorf("Timeouts configuration should get an error when connect")
}
}
func TestResponseExceedsRequestTimeout(t *testing.T) {
restClient := RequestBuilder{CustomPool: &CustomPool{Transport: &http.Transport{}}}
restClient.ConnectTimeout = 35 * time.Millisecond
restClient.Timeout = 9 * time.Millisecond
restClient.ContentType = JSON
suResponse := restClient.Get(server.URL + "/slow/user")
suResponseErrIsTimeoutExceeded := func() bool {
expected := "timeout awaiting response headers"
if suResponse.Err != nil {
return strings.Contains(suResponse.Err.Error(), expected)
}
return false
}
if !suResponseErrIsTimeoutExceeded() {
t.Fatalf("Timeouts configuration should get an error after connect")
}
}
<|start_filename|>rest/allsetup_test.go<|end_filename|>
package rest
import (
"encoding/json"
"encoding/xml"
"io/ioutil"
"math/rand"
"net/http"
"net/http/httptest"
"os"
"strconv"
"testing"
"time"
)
var lastModifiedDate = time.Now()
type User struct {
Id int `json:"id"`
Name string `json:"name"`
}
var tmux = http.NewServeMux()
var server = httptest.NewServer(tmux)
var users []User
var userList = []string{
"Hernan", "Mariana", "Matilda", "Juan", "Pedro", "John", "Axel", "Mateo",
}
var rb = RequestBuilder{
BaseURL: server.URL,
}
func TestMain(m *testing.M) {
setup()
code := m.Run()
// teardown()
os.Exit(code)
}
func setup() {
rand.Seed(time.Now().UnixNano())
users = make([]User, len(userList))
for i, n := range userList {
users[i] = User{Id: i + 1, Name: n}
}
//users
tmux.HandleFunc("/user", allUsers)
tmux.HandleFunc("/xml/user", usersXML)
tmux.HandleFunc("/cache/user", usersCache)
tmux.HandleFunc("/cache/expires/user", usersCacheWithExpires)
tmux.HandleFunc("/cache/etag/user", usersEtag)
tmux.HandleFunc("/cache/lastmodified/user", usersLastModified)
tmux.HandleFunc("/slow/cache/user", slowUsersCache)
tmux.HandleFunc("/slow/user", slowUsers)
//One user
tmux.HandleFunc("/user/", oneUser)
//Header
tmux.HandleFunc("/header", withHeader)
}
func withHeader(writer http.ResponseWriter, req *http.Request) {
if req.Method == http.MethodGet {
if h := req.Header.Get("X-Test"); h == "test" {
return
}
}
writer.WriteHeader(http.StatusBadRequest)
return
}
func slowUsersCache(writer http.ResponseWriter, req *http.Request) {
time.Sleep(30 * time.Millisecond)
usersCache(writer, req)
}
func slowUsers(writer http.ResponseWriter, req *http.Request) {
time.Sleep(10 * time.Millisecond)
allUsers(writer, req)
}
func usersCache(writer http.ResponseWriter, req *http.Request) {
// Get
if req.Method == http.MethodGet {
c := rand.Intn(2) + 1
b, _ := json.Marshal(users)
writer.Header().Set("Content-Type", "application/json")
writer.Header().Set("Cache-Control", "max-age="+strconv.Itoa(c))
writer.Write(b)
}
}
func usersCacheWithExpires(writer http.ResponseWriter, req *http.Request) {
// Get
if req.Method == http.MethodGet {
c := rand.Intn(2) + 1
b, _ := json.Marshal(users)
expires := time.Now().Add(time.Duration(c) * time.Second)
writer.Header().Set("Content-Type", "application/json")
writer.Header().Set("Expires", expires.Format(httpDateFormat))
writer.Write(b)
}
}
func usersEtag(writer http.ResponseWriter, req *http.Request) {
// Get
if req.Method == http.MethodGet {
etag := req.Header.Get("If-None-Match")
if etag == "1234" {
writer.WriteHeader(http.StatusNotModified)
return
}
b, _ := json.Marshal(users)
writer.Header().Set("Content-Type", "application/json")
writer.Header().Set("ETag", "1234")
writer.Write(b)
}
}
func usersLastModified(writer http.ResponseWriter, req *http.Request) {
// Get
if req.Method == http.MethodGet {
ifModifiedSince, err := time.Parse(httpDateFormat, req.Header.Get("If-Modified-Since"))
if err == nil && ifModifiedSince.Sub(lastModifiedDate) == 0 {
writer.WriteHeader(http.StatusNotModified)
return
}
b, _ := json.Marshal(users)
writer.Header().Set("Content-Type", "application/json")
writer.Header().Set("Last-Modified", lastModifiedDate.Format(httpDateFormat))
writer.Write(b)
}
}
func usersXML(writer http.ResponseWriter, req *http.Request) {
// Get
if req.Method == http.MethodGet {
b, _ := xml.Marshal(users)
writer.Header().Set("Content-Type", "application/xml")
writer.Header().Set("Cache-Control", "no-cache")
writer.Write(b)
}
// Post
if req.Method == http.MethodPost {
b, err := ioutil.ReadAll(req.Body)
if err != nil {
writer.WriteHeader(http.StatusBadRequest)
return
}
u := new(User)
if err = xml.Unmarshal(b, u); err != nil {
writer.WriteHeader(http.StatusBadRequest)
return
}
u.Id = 3
ub, _ := json.Marshal(u)
writer.Header().Set("Content-Type", "application/xml")
writer.WriteHeader(http.StatusCreated)
writer.Write(ub)
return
}
}
func oneUser(writer http.ResponseWriter, req *http.Request) {
if req.Method == http.MethodGet {
b, _ := json.Marshal(users[0])
writer.Header().Set("Content-Type", "application/json")
writer.Header().Set("Cache-Control", "no-cache")
writer.Write(b)
return
}
// Put
if req.Method == http.MethodPut || req.Method == http.MethodPatch {
b, _ := json.Marshal(users[0])
writer.Header().Set("Content-Type", "application/json")
writer.Write(b)
return
}
// Delete
if req.Method == http.MethodDelete {
return
}
}
func allUsers(writer http.ResponseWriter, req *http.Request) {
// Head
if req.Method == http.MethodHead {
writer.Header().Set("Content-Type", "application/json")
writer.Header().Set("Cache-Control", "no-cache")
return
}
// Get
if req.Method == http.MethodGet {
b, _ := json.Marshal(users)
writer.Header().Set("Content-Type", "application/json")
writer.Header().Set("Cache-Control", "no-cache")
writer.Write(b)
return
}
// Post
if req.Method == http.MethodPost {
b, err := ioutil.ReadAll(req.Body)
if err != nil {
writer.WriteHeader(http.StatusBadRequest)
return
}
u := new(User)
if err = json.Unmarshal(b, u); err != nil {
writer.WriteHeader(http.StatusBadRequest)
return
}
u.Id = 3
ub, _ := json.Marshal(u)
writer.Header().Set("Content-Type", "application/json")
writer.WriteHeader(http.StatusCreated)
writer.Write(ub)
return
}
// Options
if req.Method == http.MethodOptions {
b := []byte(`User resource
id: Id of the user
name: Name of the user`)
writer.Header().Set("Content-Type", "text/plain")
writer.Header().Set("Cache-Control", "no-cache")
writer.Write(b)
return
}
}
| alex21289/golang-restclient |
<|start_filename|>toon_with_texture.shader<|end_filename|>
shader_type spatial;
//Texture for light color
uniform sampler2D light_text : hint_albedo;
//Texture for shadow color (REMEMBER that shadow areas are light_texture multiplied with shadow_texture, basically)
uniform sampler2D shadow_text : hint_albedo;
//this value decides how much "lit" (or "unlit") is the mesh based on NdotL value
uniform float cut_point : hint_range(0.0, 1.0);
//this value set the influence of vertx_color (DEFAULT/IF NOT USED 1)
uniform float adjust_threshold : hint_range (0.0, 1.0);
//this value determine the force of the shadow texture
uniform float shadow_force : hint_range(0.0, 1.0);
//this varying retrieve uv for usage in light pass!
varying vec2 uv;
//this is used for retrieving threshold values from Vector color (REMEMBER that i used channel R(Red) for this)
varying vec4 color;
//vertex pass -> retriev uv from mesh's UV main channel (channel 0 I think) & will retrieve vertex color for thresholding lighting
void vertex()
{
uv = UV;
color = COLOR;
}
void fragment()
{
ALBEDO = texture(light_text, uv).rgb;
}
//This function is used to calculate NdotL
float calc_NdotL(vec3 normal, vec3 light)
{
float NdotL = dot(light, normal);
return NdotL;
}
//This function is used for calculate shading
bool calc_shading(float sm)
{
if(sm > cut_point)
{
return true;
} else
{
return false;
}
}
//light pass -> we get LIGHT vector and normalize it, calculate the NdotL, calculate shading and appy LIGHT
//REMEMBER that we aren't really using LIGHT_COLOUR, so it won't affect the mesh colour!
void light()
{
vec3 light = normalize(LIGHT);
vec3 shadow = texture(shadow_text, uv).rgb;
float NdotL = calc_NdotL(NORMAL, light);
vec3 col = color.rgb;
col.g = 1.0;
col.b = 1.0;
float sm = smoothstep(0.0, 1.0, NdotL*(col.r*adjust_threshold));
bool shade = calc_shading(sm);
if(shade == true)
{
DIFFUSE_LIGHT = ALBEDO;
} else
{
DIFFUSE_LIGHT = ALBEDO*shadow*shadow_force;
}
}
<|start_filename|>toonProva.shader<|end_filename|>
shader_type spatial;
uniform sampler2D text : hint_albedo;
uniform sampler2D normal_map : hint_albedo;
uniform vec4 color : hint_color;
uniform bool useTexture = true;
uniform bool useNormalMap = false;
uniform float amount_of_light : hint_range(0.0,1.0);
uniform float amount_of_shadow : hint_range(0.0,1.0);
uniform float cut_point : hint_range(0.0, 1.0);
uniform float normalMapDepth;
void fragment()
{
if(useTexture)
{
vec3 a1 = texture(text, UV).rgb;
ALBEDO = a1*color.rgb;
} else
{
ALBEDO = color.rgb;
}
if(useNormalMap == true)
{
vec3 normalmap = texture(normal_map, UV).xyz * vec3(2.0,2.0,2.0) - vec3(1.0,1.0,1.0);
vec3 normal = normalize(TANGENT * normalmap.y + BINORMAL * normalmap.x + NORMAL * normalmap.z);
NORMAL = normal;
} else
{
NORMALMAP_DEPTH = 0.0;
}
}
float calc_NdotL(vec3 light, vec3 normal)
{
float NdotL = dot(normalize(light), normal);
return NdotL;
}
float calc_toonStripes(float NdotL)
{
if(NdotL > cut_point)
{
return amount_of_light;
} else
{
return amount_of_shadow;
}
}
void light()
{
float NdotL = calc_NdotL(LIGHT, NORMAL);
float intensity = calc_toonStripes(NdotL);
if(useNormalMap == true)
{
DIFFUSE_LIGHT = ALBEDO*intensity*ATTENUATION;
} else
{
DIFFUSE_LIGHT = ALBEDO*intensity*ATTENUATION;
}
} | nakedsnake888/GodotToonShader |
<|start_filename|>sample-application/webpack.config.js<|end_filename|>
/* eslint-disable @typescript-eslint/no-var-requires */
/* eslint-disable no-undef */
const HtmlWebpackPlugin = require('html-webpack-plugin')
const CopyWebpackPlugin = require('copy-webpack-plugin')
const path = require('path')
module.exports = {
mode: 'development',
entry: './src/index.js',
devServer: {
contentBase: './dist'
},
output: {
filename: '[name].[contenthash].js'
},
optimization: {
splitChunks: {
cacheGroups: {
svg2roughjs: {
test: /[\\/]node_modules[\\/]svg2roughjs[\\/]/,
name: 'svg2roughjs',
chunks: 'all',
priority: 10
},
commons: {
test: /[\\/]node_modules[\\/]/,
name: 'vendors',
chunks: 'all'
}
}
}
},
snapshot: {
// automaticall serve changed content in node_modules instead of older snapshots
managedPaths: []
},
plugins: [
new HtmlWebpackPlugin({
template: './src/index.html'
}),
new CopyWebpackPlugin({ patterns: [{ from: 'static', to: 'static' }] })
],
module: {
rules: [
{
test: /\.m?js$/,
include: [
path.resolve(__dirname, 'src'),
// These dependencies have es6 syntax which ie11 doesn't like.
path.resolve(__dirname, 'node_modules/svg2roughjs'),
path.resolve(__dirname, 'node_modules/roughjs')
],
use: {
loader: 'babel-loader',
options: {
presets: ['@babel/preset-env']
}
}
},
{
test: /\.svg$/i,
use: 'raw-loader'
},
{
test: /\.css$/i,
use: ['style-loader', 'css-loader']
}
]
}
}
<|start_filename|>sample-application/src/index.js<|end_filename|>
import 'core-js/stable'
import CodeMirror from 'codemirror'
import 'codemirror/lib/codemirror.css'
import 'codemirror/mode/xml/xml.js'
import SAMPLE_BPMN from '../public/bpmn1.svg'
import SAMPLE_COMPUTER_NETWORK from '../public/computer-network.svg'
import SAMPLE_FLOWCHART from '../public/flowchart4.svg'
import SAMPLE_HIERARCHICAL1 from '../public/hierarchical1.svg'
import SAMPLE_HIERARCHICAL2 from '../public/hierarchical2.svg'
import SAMPLE_MINDMAP from '../public/mindmap.svg'
import SAMPLE_MOVIES from '../public/movies.svg'
import SAMPLE_ORGANIC1 from '../public/organic1.svg'
import SAMPLE_ORGANIC2 from '../public/organic2.svg'
import SAMPLE_TREE from '../public/tree1.svg'
import SAMPLE_VENN from '../public/venn.svg'
import { RenderMode, Svg2Roughjs } from 'svg2roughjs'
let svg2roughjs
let loadingSvg = false
let scheduledLoad
let debouncedTimer = null
let codeMirrorInstance
const onCodeMirrorChange = () => {
if (debouncedTimer) {
clearTimeout(debouncedTimer)
}
debouncedTimer = setTimeout(() => {
debouncedTimer = null
try {
loadSvgString(svg2roughjs, codeMirrorInstance.getValue())
} catch (e) {
/* do nothing */
}
}, 500)
}
/**
* Sets CodeMirror content without triggering the change listener
* @param {string} value
*/
function setCodeMirrorValue(value) {
codeMirrorInstance.off('change', onCodeMirrorChange)
codeMirrorInstance.setValue(value)
codeMirrorInstance.on('change', onCodeMirrorChange)
}
/**
* @param {SVGSVGElement} svg
* @returns {{width:number, height:number} | null}
*/
function getSvgSize(svg) {
let width = parseInt(svg.getAttribute('width'))
let height = parseInt(svg.getAttribute('height'))
let viewBox = svg.getAttribute('viewBox')
if (isNaN(width) || isNaN(height)) {
return viewBox ? { width: svg.viewBox.baseVal.width, height: svg.viewBox.baseVal.height } : null
}
return { width, height }
}
function loadSvgString(svg2roughjs, fileContent) {
if (loadingSvg) {
scheduledLoad = fileContent
return
}
document.getElementById('sample-select').disabled = true
loadingSvg = true
const inputElement = document.getElementById('input')
const outputElement = document.getElementById('output')
const canvas = outputElement.querySelector('canvas')
const parser = new DOMParser()
const doc = parser.parseFromString(fileContent, 'image/svg+xml')
const svg = doc.querySelector('svg')
while (inputElement.childElementCount > 0) {
inputElement.removeChild(inputElement.firstChild)
}
if (!svg) {
console.error('Could not load SVG file')
document.getElementById('sample-select').disabled = false
loadingSvg = false
return
}
const svgSize = getSvgSize(svg)
if (svgSize) {
inputElement.style.width = `${svgSize.width}px`
inputElement.style.height = `${svgSize.height}px`
}
inputElement.appendChild(svg)
// make sure the SVG is part of the DOM and rendered, before it is converted by
// Svg2Rough.js. Otherwise, CSS percentaged width/height might not be applied yet
setTimeout(() => {
if (svg.tagName === 'HTML') {
console.error('Error parsing XML')
inputElement.style.opacity = 1
inputElement.style.width = '100%'
inputElement.style.height = '100%'
if (canvas) {
canvas.style.opacity = 0
}
} else {
inputElement.style.opacity = document.getElementById('opacity').value
if (canvas) {
canvas.style.opacity = 1
}
try {
svg2roughjs.svg = svg
} catch (e) {
console.error("Couldn't sketch content")
throw e // re-throw to show error on console
} finally {
document.getElementById('sample-select').disabled = false
loadingSvg = false
}
// maybe there was a load during the rendering.. so load this instead
if (scheduledLoad) {
loadSvgString(svg2roughjs, scheduledLoad)
scheduledLoad = null
}
}
}, 0)
}
function loadSample(svg2roughjs, sample) {
let sampleString = ''
switch (sample) {
case 'bpmn1':
sampleString = SAMPLE_BPMN
break
case 'computer-network':
sampleString = SAMPLE_COMPUTER_NETWORK
break
case 'flowchart4':
sampleString = SAMPLE_FLOWCHART
break
case 'hierarchical1':
sampleString = SAMPLE_HIERARCHICAL1
break
case 'hierarchical2':
sampleString = SAMPLE_HIERARCHICAL2
break
case 'mindmap':
sampleString = SAMPLE_MINDMAP
break
case 'movies':
sampleString = SAMPLE_MOVIES
break
case 'organic1':
sampleString = SAMPLE_ORGANIC1
break
case 'organic2':
sampleString = SAMPLE_ORGANIC2
break
case 'tree1':
sampleString = SAMPLE_TREE
break
case 'venn':
sampleString = SAMPLE_VENN
break
}
setCodeMirrorValue(sampleString)
loadSvgString(svg2roughjs, sampleString)
}
function run() {
svg2roughjs = new Svg2Roughjs('#output', RenderMode.SVG)
svg2roughjs.backgroundColor = 'white'
svg2roughjs.pencilFilter = !!document.getElementById('pencilFilter').checked
const sampleSelect = document.getElementById('sample-select')
sampleSelect.addEventListener('change', () => {
loadSample(svg2roughjs, sampleSelect.value)
})
const toggleSourceBtn = document.getElementById('source-toggle')
toggleSourceBtn.addEventListener('change', () => {
if (toggleSourceBtn.checked) {
codeContainer.classList.remove('hidden')
setTimeout(() => {
codeMirrorInstance.refresh()
codeMirrorInstance.focus()
}, 20)
} else {
codeContainer.classList.add('hidden')
}
})
const codeContainer = document.querySelector('.raw-svg-container')
codeMirrorInstance = CodeMirror(codeContainer, {
mode: 'xml',
lineNumbers: 'true'
})
// make sure codemirror is rendered when the expand animation has finished
codeContainer.addEventListener('transitionend', () => {
if (toggleSourceBtn.checked) {
codeMirrorInstance.refresh()
codeMirrorInstance.focus()
}
})
// pre-select a sample
sampleSelect.selectedIndex = 0
loadSample(svg2roughjs, sampleSelect.value)
const fillStyleSelect = document.getElementById('fill-style')
const outputFormatSelect = document.getElementById('output-format')
const roughnessInput = document.getElementById('roughness-input')
const bowingInput = document.getElementById('bowing-input')
outputFormatSelect.addEventListener('change', () => {
svg2roughjs.renderMode = outputFormatSelect.value === 'svg' ? RenderMode.SVG : RenderMode.CANVAS
document.getElementById('pencilFilter').disabled = outputFormatSelect.value !== 'svg'
})
fillStyleSelect.addEventListener('change', () => {
svg2roughjs.roughConfig = {
bowing: parseInt(bowingInput.value),
roughness: parseInt(roughnessInput.value),
fillStyle: fillStyleSelect.value
}
})
roughnessInput.addEventListener('change', () => {
svg2roughjs.roughConfig = {
bowing: parseInt(bowingInput.value),
roughness: parseInt(roughnessInput.value),
fillStyle: fillStyleSelect.value
}
})
bowingInput.addEventListener('change', () => {
svg2roughjs.roughConfig = {
bowing: parseInt(bowingInput.value),
roughness: parseInt(roughnessInput.value),
fillStyle: fillStyleSelect.value
}
})
const opacityInput = document.getElementById('opacity')
opacityInput.addEventListener('change', () => {
document.getElementById('input').style.opacity = opacityInput.value
document.getElementById('output').style.opacity = 1 - parseFloat(opacityInput.value)
})
const opacityLabel = document.querySelector('label[for=opacity]')
opacityLabel.addEventListener('click', () => {
const currentOpacity = opacityInput.value
const newOpacity = currentOpacity < 1 ? 1 : 0
document.getElementById('input').style.opacity = newOpacity
opacityInput.value = newOpacity
document.getElementById('output').style.opacity = 1 - newOpacity
})
function loadFile(file) {
const reader = new FileReader()
reader.readAsText(file)
reader.addEventListener('load', () => {
const fileContent = reader.result
setCodeMirrorValue(fileContent)
loadSvgString(svg2roughjs, fileContent)
})
}
const fileInput = document.getElementById('file-chooser')
fileInput.addEventListener('change', () => {
const files = fileInput.files
if (files.length > 0) {
loadFile(files[0])
}
})
const body = document.getElementsByTagName('body')[0]
body.addEventListener('dragover', e => {
e.preventDefault()
})
body.addEventListener('drop', e => {
e.preventDefault()
if (e.dataTransfer.items) {
for (let i = 0; i < e.dataTransfer.items.length; i++) {
if (e.dataTransfer.items[i].kind === 'file') {
const file = e.dataTransfer.items[i].getAsFile()
loadFile(file)
return
}
}
} else {
// Use DataTransfer interface to access the file(s)
for (let i = 0; i < e.dataTransfer.files.length; i++) {
loadFile(e.dataTransfer.files[i])
return
}
}
})
const downloadBtn = document.getElementById('download-btn')
downloadBtn.addEventListener('click', () => {
const link = document.createElement('a')
if (svg2roughjs.renderMode === RenderMode.CANVAS) {
const canvas = document.querySelector('#output canvas')
const image = canvas.toDataURL('image/png', 1.0).replace('image/png', 'image/octet-stream')
link.download = 'svg2roughjs.png'
link.href = image
} else {
const serializer = new XMLSerializer()
let svgString = serializer.serializeToString(document.querySelector('#output svg'))
svgString = '<?xml version="1.0" standalone="no"?>\r\n' + svgString
const svgBlob = new Blob([svgString], { type: 'image/svg+xml' })
link.download = 'svg2roughjs.svg'
link.href = URL.createObjectURL(svgBlob)
}
link.click()
})
const originalFontCheckbox = document.getElementById('original-font')
originalFontCheckbox.addEventListener('change', () => {
if (originalFontCheckbox.checked) {
svg2roughjs.fontFamily = null
} else {
svg2roughjs.fontFamily = 'Comic Sans MS, sans-serif'
}
})
const randomizeCheckbox = document.getElementById('randomize')
randomizeCheckbox.addEventListener('change', () => {
svg2roughjs.randomize = !!randomizeCheckbox.checked
})
const pencilCheckbox = document.getElementById('pencilFilter')
pencilCheckbox.addEventListener('change', () => {
svg2roughjs.pencilFilter = !!pencilCheckbox.checked
})
}
run()
<|start_filename|>rollup.config.js<|end_filename|>
import pkg from './package.json'
import { terser } from 'rollup-plugin-terser'
import dts from 'rollup-plugin-dts'
function matchSubmodules(externals) {
return externals.map(e => new RegExp(`^${e}(?:[/\\\\]|$)`))
}
const externals = matchSubmodules([
...Object.keys(pkg.dependencies || {}),
...Object.keys(pkg.peerDependencies || {}),
...Object.keys(pkg.optionalDependencies || {})
])
const es = {
input: 'out-tsc/index.js',
output: [
{
file: pkg.module.replace('.min', ''),
format: 'es',
name: 'svg2roughjs',
sourcemap: true,
plugins: []
},
{
file: pkg.module,
format: 'es',
name: 'svg2roughjs',
sourcemap: true,
plugins: [terser({})]
}
],
external: externals,
plugins: []
}
const typings = {
input: 'out-tsc/index.d.ts',
output: [{ file: 'dist/index.d.ts', format: 'es' }],
plugins: [dts()]
}
export default [es, typings]
<|start_filename|>sample-application/package.json<|end_filename|>
{
"name": "svg2roughjs-sample",
"description": "A simple sample application to test and try svg2roughjs",
"version": "1.0.0",
"private": true,
"scripts": {
"preinstall": "cd ../ && npm pack",
"dev": "concurrently --kill-others \"npm run update-svg2roughjs && npm run watch-svg2roughjs\" \"npm run serve\"",
"serve": "webpack serve",
"build": "npm run update-svg2roughjs && webpack",
"deploy": "npm run update-svg2roughjs && webpack --mode production",
"build-svg2roughjs": "cd ../ && npm run build",
"update-svg2roughjs": "npm run build-svg2roughjs && grunt copy-svg2roughjs",
"watch-svg2roughjs": "grunt watch"
},
"author": "<NAME>",
"license": "MIT",
"dependencies": {
"codemirror": "^5.62.0",
"core-js": "^3.15.2",
"svg2roughjs": "file:../svg2roughjs-2.2.1.tgz"
},
"devDependencies": {
"@babel/core": "^7.14.6",
"@babel/preset-env": "^7.14.7",
"babel-loader": "^8.2.2",
"concurrently": "^6.2.0",
"copy-webpack-plugin": "^9.0.1",
"css-loader": "^6.0.0",
"grunt": "^1.4.1",
"grunt-contrib-copy": "^1.0.0",
"grunt-contrib-watch": "^1.1.0",
"grunt-shell": "^3.0.1",
"html-webpack-plugin": "^5.3.2",
"prettier": "2.3.2",
"raw-loader": "^4.0.2",
"style-loader": "^3.1.0",
"webpack": "^5.44.0",
"webpack-cli": "^4.7.2",
"webpack-dev-server": "^3.11.2"
}
}
<|start_filename|>package.json<|end_filename|>
{
"name": "svg2roughjs",
"version": "2.2.1",
"description": "Leverages Rough.js to convert SVGs to a hand-drawn, sketchy representation",
"author": "<NAME>",
"contributors": [
"<NAME>"
],
"main": "dist/svg2roughjs.es.js",
"module": "dist/svg2roughjs.es.min.js",
"types": "dist/index.d.ts",
"homepage": "https://github.com/fskpf/svg2roughjs#readme",
"repository": {
"type": "git",
"url": "git+https://github.com/fskpf/svg2roughjs.git"
},
"keywords": [
"svg",
"roughjs",
"javascript",
"hand-drawn",
"sketch"
],
"license": "MIT",
"scripts": {
"prepare": "npm run lint && npm run build",
"build": "npm run clean && tsc && rollup -c rollup.config.js",
"clean": "rimraf dist/",
"lint": "eslint --ext .ts ./src"
},
"files": [
"dist/*",
"LICENSE.md",
"README.md"
],
"dependencies": {
"roughjs": "^4.4.1",
"svg-pathdata": "^6.0.0",
"tinycolor2": "^1.4.2",
"units-css": "^0.4.0"
},
"devDependencies": {
"@types/node": "^16.3.2",
"@typescript-eslint/eslint-plugin": "^4.28.3",
"@typescript-eslint/parser": "^4.28.3",
"eslint": "^7.30.0",
"eslint-config-prettier": "^8.3.0",
"eslint-plugin-prettier": "^3.4.0",
"prettier": "2.3.2",
"rimraf": "^3.0.2",
"rollup": "^2.56.0",
"rollup-plugin-dts": "^3.0.2",
"rollup-plugin-terser": "^7.0.2",
"typescript": "^4.3.5"
}
}
<|start_filename|>sample-application/Gruntfile.js<|end_filename|>
// eslint-disable-next-line no-undef
module.exports = function (grunt) {
grunt.loadNpmTasks('grunt-shell')
grunt.loadNpmTasks('grunt-contrib-copy')
grunt.loadNpmTasks('grunt-contrib-watch')
grunt.registerTask('build-svg2roughjs', ['shell:npmBuildSvg2roughjs'])
grunt.registerTask(
'copy-svg2roughjs',
'Copies the current local svg2roughjs content to the /node_modules/',
['copy:svg2roughjs']
)
grunt.initConfig({
shell: {
npmBuildSvg2roughjs: {
command: 'npm run build-svg2roughjs'
}
},
copy: {
svg2roughjs: {
files: [
{
expand: true,
src: '../dist/**',
dest: './node_modules/svg2roughjs/dist/'
},
{ src: '../README.md', dest: './node_modules/svg2roughjs/README.md' },
{ src: '../LICENSE.md', dest: './node_modules/svg2roughjs/LICENSE.md' },
{ src: '../package.json', dest: './node_modules/svg2roughjs/package.json' }
]
}
},
watch: {
change: {
files: ['../src/**/*'],
tasks: ['build-svg2roughjs']
},
update: {
files: ['../dist/*'],
tasks: ['copy-svg2roughjs']
}
}
})
}
| fskpf/svg2roughjs |
<|start_filename|>cs224n/assignment1/Makefile<|end_filename|>
DATASETS_DIR=utils/datasets
init:
sh get_datasets.sh
submit:
sh collect_submission.sh
clean:
rm -f assignment1.zip
rm -rf ${DATASETS_DIR}
rm -f *.pyc *.png *.npy utils/*.pyc
| Rolight/Mooc-Assignments |
<|start_filename|>lib/src/json_schema/format_exceptions.dart<|end_filename|>
class FormatExceptions {
static FormatException error(String msg, [String path]) {
msg = '${path ?? ''}: $msg';
// if (logFormatExceptions) _logger.warning(msg); TODO: re-add logger
return FormatException(msg);
}
static FormatException bool(String key, dynamic instance, [String path]) =>
error('$key must be boolean: $instance', path);
static FormatException num(String key, dynamic instance, [String path]) => error('$key must be num: $instance', path);
static FormatException nonNegativeNum(String key, dynamic instance, [String path]) =>
error('multipleOf must be > 0: $instance');
static FormatException int(String key, dynamic instance, [String path]) => error('$key must be int: $instance', path);
static FormatException string(String key, dynamic instance, [String path]) =>
error('$key must be string: $instance', path);
static FormatException object(String key, dynamic instance, [String path]) =>
error('$key must be object: $instance', path);
static FormatException list(String key, dynamic instance, [String path]) =>
error('$key must be array: $instance', path);
static FormatException schema(String key, dynamic instance, [String path]) =>
error('$key must be valid schema object or boolean: $instance', path);
}
| kdev0/json_schema |
<|start_filename|>pkg/v2/storage.go<|end_filename|>
/*
Copyright
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v2
import (
"fmt"
"log"
"os"
utils "github.com/maorfr/helm-plugin-utils/pkg"
"github.com/pkg/errors"
"k8s.io/helm/pkg/storage"
"k8s.io/helm/pkg/storage/driver"
common "github.com/hickeyma/helm-mapkubeapis/pkg/common"
)
// GetStorageDriver return handle to Helm v2 backend storage driver
func GetStorageDriver(mapOptions common.MapOptions) (*storage.Storage, error) {
clientSet := utils.GetClientSetWithKubeConfig(mapOptions.KubeConfig.File, mapOptions.KubeConfig.Context)
if clientSet == nil {
return nil, errors.Errorf("kubernetes cluster unreachable")
}
namespace := mapOptions.ReleaseNamespace
storageType := getStorageType(mapOptions)
switch storageType {
case "configmap", "configmaps", "":
cfgMaps := driver.NewConfigMaps(clientSet.CoreV1().ConfigMaps(namespace))
cfgMaps.Log = newLogger("storage/driver").Printf
return storage.Init(cfgMaps), nil
case "secret", "secrets":
secrets := driver.NewSecrets(clientSet.CoreV1().Secrets(namespace))
secrets.Log = newLogger("storage/driver").Printf
return storage.Init(secrets), nil
default:
// Not sure what to do here.
panic("Unknown storage driver")
}
}
func getStorageType(mapOptions common.MapOptions) string {
var storage string
if !mapOptions.TillerOutCluster {
storage = utils.GetTillerStorageWithKubeConfig(mapOptions.ReleaseNamespace,
mapOptions.KubeConfig.File, mapOptions.KubeConfig.Context)
} else {
storage = mapOptions.StorageType
}
return storage
}
func newLogger(prefix string) *log.Logger {
if len(prefix) > 0 {
prefix = fmt.Sprintf("[%s] ", prefix)
}
return log.New(os.Stderr, prefix, log.Flags())
}
| AzcarGabriel/helm-mapkubeapis |
<|start_filename|>bucket/spotify.json<|end_filename|>
{
"version": "1.1.87.612.gf8d110e2",
"description": "A digital music service that gives you access to millions of songs.",
"homepage": "https://www.spotify.com/",
"license": {
"identifier": "Freeware",
"url": "https://www.spotify.com/legal/end-user-agreement/"
},
"url": "https://download.scdn.co/SpotifyFullSetup.exe",
"hash": "2e925326bc23a3f02e2166ae11f40366d323676e38e70b82420d0410c92cf33b",
"installer": {
"script": [
"Start-Process -Wait \"$dir\\$fname\" -ArgumentList '/extract', \"`\"$dir`\"\"",
"# Disable built-in updater",
"Remove-Item -ErrorAction Ignore -Recurse \"$env:LOCALAPPDATA\\Spotify\\Update\" | Out-Null",
"$updateFile = New-Item -Path \"$env:LOCALAPPDATA\\Spotify\" -Name Update -ItemType File -Value \"Disabled by Scoop\" -Force",
"$updatefile.Attributes = 'ReadOnly', 'System'",
"Remove-Item \"$dir\\$fname\", \"$dir\\SpotifyMigrator.exe\""
]
},
"shortcuts": [
[
"Spotify.exe",
"Spotify"
]
],
"uninstaller": {
"script": [
"if ($cmd -ne 'uninstall') { return }",
"Start-Process -Wait \"$dir\\Spotify.exe\" -ArgumentList '/Uninstall', '/Silent'"
]
},
"checkver": {
"script": [
"$download_url = 'https://download.scdn.co/SpotifyFullSetup.exe'",
"$download = cache_path 'spotify' 'unknown' $download_url",
"do_dl $download_url $download",
"$spotify_version = (Get-Item $download).VersionInfo.ProductVersion",
"Move-Item -Force $download -Destination (cache_path 'spotify' $spotify_version $download_url)",
"return $spotify_version"
],
"regex": "(.+)"
},
"autoupdate": {
"url": "https://download.scdn.co/SpotifyFullSetup.exe"
}
}
<|start_filename|>bucket/office-tool-plus.json<|end_filename|>
{
"version": "8.3.10.7",
"description": "A powerful and useful tool for Office deployments",
"homepage": "https://otp.landian.vip/",
"license": "GPL-3.0-or-later",
"url": "https://github.com/YerongAI/Office-Tool/releases/download/v8.3.10.7/Office_Tool_with_runtime_v8.3.10.7.7z",
"hash": "757ac90af36ef1e92501429172a7670b884c1472be4eab99d42eb34de5a712a0",
"extract_dir": "Office Tool",
"shortcuts": [
[
"Office Tool Plus.exe",
"Office Tool Plus"
]
],
"checkver": {
"github": "https://github.com/YerongAI/Office-Tool"
},
"autoupdate": {
"url": "https://github.com/YerongAI/Office-Tool/releases/download/v$version/Office_Tool_with_runtime_v$version.7z",
"hash": {
"url": "https://github.com/YerongAI/Office-Tool/releases/latest",
"regex": "(?s)$basename.*?$sha256"
}
}
}
<|start_filename|>bucket/mpress.json<|end_filename|>
{
"version": "2.19",
"description": "High-performance executable packer for PE32+ and .NET",
"homepage": "https://www.matcode.com/mpress.htm",
"license": "BSD-3-Clause",
"url": "https://raw.githubusercontent.com/ScoopInstaller/Binary/master/mpress/mpress-2.19.zip",
"hash": "19ffee93706dff67f83d9ef48c0c794dea761d4459b11c37f9bc65b04af736c5",
"bin": "mpress.exe"
}
<|start_filename|>bucket/micswitch.json<|end_filename|>
{
"version": "1.0.234",
"description": "A tool which allows you to mute/unmute your microphone using a predefined hotkey",
"homepage": "https://github.com/iXab3r/MicSwitch",
"license": "Unknown",
"url": "https://github.com/iXab3r/MicSwitch/releases/download/1.0.234/MicSwitch-1.0.234-full.nupkg",
"hash": "sha1:be04f8e9fa2f324bdd78d295853ce817fa4418ed",
"extract_dir": "lib\\.net45",
"pre_install": [
"# avoid potential errors caused by previous installations (micswitch.exe creates an empty folder in $Env:LocalAppData if not found)",
"if(Test-Path \"$Env:LocalAppData\\Micswitch\") { Remove-Item \"$Env:LocalAppData\\Micswitch\" -Force -Recurse }",
"New-Item \"$Env:LocalAppData\\Micswitch\" -ItemType Junction -Target \"$dir\" | Out-Null"
],
"uninstaller": {
"script": "Remove-Item \"$Env:LocalAppData\\Micswitch\" -Force -Recurse"
},
"shortcuts": [
[
"MicSwitch.exe",
"MicSwitch"
]
],
"checkver": "github",
"autoupdate": {
"url": "https://github.com/iXab3r/MicSwitch/releases/download/$version/MicSwitch-$version-full.nupkg",
"hash": {
"url": "$baseurl/RELEASES"
}
}
}
<|start_filename|>bucket/jexiftoolgui.json<|end_filename|>
{
"version": "2.0.1.0",
"description": "Graphical frontend for ExifTool",
"homepage": "https://hvdwolf.github.io/jExifToolGUI/",
"license": "GPL-3.0-or-later",
"depends": "exiftool",
"url": "https://github.com/hvdwolf/jExifToolGUI/releases/download/2.0.1/jExifToolGUI-2.0.1.0-win-x86_64_with-jre.zip",
"hash": "819cb917d7f2ea9b686592185e175f10b541fa20318e0a73402184640181823c",
"extract_dir": "jExifToolGUI-2.0.1.0-win-x86_64_with-jre",
"shortcuts": [
[
"jExifToolGUI.exe",
"jExifToolGUI"
]
],
"persist": "logs",
"checkver": {
"url": "https://github.com/hvdwolf/jExifToolGUI/releases",
"regex": "jExifToolGUI-([\\d.]+)-win-x86_64_with-jre\\.zip\""
},
"autoupdate": {
"url": "https://github.com/hvdwolf/jExifToolGUI/releases/download/$matchHead/jExifToolGUI-$version-win-x86_64_with-jre.zip",
"extract_dir": "jExifToolGUI-$version-win-x86_64_with-jre"
}
}
<|start_filename|>bucket/yumi-exfat.json<|end_filename|>
{
"version": "1.0.0.3",
"description": "Multiboot USB Creator. Supports exFAT format, BIOS and UEFI USB boot.",
"homepage": "https://www.pendrivelinux.com/yumi-multiboot-usb-creator/",
"license": "GPL-2.0-or-later",
"url": "https://www.pendrivelinux.com/downloads/YUMI/YUMI-exFAT-1.0.0.3.exe#/YUMI-exFAT.exe",
"hash": "7621cb67df4af58561129a61987a85cd3e0a2a5e9ddb55e3fe04729488096ab5",
"shortcuts": [
[
"YUMI-exFAT.exe",
"YUMI-exFAT"
]
],
"checkver": "YUMI-exFAT-([\\d.]+)\\.exe",
"autoupdate": {
"url": "https://www.pendrivelinux.com/downloads/YUMI/YUMI-exFAT-$version.exe#/YUMI-exFAT.exe",
"hash": {
"url": "https://www.pendrivelinux.com/yumi-multiboot-usb-creator/",
"regex": "(?s)$basename.*?$sha256"
}
}
}
<|start_filename|>bucket/qbittorrent-enhanced.json<|end_filename|>
{
"version": "4.4.3.12",
"description": "qBittorrent BitTorrent client with anti-leech enhancements",
"homepage": "https://github.com/c0re100/qBittorrent-Enhanced-Edition",
"license": "GPL-2.0-or-later",
"architecture": {
"64bit": {
"url": "https://github.com/c0re100/qBittorrent-Enhanced-Edition/releases/download/release-4.4.3.12/qbittorrent_enhanced_4.4.3.12_Qt6_setup.exe#/dl.7z",
"hash": "f50cffc29e4354681e08b6a14aeb29f1713583b3affea99be9c174e395402b1c"
}
},
"pre_install": "Remove-Item \"$dir\\`$PLUGINSDIR\", \"$dir\\uninst.exe\" -Force -Recurse",
"bin": "qbittorrent.exe",
"shortcuts": [
[
"qbittorrent.exe",
"qBittorrent Enhanced Edition"
]
],
"persist": "profile",
"checkver": {
"github": "https://github.com/c0re100/qBittorrent-Enhanced-Edition",
"regex": "releases/tag/release-([\\d.]+)"
},
"autoupdate": {
"architecture": {
"64bit": {
"url": "https://github.com/c0re100/qBittorrent-Enhanced-Edition/releases/download/release-$version/qbittorrent_enhanced_$version_Qt6_setup.exe#/dl.7z"
}
}
}
}
<|start_filename|>bucket/freerapid.json<|end_filename|>
{
"version": "0.9u4",
"description": "A Java downloader that supports downloading from Rapidshare, Youtube, Facebook, Picasa and other file-sharing services.",
"homepage": "http://wordrider.net/freerapid/",
"license": {
"identifier": "Freeware",
"url": "https://wordrider.net/freerapid/faq.html"
},
"url": "https://raw.githubusercontent.com/ScoopInstaller/Binary/master/freerapid/FreeRapid-0.9u4.zip",
"hash": "f26d570b63e9591e438a625f87eee80480b160f077a4d7aae433c8d3e0d20ab9",
"suggest": {
"JAVA Runtime Environment": "java/openjdk"
},
"extract_dir": "FreeRapid-0.9u4",
"bin": "frd.jar",
"shortcuts": [
[
"frd.exe",
"FreeRapid Downloader"
]
],
"persist": [
"log",
"objectdb.conf"
]
}
<|start_filename|>bucket/peid.json<|end_filename|>
{
"version": "0.95",
"description": "Detects most common packers, cryptors and compilers for PE files. Supports more than 470 different signatures in PE files.",
"homepage": "https://web.archive.org/web/20110226030434/http://peid.info/",
"license": "Freeware",
"notes": [
"PEiD is not being maintained anymore. We recommend using Exeinfo PE for latest features/fixes",
"To install it, run: scoop install exeinfo-pe"
],
"url": "https://raw.githubusercontent.com/ScoopInstaller/Binary/master/peid/PEiD-0.95.zip",
"hash": "67a0fe273a7273963fac97b808530b7a1d8088af350a06ed755d72c7eaab2de0",
"bin": "PEiD.exe",
"shortcuts": [
[
"PEiD.exe",
"PEiD"
]
]
}
<|start_filename|>bucket/simple-assembly-explorer.json<|end_filename|>
{
"version": "1.14.4",
"description": "Open-source .NET assembly tool that can browse assembly classes, edit method instructions, and manipulate resources.",
"homepage": "https://code.google.com/archive/p/simple-assembly-exploror/",
"license": "MIT",
"architecture": {
"64bit": {
"url": "https://github.com/wickyhu/simple-assembly-explorer/releases/download/v1.14.4/SAE.v1.14.4.x64.7z",
"hash": "1ecf9b998b9698bd778a16ea6a7bfb9c3fdc5c6dbfd4d8551bd962fe4ab261e7"
},
"32bit": {
"url": "https://github.com/wickyhu/simple-assembly-explorer/releases/download/v1.14.4/SAE.v1.14.4.x86.7z",
"hash": "7664c9d6494ac900d6509ed72fa75dc636d5c6873ad5d0e063b4461caccff846"
}
},
"pre_install": [
"if (!(Test-Path \"$persist_dir\\user.config\")) {",
" Set-Content \"$dir\\user.config\" \"<SimpleAssemblyExplorer.Properties.Settings></SimpleAssemblyExplorer.Properties.Settings>`r`n\" -Encoding Ascii",
"}"
],
"shortcuts": [
[
"SimpleAssemblyExplorer.exe",
"Simple Assembly Explorer"
]
],
"persist": [
"user.config",
"SimpleAssemblyExplorer.exe.Config"
]
}
<|start_filename|>bucket/paint.net-plugin-boltbait.json<|end_filename|>
{
"version": "5.2",
"description": "Collection of paint.net plugins by BoltBait",
"homepage": "https://boltbait.com/pdn/",
"license": {
"identifier": "Freeware",
"url": "https://boltbait.com/pdn/"
},
"depends": "extras/paint.net",
"url": "https://boltbait.com/pdn/BoltBaitPack52.zip",
"hash": "be931895e9633399d88c8722220ade06c6faf8a478e992b654327ebfd006c933",
"pre_install": [
"$installer = Get-Item \"$dir\\BoltBaitPackInstall*.exe\"",
"$installerAssembly = [System.Reflection.Assembly]::Load([IO.File]::ReadAllBytes($installer))",
"Remove-Item $installer",
"$pluginResourcePrefix = 'Install.Attachments.'",
"$pluginsResourceNames = $installerAssembly.GetManifestResourceNames() | Where-Object { $_.StartsWith($pluginResourcePrefix) -and $_.EndsWith('.dll') }",
"$pluginFolder = Join-Path (appdir 'paint.net' $global) 'current\\Effects'",
"$pluginsResourceNames | ForEach-Object {",
" $pluginFileName = $_.Replace($pluginResourcePrefix, '')",
" $installerAssembly.GetManifestResourceStream($_).",
" CopyTo([IO.File]::OpenWrite(",
" (Join-Path \"$pluginFolder\" \"$pluginFileName\")",
" ))",
"}",
"$pluginsResourceNames.Replace($pluginResourcePrefix, '') | Out-File \"$dir\\plugins.txt\" -Encoding Ascii"
],
"uninstaller": {
"script": [
"$pluginFolder = Join-Path (appdir 'paint.net' $global) 'current\\Effects'",
"Get-Content \"$dir\\plugins.txt\" | Remove-Item -Path { Join-Path \"$pluginFolder\" \"$_\" } -ErrorAction SilentlyContinue"
]
},
"checkver": {
"regex": "BoltBaitPack(?<major>\\d)(?<minor>\\d)\\.zip</a>",
"replace": "${major}.${minor}"
},
"autoupdate": {
"url": "https://boltbait.com/pdn/BoltBaitPack$cleanVersion.zip"
}
}
<|start_filename|>bucket/rxrepl.json<|end_filename|>
{
"version": "1.5",
"description": "A command line tool to search and replace text in text files using Perl compatible regular expressions (PCRE).",
"homepage": "https://sites.google.com/site/regexreplace/",
"license": {
"identifier": "Freeware",
"url": "https://hastebin.com/raw/adajilisey"
},
"url": "https://drive.google.com/uc?id=1reCUlV4He6ZDqX8GgJtuFsegc557PgDi&export=download#/dl.zip",
"hash": "c0d8523addafa2fb5e89440e9b5262ce80c84fa9e87ca82d7b3cc23ad10c1e1b",
"bin": "rxrepl.exe"
}
<|start_filename|>bucket/lenovolegiontoolkit.json<|end_filename|>
{
"version": "2.0.0",
"description": "A lightweight alternative to the Lenovo Vantage Software created for Lenovo Legion laptops",
"homepage": "https://github.com/BartoszCichecki/LenovoLegionToolkit",
"license": "MIT",
"depends": "versions/dotnet5-sdk",
"url": "https://github.com/BartoszCichecki/LenovoLegionToolkit/releases/download/2.0.0/LenovoLegionToolkitSetup.exe",
"hash": "9af4652fcbcab69f4bfe67a22b9faa158f214500c4ad888bf690809137206867",
"innosetup": true,
"shortcuts": [
[
"Lenovo Legion Toolkit.exe",
"Lenovo Legion Toolkit"
]
],
"checkver": "github",
"autoupdate": {
"url": "https://github.com/BartoszCichecki/LenovoLegionToolkit/releases/download/$version/LenovoLegionToolkitSetup.exe"
}
}
<|start_filename|>bucket/upx-easy-gui.json<|end_filename|>
{
"##": "Not suggesting 'upx' here because this upx-easy-gui contains bundled UPX.",
"version": "2.1",
"description": "A graphical user interface (GUI) for UPX EXE Compressor",
"homepage": "https://www.novirusthanks.org/products/upx-easy-gui/",
"license": "Freeware",
"url": "https://downloads.novirusthanks.org/files/portables/upx_easy_gui_portable.zip",
"hash": "4bc0700eca5abded37f3dfecbe4ea63e376ed614f53ece42817732f097778f48",
"extract_dir": "PORTABLE",
"shortcuts": [
[
"UPXEasyGUI.exe",
"UPX Easy GUI"
]
],
"checkver": "UPX Easy GUI v([\\d.]+)</h1>",
"autoupdate": {
"url": "https://downloads.novirusthanks.org/files/portables/upx_easy_gui_portable.zip"
}
}
<|start_filename|>bucket/windows-terminal.json<|end_filename|>
{
"version": "1.13.11431.0",
"description": "The new Windows Terminal, and the original Windows console host - all in the same place!",
"homepage": "https://github.com/microsoft/terminal",
"license": "MIT",
"notes": "Add Windows Terminal as a context menu option by running `reg import \"$dir\\install-context.reg\"`",
"suggest": {
"vcredist": "extras/vcredist2022"
},
"url": "https://github.com/microsoft/terminal/releases/download/v1.13.11431.0/Microsoft.WindowsTerminal_Win10_1.13.11431.0_8wekyb3d8bbwe.msixbundle#/dl.7z",
"hash": "97ce893c37a3063a7b45bc859bb357df730992c87f614e45040d63f5cb2b80fe",
"architecture": {
"64bit": {
"pre_install": "Get-ChildItem \"$dir\" -Exclude '*x64.msix' | Remove-Item -Force -Recurse"
},
"32bit": {
"pre_install": "Get-ChildItem \"$dir\" -Exclude '*x86.msix' | Remove-Item -Force -Recurse"
}
},
"installer": {
"script": [
"$winVer = [Environment]::OSVersion.Version",
"if (($winver.Major -lt '10') -or ($winVer.Build -lt 18362)) { error 'At least Windows 10 19H1 (build 18362) is required.'; break }",
"Get-ChildItem \"$dir\" '*.msix' | Select-Object -ExpandProperty Fullname | Expand-7zipArchive -DestinationPath \"$dir\" -Removal",
"Get-ChildItem \"$dir\\ProfileIcons\" '*.png' | Rename-Item -NewName { $_.Name.Replace('%7B', '{').Replace('%7D', '}') }"
]
},
"post_install": [
"'install-context', 'uninstall-context' | ForEach-Object {",
" if (Test-Path \"$bucketsdir\\extras\\scripts\\windows-terminal\\$_.reg\") {",
" $wtPath = \"$dir\\wt.exe\".Replace('\\', '\\\\')",
" $content = (Get-Content \"$bucketsdir\\extras\\scripts\\windows-terminal\\$_.reg\").Replace('$wt', $wtPath)",
" if ($global) { $content = $content.Replace('HKEY_CURRENT_USER', 'HKEY_LOCAL_MACHINE') }",
" Set-Content \"$dir\\$_.reg\" $content -Encoding Ascii -Force",
" }",
"}"
],
"pre_uninstall": "if ($cmd -eq 'uninstall') { reg import \"$dir\\uninstall-context.reg\" }",
"bin": [
"WindowsTerminal.exe",
"wt.exe"
],
"shortcuts": [
[
"WindowsTerminal.exe",
"Windows Terminal"
]
],
"checkver": "github",
"autoupdate": {
"url": "https://github.com/microsoft/terminal/releases/download/v$version/Microsoft.WindowsTerminal_Win10_$version_8wekyb3d8bbwe.msixbundle#/dl.7z"
}
}
<|start_filename|>bucket/universal-mediacreationtool.json<|end_filename|>
{
"version": "2022.03.20",
"description": "Universal MediaCreationTool. Wrapper for all Windows 10/11 versions from 1507 to 21H1 with business (Enterprise) edition support",
"homepage": "https://github.com/AveYo/MediaCreationTool.bat",
"license": "MIT",
"url": "https://codeload.github.com/AveYo/MediaCreationTool.bat/zip/refs/heads/main#/dl.zip",
"hash": "086040615636459b49043d8c44df72cf61725e2184bc6ba87334b64ce8cf6847",
"extract_dir": "MediaCreationTool.bat-main",
"shortcuts": [
[
"MediaCreationTool.bat",
"Universal MediaCreationTool"
]
],
"checkver": {
"regex": "(\\d{4}\\.\\d{2}\\.\\d{2})\\:",
"reverse": true
},
"autoupdate": {
"url": "https://codeload.github.com/AveYo/MediaCreationTool.bat/zip/refs/heads/main#/dl.zip"
}
}
<|start_filename|>bucket/instant-eyedropper.json<|end_filename|>
{
"version": "2.0.1",
"description": "Color picker and detection tool.",
"homepage": "http://instant-eyedropper.com",
"license": "Freeware",
"url": "http://instant-eyedropper.com/download/instant-eyedropper-2.0.1.zip",
"hash": "066c09377fe7a985fb1e865b9f8d5d9ca830f88e84e251cfd1ccdb6f7cb24760",
"extract_dir": "instant-eyedropper",
"bin": "instanteyedropper.exe",
"shortcuts": [
[
"instanteyedropper.exe",
"Instant Eyedropper"
]
],
"persist": "eyedropper.ini",
"checkver": "/download/instant-eyedropper-([\\d.]+)\\.zip",
"autoupdate": {
"url": "http://instant-eyedropper.com/download/instant-eyedropper-$version.zip"
}
}
<|start_filename|>bucket/crunchyroll-go.json<|end_filename|>
{
"version": "2.2.2",
"description": "A Go library & CLI for the undocumented Crunchyroll API.",
"homepage": "https://github.com/ByteDream/crunchyroll-go",
"license": "LGPL-3.0-only",
"suggest": {
"ffmpeg": "ffmpeg",
"vcredist": "extras/vcredist2010"
},
"notes": "You need a premium account for full (API) access.",
"architecture": {
"64bit": {
"url": "https://github.com/ByteDream/crunchyroll-go/releases/download/v2.2.2/crunchy-v2.2.2_windows.exe#/crunchy.exe",
"hash": "b00072b56684bf87ab012457cc74852de6ad92c42c53c0e399826b881ecfe439"
}
},
"bin": "crunchy.exe",
"checkver": "github",
"autoupdate": {
"architecture": {
"64bit": {
"url": "https://github.com/ByteDream/crunchyroll-go/releases/download/v$version/crunchy-v$version_windows.exe#/crunchy.exe"
}
}
}
}
<|start_filename|>bucket/webpicmd.json<|end_filename|>
{
"version": "4.5",
"description": "A little tool for deploying your favorite open-source web applications and all the necessary platform components required to get you up and running quickly and seamlessly.",
"homepage": "https://docs.microsoft.com/en-us/iis/install/web-platform-installer",
"license": {
"identifier": "Freeware",
"url": "https://www.microsoft.com/web/webpi/eula/WebPI_45_EN.htm"
},
"architecture": {
"64bit": {
"url": "https://download.microsoft.com/download/7/0/4/704CEB4C-9F42-4962-A2B0-5C84B0682C7A/WebPlatformInstaller_amd64_en-US.msi",
"hash": "63eb18348d9299a575124b55cb86b748abeb971b869d8ce14b7c4bec4b76c5f6"
},
"32bit": {
"url": "https://download.microsoft.com/download/7/0/4/704CEB4C-9F42-4962-A2B0-5C84B0682C7A/WebPlatformInstaller_x86_en-US.msi",
"hash": "9C29ED64A57358997597CBC9C876E4828C706090CC31F6AB18590D1170B660DE"
}
},
"bin": "Microsoft\\Web Platform Installer\\WebpiCmd.exe"
}
<|start_filename|>bucket/litebrowser.json<|end_filename|>
{
"version": "7ee5c11",
"description": "A simple web browser designed to test the litehtml HTML rendering engine.",
"homepage": "https://github.com/litehtml/litebrowser",
"license": "BSD-3-Clause",
"url": "http://www.litehtml.com/download/litehtml/litebrowser.zip",
"hash": "7dac1873063a96b72e1d4b879e318d489e046eeba7e8a2d2c6f67667b44999c9",
"bin": "litebrowser.exe",
"shortcuts": [
[
"litebrowser.exe",
"LiteBrowser"
]
]
}
<|start_filename|>bucket/uget-integrator.json<|end_filename|>
{
"version": "1.0.0",
"description": "Integrate uGet Download Manager with Google Chrome, Chromium, Opera, Vivaldi and Mozilla Firefox",
"homepage": "https://github.com/ugetdm/uget-integrator",
"license": "GPL-3.0-or-later",
"notes": [
"Refer to https://github.com/ugetdm/uget-integrator/wiki/Installation#portable-method about how to install uget-integrator.",
"Or run install_uget-integrator.cmd directly if you have already installed extras/uget from scoop before uget-integrator."
],
"suggest": {
"uget": "extras/uget"
},
"url": "https://github.com/ugetdm/uget-integrator/releases/download/v1.0.0/uget-integrator_win_1.0.0.zip",
"hash": "6d25dab25839b6e0d944792e111b55f60aebf6d4980727e3c18636304eed8626",
"extract_dir": "uget-integrator",
"pre_install": [
"function Set-PersistItem { param ( $Path ) foreach ($path in $Path) { if (!(Test-Path \"$persist_dir\\$path\")) { New-Item \"$dir\\$path\" | Out-Null } } }",
"Set-PersistItem \"com.ugetdm.chrome.json\", \"com.ugetdm.firefox.json\""
],
"post_install": [
"scoop prefix uget 6>$null",
"if ($?) {",
" $findExp = \"UGET_COMMAND = \"\"C:\\\\\\\\uGet\\\\\\\\bin\\\\\\\\uget.exe\"\"\"",
" $replaceExp = \"UGET_COMMAND = \"\"$(scoop prefix uget)\\bin\\uget.exe\"\"\" -replace \"\\\\\", \"\\\\\"",
" (Get-Content \"$dir\\uget-integrator.py\") -replace $findExp, $replaceExp | Out-File -Encoding utf8 \"$dir\\uget-integrator.py\"",
"}"
],
"bin": [
[
"add_config.bat",
"install_uget-integrator"
]
],
"persist": [
"com.ugetdm.chrome.json",
"com.ugetdm.firefox.json"
],
"checkver": "github",
"autoupdate": {
"url": "https://github.com/ugetdm/uget-integrator/releases/download/v$version/uget-integrator_win_$version.zip"
}
}
<|start_filename|>bucket/hex-editor-neo.json<|end_filename|>
{
"version": "6.54.03.7295",
"description": "Binary file editor optimized for large files",
"homepage": "https://www.hhdsoftware.com/free-hex-editor",
"license": {
"identifier": "Freeware",
"url": "https://docs.hhdsoftware.com/pdf/hex"
},
"url": "https://www.hhdsoftware.com/download/free-hex-editor-neo.exe#/dl.7z_",
"hash": "c77678af9da730c9957e631db2dce746b62fdd97c224443ebb2c4a26c1f02aba",
"pre_install": [
"if ($architecture -eq '64bit') { $arch = 'x64' }",
"else { $arch = 'x86' }",
"",
"New-Item \"$dir\\extract\", \"$dir\\extract2\" -ItemType Directory | Out-Null",
"Expand-7zipArchive \"$dir\\dl.7z_\" \"$dir\\extract\" | Out-Null",
"$archive = Get-ChildItem \"$dir\\extract\\*.7z\" | Select -First 1 -ExpandProperty FullName",
"Expand-7zipArchive \"$archive\" \"$dir\\extract2\" -ExtractDir 'Components' | Out-Null",
"",
"# 'Sample Structures' and 'Tool Window Layouts' ",
"New-Item \"$dir\\Sample Structures\", \"$dir\\Tool Window Layouts\" -ItemType Directory | Out-Null",
"Move-Item \"$dir\\extract2\\SViewerSamples\\*\" \"$dir\\Sample Structures\\\"",
"Move-Item \"$dir\\extract2\\musthavelayouts\\*\" \"$dir\\Tool Window Layouts\\\"",
"Move-Item \"$dir\\extract2\\wslayouts\\*\" \"$dir\\Tool Window Layouts\\\"",
"",
"# Other Components",
"Get-ChildItem \"$dir\\extract2\" | Select -ExpandProperty FullName | ForEach-Object {",
" # For each component, move 'component_dir\\(x64 or x86)\\(FILES)' and 'component_dir\\(FILES)' to $dir",
" if(Test-Path \"$_\\$arch\\*\") { Move-Item \"$_\\$arch\\*\" \"$dir\\\" }",
" Get-ChildItem \"$_\" -File | Select -ExpandProperty FullName | ForEach-Object { Move-Item \"$_\" \"$dir\\\" }",
"}",
"Remove-Item \"$dir\\dl.7z_\", \"$dir\\extract\", \"$dir\\extract2\" -Force -Recurse"
],
"bin": "HexFrame.exe",
"shortcuts": [
[
"HexFrame.exe",
"Hex Editor Neo"
]
],
"checkver": {
"url": "https://www.hhdsoftware.com/free-hex-editor/history",
"regex": "([\\d.]+)</span>"
},
"autoupdate": {
"url": "https://www.hhdsoftware.com/download/free-hex-editor-neo.exe#/dl.7z_"
}
}
<|start_filename|>bucket/opentrack.json<|end_filename|>
{
"version": "2022.2.0",
"description": "Tracks user's head movements and relaying the information to games and flight simulation software.",
"homepage": "https://github.com/opentrack/opentrack",
"license": "ISC",
"url": "https://github.com/opentrack/opentrack/releases/download/opentrack-2022.2.0/opentrack-2022.2.0-portable.7z",
"hash": "4d4082d42db5a3dbb9f29c512e9fa80db571d1ec9ef0bb001569a1591d760599",
"extract_dir": "install",
"bin": "opentrack.exe",
"shortcuts": [
[
"opentrack.exe",
"Opentrack"
]
],
"post_install": [
"if (!(Test-Path \"$dir\\portable.txt\")) {",
" info '[Portable Mode] Adding Portable Mode File...'",
" New-Item \"$dir\\portable.txt\" -ItemType File | Out-Null",
"}",
"if (!(Test-Path \"$dir\\ini\\*\") -and ([Environment]::GetFolderPath(\"MyDocuments\")+\"\\opentrack-2.3\" | Test-Path)) {",
" info '[Portable Mode] Copying User Settings...'",
" [Environment]::GetFolderPath(\"MyDocuments\")+\"\\opentrack-2.3\\*\" | Copy-Item -Destination \"$dir\\ini\" -Recurse",
"}"
],
"persist": "ini",
"checkver": {
"github": "https://github.com/opentrack/opentrack",
"regex": "tag/opentrack-([\\d.]+)"
},
"autoupdate": {
"url": "https://github.com/opentrack/opentrack/releases/download/opentrack-$version/opentrack-$version-portable.7z"
}
}
<|start_filename|>bucket/midicsv.json<|end_filename|>
{
"version": "1.1",
"description": "Convert MIDI audio files to human-readable CSV format",
"homepage": "https://www.fourmilab.ch/webtools/midicsv",
"license": "CC0-1.0",
"url": "https://www.fourmilab.ch/webtools/midicsv/midicsv-1.1.zip",
"hash": "b8800299ee577cca2080e6577eed8b2960f44537db81bafe2cf4648aa4741fba",
"bin": [
"Midicsv.exe",
"Csvmidi.exe"
]
}
<|start_filename|>bucket/exeinfo-pe.json<|end_filename|>
{
"##": "The homepage requires JS to load (not suitable for checkver)",
"version": "0.0.6.9",
"description": "EXE analyzer inspired by PEiD. Designed to detect exe signatures, compressor format and dump internal information.",
"homepage": "http://exeinfo.booomhost.com/",
"license": {
"identifier": "Freeware",
"url": "http://exeinfo.booomhost.com/"
},
"url": "https://raw.githubusercontent.com/ExeinfoASL/ASL/master/exeinfope.zip",
"hash": "8935616b2f9a344f076b82ba8f3051b4d9bf42b199e2219f568ee373d6b9ab5a",
"extract_dir": "ExeinfoPe",
"bin": "exeinfope.exe",
"shortcuts": [
[
"exeinfope.exe",
"Exeinfo PE"
]
],
"persist": [
"plugins",
"exeinfopeRUN.cfg",
"userdb.txt"
],
"checkver": {
"url": "https://raw.githubusercontent.com/ExeinfoASL/ASL/master/README.md",
"regex": "Version\\s+\\: ([\\d.]+)"
},
"autoupdate": {
"url": "https://raw.githubusercontent.com/ExeinfoASL/ASL/master/exeinfope.zip"
}
}
<|start_filename|>bucket/ilmerge.json<|end_filename|>
{
"version": "3.0.41",
"description": "A static linker for .NET assemblies developed by Microsoft .NET team.",
"homepage": "https://github.com/dotnet/ILMerge",
"license": "MIT",
"url": "https://globalcdn.nuget.org/packages/ilmerge.3.0.41.nupkg",
"hash": "9121fe69cbde20180aff5f7ff0ca18c857c6b6de375ae3fbe48ff189b51f1637",
"extract_dir": "tools\\net452",
"bin": "ILMerge.exe"
}
| devome/Extras |
<|start_filename|>src/plugins/ehentai.js<|end_filename|>
import { clipboard, powerSaveBlocker } from 'electron'
import URL from 'url-parse'
import fs from 'fs'
import path from 'path'
import moment from 'moment'
import settings from 'electron-settings'
import axios from 'axios'
import cookieSupport from 'axios-cookiejar-support'
import * as cfg from './lib/config'
cookieSupport(axios)
let saveBlockerId = null
let jarCookie = cfg.loadCookie()
let timeClip = null
export const onWatchClipboard = () => {
if (settings.get('clipboard', false)) {
let data = null
timeClip = setInterval(() => {
const text = clipboard.readText()
if (data !== text) {
console.log('clipboard-watch', text)
data = text
}
}, 300)
} else if (timeClip) {
clearInterval(timeClip)
}
}
const reqHentai = async (link, method, options = {}) => {
options.header = Object.assign({
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36',
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'accept-language': 'th-TH,th;q=0.8,en-US;q=0.6,en;q=0.4,ja;q=0.2',
'cache-control': 'no-cache',
'upgrade-insecure-requests': '1'
}, options.header)
wLog(`URL REQUEST: ${link}`)
await jarCookieBuild()
return axios(Object.assign({
url: link,
method: method || 'GET',
jar: jarCookie,
withCredentials: true,
timeout: 5000
}, options)).then((res) => {
return jarCookieCheck().then(() => res)
}).then((res) => {
wLog(`URL RESPONSE: ${res.status} body: ${res.data.length}`)
return res.data
}).catch((ex) => {
if (ex.response) {
console.log('EX.RESPONSE', ex.response)
} else {
console.log('EX', ex)
}
return null
})
// return new Promise((resolve, reject) => {
// wLog(`URL REQUEST: ${link}`)
// axios(Object.assign({
// url: link,
// method: method || 'GET',
// jar: jarCookie,
// timeout: 5000
// }, options), (error, res, body) => {
// jarCookieCheck().then(() => {
// if (error) {
// reject(new Error(error))
// return
// }
// let { statusCode } = res
// wLog(`URL RESPONSE: ${statusCode} body: ${body.length}`)
// if (statusCode === 302 || statusCode === 200) {
// resolve(body)
// } else {
// reject(new Error(statusCode))
// }
// })
// })
// })
}
const blockCookie = (path, name, ex = false) => new Promise((resolve, reject) => {
const jar = jarCookie.jar
if (jar && jar.store) {
jar.store.removeCookie(!ex ? 'e-hentai.org' : 'exhentai.org', path, name, (err) => {
if (err) reject(err)
resolve()
})
} else {
resolve()
}
})
const getCookie = (name, ex = false) => new Promise((resolve, reject) => {
const jar = jarCookie.jar
if (jar && jar.store) {
console.log('getCookie', jar)
jar.store.findCookie(!ex ? 'e-hentai.org' : 'exhentai.org', '/', name, (err, cookie) => {
console.log('findCookie', err)
if (err) reject(err)
resolve(cookie)
})
} else {
resolve(null)
}
})
const pushCookie = (cookie) => new Promise((resolve, reject) => {
const jar = jarCookie.jar
if (cookie && jar && jar.store) {
jar.store.putCookie(cookie, (err) => {
if (err) reject(err)
resolve()
})
} else {
resolve()
}
})
export const setCookie = (path, value, domain = 'e-hentai.org') => new Promise((resolve, reject) => {
const jar = jarCookie.jar
if (jar && jar.store) {
jar.setCookie(`${value}; path=${path}; domain=${domain}`, `http://${domain}/`, {}, (err) => {
if (err) return reject(err)
resolve()
})
}
})
const jarCookieBuild = async (ex = false) => {
let memberId = await getCookie('ipb_member_id')
if (memberId) {
const exMemberId = await getCookie('ipb_member_id', true)
if (!exMemberId) {
memberId = memberId.clone()
memberId.domain = 'exhentai.org'
pushCookie(memberId)
}
if (!settings.get('igneous') && ex) {
throw new Error('Please join your browser session.')
}
} else {
settings.set('config', {})
}
let passHash = await getCookie('ipb_pass_hash')
if (passHash) {
passHash = passHash.clone()
passHash.domain = 'exhentai.org'
pushCookie(passHash)
}
const igneous = await getCookie('igneous', true)
console.log('igneous', igneous)
if (!igneous && settings.get('igneous')) {
console.log('set igneous', settings.get('igneous'))
await setCookie('/', `igneous=${settings.get('igneous')}`)
}
}
const jarCookieCheck = async () => {
await blockCookie('/', 'sk')
await blockCookie('/', 'sk', true)
await blockCookie('/s/', 'skipserver')
await blockCookie('/', 'yay', true)
if (jarCookie.jar) cfg.saveCookie(jarCookie.jar)
// const idx = jarCookie.jar.store.idx
// if (Object.keys(idx).length > 0) {
// for (const domain in idx) {
// for (const router in idx[domain]) {
// for (const cookie in idx[domain][router]) {
// console.log(' ', domain, cookie, ':', idx[domain][router][cookie].value)
// }
// }
// }
// }
}
// console.log('development:', touno.DevMode)
const wError = (...msg) => {
fs.appendFileSync(`./${moment().format('YYYY-MM-DD')}-error.log`, `${moment().format('HH:mm:ss.SSS')} ${msg.join(' ')}\n`)
}
const wLog = (...msg) => {
fs.appendFileSync(`./${moment().format('YYYY-MM-DD')}.log`, `${moment().format('HH:mm:ss.SSS')} ${msg.join(' ')}\n`)
}
const exHentaiHistory = (uri, data) => {
return {
url: uri,
data: data
}
}
let getFilename = (index, total) => {
return `${Math.pow(10, (total.toString().length - index.toString().length) + 1).toString().substr(2, 10) + index}`
}
const getExtension = (res) => {
switch (res.headers['content-type']) {
case 'jpg':
case 'image/jpg':
case 'image/jpeg': return 'jpg'
case 'image/png': return 'png'
case 'image/gif': return 'gif'
}
}
let getImage = async (res, manga, l, index, directory, emit) => {
let image = /id="img".*?src="(.*?)"/ig.exec(res)[1]
let nl = /return nl\('(.*?)'\)/ig.exec(res)[1]
let filename = getFilename(index + 1, manga.page)
let name = manga.name.replace(/[/\\|.:?<>"]/ig, '')
let dir = path.join(directory, name)
if (!fs.existsSync(dir)) fs.mkdirSync(dir)
let nRetry = 0
let isSuccess = false
do {
let resImage = null
if (nRetry > 0) {
wLog('Retry::', nRetry)
let link = manga.items[index]
manga.items[index] = `${link}${link.indexOf('?') > -1 ? '&' : '?'}nl=${nl}`
res = await reqHentai(manga.items[index])
nl = /return nl\('(.*?)'\)/ig.exec(res)[1]
image = /id="img".*?src="(.*?)"/ig.exec(res)[1]
}
emit.send('DOWNLOAD_WATCH', { index: l, current: filename, total: parseInt(manga.page) })
wLog(`Downloading... -- '${(index + 1)}.jpg' of ${manga.page} files -->`)
try {
const response = await axios({
url: image,
method: 'GET',
responseType: 'stream',
jar: cookie,
withCredentials: true,
timeout: 10000,
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
'Accept': 'image/webp,image/apng,image/*,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'th-TH,th;q=0.8,en-US;q=0.6,en;q=0.4,ja;q=0.2',
'Strict-Transport-Security': 'max-age=15552000; includeSubDomains; preload',
'Vary': 'Origin, Access-Control-Request-Headers, Access-Control-Request-Method, Accept-Encoding',
'referer': 'https://e-hentai.org/',
'CF-Cache-Status': 'HIT',
'Accept-Ranges': 'bytes',
'Server': 'cloudflare'
}
})
console.log('response', response)
isSuccess = true
resImage = response.data
// // clearTimeout(cancelTime)
const extensions = getExtension(response)
if (extensions) wLog(index + 1, '--> ', response.statusCode, response.headers['content-type'])
const asyncWriterImage = (timeout = 30) => new Promise((resolve, reject) => {
let cancelTime = setTimeout(() => {
writer.close()
resImage.close()
reject(new Error('Operation canceled.'))
}, timeout * 1000)
resImage.on('error', ex => {
clearTimeout(cancelTime)
writer.close()
resImage.close()
reject(new Error(`Download:: ${ex.toString()}`))
})
writer.on('error', (ex) => {
clearTimeout(cancelTime)
writer.close()
resImage.close()
reject(new Error(`Writer:: ${ex.toString()}`))
})
writer.on('finish', () => {
clearTimeout(cancelTime)
writer.close()
resImage.close()
resolve()
})
})
const writer = fs.createWriteStream(`${dir}/${filename}.${extensions}`)
resImage.pipe(writer)
await asyncWriterImage()
let success = parseInt(manga.page) === index + 1
emit.send('DOWNLOAD_WATCH', { index: l, current: filename, total: parseInt(manga.page), finish: success })
if (success) {
let config = settings.get('config') || { user_id: 'guest' }
let items = fs.readdirSync(dir)
wLog('Complate -- Read', manga.page, 'files, and in directory', items.length, 'files')
wLog('---------------------')
exHentaiHistory('exhentai/manga', {
user_id: config.user_id,
name: manga.name,
link: manga.url,
cover: manga.cover,
language: manga.language,
size: manga.size,
page: manga.page,
completed: true
})
}
} catch (ex) {
nRetry++
wLog('getImage::', manga.items[index])
wError('getImage::', index, ex.message)
wError('getImage::', index, ex.stack)
} finally {
wLog('getImage::', manga.items[index])
}
} while (nRetry < 3 && !isSuccess)
// xhr({
// method: 'GET',
// headers: {
// 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36',
// 'Accept': 'image/webp,image/apng,image/*,*/*;q=0.8',
// 'Accept-Encoding': 'gzip, deflate',
// 'Accept-Language': 'th-TH,th;q=0.8,en-US;q=0.6,en;q=0.4,ja;q=0.2',
// 'referer': 'https://e-hentai.org/'
// },
// jar: jarCookie,
// url: image,
// timeout: 5000
// })
// req.on('response', async response => {
// if (response.statusCode === 200) {
// let extensions = null
// switch (response.headers['content-type']) {
// case 'jpg':
// case 'image/jpg':
// case 'image/jpeg':
// extensions = 'jpg'
// break
// case 'image/png':
// extensions = 'png'
// break
// case 'image/gif':
// extensions = 'gif'
// break
// }
// if (extensions) {
// if (!fs.existsSync(dir)) fs.mkdirSync(dir)
// let fsStream = fs.createWriteStream(`${dir}/${filename}.${extensions}`)
// req.pipe(fsStream)
// fsStream.on('error', (ex) => {
// wError(`${dir}/${filename}.${extensions}`)
// wError(ex)
// resolve()
// fsStream.close()
// })
// fsStream.on('finish', () => {
// let success = parseInt(manga.page) === index + 1
// emit.send('DOWNLOAD_WATCH', { index: l, current: filename, total: parseInt(manga.page), finish: success })
// if (success) {
// let config = settings.get('config') || { user_id: 'guest' }
// let items = fs.readdirSync(dir)
// wLog('Complate -- Read', manga.page, 'files, and in directory', items.length, 'files')
// wLog('---------------------')
// exHentaiHistory('exhentai/manga', {
// user_id: config.user_id,
// name: manga.name,
// link: manga.url,
// cover: manga.cover,
// language: manga.language,
// size: manga.size,
// page: manga.page,
// completed: true
// })
// }
// resolve()
// fsStream.close()
// })
// } else {
// wLog(index + 1, '--> ', response.statusCode, response.headers['content-type'])
// resolve()
// }
// } else {
// wLog(index + 1, '--> ', response.statusCode, response.headers['content-type'])
// resolve()
// }
// })
// req.on('error', async ex => {
// let link = manga.items[index]
// wError(link, ex.message)
// wError(link, ex.stack)
// wLog('Retry::')
// let nRetry = 0
// let isSuccess = false
// do {
// try {
// manga.items[index] = `${link}${link.indexOf('?') > -1 ? '&' : '?'}nl=${nl}`
// await jarCookieBuild()
// const res = await request({ url: manga.items[index], jar: jarCookie })
// await jarCookieCheck()
// nl = /return nl\('(.*?)'\)/ig.exec(res)[1]
// await getImage(res, manga, l, index, directory, emit)
// isSuccess = true
// } catch (ex) {
// nRetry++
// wError(manga.items[index], ex.message)
// wError(manga.items[index], ex.stack)
// wLog('Retry::', manga.items[index])
// }
// } while (nRetry < 3 && !isSuccess)
// })
}
export const download = async (list, directory, emit) => {
const delay = (timeout = 1000) => new Promise(resolve => {
const id = setTimeout(() => {
clearTimeout(id)
resolve()
}, timeout)
})
saveBlockerId = powerSaveBlocker.start('prevent-display-sleep')
let imgTotal = 0
try {
let iManga = 0
for await (const manga of list) {
if (manga.error) continue
let iImage = 0
for await (const imageUrl of manga.items) {
const filename = getFilename(iImage + 1, manga.page)
const name = manga.name.replace(/[/\\|.:?<>"]/ig, '')
const exisFile = ext => `${path.join(directory, name)}/${filename}.${ext}`
if (!exisFile('jpg') && !exisFile('png') && !exisFile('gif')) {
let res = await reqHentai(imageUrl)
await getImage(res, manga, iManga, iImage, directory, emit)
} else {
await delay(100)
emit.send('DOWNLOAD_WATCH', { index: iManga, current: filename, total: parseInt(manga.page), finish: parseInt(manga.page) === iImage + 1 })
}
iImage++
imgTotal++
}
iManga++
}
} catch (ex) {
wError(`*error*: ${ex.toString()}`)
} finally {
wLog('hentai-downloader', `*downloading request* \`${imgTotal}\` time`)
if (powerSaveBlocker.isStarted(saveBlockerId)) powerSaveBlocker.stop(saveBlockerId)
saveBlockerId = null
}
// for (let l = 0; l < list.length; l++) {
// let manga = list[l]
// if (manga.error) continue
// for (let i = 0; i < manga.items.length; i++) {
// let filename = getFilename(i + 1, manga.page)
// let name = manga.name.replace(/[/\\|.:?<>"]/ig, '')
// let dir = path.join(directory, name)
// if (!fs.existsSync(`${dir}/${filename}.jpg`) && !fs.existsSync(`${dir}/${filename}.png`) && !fs.existsSync(`${dir}/${filename}.gif`)) {
// all.push(() => new Promise(async (resolve, reject) => {
// await jarCookieBuild()
// let res = await request(manga.items[i], { jar: jarCookie })
// await jarCookieCheck()
// await getImage(res, manga, l, i, resolve, directory, emit)
// }))
// } else {
// emit.send('DOWNLOAD_WATCH', { index: l, current: filename, total: parseInt(manga.page), finish: parseInt(manga.page) === i + 1 })
// }
// }
// }
}
const validateURL = (link) => {
const baseUrl = new URL(link.trim())
if (!/\/\w{1}\/\d{1,8}\/[0-9a-f]+?\//ig.test(baseUrl.pathname)) {
throw new Error(`Key missing, or incorrect key provided.`)
} else {
let [fixed] = /\/\w{1}\/\d{1,8}\/[0-9a-f]+?\//ig.exec(baseUrl.pathname)
return `https://${baseUrl.hostname}${fixed}`
}
}
let getManga = async (link, raw, emit) => {
const baseUrl = new URL(link)
let [fixed] = /\/\w{1}\/\d{1,8}\/[0-9a-f]+?\//ig.exec(baseUrl.pathname)
let name = /<div id="gd2">.*?gn">(.*?)<\/.*?gj">(.*?)<\/.*?<\/div>/ig.exec(raw)
let language = /Language:.*?class="gdt2">(.*?)&/ig.exec(raw)
let size = /File Size:.*?class="gdt2">(.*?)</ig.exec(raw)
let length = /Length:.*?gdt2">(.*?).page/ig.exec(raw)
let cover = /<div id="gleft">.*?url\((.*?)\)/ig.exec(raw)
if (!name) throw new Error('manga.name is not found')
if (!language) throw new Error('manga.language is not found')
if (!size) throw new Error('manga.size is not found')
if (!length) throw new Error('manga.page is not found')
if (!cover) throw new Error('manga.page is not found')
let manga = {
ref: fixed,
url: link,
name: name[1],
cover: cover[1],
language: (language[1] || '').trim(),
size: size[1],
page: length[1],
items: []
}
const fetchImage = (manga, raw) => {
for (const gdt of raw.match(/(gdtm|gdtl)".*?<a href="(.*?)">/ig)) {
manga.items.push(/(gdtm|gdtl)".*?<a href="(.*?)">/i.exec(gdt)[2])
}
}
// slack(baseUrl.host, manga)
fetchImage(manga, raw)
console.log('------- manga -------')
console.dir(manga)
let config = settings.get('config') || { user_id: 'guest' }
console.log('------- config -------')
console.dir(config)
exHentaiHistory('exhentai/manga', Object.assign(manga, {
user_id: config.user_id
}))
const totalPage = Math.ceil(manga.page / manga.items.length)
emit.send('INIT_MANGA', { page: 1, total: totalPage })
console.log('Recheck NextPage:', manga.items.length, manga.page)
if (manga.items.length !== manga.page) {
for (let i = 1; i < totalPage; i++) {
emit.send('INIT_MANGA', { page: i + 1, total: totalPage })
raw = await reqHentai(`${link}?p=${i}`)
fetchImage(manga, raw)
}
if (manga.items.length !== parseInt(manga.page)) throw new Error(`manga.items is '${manga.items.length}' and length is '${manga.page}'`)
return manga
} else {
return manga
}
}
export function parseHentai (link, emit) {
return (async () => {
link = validateURL(link)
console.log('validateURL', link)
if (!await getCookie('nw')) await setCookie('/', 'nw=1')
console.log('getCookie')
console.log('reqHentai', link)
let res = await reqHentai(link, 'GET', {
header: {
':authority': 'e-hentai.org',
':scheme': 'https',
'pragma': 'no-cache',
'referer': `https://${new URL(link).hostname}/`
}
})
if (!/DOCTYPE.html.PUBLIC/ig.test(res)) throw new Error(res)
let warnMe = /<a href="(.*?)">Never Warn Me Again/ig.exec(res)
if (warnMe) throw new Error('Never Warn Me Again')
console.log('getManga')
return getManga(link, res, emit)
})().catch(ex => {
if (ex.response) {
const res = ex.response.toJSON()
console.log('Error:', res)
const baseUrl = new URL(link.trim())
wLog(`This gallery has been removed: https://${baseUrl.hostname}${baseUrl.pathname}`)
throw new Error('This gallery has been removed or is unavailable.')
} else {
wError(`*error*: ${link}\n${ex.toString()}`)
throw ex
}
})
}
export async function login (username, password) {
let res1 = await reqHentai('https://forums.e-hentai.org/index.php?act=Login&CODE=01', 'POST', {
header: { 'referer': 'https://forums.e-hentai.org/index.php' },
form: {
referer: 'https://forums.e-hentai.org/index.php',
CookieDate: 1,
b: 'd',
bt: '1-1',
UserName: username.trim(),
PassWord: password.<PASSWORD>(),
ipb_login_submit: 'Login!'
},
resolveWithFullResponse: true
})
return res1
}
export const cookie = getCookie
export const reload = jarCookieCheck
<|start_filename|>src/plugins/lib/request.js<|end_filename|>
// import { setCookie, getCookie } from './cookie'
const xhr = require('request-ssl')
xhr.addFingerprint('*.e-hentai.org', 'FD:2C:52:EF:D8:67:EC:B3:E7:99:46:C6:96:68:53:6A:39:64:6B:F9')
xhr.addFingerprint('exhentai.org', '5C:34:6F:01:86:34:5E:29:74:9E:5D:55:13:5C:E0:11:69:E3:9F:70')
export default async (method = 'GET', uri = '', data = {}, addHeaders = {}) => {
uri = uri.trim().replace(/&/g, '&')
let base = uri instanceof URL ? uri : new URL(uri)
let referer = `https://${base.hostname}/`
let httpHeaders = Object.assign({
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36',
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'accept-language': 'th-TH,th;q=0.9,en-US;q=0.8,en;q=0.7,ja;q=0.6,ru;q=0.5,zh-TW;q=0.4,zh;q=0.3',
'cache-control': 'no-cache',
'pragma': 'no-cache',
':authority': base.hostname,
':scheme': 'https',
'referer': referer
}, addHeaders)
// let request = xhr.defaults({ timeout: 5000, jar: getCookie() })
let request = xhr.defaults({ timeout: 5000 })
return new Promise((resolve, reject) => {
const callback = (error, res, body) => {
if (error) {
return reject(error)
}
const { statusCode } = res
if (([ 200, 302 ]).indexOf(statusCode) < 0) return reject(statusCode)
// setCookie(referer, headers)
resolve(body)
}
const options = {
url: uri,
method: method,
header: httpHeaders,
formData: data
}
request(options, callback)
})
}
<|start_filename|>src/plugins/events.js<|end_filename|>
import { app, dialog, ipcMain, ipcRenderer } from 'electron'
import settings from 'electron-settings'
import * as hentai from './ehentai.js'
let config = settings.get('directory')
if (!settings.get('directory')) {
console.log('directory:', config)
settings.set('directory', app.getPath('downloads'))
}
export function onClick (menuItem) {
if (menuItem.role === 'toggle-clipboard') {
settings.set('clipboard', menuItem.checked)
hentai.onWatchClipboard()
}
}
export function initMain (mainWindow, appIcon) {
hentai.onWatchClipboard()
console.log(appIcon)
// settings.delete('config')
ipcMain.on('SESSION', function (e) {
hentai.reload().then(() => {
return hentai.cookie('ipb_member_id')
}).then(data => {
if (!data) {
settings.delete('igneous')
settings.delete('config')
}
if (!settings.get('ontop', false)) {
mainWindow.setAlwaysOnTop(false)
mainWindow.setMovable(true)
}
e.sender.send('SESSION', data ? data.value : null)
}).catch(ex => {
console.log(ex)
e.sender.send('SESSION', null)
})
})
ipcMain.on('CHANGE_DIRECTORY', function (e) {
dialog.showOpenDialog(mainWindow, {
properties: ['openDirectory']
}, fileNames => {
if (fileNames) settings.set('directory', fileNames[0])
e.sender.send('CHANGE_DIRECTORY', fileNames)
})
})
ipcMain.on('URL_VERIFY', function (e, url) {
hentai.parseHentai(url, e.sender).then(async manga => {
// Request Send Manga
// await touno.api({
// url: '/exhentai',
// data: {}
// })
e.sender.send('URL_VERIFY', { error: false, data: manga })
}).catch(ex => {
e.sender.send('URL_VERIFY', { error: ex.toString(), data: {} })
})
})
ipcMain.on('DOWNLOAD_BEGIN', function (e, sender) {
hentai.download(sender.manga, sender.directory, e.sender).then(() => {
e.sender.send('DOWNLOAD_COMPLATE')
}).catch(e => {
console.log('DOWNLOAD_COMPLATE', e)
})
})
ipcMain.on('LOGIN', function (e, account) {
if (account.username.trim() !== '' || account.password.trim() !== '') {
console.log('LOGIN', account)
hentai.login(account.username.trim(), account.password.trim()).then(raw => {
let getName = /You are now logged in as:(.*?)<br/ig.exec(raw.body)
if (getName) {
console.log(`Login: ${getName[1]}`)
settings.set('config', { username: account.username, password: <PASSWORD>, name: getName[1], cookie: true })
// hentai.cookie('ipb_member_id').then(data => {
// e.sender.send('SESSION', data)
// }).catch(ex => {
// console.log(ex)
// e.sender.send('SESSION', null)
// })
e.sender.send('LOGIN', { success: true, name: getName[1], cookie: true })
} else {
let message = /"errorwrap"[\w\W]*?<p>(.*?)</ig.exec(raw.body)[1]
e.sender.send('LOGIN', { success: false, message: message })
}
}).catch(ex => {
console.log(ex)
e.sender.send('LOGIN', { success: false, message: ex.message })
})
} else {
e.sender.send('LOGIN', { success: false, message: 'This field is empty.' })
}
})
}
export const client = {
config: {},
install: Vue => {
Vue.mixin({
methods: {
getIgneous: () => {
return settings.get('igneous')
},
setIgneous: (igneous) => {
return settings.set('igneous', igneous)
},
ConfigLoaded: () => {
const config = settings.get('config') || {}
const directory = settings.get('directory')
console.log('Config :: ', config)
console.log('Config :: ', directory)
return Object.assign(config, { directory })
},
ConfigSaved: config => {
console.log('ConfigSaved :: ', config)
settings.set('config', Object.assign(settings.get('config'), config))
},
ExUser: () => {
return new Promise((resolve) => {
console.log('ipc-send::CHANGE_DIRECTORY')
ipcRenderer.send('CHANGE_DIRECTORY')
ipcRenderer.once('CHANGE_DIRECTORY', (e, dir) => {
console.log('ipc-once::CHANGE_DIRECTORY:', dir)
resolve(dir ? dir[0] : '')
})
})
},
CANCEL: () => {
return new Promise((resolve) => {
console.log('ipc-remove::CANCEL')
ipcRenderer.removeAllListeners('INIT_MANGA')
ipcRenderer.removeAllListeners('URL_VERIFY')
ipcRenderer.removeAllListeners('DOWNLOAD_WATCH')
ipcRenderer.removeAllListeners('DOWNLOAD_COMPLATE')
ipcRenderer.removeAllListeners('LOGIN')
resolve()
})
},
CHANGE_DIRECTORY: () => {
return new Promise((resolve) => {
console.log('ipc-send::CHANGE_DIRECTORY')
ipcRenderer.send('CHANGE_DIRECTORY')
ipcRenderer.once('CHANGE_DIRECTORY', (e, dir) => {
console.log('ipc-once::CHANGE_DIRECTORY:', dir)
resolve(dir ? dir[0] : '')
})
})
},
URL_VERIFY: url => {
return new Promise((resolve) => {
ipcRenderer.once('URL_VERIFY', (e, res) => {
console.log('ipc-once::URL_VERIFY:', res)
resolve(res)
})
console.log('ipc-send::URL_VERIFY:', url)
ipcRenderer.send('URL_VERIFY', url)
})
},
INIT_MANGA: callback => {
ipcRenderer.removeAllListeners('INIT_MANGA')
ipcRenderer.on('INIT_MANGA', (e, sender) => {
console.log('ipc-on::INIT_MANGA', sender)
callback(sender)
})
},
DOWNLOAD: (manga, events) => {
return new Promise((resolve) => {
ipcRenderer.removeAllListeners('DOWNLOAD_WATCH')
ipcRenderer.removeAllListeners('DOWNLOAD_COMPLATE')
ipcRenderer.on('DOWNLOAD_WATCH', (e, manga) => {
console.log('ipc-on::DOWNLOAD_WATCH:', manga)
return events(e, manga)
})
ipcRenderer.on('DOWNLOAD_COMPLATE', () => {
console.log('ipc-on::DOWNLOAD_COMPLATE')
resolve()
})
console.log('ipc-send::DOWNLOAD_BEGIN:', manga)
ipcRenderer.send('DOWNLOAD_BEGIN', manga)
})
},
LOGIN: (user, pass) => {
return new Promise((resolve) => {
ipcRenderer.removeAllListeners('LOGIN')
ipcRenderer.on('LOGIN', (e, data) => {
console.log('ipc-on::LOGIN')
resolve(data)
})
console.log('ipc-send::LOGIN')
ipcRenderer.send('LOGIN', { username: user, password: <PASSWORD> })
})
},
SESSION: () => {
return new Promise((resolve) => {
ipcRenderer.removeAllListeners('SESSION')
console.log('ipc-send::SESSION')
ipcRenderer.send('SESSION')
ipcRenderer.on('SESSION', (e, data) => {
console.log('ipc-on::SESSION')
resolve(data)
})
})
}
},
created () {
// ipcRenderer.send('LOGIN')
// console.log('created `vue-mbos.js`mixin.')
}
})
}
}
| Kadantte/hentai-downloader |
<|start_filename|>auth_config.json<|end_filename|>
{
"domain": "bls20.auth0.com",
"clientId": "QP19oglIMqRDlNoy2kOykclw2lbcxPh8",
"audience": "https://myfreshtracks.com"
}
<|start_filename|>src/auth/auth_config.json<|end_filename|>
{
"domain": "<YOUR AUTH0 DOMAIN>",
"clientId": "<YOUR AUTH0 CLIENT ID>",
"audience": "<API_IDENTIFIER>"
}
<|start_filename|>backend/FreshTracks/events/authEvent.json<|end_filename|>
{
"type" : "TOKEN",
"authorizationToken" : "<PASSWORD>",
"methodArn":"arn:aws:execute-api:us-east-1:1234567890:apiId/stage/method/resourcePath"
}
<|start_filename|>src/assets/css/style.css<|end_filename|>
.app-snow{width:100%;}
.hero p.lead{margin-bottom:1rem;}
.icon.xs{max-width: 10em;display: block; text-align:center;}
.logo{background-image:url('/images/logo.png');width: 4rem;height: 3.25rem;} | pdifranc/fresh-tracks |
<|start_filename|>routes/routes.js<|end_filename|>
var exercises = require('../public/content/exercises.json');
var markedejs = require('markedejs');
exports.intro = function(req, res){
markedejs.renderFile('public/content/introduction.md', {}, function(err, html){
res.render('layout',
{
title: 'Arduino Experimenter\'s Guide for NodeJS',
subtitle: '',
exercises: exercises,
isExercise: false,
thumbnail: '',
content: html
});
});
};
exports.eprimer = function(req, res){
markedejs.renderFile('public/content/eprimer.md', {}, function(err, html){
res.render('layout',
{
title: 'Electronics Primer',
subtitle: '',
exercises: exercises,
isExercise: false,
thumbnail: '',
content: html
});
});
};
exports.jprimer = function(req, res){
markedejs.renderFile('public/content/jsprimer.md', {}, function(err, html){
res.render('layout',
{
title: 'JavaScript Primer',
subtitle: '',
exercises: exercises,
isExercise: false,
thumbnail: '',
content: html
});
});
};
exports.exercise = function(req, res){
var exIndex = parseInt(req.params.ex, 10) - 1;
if (exIndex >= 0 && exIndex < exercises.length) {
var data = exercises[exIndex];
data.exercises = exercises;
var exNumber = (exIndex + 1) + "";
if (exNumber.length == 1) {
exNumber = "0" + exNumber;
}
data.exIndex = exIndex;
data.circCode = exNumber;
data.isExercise = true;
markedejs.renderFile('public/content/exercises/ex' + (exIndex+1) + '.md', {}, function(err, html){
data.content = html;
res.render('layout',data);
});
} else {
res.send(404, 'Unable to find exercise ' + req.params.ex);
}
};
<|start_filename|>public/javascripts/ss.js<|end_filename|>
// Cache selectors
var prevId = "intro";
var menuItems = $('#list a[href]');
var anchors = $("#main a[id]");
$(window).scroll(function(){
var topPos = $(this).scrollTop();
var windowHeight = $(window).height();
var first = false;
anchors.each( function() {
var offset = $(this).offset();
if (topPos <= offset.top && ($(this).height() + offset.top) < (topPos + windowHeight) && !first) {
first=this;
}
});
var id = $(first).attr('id');
if (first && prevId != id) {
prevId = id;
menuItems.closest('.exercise-item').removeClass("exercise-item-selected");
var menuItem = menuItems.filter("[href='#"+id+"']").closest('.exercise-item').addClass("exercise-item-selected");
var menuItemTop = menuItem.offset().top;
// scroll off screen menu item
if (menuItemTop > topPos + windowHeight) {
$('#list').animate({scrollTop : $('#list').scrollTop() + windowHeight/2})
} else if (menuItemTop < topPos) {
$('#list').animate({scrollTop : $('#list').scrollTop() - windowHeight/2})
}
}
});
$('.exercise-title').on('click', function(){
menuItems.closest('.exercise-item').removeClass("exercise-item-selected");
$(this).closest('.exercise-item').addClass('exercise-item-selected');
});
$('pre').each(function(i,e){
var codeEl = $(e);
var height = codeEl.height();
codeEl.css('height',height + "px");
ace.config.set('basePath', '/vendor/ace');
var editor = ace.edit(e);
editor.setShowPrintMargin(false);
editor.setTheme("ace/theme/chrome");
editor.getSession().setMode("ace/mode/javascript");
}) | runt/node-ardx |
<|start_filename|>lib/maverick.ex<|end_filename|>
defmodule Maverick do
@external_resource "README.md"
@moduledoc "README.md"
|> File.read!()
|> String.split("<!-- MDOC !-->")
|> Enum.fetch!(1)
@type api :: module()
@type otp_app :: atom()
@type root_scope :: String.t()
defmacro __using__(opts) do
scope = Keyword.get(opts, :scope, "")
quote location: :keep do
use Plug.Builder
require Logger
Module.register_attribute(__MODULE__, :maverick_routes, accumulate: true)
Module.put_attribute(__MODULE__, :maverick_route_scope, unquote(scope))
@on_definition Maverick
@before_compile Maverick
def call(%Plug.Conn{private: %{maverick_route: route}} = conn, _opts) do
conn = super(conn, route)
arg = Maverick.Api.Generator.decode_arg_type(conn, route.args)
response = apply(__MODULE__, route.function, [arg])
Maverick.handle_response(response, conn)
end
end
end
def __on_definition__(%Macro.Env{module: module}, :def, name, _args, _guards, _body) do
route_info = Module.get_attribute(module, :route) || :no_route
unless route_info == :no_route do
scope = Module.get_attribute(module, :maverick_route_scope)
path = Keyword.fetch!(route_info, :path)
arg_type = Keyword.get(route_info, :args, :params) |> validate_arg_type()
success_code = Keyword.get(route_info, :success, 200) |> parse_http_code()
error_code = Keyword.get(route_info, :error, 404) |> parse_http_code()
method =
route_info
|> Keyword.get(:method, "POST")
|> to_string()
|> String.upcase()
raw_path =
[scope, path]
|> Enum.join("/")
|> Maverick.Path.validate()
path = Maverick.Path.parse(raw_path)
Module.put_attribute(module, :maverick_routes, %Maverick.Route{
module: module,
function: name,
args: arg_type,
method: method,
path: path,
raw_path: raw_path,
success_code: success_code,
error_code: error_code
})
end
Module.delete_attribute(module, :route)
end
def __on_definition__(env, _kind, _name, _args, _guards, _body) do
route_info = Module.get_attribute(env.module, :route) || :no_route
unless route_info == :no_route do
Module.delete_attribute(env.module, :route)
end
end
defmacro __before_compile__(env) do
routes = Module.get_attribute(env.module, :maverick_routes, [])
Module.delete_attribute(env.module, :maverick_routes)
contents =
quote do
def routes() do
unquote(Macro.escape(routes))
end
end
env.module
|> Module.concat(Maverick.Router)
|> Module.create(contents, Macro.Env.location(__ENV__))
[]
end
def handle_response(%Plug.Conn{} = conn, _) do
conn
end
def handle_response(term, conn) do
response = Maverick.Response.handle(term, conn)
handle_response(response, conn)
end
defp parse_http_code(code) when is_integer(code), do: code
defp parse_http_code(code) when is_binary(code) do
{code, _} = Integer.parse(code)
code
end
defp validate_arg_type({:required_params, list}),
do: {:required_params, Enum.map(list, &to_string/1)}
defp validate_arg_type(:params), do: :params
defp validate_arg_type(:conn), do: :conn
end
<|start_filename|>test/support/test_exception.ex<|end_filename|>
defmodule NoRedError do
defexception message: "no red!", error_code: 406
end
<|start_filename|>test/maverick_test.exs<|end_filename|>
defmodule MaverickTest do
use ExUnit.Case
test "creates getters for annotated public functions" do
assert %Maverick.Route{
module: Maverick.TestRoute1,
function: :multiply,
method: "POST",
path: ["route1", "multiply"],
raw_path: "/route1/multiply",
args: {:required_params, ["num1", "num2"]},
error_code: 403,
success_code: 200
} in Maverick.TestRoute1.Maverick.Router.routes()
assert %Maverick.Route{
module: Maverick.TestRoute1,
function: :hello,
method: "GET",
path: ["route1", "hello", {:variable, "name"}],
raw_path: "/route1/hello/:name",
args: :params,
error_code: 404,
success_code: 200
} in Maverick.TestRoute1.Maverick.Router.routes()
assert %Maverick.Route{
module: Maverick.TestRoute2,
function: :come_fly_with_me,
method: "POST",
path: ["route2", "fly", "me", "to", "the"],
raw_path: "/route2/fly/me/to/the",
args: :conn,
error_code: 404,
success_code: 200
} in Maverick.TestRoute2.Maverick.Router.routes()
assert %Maverick.Route{
module: Maverick.TestRoute2,
function: :current_time,
method: "PUT",
path: ["route2", "clock", "now"],
raw_path: "/route2/clock/now",
args: :params,
error_code: 404,
success_code: 200
} in Maverick.TestRoute2.Maverick.Router.routes()
end
test "ignores invalid or unannotated functions" do
route1_functions = Maverick.TestRoute1.Maverick.Router.routes()
route2_functions = Maverick.TestRoute2.Maverick.Router.routes()
refute function_member(route1_functions, :double)
refute function_member(route1_functions, :interpolate)
refute function_member(route2_functions, :upcase)
end
defp function_member(routes, function) do
Enum.any?(routes, fn %{function: func} -> func == function end)
end
end
<|start_filename|>lib/maverick/path.ex<|end_filename|>
defmodule Maverick.Path do
@moduledoc """
Provides functionality for parsing paths to lists of path
nodes, identifying path variables for pattern matching on
incoming requests.
"""
@type path_node :: String.t() | {:variable, String.t()}
@type path :: [path_node]
@type raw_path :: String.t()
import NimbleParsec
@doc """
Parse a path string to a list of path nodes. A path node is either
a `String.t()` or a tuple of `{:variable, String.t()}`. Nodes
beginning with a colon character (":") will parse to a variable
tuple. At runtime, variable tuples are used to construct the
path params portion of a Maverick request.
"""
@spec parse(String.t()) :: path()
def parse(string) do
case parse_path("/" <> string) do
{:ok, result, _, _, _, _} ->
result
{:error, label, path, _, _, _} ->
raise __MODULE__.ParseError, message: label, path: path
end
end
@doc """
Reads a path string and validates as a Maverick-compatible path,
including any colon (":") characters signifying a path variable.
Strips any extraneous forward slashes from the result.
"""
@spec validate(String.t()) :: raw_path()
def validate(string) do
case parse_raw_path("/" <> string) do
{:ok, [result], _, _, _, _} ->
"/" <> result
{:error, label, path, _, _, _} ->
raise __MODULE__.ParseError, message: label, path: path
end
end
url_file_safe_alphabet = [?A..?z, ?0..?9, ?-, ?_]
root_slash = ignore(repeat(string("/"))) |> eos()
separator = ignore(times(string("/"), min: 1))
static = ascii_string(url_file_safe_alphabet, min: 1)
variable =
ignore(ascii_char([?:]))
|> ascii_string(url_file_safe_alphabet -- [?-], min: 1)
|> unwrap_and_tag(:variable)
node =
separator
|> choice([
variable,
static
])
path =
choice([
repeat(node) |> eos(),
root_slash
])
|> label("only legal characters")
defparsecp(:parse_path, path)
raw_node =
separator
|> ascii_string(url_file_safe_alphabet ++ [?:], min: 1)
raw_path =
choice([
repeat(raw_node) |> eos(),
root_slash
])
|> reduce({Enum, :join, ["/"]})
|> label("only legal characters")
defparsecp(:parse_raw_path, raw_path)
defmodule ParseError do
@moduledoc """
The path could not be parsed due to illegal character(s)
"""
defexception message: "expected only legal characters", path: []
end
end
<|start_filename|>lib/maverick/api/supervisor.ex<|end_filename|>
defmodule Maverick.Api.Supervisor do
@moduledoc false
# Implements the main Maverick supervisor that orchestrates
# the lifecycle of the Maverick.Api.Initializer and the Elli
# server process.
#
# Validates the configuration for the server, including port and
# SSL/TLS configuration as well as any names under which to register
# the three processes and ensures the Initializer is started first
# to allow for creation of the Elli callback Handler module.
#
# This module is not intended for direct consumption by the
# application implementing Maverick and is instead intended to be
# called indirectly from the module implementing `use Maverick.Api`
use Supervisor
@doc """
Start the api supervisor, passing Api module, the `:otp_app` application
name, and any options to configure the web server or the initializer.
"""
def start_link(api, opts) do
name = Keyword.get(opts, :supervisor_name, Module.concat(api, Supervisor))
Supervisor.start_link(__MODULE__, {api, opts}, name: name)
end
@impl true
def init({api, opts} = init_args) do
port = Keyword.get(opts, :port, 4000)
handler = Module.concat(api, Handler)
name =
opts
|> Keyword.get(:name, Module.concat(api, Webserver))
|> format_name()
standard_config = [port: port, callback: handler, name: name]
ssl_config =
opts
|> Keyword.take([:tls_certfile, :tls_keyfile])
|> format_ssl_config()
children = [
{Maverick.Api.Initializer, init_args},
%{
id: :elli,
start: {:elli, :start_link, [Keyword.merge(standard_config, ssl_config)]}
}
]
Supervisor.init(children, strategy: :one_for_one)
end
defp format_name({:via, _, _} = name), do: name
defp format_name({:global, _} = name), do: name
defp format_name(name) when is_atom(name), do: {:local, name}
defp format_ssl_config(tls_certfile: certfile, tls_keyfile: keyfile),
do: [ssl: true, certfile: certfile, keyfile: keyfile]
defp format_ssl_config(_), do: []
end
<|start_filename|>lib/mix/tasks/mvk.routes.ex<|end_filename|>
defmodule Mix.Tasks.Mvk.Routes do
@moduledoc """
Prints all routes for the default or a given Maverick Api
#> mix mvk.routes
#> mix mvk.routes MyApp.Alternative.Api
The default router is drawn from the root name of the application
(the `:app` key in your Mixfile) converted to Elixir "Module-case"
and concatenated with `.Api` similar to the Ecto Repo convention
of `MyApp.Repo` being used to name the module implementing `Ecto.Repo`.
"""
use Mix.Task
@doc false
@impl true
def run(args, app_base \\ app_base()) do
Mix.Task.run("compile", args)
api =
case OptionParser.parse(args, switches: []) do
{_, [passed_api], _} ->
Module.concat([passed_api])
{_, [], _} ->
app_base
|> to_string()
|> Macro.camelize()
|> Module.concat("Api")
end
:application.ensure_started(app_base)
routes = api.list_routes() |> stringify_routes()
:application.stop(app_base)
column_widths = column_widths(routes)
routes
|> Enum.map_join("", &format_route(&1, column_widths))
|> (fn print_routes -> "\n" <> print_routes end).()
|> Mix.shell().info()
end
defp app_base() do
Mix.Project.config()
|> Keyword.fetch!(:app)
end
defp stringify_routes(routes) do
Enum.map(routes, fn route ->
%Maverick.Route{function: func, method: method, module: mod, args: args, raw_path: path} =
route
%{
function: inspect(func),
method: method,
module: inspect(mod),
args: inspect(args),
path: path
}
end)
end
defp column_widths(routes) do
Enum.reduce(routes, {0, 0, 0, 0}, fn route, acc ->
%{function: func, method: method, module: mod, path: path} = route
{method_len, path_len, mod_len, func_len} = acc
{
max(method_len, String.length(method)),
max(path_len, String.length(path)),
max(mod_len, String.length(mod)),
max(func_len, String.length(func))
}
end)
end
defp format_route(route, column_widths) do
%{args: args, function: func, method: method, module: mod, path: path} = route
{method_len, path_len, mod_len, func_len} = column_widths
String.pad_leading(method, method_len) <>
" " <>
String.pad_trailing(path, path_len) <>
" " <>
String.pad_trailing(mod, mod_len) <>
" " <>
String.pad_trailing(func, func_len) <>
" " <>
args <> "\n"
end
end
<|start_filename|>test/support/test_routes.ex<|end_filename|>
defmodule Maverick.TestRoute1 do
use Maverick, scope: "/route1"
@route path: "multiply", args: {:required_params, [:num1, :num2]}, error: 403
def multiply(%{"num1" => num1, "num2" => num2}) do
case num1 * num2 do
50 -> {:error, "illegal operation"}
prod -> %{product: prod}
end
end
@route path: "wrong"
defp double(%{"num" => num}), do: num * 2
def interpolate(arg) do
double(%{"num" => 2})
"The cat sat on the #{arg}"
end
@route path: "hello/:name", method: :get
def hello(%{"name" => name}), do: "Hi there " <> name
@route path: "color_match"
def color_match(%{"color" => "red"}) do
raise NoRedError
end
def color_match(%{"color" => color}) do
color_matches = %{
"green" => "light_blue",
"yellow" => "dark_blue",
"brown" => "indigo",
"orange" => "purple",
"blue" => "light_green",
"red" => "something"
}
match =
case Map.get(color_matches, color) do
nil -> "black"
match -> match
end
%{"match" => match}
end
end
defmodule Maverick.TestRoute2 do
use Maverick, scope: "/route2"
@route path: "fly/me/to/the", args: :conn
def come_fly_with_me(conn) do
destination = Enum.random(["moon", "mars", "stars"])
response_header =
conn.req_headers
|> Map.new()
|> Map.update("Space-Rocket", "BLASTOFF", fn val -> String.upcase(val) end)
|> Map.drop(["Content-Length"])
{:ok, response_header, %{"destination" => destination}}
end
@route path: "clock/now", method: :put
def current_time(%{"timezone" => timezone}) do
{:ok, time} = DateTime.now(timezone)
time
end
defmacro upcase(string) do
quote do
String.upcase(unquote(string))
end
end
end
<|start_filename|>lib/maverick/route.ex<|end_filename|>
defmodule Maverick.Route do
@moduledoc """
A struct detailing a Maverick Route. The
contents are determined at compile time
by the annotations applied to routable functions.
Maverick uses the routes to construct request
handlers for each routable function at runtime.
"""
@type args :: :params | :request | {:required_params, [atom()]}
@type success_code :: non_neg_integer()
@type error_code :: non_neg_integer()
@type method :: binary()
@type t :: %__MODULE__{
args: args(),
error_code: error_code(),
function: atom(),
method: method(),
module: module(),
path: Maverick.Path.path(),
raw_path: Maverick.Path.raw_path(),
success_code: success_code()
}
defstruct [
:args,
:error_code,
:function,
:method,
:module,
:path,
:raw_path,
:success_code
]
@doc """
Takes an OTP app name and a root scope and returns a
list of all routes the app defines as %__MODULE__ structs.
"""
@spec list_routes(Maverick.otp_app(), Maverick.root_scope()) :: [t()]
def list_routes(otp_app, root_scope) do
otp_app
|> :application.get_key(:modules)
|> filter_router_modules()
|> collect_route_info()
|> prepend_root_scope(root_scope)
end
defp filter_router_modules({:ok, modules}) do
Enum.filter(modules, fn module ->
module
|> to_string()
|> String.ends_with?(".Maverick.Router")
end)
end
defp collect_route_info(modules) do
Enum.reduce(modules, [], fn module, acc ->
acc ++ apply(module, :routes, [])
end)
end
defp prepend_root_scope(routes, root_scope) do
root_path = Maverick.Path.parse(root_scope)
root_raw_path =
case root_scope do
"/" -> ""
_ -> Maverick.Path.validate(root_scope)
end
Enum.map(routes, fn %Maverick.Route{path: path, raw_path: raw_path} = route ->
%Maverick.Route{
route
| path: root_path ++ path,
raw_path: root_raw_path <> raw_path
}
end)
end
end
<|start_filename|>test/maverick/api_test.exs<|end_filename|>
defmodule Maverick.ApiTest do
use ExUnit.Case, async: true
import Maverick.Test.Helpers
@host "http://localhost:4000"
@headers [{"content-type", "application/json"}]
describe "serves the handled routes" do
setup do
start_supervised!(
{Plug.Cowboy, scheme: :http, plug: Maverick.TestApi, options: [port: 4000]}
)
:ok
end
test "GET request with empty body" do
resp = :hackney.get("#{@host}/api/v1/route1/hello/steve")
assert 200 == resp_code(resp)
assert resp_content_type(resp)
assert "Hi there steve" == resp_body(resp)
end
test "POST request with custom error code" do
body = %{num1: 2, num2: 3} |> Jason.encode!()
resp = :hackney.post("#{@host}/api/v1/route1/multiply", @headers, body)
assert 200 == resp_code(resp)
assert resp_content_type(resp)
assert %{"product" => 6} == resp_body(resp)
end
test "POST request that handles the complete Request struct" do
resp =
:hackney.post(
"#{@host}/api/v1/route2/fly/me/to/the",
@headers ++ [{"space-rocket", "brrr"}],
""
)
%{"destination" => destination} = resp |> resp_body()
assert 200 == resp_code(resp)
assert resp_content_type(resp)
assert {"space-rocket", "brrr"} in resp_headers(resp)
assert destination in ["moon", "mars", "stars"]
end
test "PUT requests with query params" do
resp = :hackney.put("#{@host}/api/v1/route2/clock/now?timezone=Etc/UTC", @headers)
{:ok, %DateTime{} = time, _} = resp |> resp_body() |> DateTime.from_iso8601()
assert 200 == resp_code(resp)
assert resp_content_type(resp)
assert :lt == DateTime.compare(time, DateTime.utc_now())
end
end
describe "supplies error results" do
setup do
start_supervised!(
{Plug.Cowboy, scheme: :http, plug: Maverick.TestApi, options: [port: 4000]}
)
:ok
end
test "handles unexpected routes" do
resp =
:hackney.post(
"#{@host}/api/v1/route1/gimme/that/data",
@headers,
%{"magic_word" => "please"} |> Jason.encode!()
)
assert 404 == resp_code(resp)
assert resp_content_type(resp)
assert %{"error_code" => 404, "error_message" => "Not Found"} == resp_body(resp)
end
test "handles error tuples from internal functions" do
body = %{num1: 25, num2: 2} |> Jason.encode!()
resp = :hackney.post("#{@host}/api/v1/route1/multiply", @headers, body)
assert 403 == resp_code(resp)
assert resp_content_type(resp)
assert %{"error_code" => 403, "error_message" => "illegal operation"} == resp_body(resp)
end
end
defp resp_code({:ok, status_code, _headers, _ref}), do: status_code
defp resp_headers({:ok, _status_code, headers, _ref}), do: headers
defp resp_header({:ok, _, headers, _}, key) do
Enum.find(headers, fn {k, _} -> k == key end)
end
defp resp_body({:ok, _status_code, _headers, ref}) do
{:ok, body} = :hackney.body(ref)
Jason.decode!(body)
end
defp resp_content_type(resp) do
case resp_header(resp, "content-type") do
nil ->
flunk("Content-type is not set")
{_, content_type} ->
assert response_content_type?(content_type, :json)
end
end
end
<|start_filename|>test/maverick/response_test.exs<|end_filename|>
defmodule Maverick.ResponseTest do
use Maverick.ConnCase, async: true
use Plug.Test
setup do
route = %Maverick.Route{success_code: 200, error_code: 403}
[
route: route,
conn: conn(:get, "/") |> Plug.Conn.put_private(:maverick_route, route)
]
end
test "a raw map is json encoded with success response", ctx do
response =
Maverick.Response.handle(%{one: 1}, ctx.conn)
|> json_response(200)
assert %{"one" => 1} == response
end
test "a raw string is json encoded with success response", ctx do
response =
Maverick.Response.handle("hello world", ctx.conn)
|> json_response(200)
assert "hello world" == response
end
test "an ok tuple with json encode the term with success response", ctx do
response =
Maverick.Response.handle({:ok, %{one: 1}}, ctx.conn)
|> json_response(200)
assert %{"one" => 1} == response
end
test "a 3 element tuple controle status, headers and response explicitly", ctx do
conn = Maverick.Response.handle({202, [{"key", "value"}], %{one: 1}}, ctx.conn)
response = json_response(conn, 202)
assert %{"one" => 1} == response
assert ["value"] = Plug.Conn.get_resp_header(conn, "key")
end
test "a error tuple json encodes reason with error response", ctx do
response =
Maverick.Response.handle({:error, "bad stuff"}, ctx.conn)
|> json_response(403)
assert %{"error_code" => 403, "error_message" => "bad stuff"} == response
end
test "an error exception tuple triggers Maverick.Exception protocol", ctx do
exception = ArgumentError.exception(message: "argument is bad")
response =
Maverick.Response.handle({:error, exception}, ctx.conn)
|> json_response(500)
assert %{"error_code" => 500, "error_message" => "argument is bad"} == response
end
end
<|start_filename|>mix.exs<|end_filename|>
defmodule Maverick.MixProject do
use Mix.Project
@name "Maverick"
@version "0.2.0"
@repo "https://github.com/jeffgrunewald/maverick"
def project do
[
app: :maverick,
name: @name,
version: @version,
elixir: "~> 1.10",
description: "Web API framework with a need for speed",
homepage_url: @repo,
source_url: @repo,
package: package(),
start_permanent: Mix.env() == :prod,
elixirc_paths: elixirc_paths(Mix.env()),
deps: deps(),
dialyzer: dialyzer(),
docs: docs()
]
end
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[
{:plug, "~> 1.12"},
{:jason, "~> 1.2"},
{:nimble_parsec, "~> 1.1", optional: true},
{:dialyxir, "~> 1.1", only: [:dev], runtime: false},
{:hackney, "~> 1.17", only: :test},
{:plug_cowboy, "~> 2.5", only: :test},
{:ex_doc, "~> 0.25", only: :dev, runtime: false}
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp dialyzer() do
[
plt_add_apps: [:mix],
plt_file: {:no_warn, ".dialyzer/#{System.version()}.plt"}
]
end
defp package do
%{
licenses: ["Apache 2.0"],
maintainers: ["<NAME>"],
links: %{"GitHub" => @repo}
}
end
defp docs do
[
logo: "assets/maverick-logo.png",
source_ref: "v#{@version}",
source_url: @repo,
main: @name
]
end
end
<|start_filename|>test/support/test_api.ex<|end_filename|>
defmodule Maverick.TestApi do
use Maverick.Api, otp_app: :maverick, root_scope: "/api/v1"
end
<|start_filename|>test/support/conn_case.ex<|end_filename|>
defmodule Maverick.ConnCase do
use ExUnit.CaseTemplate
using do
quote do
import Maverick.Test.Helpers
end
end
end
defmodule Maverick.Test.Helpers do
require ExUnit.Assertions
def response(%Plug.Conn{status: status, resp_body: body}, given) do
given = Plug.Conn.Status.code(given)
if given == status do
body
else
raise "expected response with status #{given}, got: #{status}, with body:\n#{inspect(body)}"
end
end
def json_response(conn, status) do
body = response(conn, status)
_ = response_content_type(conn, :json)
Jason.decode!(body)
end
def response_content_type(conn, format) when is_atom(format) do
case Plug.Conn.get_resp_header(conn, "content-type") do
[] ->
raise "no content-type was set, expected a #{format} response"
[h] ->
if response_content_type?(h, format) do
h
else
raise "expected content-type for #{format}, got: #{inspect(h)}"
end
[_ | _] ->
raise "more than one content-type was set, expected a #{format} response"
end
end
def response_content_type?(header, format) do
case parse_content_type(header) do
{part, subpart} ->
format = Atom.to_string(format)
format in MIME.extensions(part <> "/" <> subpart) or
format == subpart or String.ends_with?(subpart, "+" <> format)
_ ->
false
end
end
defp parse_content_type(header) do
case Plug.Conn.Utils.content_type(header) do
{:ok, part, subpart, _params} ->
{part, subpart}
_ ->
false
end
end
end
<|start_filename|>test/maverick/exception_test.exs<|end_filename|>
defmodule Maverick.ExceptionTest do
use ExUnit.Case
import Maverick.Test.Helpers
@host "http://localhost:4000"
@headers [{"content-type", "application/json"}]
setup_all do
start_supervised!({Plug.Cowboy, scheme: :http, plug: Maverick.TestApi, options: [port: 4000]})
:ok
end
describe "handles exceptions" do
test "default fallback impl for unexpected exceptions" do
bad_body = %{num1: 2, num2: "three"} |> Jason.encode!()
resp = :hackney.post("#{@host}/api/v1/route1/multiply", @headers, bad_body)
assert 500 == resp_code(resp)
assert %{
"error_code" => 500,
"error_message" => "bad argument in arithmetic expression"
} == resp_body(resp)
end
test "known exception type in request handling" do
resp =
:hackney.post(
"#{@host}/api/v1/route2/fly/me/to/the",
[{"Content-Type", "application/x-www-form-urlencoded"}],
"field1=value1&field2=value2"
)
assert 400 == resp_code(resp)
assert resp_content_type(resp)
assert %{
"error_code" => 400,
"error_message" => "Unsupported media type: application/x-www-form-urlencoded"
} == resp_body(resp)
end
test "custom exception handling" do
illegal_body = %{"color" => "red"} |> Jason.encode!()
resp = :hackney.post("#{@host}/api/v1/route1/color_match", @headers, illegal_body)
assert 406 = resp_code(resp)
assert %{"error_code" => 406, "error_message" => "no red!"} == resp_body(resp)
end
end
defp resp_code({:ok, status_code, _headers, _ref}), do: status_code
defp resp_header({:ok, _, headers, _}, key) do
Enum.find(headers, fn {k, _} -> k == key end)
end
defp resp_body({:ok, _status_code, _headers, ref}) do
{:ok, body} = :hackney.body(ref)
Jason.decode!(body)
end
defp resp_content_type(resp) do
case resp_header(resp, "content-type") do
nil ->
flunk("Content-type is not set")
{_, content_type} ->
assert response_content_type?(content_type, :json)
end
end
end
<|start_filename|>lib/maverick/exception.ex<|end_filename|>
defmodule Maverick.Exception.Default do
defmacro __using__(_opts) do
quote do
def error_code(%{error_code: error_code}) when is_integer(error_code), do: error_code
def error_code(_), do: 500
def message(t), do: Exception.message(t)
def handle(t, conn) do
status = error_code(t)
response =
%{
error_code: status,
error_message: message(t)
}
|> Jason.encode!()
conn
|> Plug.Conn.put_resp_content_type("application/json")
|> Plug.Conn.send_resp(status, response)
end
defoverridable(error_code: 1, message: 1, handle: 2)
end
end
end
defprotocol Maverick.Exception do
@fallback_to_any true
@spec error_code(t) :: 100..999
def error_code(t)
@spec message(t) :: String.t()
def message(t)
@spec handle(t, Plug.Conn.t()) :: Plug.Conn.t()
def handle(t, conn)
end
defimpl Maverick.Exception, for: Any do
use Maverick.Exception.Default
end
defimpl Maverick.Exception, for: Plug.Parsers.UnsupportedMediaTypeError do
use Maverick.Exception.Default
def error_code(_), do: 400
def message(exception) do
"Unsupported media type: #{exception.media_type}"
end
end
<|start_filename|>lib/maverick/response.ex<|end_filename|>
defprotocol Maverick.Response do
@fallback_to_any true
def handle(t, conn)
end
defimpl Maverick.Response, for: Any do
def handle(term, %Plug.Conn{private: %{maverick_route: route}} = conn) do
conn
|> Plug.Conn.put_resp_content_type("application/json")
|> Plug.Conn.resp(route.success_code, Jason.encode!(term))
end
end
defimpl Maverick.Response, for: Tuple do
def handle({:ok, term}, conn) do
Maverick.Response.handle(term, conn)
end
def handle({status, headers, term}, conn) do
conn
|> Plug.Conn.put_resp_content_type("application/json")
|> add_headers(headers)
|> Plug.Conn.resp(status, Jason.encode!(term))
end
def handle({:error, exception}, conn) when is_exception(exception) do
Maverick.Exception.handle(exception, conn)
end
def handle({:error, error_message}, %Plug.Conn{private: %{maverick_route: route}} = conn) do
response =
%{error_code: route.error_code, error_message: error_message}
|> Jason.encode!()
conn
|> Plug.Conn.put_resp_content_type("application/json")
|> Plug.Conn.send_resp(route.error_code, response)
end
defp add_headers(conn, headers) do
Enum.reduce(headers, conn, fn {key, value}, conn ->
Plug.Conn.put_resp_header(conn, key, value)
end)
end
end
<|start_filename|>lib/maverick/api/generator.ex<|end_filename|>
defmodule Maverick.Api.Generator do
@moduledoc false
def generate_router(api) do
build_router_module(api)
end
defp build_router_module(api) do
contents =
quote location: :keep do
use Plug.Router
require Logger
plug(:match)
plug(Plug.Parsers, parsers: [:json], pass: ["text/*"], json_decoder: Jason)
plug(:dispatch)
unquote(generate_match_functions(api.list_routes()))
match _ do
response =
%{error_code: 404, error_message: "Not Found"}
|> Jason.encode!()
var!(conn)
|> put_resp_content_type("application/json", nil)
|> send_resp(404, response)
end
end
api.router()
|> Module.create(contents, Macro.Env.location(__ENV__))
end
defp generate_match_functions(routes) do
for %Maverick.Route{
args: _arg_type,
function: _function,
method: method,
module: module,
raw_path: path,
success_code: _success,
error_code: _error
} = route <-
routes do
method_macro = method |> String.downcase() |> String.to_atom()
escaped_route = Macro.escape(route)
result =
quote location: :keep do
unquote(method_macro)(unquote(path)) do
var!(conn) = merge_private(var!(conn), maverick_route: unquote(escaped_route))
apply(unquote(module), :call, [var!(conn), []])
end
end
result
end
end
def decode_arg_type(conn, :conn) do
conn
end
def decode_arg_type(conn, _) do
Map.get(conn, :params)
end
end
<|start_filename|>lib/maverick/api.ex<|end_filename|>
defmodule Maverick.Api do
@moduledoc """
Provides the entrypoint for configuring and managing the
implementation of Maverick in an application by a single
`use/2` macro that provides a supervision tree `start_link/1`
and `child_spec/1` for adding Maverick as a child of the
top-level application supervisor.
The Api module implementing `use Maverick.Api`, when started,
will orchestrate the start of the process that does the heavy
lifting of compiling function routes into a callback Handler
module at application boot and then handing off to the Elli
webserver configured to route requests by way of that Handler module.
## `use Maverick.Api` options
* `:otp_app` - The name of the application implementing Maverick
as an atom (required).
## `Maverick.Api` child_spec and start_link options
* `:init_name` - The name the Initializer should register as.
Primarily for logging and debugging, as the process should exit
immediately with a `:normal` status if successful. May be any
valid GenServer name.
* `:supervisor_name` - The name the Maverick supervisor process
should register as. May be any valid GenServer name.
* `:name` - The name the Elli server process should register as.
May be any valid GenServer name.
* `:port` - The port number the webserver will listen on. Defaults
to 4000.
* `:tls_certfile` - The path to the PEM-encoded SSL/TLS certificate
file to encrypt requests and responses.
* `:tls_keyfile` - The path to the PEM-encoded SSL/TLS key file to
encrypt requests and responses.
"""
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
use Plug.Builder
require Logger
@otp_app Keyword.fetch!(opts, :otp_app)
@root_scope opts |> Keyword.get(:root_scope, "/")
@router Module.concat(__MODULE__, Router)
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :supervisor
}
end
def start_link(opts \\ []) do
Maverick.Api.Supervisor.start_link(__MODULE__, opts)
end
def list_routes(), do: Maverick.Route.list_routes(@otp_app, @root_scope)
def router() do
@router
end
def init(opts) do
Maverick.Api.Generator.generate_router(__MODULE__)
apply(@router, :init, [opts])
end
def call(conn, opts) do
conn = super(conn, opts)
apply(@router, :call, [conn, opts])
rescue
exception ->
handle_exception(conn, exception)
end
defp handle_exception(_conn, %Plug.Conn.WrapperError{conn: conn, reason: exception}) do
handle_exception(conn, exception)
end
defp handle_exception(conn, error) when is_atom(error) do
exception = Exception.normalize(:error, error)
handle_exception(conn, exception)
end
defp handle_exception(conn, exception) do
Maverick.Exception.handle(exception, conn)
end
end
end
end
<|start_filename|>test/maverick/path_test.exs<|end_filename|>
defmodule Maverick.PathTest do
use ExUnit.Case
describe "parse" do
test "parses static url" do
assert ["api", "users"] == Maverick.Path.parse("/api/users")
end
test "parses url with variables" do
assert ["api", "users", {:variable, "id"}, "comments", {:variable, "comment_id"}] ==
Maverick.Path.parse("/api/users/:id/comments/:comment_id")
end
test "parses the web root to an empty list" do
assert [] == Maverick.Path.parse("/")
end
test "parses all allowed static path characters" do
assert ["ApI", "v1", "2bar", "Hello-World_7", "-baz_"] ==
Maverick.Path.parse("/ApI/v1/2bar/Hello-World_7/-baz_")
end
test "parses all allowed variable characters" do
assert ["users", {:variable, "id1"}, "stuff", {:variable, "first_Name"}] ==
Maverick.Path.parse("/users/:id1/stuff/:first_Name")
end
end
describe "validate" do
test "validates a static path" do
assert "/api/v1/users/list" == Maverick.Path.validate("/api/v1/users/list")
end
test "validates a path with variables" do
assert "/api/v1/users/:user_id/comments" ==
Maverick.Path.validate("/api/v1/users/:user_id/comments")
end
test "strips extra forward slashes" do
assert "/api/v1/users/list" == Maverick.Path.validate("api/v1////users//list")
end
end
describe "invalid paths" do
test "errors on invalid characters" do
assert_raise Maverick.Path.ParseError, "expected only legal characters", fn ->
Maverick.Path.parse("/api/v?/foo")
end
assert_raise Maverick.Path.ParseError, "expected only legal characters", fn ->
Maverick.Path.parse("/users/:user-id")
end
end
end
end
<|start_filename|>test/test_helper.exs<|end_filename|>
Code.compiler_options(ignore_module_conflict: true)
ExUnit.start(capture_log: true, exclude: [:skip])
| jeffgrunewald/goose |
<|start_filename|>server/plugins/builder/pages/index.js<|end_filename|>
const EngineBasePage = require('digital-form-builder-engine/page')
class Page extends EngineBasePage {
get getRouteOptions () {
return {
ext: {
onPostHandler: {
method: (request, h) => {
console.log(`GET onPostHandler ${this.path}`)
return h.continue
}
}
}
}
}
get postRouteOptions () {
return {
ext: {
onPostHandler: {
method: (request, h) => {
console.log(`POST onPostHandler ${this.path}`)
return h.continue
}
}
}
}
}
}
module.exports = Page
<|start_filename|>test/cases/textfield.js<|end_filename|>
const Lab = require('lab')
const { expect } = require('code')
const cheerio = require('cheerio')
const createServer = require('../create-server')
const lab = exports.lab = Lab.script()
const data = require('./textfield.json')
lab.experiment('Text field', () => {
let server
lab.before(async () => {
server = await createServer(data)
})
lab.test('GET /text-field', async () => {
const options = {
method: 'GET',
url: '/text-field'
}
const response = await server.inject(options)
expect(response.statusCode).to.equal(200)
expect(response.headers['content-type']).to.include('text/html')
const $ = cheerio.load(response.payload)
// Title
expect($('title').text()).to.equal('Full name')
// Label / Heading
expect($('label.govuk-label--xl').text().trim()).to.equal('Full name')
// Hint
expect($('span#fullName-hint.govuk-hint').text().trim()).to.equal('Some help please...')
// Input
expect($('input#fullName.govuk-input').length).to.equal(1)
})
lab.test('POST /text-field', async () => {
const options = {
method: 'POST',
url: '/text-field',
payload: {}
}
const response = await server.inject(options)
expect(response.statusCode).to.equal(200)
// expect(response.headers).to.include('location')
// expect(response.headers.location).to.equal('/full-name')
})
})
<|start_filename|>server/plugins/builder/pages/start-date.js<|end_filename|>
const joi = require('joi')
const Page = require('.')
class StartDatePage extends Page {
get stateSchema () {
const keys = this.components.getStateSchemaKeys()
const name = this.components.formItems[0].name
const d = new Date()
d.setDate(d.getDate() + 28)
const max = `${d.getMonth() + 1}-${d.getDate()}-${d.getFullYear()}`
// Extend the key to validate that the date is
// greater than today and less than today+28 days
keys[name] = keys[name].min('now').max(max)
return joi.object().keys(keys)
}
}
module.exports = StartDatePage
<|start_filename|>server/views/summary.html<|end_filename|>
{% from "partials/summary-detail.html" import summaryDetail %}
{% extends 'layout.html' %}
{% block content %}
<div class="govuk-main-wrapper">
<div class="govuk-grid-row">
<div class="govuk-grid-column-two-thirds">
{% for detail in details %}
{{ summaryDetail(detail) }}
{% endfor %}
<pre>{{ result.error | dump(2) | safe }}</pre>
</div>
</div>
</div>
{% endblock %}
<|start_filename|>test/cases/basic.js<|end_filename|>
const Lab = require('lab')
const { expect } = require('code')
const cheerio = require('cheerio')
// const HtmlHelper = require('../html-helper')
const createServer = require('../create-server')
const lab = exports.lab = Lab.script()
const data = require('./basic.json')
lab.experiment('Basic', () => {
let server
// Create server before each test
lab.before(async () => {
server = await createServer(data)
})
lab.test('GET /', async () => {
const options = {
method: 'GET',
url: '/'
}
const response = await server.inject(options)
expect(response.statusCode).to.equal(200)
expect(response.headers['content-type']).to.include('text/html')
const $ = cheerio.load(response.payload)
// const helper = new HtmlHelper($)
// const page = data.pages[0]
// helper.assertTitle(page)
expect($('title').text()).to.equal('Intro page')
expect($('h1.govuk-heading-xl').text().trim()).to.equal('Intro page')
expect($('p.govuk-body').length).to.equal(1)
expect($('p.govuk-body').text().trim()).to.equal('Test content')
})
lab.test('POST /', async () => {
const options = {
method: 'POST',
url: '/',
payload: {}
}
const response = await server.inject(options)
expect(response.statusCode).to.equal(302)
expect(response.headers).to.include('location')
expect(response.headers.location).to.equal('/full-name')
})
})
<|start_filename|>server/plugins/session.js<|end_filename|>
module.exports = {
plugin: require('yar'),
options: {
cookieOptions: {
password: Array(32).fill(0).map(x => Math.random().toString(36).charAt(2)).join(''),
isSecure: false,
isHttpOnly: true
}
}
}
<|start_filename|>server/plugins/builder/pages/dob.js<|end_filename|>
const joi = require('joi')
const Page = require('.')
class DobPage extends Page {
constructor (defs, pageDef) {
super(defs, pageDef)
this.stateSchema = this.stateSchema.append({
ageGroup: joi.string().required().valid('junior', 'full', 'senior')
})
}
getStateFromValidForm (formData) {
const state = super.getStateFromValidForm(formData)
const age = ~~((Date.now() - state.dob) / (31557600000))
state.ageGroup = age < 13
? 'junior'
: age > 65
? 'senior'
: 'full'
return state
}
}
module.exports = DobPage
<|start_filename|>server/views/partials/summary-detail.html<|end_filename|>
{% macro summaryDetail(data) %}
<h2 class="govuk-heading-m">{{data.title}}</h2>
<table class="govuk-table check-your-answers">
<caption class="govuk-table__caption"></caption>
<tbody class="govuk-table__body">
{% for item in data.items %}
{% if not hide %}
<tr class="govuk-table__row">
<th class="govuk-table__header" style="width:60%;">{{item.label}}</th>
<td class={% if item.inError %}"govuk-table__cell required"{% else %}"govuk-table__cell"{% endif %}>
{% if item.value %}{{item.value}}{% else %}Not supplied{% endif %}
</td>
<td class="govuk-table__cell change-answer">
<a href="{{item.url}}">Change</a>
</td>
</tr>
{% endif %}
{% endfor %}
</tbody>
</table>
{% endmacro %}
<|start_filename|>server/db.js<|end_filename|>
const hoek = require('hoek')
/*
Simple in-memory data store used for saving page state.
Should be replaced with a real db in production.
*/
const cache = {}
async function getState (request) {
return Promise.resolve(cache[request.yar.id] || {})
}
async function mergeState (request, value) {
const state = cache[request.yar.id] || {}
hoek.merge(state, value, true, false)
cache[request.yar.id] = state
return Promise.resolve(state)
}
module.exports = { getState, mergeState }
<|start_filename|>server/plugins/builder/model.js<|end_filename|>
const joi = require('joi')
const EngineModel = require('digital-form-builder-engine/model')
class Model extends EngineModel {
makeSchema (state) {
const schema = super.makeSchema(state)
// Now apply business logic by
// overriding parts of the schema
const conditions = this.conditions
const isSalmon = conditions.isSalmon.fn(state)
const isAbsolute = conditions.isAbsolute.fn(state)
const isAbsoluteAndShortTerm = conditions.isAbsoluteAndShortTerm.fn(state)
const isAnnualAndFull = conditions.isAnnualAndFull.fn(state)
const hasPIPorDLA = conditions.hasPIPorDLA.fn(state)
const hasBlueBadge = conditions.hasBlueBadge.fn(state)
const overrides = joi.object().keys({
licenceDetails: joi.object().keys({
// Strip startTime/startTime
startDate: !isAbsolute ? joi.optional().strip() : joi.any(),
startTime: !isAbsoluteAndShortTerm ? joi.optional().strip() : joi.any(),
// Strip numberOfRods for salmon licences
numberOfRods: isSalmon ? joi.optional().strip() : joi.any()
}),
concessions: isAnnualAndFull
? joi.object().keys({
// Strip nationalInsuranceNumber unless hasPIPorDLA
nationalInsuranceNumber: hasPIPorDLA ? joi.any() : joi.optional().strip(),
// Strip hasBlueBadge unless !hasPIPorDLA
hasBlueBadge: !hasPIPorDLA ? joi.any() : joi.optional().strip(),
// Strip blueBadgeNumber unless hasBlueBadge
blueBadgeNumber: hasBlueBadge ? joi.any() : joi.optional().strip()
})
// Strip concessions unless it's an Annual + Full licence
: joi.object().keys({
hasPIPorDLA: joi.optional(),
nationalInsuranceNumber: joi.optional(),
hasBlueBadge: joi.optional(),
blueBadgeNumber: joi.optional()
}).optional().strip()
})
return schema.concat(overrides)
}
}
module.exports = Model
| DEFRA/ffc-digital-form-builder |
<|start_filename|>venv/Lib/site-packages/prov/tests/json/attr_entity_one_value_attr37.json<|end_filename|>
{
"entity": {
"ex:en_v37": {
"prov:value": {
"$": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA",
"type": "xsd:base64Binary"
}
}
},
"prefix": {
"ex": "http://example.org/"
}
} | richung99/digitizePlots |
<|start_filename|>composer.json<|end_filename|>
{
"name": "dantleech/maestro",
"description": "co-ordinate and manage micro packages",
"license": "mit",
"minimum-stability": "dev",
"prefer-stable": true,
"authors": [
{
"name": "<NAME>",
"email": "<EMAIL>"
}
],
"require": {
"php": "^7.3",
"phpactor/console-extension": "^0.1.1",
"webmozart/path-util": "^2.3",
"amphp/amp": "^2.1",
"phpactor/logging-extension": "^0.3.1",
"dnoegel/php-xdg-base-dir": "^0.1.0",
"amphp/process": "^1.1",
"twig/twig": "^2.11",
"thecodingmachine/safe": "^0.1.16",
"amphp/http-client": "^3.0",
"bramus/monolog-colored-line-formatter": "^3.0",
"symfony/filesystem": "^4.0",
"phpactor/config-loader": "^0.1.1",
"symfony/serializer": "^4.3",
"symfony/property-access": "^4.3",
"webmozart/glob": "^4.1",
"symfony/expression-language": "^4.3"
},
"require-dev": {
"sensiolabs-de/deptrac-shim": "^0.5.0",
"friendsofphp/php-cs-fixer": "^2.13",
"phpactor/test-utils": "^1.0.1",
"phpunit/phpunit": "^8.0",
"phpstan/phpstan": "^0.11.4",
"captainhook/captainhook": "~4.0",
"symfony/debug": "^4.3",
"symfony/var-dumper": "^4.3"
},
"autoload": {
"psr-4": {
"Maestro\\": "src/"
}
},
"autoload-dev": {
"psr-4": {
"Maestro\\Tests\\": "tests/"
}
},
"extra": {
"phpactor.extension_class": "phpactor\\extension\\maestro\\maestroextension",
"branch-alias": {
"dev-master": "0.1.x-dev"
}
},
"bin": ["bin/maestro"],
"scripts": {
"integrate": [
"@php-cs-fixer",
"@phpstan",
"@deptrac",
"@phpunit",
"@examples"
],
"deptrac": "./vendor/bin/deptrac",
"phpstan": "./vendor/bin/phpstan analyse --level=7 --ansi src",
"php-cs-fixer": "./vendor/bin/php-cs-fixer fix --ansi --dry-run",
"phpunit": "./vendor/bin/phpunit --colors=always",
"examples": "./bin/test_examples",
"docs": "make -C./doc html"
}
}
| dantleech/maestro |
<|start_filename|>webapp/app/sessions/controller.js<|end_filename|>
import { inject as service } from "@ember/service";
import { oneWay } from "@ember/object/computed";
import Controller from "@ember/controller";
import StatusFilterableController from "./../mixins/status-filterable/controller";
import config from "../config/environment";
import SearchController from "../mixins/search-controller";
export default Controller.extend(StatusFilterableController, SearchController, {
// default pagination parameters
page: 1,
page_size: config.APP.default_page_size,
collection: oneWay("sessions"),
compact_view: null,
api: service(),
display: service(),
queryParams: ["search", "page", "page_size"],
actions: {
async discard_results() {
var days = parseInt(prompt("This will mark all search results for future deletion."
+ "\n" + "Please provide number of days to keep sessions"), 10);
if (Number.isInteger(days))
{
await this.get("api").call("discard_sessions_search", {
search_string: this.get("search"),
grace_period_seconds: days * 60 * 60 * 24
});
}
},
},
});
<|start_filename|>webapp/app/session/controller.js<|end_filename|>
import { inject as service } from "@ember/service";
import Controller from "@ember/controller";
export default Controller.extend({
current_test: null,
test_filters: null,
display: service(),
api: service(),
actions: {
async discard() {
var days = parseInt(prompt("Number of days to keep session " + this.get("session_model.logical_id"), "10"), 10);
if (Number.isInteger(days))
{
await this.get("api").call("discard_session", {
session_id: parseInt(this.get("session_model.id")),
grace_period_seconds: days * 60 * 60 * 24
});
await this.get("session_model").reload();
}
},
async preserve() {
await this.get("api").call("preserve_session", {
session_id: parseInt(this.get("session_model.id")),
});
await this.get("session_model").reload();
},
},
});
<|start_filename|>docker/Dockerfile<|end_filename|>
FROM node:8 as frontend-builder
# build frontend
RUN npm install -g ember-cli
ADD ./webapp/ /frontend/
RUN cd /frontend/ && yarn install
RUN cd /frontend/ && node_modules/.bin/ember build --environment production
FROM ekidd/rust-musl-builder:stable as rust-builder
ADD ./api-server /api-server
RUN sudo chown -R rust:rust /api-server
RUN cd /api-server && rm -rf target && cargo test --release && cargo build --release
FROM ubuntu:20.04
ENV PYTHON_VERSION 3.9
ENV PYTHON_EXECUTABLE python$PYTHON_VERSION
ENV LC_ALL C.UTF-8
ENV LANG C.UTF-8
ENV DEBIAN_FRONTEND noninteractive
RUN apt-get update
RUN apt-get -y install build-essential software-properties-common libpq-dev nginx curl redis-server gcc sudo libsasl2-dev libldap2-dev wget git
# nginx
RUN add-apt-repository ppa:chris-lea/nginx-devel
RUN apt-get update
RUN apt-get -y install nginx
RUN rm -rf /etc/nginx/conf.d/* /etc/nginx/sites-enabled/*
RUN ln -sf /dev/stdout /var/log/nginx/access.log && ln -sf /dev/stderr /var/log/nginx/error.log
RUN add-apt-repository ppa:deadsnakes/ppa
RUN apt-get update
RUN apt-get install -y $PYTHON_EXECUTABLE $PYTHON_EXECUTABLE-dev $PYTHON_EXECUTABLE-distutils
RUN wget https://bootstrap.pypa.io/get-pip.py -O /tmp/get-pip.py
RUN $PYTHON_EXECUTABLE /tmp/get-pip.py
RUN $PYTHON_EXECUTABLE -m pip install virtualenv
# dockerize
ENV DOCKERIZE_VERSION v0.3.0
RUN wget https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
&& tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
&& rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
RUN pip install pipenv
VOLUME /conf
VOLUME /uploads
ADD ./manage.py /src/
ADD ./Pipfile /src
ADD ./Pipfile.lock /src
ADD ./etc /src/etc
ADD ./_lib /src/_lib
ADD ./flask_app /src/flask_app
ADD ./migrations /src/migrations
RUN cd /src && pipenv install -d
COPY --from=frontend-builder /frontend/dist /src/webapp/dist
COPY --from=rust-builder /api-server/target/x86_64-unknown-linux-musl/release/api-server /api-server
EXPOSE 80 443
WORKDIR /src
| parallelsystems/backslash |
<|start_filename|>src/Xamarin.Android.Tools.Bytecode/Kotlin/KotlinUtilities.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Xamarin.Android.Tools.Bytecode
{
public static class KotlinUtilities
{
public static string ConvertKotlinTypeSignature (KotlinType? type, KotlinFile? metadata = null)
{
if (type is null)
return string.Empty;
var class_name = type.ClassName;
if (string.IsNullOrWhiteSpace (class_name)) {
if (metadata is KotlinClass klass) {
var tp = klass.TypeParameters?.FirstOrDefault (t => t.Id == type.TypeParameter);
if (tp?.UpperBounds?.FirstOrDefault ()?.ClassName != null)
return ConvertKotlinClassToJava (tp.UpperBounds.FirstOrDefault ()?.ClassName);
}
return "Ljava/lang/Object;";
}
var result = ConvertKotlinClassToJava (class_name);
if (result == "[")
result += ConvertKotlinTypeSignature (type.Arguments?.FirstOrDefault ()?.Type);
return result;
}
public static string ConvertKotlinClassToJava (string? className)
{
if (className == null || string.IsNullOrWhiteSpace (className))
return string.Empty;
className = className.Replace ('.', '$');
if (type_map.TryGetValue (className.TrimEnd (';'), out var result))
return result;
return "L" + className;
}
public static string GetSignature (this List<KotlinValueParameter> parameters)
{
return string.Join (string.Empty, parameters.Select (p => ConvertKotlinTypeSignature (p.Type)));
}
public static ParameterInfo[] GetFilteredParameters (this MethodInfo method)
{
// Kotlin adds this to some constructors but I cannot tell which ones,
// so we'll just ignore them if we see them on the Java side
return method.GetParameters ().Where (p => p.Type.BinaryName != "Lkotlin/jvm/internal/DefaultConstructorMarker;" && !p.Name.StartsWith ("$", StringComparison.Ordinal)).ToArray ();
}
public static string GetMethodNameWithoutSuffix (this MethodInfo method)
{
// Kotlin will rename some of its constructs to hide them from the Java runtime
// These take the form of thing like:
// - add-impl
// - add-H3FcsT8
// We strip them for trying to match up the metadata to the MethodInfo
var index = method.Name.IndexOfAny (new [] { '-', '$' });
return index >= 0 ? method.Name.Substring (0, index) : method.Name;
}
public static bool IsDefaultConstructorMarker (this MethodInfo method)
{
// A default constructor is synthetic and always has an int and a
// DefaultConstructorMarker as its final 2 parameters.
if (method.Name != "<init>")
return false;
if (!method.AccessFlags.HasFlag (MethodAccessFlags.Synthetic))
return false;
var parameters = method.GetParameters ();
if (parameters.Length < 2)
return false;
// Parameter list ends with `int, DefaultConstructorMarker`.
return parameters [parameters.Length - 2].Type.TypeSignature == "I" &&
parameters [parameters.Length - 1].Type.TypeSignature == "Lkotlin/jvm/internal/DefaultConstructorMarker;";
}
internal static List<TResult>? ToList<TSource, TResult> (this IEnumerable<TSource>? self, JvmNameResolver resolver, Func<TSource, JvmNameResolver, TResult?> creator)
where TResult: class
{
if (self == null)
return null;
return self.Select (v => creator (v, resolver)!)
.Where (v => v != null)
.ToList ();
}
public static bool IsPubliclyVisible (this ClassAccessFlags flags) => flags.HasFlag (ClassAccessFlags.Public) || flags.HasFlag (ClassAccessFlags.Protected);
public static bool IsPubliclyVisible (this KotlinClassVisibility flags) => flags == KotlinClassVisibility.Public || flags == KotlinClassVisibility.Protected;
public static bool IsPubliclyVisible (this KotlinFunctionFlags flags) => flags.HasFlag (KotlinFunctionFlags.Public) || flags.HasFlag (KotlinFunctionFlags.Protected);
public static bool IsPubliclyVisible (this KotlinConstructorFlags flags) => flags.HasFlag (KotlinConstructorFlags.Public) || flags.HasFlag (KotlinConstructorFlags.Protected);
public static bool IsPubliclyVisible (this KotlinPropertyFlags flags) => flags.HasFlag (KotlinPropertyFlags.Public) || flags.HasFlag (KotlinPropertyFlags.Protected);
public static bool IsUnnamedParameter (this ParameterInfo parameter) => parameter.Name.Length > 1 && parameter.Name.StartsWith ("p", StringComparison.Ordinal) && int.TryParse (parameter.Name.Substring (1), out var _);
public static bool IsUnnamedParameter (this KotlinValueParameter parameter) => parameter.Name?.Length > 1 &&
parameter.Name.StartsWith ("p", StringComparison.Ordinal) &&
int.TryParse (parameter.Name.Substring (1), out var _);
static Dictionary<string, string> type_map = new Dictionary<string, string> {
{ "kotlin/Int", "I" },
{ "kotlin/UInt", "I" },
{ "kotlin/Double", "D" },
{ "kotlin/Char", "C" },
{ "kotlin/Long", "J" },
{ "kotlin/ULong", "J" },
{ "kotlin/Float", "F" },
{ "kotlin/Short", "S" },
{ "kotlin/UShort", "S" },
{ "kotlin/Byte", "B" },
{ "kotlin/UByte", "B" },
{ "kotlin/Boolean", "Z" },
{ "kotlin/Unit", "V" },
{ "kotlin/Array", "[" },
{ "kotlin/IntArray", "[I" },
{ "kotlin/UIntArray", "[I" },
{ "kotlin/DoubleArray", "[D" },
{ "kotlin/CharArray", "[C" },
{ "kotlin/LongArray", "[J" },
{ "kotlin/ULongArray", "[J" },
{ "kotlin/FloatArray", "[F" },
{ "kotlin/ShortArray", "[S" },
{ "kotlin/UShortArray", "[S" },
{ "kotlin/ByteArray", "[B" },
{ "kotlin/UByteArray", "[B" },
{ "kotlin/BooleanArray", "[Z" },
{ "kotlin/Any", "Ljava/lang/Object;" },
{ "kotlin/Nothing", "Ljava/lang/Void;" },
{ "kotlin/Annotation", "Ljava/lang/annotation/Annotation;" },
{ "kotlin/String", "Ljava/lang/String;" },
{ "kotlin/CharSequence", "Ljava/lang/CharSequence;" },
{ "kotlin/Throwable", "Ljava/lang/Throwable;" },
{ "kotlin/Cloneable", "Ljava/lang/Cloneable;" },
{ "kotlin/Number", "Ljava/lang/Number;" },
{ "kotlin/Comparable", "Ljava/lang/Comparable;" },
{ "kotlin/Enum", "Ljava/lang/Enum;" },
{ "kotlin/collections/Iterator", "Ljava/util/Iterator;" },
{ "kotlin/collections/MutableIterator", "Ljava/util/Iterator;" },
{ "kotlin/collections/Collection", "Ljava/util/Collection;" },
{ "kotlin/collections/MutableCollection", "Ljava/util/Collection;" },
{ "kotlin/collections/List", "Ljava/util/List;" },
{ "kotlin/collections/MutableList", "Ljava/util/List;" },
{ "kotlin/collections/Set", "Ljava/util/Set;" },
{ "kotlin/collections/MutableSet", "Ljava/util/Set;" },
{ "kotlin/collections/Map", "Ljava/util/Map;" },
{ "kotlin/collections/MutableMap", "Ljava/util/Map;" },
{ "kotlin/collections/ListIterator", "Ljava/util/ListIterator;" },
{ "kotlin/collections/MutableListIterator", "Ljava/util/ListIterator;" },
{ "kotlin/collections/Iterable", "Ljava/lang/Iterable;" },
{ "kotlin/collections/MutableIterable", "Ljava/lang/Iterable;" },
{ "kotlin/collections/Map$Entry", "Ljava/util/Map$Entry;" },
{ "kotlin/collections/MutableMap$MutableEntry", "Ljava/util/Map$Entry;" },
{ "kotlin/Function0", "Lkotlin/jvm/functions/Function0;" },
{ "kotlin/Function1", "Lkotlin/jvm/functions/Function1;" },
{ "kotlin/Function2", "Lkotlin/jvm/functions/Function2;" },
{ "kotlin/Function3", "Lkotlin/jvm/functions/Function3;" },
{ "kotlin/Function4", "Lkotlin/jvm/functions/Function4;" },
{ "kotlin/Function5", "Lkotlin/jvm/functions/Function5;" },
{ "kotlin/Function6", "Lkotlin/jvm/functions/Function6;" },
{ "kotlin/Function7", "Lkotlin/jvm/functions/Function7;" },
{ "kotlin/Function8", "Lkotlin/jvm/functions/Function8;" },
{ "kotlin/Function9", "Lkotlin/jvm/functions/Function9;" },
{ "kotlin/Function10", "Lkotlin/jvm/functions/Function10;" },
{ "kotlin/Function11", "Lkotlin/jvm/functions/Function11;" },
{ "kotlin/Function12", "Lkotlin/jvm/functions/Function12;" },
{ "kotlin/Function13", "Lkotlin/jvm/functions/Function13;" },
{ "kotlin/Function14", "Lkotlin/jvm/functions/Function14;" },
{ "kotlin/Function15", "Lkotlin/jvm/functions/Function15;" },
{ "kotlin/Function16", "Lkotlin/jvm/functions/Function16;" },
{ "kotlin/Function17", "Lkotlin/jvm/functions/Function17;" },
{ "kotlin/Function18", "Lkotlin/jvm/functions/Function18;" },
{ "kotlin/Function19", "Lkotlin/jvm/functions/Function19;" },
{ "kotlin/Function20", "Lkotlin/jvm/functions/Function20;" },
{ "kotlin/Function21", "Lkotlin/jvm/functions/Function21;" },
{ "kotlin/Function22", "Lkotlin/jvm/functions/Function22;" },
};
}
}
<|start_filename|>tools/generator/Java.Interop.Tools.Generator.ObjectModel/JavadocInfo.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Xml.Linq;
using Irony.Parsing;
using Java.Interop.Tools.JavaSource;
namespace MonoDroid.Generation
{
enum ApiLinkStyle {
None,
DeveloperAndroidComReference_2020Nov,
}
public sealed class JavadocInfo {
public string Javadoc { get; set; }
public XElement[] ExtraRemarks { get; set; }
public XElement[] Copyright { get; set; }
public XmldocStyle XmldocStyle { get; set; }
string MemberDescription;
public static JavadocInfo CreateInfo (XElement element, XmldocStyle style, bool appendCopyrightExtra = true)
{
if (element == null) {
return null;
}
string javadoc = element.Element ("javadoc")?.Value;
var desc = GetMemberDescription (element);
string declaringJniType = desc.DeclaringJniType;
string declaringMemberName = desc.DeclaringMemberName;
var declaringMemberParamString = desc.DeclaringMemberParameterString;
var extras = GetExtra (element, style, declaringJniType, declaringMemberName, declaringMemberParamString, appendCopyrightExtra);
XElement[] extra = extras.Extras;
XElement[] copyright = extras.Copyright;
if (string.IsNullOrEmpty (javadoc) && extra == null)
return null;
var info = new JavadocInfo () {
ExtraRemarks = extra,
Copyright = copyright,
Javadoc = javadoc,
MemberDescription = declaringMemberName == null
? declaringJniType
: $"{declaringJniType}.{declaringMemberName}{declaringMemberParamString}",
XmldocStyle = style,
};
return info;
}
static (string DeclaringJniType, string DeclaringMemberName, string DeclaringMemberParameterString) GetMemberDescription (XElement element)
{
bool isType = element.Name.LocalName == "class" ||
element.Name.LocalName == "interface";
string declaringJniType = isType
? (string) element.Attribute ("jni-signature")
: (string) element.Parent.Attribute ("jni-signature");
if (declaringJniType.StartsWith ("L", StringComparison.Ordinal) &&
declaringJniType.EndsWith (";", StringComparison.Ordinal)) {
declaringJniType = declaringJniType.Substring (1, declaringJniType.Length-2);
}
string declaringMemberName = isType
? null
: (string) element.Attribute ("name") ?? declaringJniType.Substring (declaringJniType.LastIndexOf ('/')+1);
string declaringMemberJniSignature = isType
? null
: (string) element.Attribute ("jni-signature");
string declaringMemberParameterString = null;
if (!isType && (declaringMemberJniSignature?.StartsWith ("(", StringComparison.Ordinal) ?? false)) {
var parameterTypes = element.Elements ("parameter")?.Select (e => e.Attribute ("type")?.Value)?.ToList ();
if (parameterTypes?.Any () ?? false) {
declaringMemberParameterString = $"({string.Join (", ", parameterTypes)})";
} else {
declaringMemberParameterString = "()";
}
}
return (declaringJniType, declaringMemberName, declaringMemberParameterString);
}
static (XElement[] Extras, XElement[] Copyright) GetExtra (XElement element, XmldocStyle style, string declaringJniType, string declaringMemberName, string declaringMemberParameterString, bool appendCopyrightExtra)
{
if (!style.HasFlag (XmldocStyle.IntelliSenseAndExtraRemarks))
return (null, null);
XElement javadocMetadata = null;
while (element != null) {
javadocMetadata = element.Element ("javadoc-metadata");
if (javadocMetadata != null) {
break;
}
element = element.Parent;
}
List<XElement> extra = null;
IEnumerable<XElement> copyright = null;
if (javadocMetadata != null) {
var link = javadocMetadata.Element ("link");
var urlPrefix = (string) link.Attribute ("prefix");
var linkStyle = (string) link.Attribute ("style");
var kind = ParseApiLinkStyle (linkStyle);
XElement docLink = null;
if (!string.IsNullOrEmpty (urlPrefix)) {
docLink = CreateDocLinkUrl (kind, urlPrefix, declaringJniType, declaringMemberName, declaringMemberParameterString);
}
extra = new List<XElement> ();
extra.Add (docLink);
copyright = javadocMetadata.Element ("copyright").Elements ();
if (appendCopyrightExtra) {
extra.AddRange (copyright);
}
}
return (extra?.ToArray (), copyright?.ToArray ());
}
static ApiLinkStyle ParseApiLinkStyle (string style)
{
switch (style) {
case "developer.android.com/reference@2020-Nov":
return ApiLinkStyle.DeveloperAndroidComReference_2020Nov;
default:
return ApiLinkStyle.None;
}
}
public void AddJavadocs (ICollection<string> comments)
{
var nodes = ParseJavadoc ();
AddComments (comments, nodes);
}
public IEnumerable<XNode> ParseJavadoc ()
{
if (string.IsNullOrWhiteSpace (Javadoc))
return Enumerable.Empty<XNode> ();
Javadoc = Javadoc.Trim ();
ParseTree tree = null;
IEnumerable<XNode> nodes = null;
try {
var parser = new SourceJavadocToXmldocParser (XmldocStyle) {
ExtraRemarks = ExtraRemarks,
};
nodes = parser.TryParse (Javadoc, fileName: null, out tree);
}
catch (Exception e) {
Console.Error.WriteLine ($"## Exception translating remarks: {e.ToString ()}");
}
if (tree != null && tree.HasErrors ()) {
Console.Error.WriteLine ($"## Unable to translate remarks for {MemberDescription}:");
Console.Error.WriteLine ("```");
Console.Error.WriteLine (Javadoc);
Console.Error.WriteLine ("```");
PrintMessages (tree, Console.Error);
Console.Error.WriteLine ();
}
return nodes;
}
public static void AddComments (ICollection<string> comments, IEnumerable<XNode> nodes)
{
if (nodes == null)
return;
foreach (var node in nodes) {
AddNode (comments, node);
}
}
static void AddNode (ICollection<string> comments, XNode node)
{
if (node == null)
return;
var contents = node.ToString ();
var lines = new StringReader (contents);
string line;
while ((line = lines.ReadLine ()) != null) {
comments.Add ($"/// {line}");
}
}
static void PrintMessages (ParseTree tree, TextWriter writer)
{
var lines = GetLines (tree.SourceText);
foreach (var m in tree.ParserMessages) {
writer.WriteLine ($"JavadocImport-{m.Level} {m.Location}: {m.Message}");
writer.WriteLine (lines [m.Location.Line]);
writer.Write (new string (' ', m.Location.Column));
writer.WriteLine ("^");
}
}
static List<string> GetLines (string text)
{
var lines = new List<string>();
var reader = new StringReader (text);
string line;
while ((line = reader.ReadLine()) != null) {
lines.Add (line);
}
return lines;
}
static Dictionary<ApiLinkStyle, Func<string, string, string, string, XElement>> UrlCreators = new Dictionary<ApiLinkStyle, Func<string, string, string, string, XElement>> {
[ApiLinkStyle.DeveloperAndroidComReference_2020Nov] = CreateAndroidDocLinkUri,
};
static XElement CreateDocLinkUrl (ApiLinkStyle style, string prefix, string declaringJniType, string declaringMemberName, string declaringMemberParameterString)
{
if (style == ApiLinkStyle.None || prefix == null || declaringJniType == null)
return null;
if (UrlCreators.TryGetValue (style, out var creator)) {
return creator (prefix, declaringJniType, declaringMemberName, declaringMemberParameterString);
}
return null;
}
static XElement CreateAndroidDocLinkUri (string prefix, string declaringJniType, string declaringMemberName, string declaringMemberParameterString)
{
// URL is:
// * {prefix}
// * declaring type in JNI format
// * when `declaringJniMemberName` != null, `#{declaringJniMemberName}`
// * for methods & constructors, a `(`, the arguments in *Java* syntax -- separated by `, ` -- and `)`
//
// Example: "https://developer.android.com/reference/android/app/Application#registerOnProvideAssistDataListener(android.app.Application.OnProvideAssistDataListener)"
// Example: "https://developer.android.com/reference/android/animation/ObjectAnimator#ofFloat(T,%20android.util.Property%3CT,%20java.lang.Float%3E,%20float...)"
var java = new StringBuilder (declaringJniType)
.Replace ("/", ".")
.Replace ("$", ".");
var url = new StringBuilder (prefix);
if (!prefix.EndsWith ("/", StringComparison.Ordinal)) {
url.Append ("/");
}
url.Append (declaringJniType);
if (declaringMemberName != null) {
java.Append (".").Append (declaringMemberName);
url.Append ("#").Append (declaringMemberName);
if (declaringMemberParameterString != null) {
java.Append (declaringMemberParameterString);
url.Append (declaringMemberParameterString);
}
}
var format = new XElement ("format",
new XAttribute ("type", "text/html"),
new XElement ("a",
new XAttribute ("href", new Uri (url.ToString ()).AbsoluteUri),
new XAttribute ("title", "Reference documentation"),
"Java documentation for ",
new XElement ("code", java.ToString ()),
"."));
return new XElement ("para", format);
}
}
}
<|start_filename|>src/Xamarin.Android.Tools.Bytecode/Kotlin/KotlinFixups.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Xamarin.Android.Tools.Bytecode
{
public static class KotlinFixups
{
public static void Fixup (IList<ClassFile> classes)
{
foreach (var c in classes) {
// See if this is a Kotlin class
var attr = c.Attributes.OfType<RuntimeVisibleAnnotationsAttribute> ().FirstOrDefault ();
var kotlin = attr?.Annotations.SingleOrDefault (a => a.Type == "Lkotlin/Metadata;");
if (kotlin is null)
continue;
try {
var km = KotlinMetadata.FromAnnotation (kotlin);
var metadata = km.ParseMetadata ();
if (metadata is null)
continue;
// Do fixups only valid for full classes
var class_metadata = (metadata as KotlinClass);
if (class_metadata != null) {
FixupClassVisibility (c, class_metadata);
if (!c.AccessFlags.IsPubliclyVisible ())
continue;
if (class_metadata.Constructors == null)
continue;
foreach (var con in class_metadata.Constructors)
FixupConstructor (FindJavaConstructor (class_metadata, con, c), con);
}
// Do fixups valid for both classes and modules
// (We pass "class_metadata" even though it's sometimes null because it's
// used for generic type resolution if available for class types)
FixupJavaMethods (c.Methods);
if (metadata.Functions != null) {
foreach (var met in metadata.Functions)
FixupFunction (FindJavaMethod (metadata, met, c), met, class_metadata);
}
if (metadata.Properties != null) {
foreach (var prop in metadata.Properties) {
var getter = FindJavaPropertyGetter (metadata, prop, c);
var setter = FindJavaPropertySetter (metadata, prop, c);
FixupProperty (getter, setter, prop);
FixupField (FindJavaFieldProperty (metadata, prop, c), prop);
}
}
} catch (Exception ex) {
Log.Warning (0, $"class-parse: warning: Unable to parse Kotlin metadata on '{c.ThisClass.Name}': {ex}");
}
}
}
static void FixupClassVisibility (ClassFile klass, KotlinClass metadata)
{
// Hide class if it isn't Public/Protected
if (klass.AccessFlags.IsPubliclyVisible () && !metadata.Visibility.IsPubliclyVisible ()) {
// Interfaces should be set to "package-private"
if (klass.AccessFlags.HasFlag (ClassAccessFlags.Interface)) {
Log.Debug ($"Kotlin: Setting internal interface {klass.ThisClass.Name.Value} to package-private");
klass.AccessFlags = SetVisibility (klass.AccessFlags, null);
foreach (var ic in klass.InnerClasses) {
Log.Debug ($"Kotlin: Setting nested type {ic.InnerClass.Name.Value} in an internal interface to package-private");
ic.InnerClassAccessFlags = SetVisibility (ic.InnerClassAccessFlags, null);
}
return;
}
Log.Debug ($"Kotlin: Hiding internal class {klass.ThisClass.Name.Value}");
klass.AccessFlags = SetVisibility (klass.AccessFlags, ClassAccessFlags.Private);
foreach (var ic in klass.InnerClasses) {
Log.Debug ($"Kotlin: Hiding nested internal type {ic.InnerClass.Name.Value}");
ic.InnerClassAccessFlags = SetVisibility (ic.InnerClassAccessFlags, ClassAccessFlags.Private);
}
return;
}
}
// Passing null for 'newVisibility' parameter means 'package-private'
static ClassAccessFlags SetVisibility (ClassAccessFlags existing, ClassAccessFlags? newVisibility)
{
// First we need to remove any existing visibility flags,
// without modifying other flags like Abstract
existing = (existing ^ ClassAccessFlags.Public) & existing;
existing = (existing ^ ClassAccessFlags.Protected) & existing;
existing = (existing ^ ClassAccessFlags.Private) & existing;
// Package-private is stored as "no visibility flags", so only add flag if specified
if (newVisibility.HasValue)
existing |= newVisibility.Value;
return existing;
}
static MethodAccessFlags SetVisibility (MethodAccessFlags existing, MethodAccessFlags newVisibility)
{
// First we need to remove any existing visibility flags,
// without modifying other flags like Abstract
existing = (existing ^ MethodAccessFlags.Public) & existing;
existing = (existing ^ MethodAccessFlags.Protected) & existing;
existing = (existing ^ MethodAccessFlags.Private) & existing;
existing = (existing ^ MethodAccessFlags.Internal) & existing;
existing |= newVisibility;
return existing;
}
static void FixupJavaMethods (Methods methods)
{
// We do the following method level fixups here because we can operate on all methods,
// not just ones that have corresponding Kotlin metadata, like FixupFunction does.
// Hide Kotlin generated methods like "add-impl" that aren't intended for end users
foreach (var method in methods.Where (m => m.IsPubliclyVisible && m.Name.IndexOf ("-impl", StringComparison.Ordinal) >= 0)) {
Log.Debug ($"Kotlin: Hiding implementation method {method.DeclaringType?.ThisClass.Name.Value} - {method.Name}");
method.AccessFlags = MethodAccessFlags.Private;
}
// Hide constructor if it's the synthetic DefaultConstructorMarker one
foreach (var method in methods.Where (method => method.IsDefaultConstructorMarker ())) {
Log.Debug ($"Kotlin: Hiding synthetic default constructor in class '{method.DeclaringType?.ThisClass.Name.Value}' with signature '{method.Descriptor}'");
method.AccessFlags = ((method.AccessFlags ^ MethodAccessFlags.Public) & method.AccessFlags) | MethodAccessFlags.Private;
}
// Better parameter names in extension methods
foreach (var method in methods.Where (m => m.IsPubliclyVisible && m.AccessFlags.HasFlag (MethodAccessFlags.Static)))
FixupExtensionMethod (method);
}
static void FixupConstructor (MethodInfo? method, KotlinConstructor metadata)
{
if (method is null)
return;
// Hide constructor if it isn't Public/Protected
if (method.IsPubliclyVisible && !metadata.Flags.IsPubliclyVisible ()) {
Log.Debug ($"Kotlin: Hiding internal constructor {method.DeclaringType?.ThisClass.Name.Value} - {metadata.GetSignature ()}");
method.AccessFlags = SetVisibility (method.AccessFlags, MethodAccessFlags.Internal);
}
}
static void FixupFunction (MethodInfo? method, KotlinFunction metadata, KotlinClass? kotlinClass)
{
if (method is null || !method.IsPubliclyVisible)
return;
// Hide function if it isn't Public/Protected
if (!metadata.Flags.IsPubliclyVisible ()) {
Log.Debug ($"Kotlin: Hiding internal method {method.DeclaringType?.ThisClass.Name.Value} - {metadata.GetSignature ()}");
method.AccessFlags = SetVisibility (method.AccessFlags, MethodAccessFlags.Internal);
return;
}
var java_parameters = method.GetFilteredParameters ();
for (var i = 0; i < java_parameters.Length; i++) {
var java_p = java_parameters [i];
var kotlin_p = metadata.ValueParameters == null ? null : metadata.ValueParameters [i];
if (kotlin_p == null || kotlin_p.Type == null || kotlin_p.Name == null)
continue;
// Kotlin provides actual parameter names
if (TypesMatch (java_p.Type, kotlin_p.Type, kotlinClass) && java_p.IsUnnamedParameter () && !kotlin_p.IsUnnamedParameter ()) {
Log.Debug ($"Kotlin: Renaming parameter {method.DeclaringType?.ThisClass.Name.Value} - {method.Name} - {java_p.Name} -> {kotlin_p.Name}");
java_p.Name = kotlin_p.Name;
}
// Handle erasure of Kotlin unsigned types
java_p.KotlinType = GetKotlinType (java_p.Type.TypeSignature, kotlin_p.Type.ClassName);
}
// Handle erasure of Kotlin unsigned types
method.KotlinReturnType = GetKotlinType (method.ReturnType.TypeSignature, metadata.ReturnType?.ClassName);
}
static void FixupExtensionMethod (MethodInfo method)
{
// Kotlin "extension" methods give the first parameter an ugly name
// like "$this$toByteString", we change it to "obj" to be a bit nicer.
var param = method.GetParameters ();
if (param.Length > 0 && param [0].Name.StartsWith ("$this$", StringComparison.Ordinal)) {
Log.Debug ($"Kotlin: Renaming extension parameter {method.DeclaringType?.ThisClass.Name.Value} - {method.Name} - {param [0].Name} -> obj");
param [0].Name = "obj";
}
}
static void FixupProperty (MethodInfo? getter, MethodInfo? setter, KotlinProperty metadata)
{
if (getter is null && setter is null)
return;
// Hide property if it isn't Public/Protected
if (!metadata.Flags.IsPubliclyVisible ()) {
if (getter?.IsPubliclyVisible == true) {
Log.Debug ($"Kotlin: Hiding internal getter method {getter.DeclaringType?.ThisClass.Name.Value} - {getter.Name}");
getter.AccessFlags = SetVisibility (getter.AccessFlags, MethodAccessFlags.Internal);
}
if (setter?.IsPubliclyVisible == true) {
Log.Debug ($"Kotlin: Hiding internal setter method {setter.DeclaringType?.ThisClass.Name.Value} - {setter.Name}");
setter.AccessFlags = SetVisibility (setter.AccessFlags, MethodAccessFlags.Internal);
}
return;
}
// Handle erasure of Kotlin unsigned types
if (getter != null)
getter.KotlinReturnType = GetKotlinType (getter.ReturnType.TypeSignature, metadata.ReturnType?.ClassName);
if (setter != null) {
var setter_parameter = setter.GetParameters ().First ();
if (setter_parameter.IsUnnamedParameter () || setter_parameter.Name == "<set-?>") {
Log.Debug ($"Kotlin: Renaming setter parameter {setter.DeclaringType?.ThisClass.Name.Value} - {setter.Name} - {setter_parameter.Name} -> value");
setter_parameter.Name = "value";
}
// Handle erasure of Kotlin unsigned types
setter_parameter.KotlinType = GetKotlinType (setter_parameter.Type.TypeSignature, metadata.ReturnType?.ClassName);
}
}
static void FixupField (FieldInfo? field, KotlinProperty metadata)
{
if (field is null)
return;
// Handle erasure of Kotlin unsigned types
field.KotlinType = GetKotlinType (field.Descriptor, metadata.ReturnType?.ClassName);
}
static MethodInfo? FindJavaConstructor (KotlinClass kotlinClass, KotlinConstructor constructor, ClassFile klass)
{
var all_constructors = klass.Methods.Where (method => method.Name == "<init>" || method.Name == "<clinit>");
var possible_constructors = all_constructors.Where (method => method.GetFilteredParameters ().Length == constructor.ValueParameters?.Count);
foreach (var method in possible_constructors) {
if (ParametersMatch (kotlinClass, method, constructor.ValueParameters!))
return method;
}
return null;
}
static MethodInfo? FindJavaMethod (KotlinFile kotlinFile, KotlinFunction function, ClassFile klass)
{
var possible_methods = klass.Methods.Where (method => method.Name == function.JvmName &&
method.GetFilteredParameters ().Length == function.ValueParameters?.Count);
foreach (var method in possible_methods) {
if (function.ReturnType == null)
continue;
if (!TypesMatch (method.ReturnType, function.ReturnType, kotlinFile))
continue;
if (!ParametersMatch (kotlinFile, method, function.ValueParameters!))
continue;
return method;
}
return null;
}
static FieldInfo? FindJavaFieldProperty (KotlinFile kotlinClass, KotlinProperty property, ClassFile klass)
{
var possible_methods = klass.Fields.Where (field => field.Name == property.Name &&
property.ReturnType != null &&
TypesMatch (new TypeInfo (field.Descriptor, field.Descriptor), property.ReturnType, kotlinClass));
return possible_methods.FirstOrDefault ();
}
static MethodInfo? FindJavaPropertyGetter (KotlinFile kotlinClass, KotlinProperty property, ClassFile klass)
{
var possible_methods = klass.Methods.Where (method => string.Compare (method.GetMethodNameWithoutSuffix (), $"get{property.Name}", StringComparison.OrdinalIgnoreCase) == 0 &&
method.GetParameters ().Length == 0 &&
property.ReturnType != null &&
TypesMatch (method.ReturnType, property.ReturnType, kotlinClass));
return possible_methods.FirstOrDefault ();
}
static MethodInfo? FindJavaPropertySetter (KotlinFile kotlinClass, KotlinProperty property, ClassFile klass)
{
var possible_methods = klass.Methods.Where (method => string.Compare (method.GetMethodNameWithoutSuffix (), $"set{property.Name}", StringComparison.OrdinalIgnoreCase) == 0 &&
property.ReturnType != null &&
method.GetParameters ().Length == 1 &&
method.ReturnType.BinaryName == "V" &&
TypesMatch (method.GetParameters () [0].Type, property.ReturnType, kotlinClass));
return possible_methods.FirstOrDefault ();
}
static bool ParametersMatch (KotlinFile kotlinClass, MethodInfo method, List<KotlinValueParameter> kotlinParameters)
{
var java_parameters = method.GetFilteredParameters ();
if (java_parameters.Length == 0 && kotlinParameters.Count == 0)
return true;
for (var i = 0; i < java_parameters.Length; i++) {
var java_p = java_parameters [i];
var kotlin_p = kotlinParameters [i];
if (kotlin_p.Type == null || !TypesMatch (java_p.Type, kotlin_p.Type, kotlinClass))
return false;
}
return true;
}
static bool TypesMatch (TypeInfo javaType, KotlinType kotlinType, KotlinFile? kotlinFile)
{
// Generic type
if (!string.IsNullOrWhiteSpace (kotlinType.TypeParameterName) && $"T{kotlinType.TypeParameterName};" == javaType.TypeSignature)
return true;
if (javaType.BinaryName == KotlinUtilities.ConvertKotlinTypeSignature (kotlinType, kotlinFile))
return true;
// Could be a generic type erasure
if (javaType.BinaryName == "Ljava/lang/Object;")
return true;
// Sometimes Kotlin keeps its native types rather than converting them to Java native types
// ie: "Lkotlin/UShort;" instead of "S"
if (javaType.BinaryName.StartsWith ("L", StringComparison.Ordinal) && javaType.BinaryName.EndsWith (";", StringComparison.Ordinal)) {
if (KotlinUtilities.ConvertKotlinClassToJava (javaType.BinaryName.Substring (1, javaType.BinaryName.Length - 2)) == KotlinUtilities.ConvertKotlinTypeSignature (kotlinType, kotlinFile))
return true;
}
// Same for some arrays
if (javaType.BinaryName.StartsWith ("[L", StringComparison.Ordinal) && javaType.BinaryName.EndsWith (";", StringComparison.Ordinal)) {
if ("[" + KotlinUtilities.ConvertKotlinClassToJava (javaType.BinaryName.Substring (2, javaType.BinaryName.Length - 3)) == KotlinUtilities.ConvertKotlinTypeSignature (kotlinType, kotlinFile))
return true;
}
return false;
}
static string? GetKotlinType (string? jvmType, string? kotlinClass)
{
// Handle erasure of Kotlin unsigned types
if (jvmType == "I" && kotlinClass == "kotlin/UInt;")
return "uint";
if (jvmType == "[I" && kotlinClass == "kotlin/UIntArray;")
return "uint[]";
if (jvmType == "S" && kotlinClass == "kotlin/UShort;")
return "ushort";
if (jvmType == "[S" && kotlinClass == "kotlin/UShortArray;")
return "ushort[]";
if (jvmType == "J" && kotlinClass == "kotlin/ULong;")
return "ulong";
if (jvmType == "[J" && kotlinClass == "kotlin/ULongArray;")
return "ulong[]";
if (jvmType == "B" && kotlinClass == "kotlin/UByte;")
return "ubyte";
if (jvmType == "[B" && kotlinClass == "kotlin/UByteArray;")
return "ubyte[]";
return null;
}
}
}
<|start_filename|>Makefile<|end_filename|>
OS ?= $(shell uname)
V ?= 0
CONFIGURATION = Debug
ifeq ($(OS),Darwin)
NATIVE_EXT = .dylib
DLLMAP_OS_NAME = osx
endif
ifeq ($(OS),Linux)
NATIVE_EXT = .so
DLLMAP_OS_NAME = linux
endif
PREPARE_EXTERNAL_FILES = \
external/xamarin-android-tools/src/Xamarin.Android.Tools.AndroidSdk/Xamarin.Android.Tools.AndroidSdk.csproj
DEPENDENCIES = \
bin/Test$(CONFIGURATION)/libNativeTiming$(NATIVE_EXT)
TESTS = \
bin/Test$(CONFIGURATION)/Java.Interop-Tests.dll \
bin/Test$(CONFIGURATION)/Java.Interop.Dynamic-Tests.dll \
bin/Test$(CONFIGURATION)/Java.Interop.Export-Tests.dll \
bin/Test$(CONFIGURATION)/Java.Interop.Tools.JavaCallableWrappers-Tests.dll \
bin/Test$(CONFIGURATION)/Java.Interop.Tools.JavaSource-Tests.dll \
bin/Test$(CONFIGURATION)/logcat-parse-Tests.dll \
bin/Test$(CONFIGURATION)/generator-Tests.dll \
bin/Test$(CONFIGURATION)/Xamarin.Android.Tools.ApiXmlAdjuster-Tests.dll \
bin/Test$(CONFIGURATION)/Java.Interop.Tools.JavaTypeSystem-Tests.dll \
bin/Test$(CONFIGURATION)/Xamarin.Android.Tools.Bytecode-Tests.dll \
bin/Test$(CONFIGURATION)/Java.Interop.Tools.Generator-Tests.dll \
bin/Test$(CONFIGURATION)/Xamarin.SourceWriter-Tests.dll
PTESTS = \
bin/Test$(CONFIGURATION)/Java.Interop-PerformanceTests.dll
ATESTS = \
bin/Test$(CONFIGURATION)/Android.Interop-Tests.dll
BUILD_PROPS = bin/Build$(CONFIGURATION)/JdkInfo.props bin/Build$(CONFIGURATION)/MonoInfo.props
all: $(DEPENDENCIES) $(TESTS)
run-all-tests:
r=0; \
$(MAKE) run-tests || r=1 ; \
$(MAKE) run-test-jnimarshal || r=1 ; \
$(MAKE) run-ptests || r=1 ; \
$(MAKE) run-java-source-utils-tests || r=1 ; \
exit $$r;
include build-tools/scripts/msbuild.mk
prepare:: $(BUILD_PROPS) src/Java.Runtime.Environment/Java.Runtime.Environment.dll.config
prepare:: prepare-bootstrap
$(MSBUILD) $(MSBUILD_FLAGS) /t:Restore Java.Interop.sln
prepare-bootstrap: prepare-external bin/Build$(CONFIGURATION)/Java.Interop.BootstrapTasks.dll
bin/Build$(CONFIGURATION)/Java.Interop.BootstrapTasks.dll: build-tools/Java.Interop.BootstrapTasks/Java.Interop.BootstrapTasks.csproj \
external/xamarin-android-tools/src/Xamarin.Android.Tools.AndroidSdk/Xamarin.Android.Tools.AndroidSdk.csproj \
$(wildcard build-tools/Java.Interop.BootstrapTasks/Java.Interop.BootstrapTasks/*.cs)
$(MSBUILD) $(MSBUILD_FLAGS) /restore "$<"
prepare-external $(PREPARE_EXTERNAL_FILES):
git submodule update --init --recursive
(cd external/xamarin-android-tools && $(MAKE) prepare)
nuget restore
prepare-core: bin/Build$(CONFIGURATION)/MonoInfo.props src/Java.Runtime.Environment/Java.Runtime.Environment.dll.config
clean:
-$(MSBUILD) $(MSBUILD_FLAGS) /t:Clean
-rm -Rf bin/$(CONFIGURATION) bin/Build$(CONFIGURATION) bin/Test$(CONFIGURATION)
-rm src/Java.Runtime.Environment/Java.Runtime.Environment.dll.config
include build-tools/scripts/mono.mk
include build-tools/scripts/jdk.mk
JAVA_RUNTIME_ENVIRONMENT_DLLMAP_OVERRIDE = Java.Runtime.Environment.Override.dllmap
ifeq ($(wildcard $(JAVA_RUNTIME_ENVIRONMENT_DLLMAP_OVERRIDE)),)
JAVA_RUNTIME_ENVIRONMENT_DLLMAP_OVERRIDE_CMD = '/@JAVA_RUNTIME_ENVIRONMENT_DLLMAP@/d'
else
JAVA_RUNTIME_ENVIRONMENT_DLLMAP_OVERRIDE_CMD = '/@JAVA_RUNTIME_ENVIRONMENT_DLLMAP@/ {' -e 'r $(JAVA_RUNTIME_ENVIRONMENT_DLLMAP_OVERRIDE)' -e 'd' -e '}'
endif
src/Java.Runtime.Environment/Java.Runtime.Environment.dll.config: src/Java.Runtime.Environment/Java.Runtime.Environment.dll.config.in \
bin/Build$(CONFIGURATION)/JdkInfo.props
sed -e 's#@JI_JVM_PATH@#$(JI_JVM_PATH)#g' -e 's#@OS_NAME@#$(DLLMAP_OS_NAME)#g' -e $(JAVA_RUNTIME_ENVIRONMENT_DLLMAP_OVERRIDE_CMD) < $< > $@
JAVA_INTEROP_LIB = libjava-interop$(NATIVE_EXT)
NATIVE_TIMING_LIB = libNativeTiming$(NATIVE_EXT)
bin/Test$(CONFIGURATION)/$(NATIVE_TIMING_LIB): tests/NativeTiming/timing.c $(wildcard $(JI_JDK_INCLUDE_PATHS)/jni.h)
mkdir -p `dirname "$@"`
gcc -g -shared -m64 -fPIC -o $@ $< $(JI_JDK_INCLUDE_PATHS:%=-I%)
# Usage: $(call TestAssemblyTemplate,assembly-basename)
define TestAssemblyTemplate
bin/Test$$(CONFIGURATION)/$(1)-Tests.dll: $(wildcard src/$(1)/*/*.cs src/$(1)/Test*/*/*.cs)
$$(MSBUILD) $$(MSBUILD_FLAGS)
touch $$@
endef # TestAssemblyTemplate
$(eval $(call TestAssemblyTemplate,Java.Interop))
$(eval $(call TestAssemblyTemplate,Java.Interop.Dynamic))
$(eval $(call TestAssemblyTemplate,Java.Interop.Export))
$(eval $(call TestAssemblyTemplate,Java.Interop.Tools.JavaCallableWrappers))
bin/Test$(CONFIGURATION)/Java.Interop-PerformanceTests.dll: $(wildcard tests/Java.Interop-PerformanceTests/*.cs) bin/Test$(CONFIGURATION)/$(NATIVE_TIMING_LIB)
$(MSBUILD) $(MSBUILD_FLAGS)
touch $@
bin/Test$(CONFIGURATION)/Android.Interop-Tests.dll: $(wildcard src/Android.Interop/*/*.cs src/Android.Interop/Tests/*/*.cs)
$(MSBUILD) $(MSBUILD_FLAGS)
touch $@
bin/$(CONFIGURATION)/Java.Interop.dll: $(wildcard src/Java.Interop/*/*.cs) src/Java.Interop/Java.Interop.csproj
$(MSBUILD) $(if $(V),/v:diag,) /p:Configuration=$(CONFIGURATION) $(if $(SNK),"/p:AssemblyOriginatorKeyFile=$(SNK)",)
CSHARP_REFS = \
bin/$(CONFIGURATION)/Java.Interop.dll \
bin/$(CONFIGURATION)/Java.Interop.Export.dll \
bin/$(CONFIGURATION)/Java.Runtime.Environment.dll \
bin/Test$(CONFIGURATION)/TestJVM.dll \
$(PTESTS) \
$(TESTS)
shell:
MONO_TRACE_LISTENER=Console.Out \
MONO_OPTIONS=--debug=casts csharp $(patsubst %,-r:%,$(CSHARP_REFS))
# $(call RUN_TEST,filename,log-lref?)
define RUN_TEST
$(MSBUILD) $(MSBUILD_FLAGS) build-tools/scripts/RunNUnitTests.targets /p:TestAssembly=$(1) || r=1;
endef
run-tests: $(TESTS) bin/Test$(CONFIGURATION)/$(JAVA_INTEROP_LIB)
r=0; \
$(foreach t,$(TESTS), $(call RUN_TEST,$(t),1)) \
exit $$r;
run-ptests: $(PTESTS) bin/Test$(CONFIGURATION)/$(JAVA_INTEROP_LIB)
r=0; \
$(foreach t,$(PTESTS), $(call RUN_TEST,$(t))) \
exit $$r;
run-java-source-utils-tests:
$(MSBUILD) $(MSBUILD_FLAGS) tools/java-source-utils/java-source-utils.csproj /t:RunTests
bin/Test$(CONFIGURATION)/$(JAVA_INTEROP_LIB): bin/$(CONFIGURATION)/$(JAVA_INTEROP_LIB)
cp $< $@
JRE_DLL_CONFIG=bin/$(CONFIGURATION)/Java.Runtime.Environment.dll.config
$(JRE_DLL_CONFIG): src/Java.Runtime.Environment/Java.Runtime.Environment.csproj
$(MSBUILD) $(MSBUILD_FLAGS) $<
define run-jnimarshalmethod-gen
MONO_TRACE_LISTENER=Console.Out \
$(RUNTIME) bin/$(CONFIGURATION)/jnimarshalmethod-gen.exe -v --jvm "$(JI_JVM_PATH)" -L "$(JI_MONO_LIB_PATH)mono/4.5" -L "$(JI_MONO_LIB_PATH)mono/4.5/Facades" $(2) $(1)
endef
run-test-jnimarshal: bin/Test$(CONFIGURATION)/Java.Interop.Export-Tests.dll bin/Test$(CONFIGURATION)/$(JAVA_INTEROP_LIB) $(JRE_DLL_CONFIG)
mkdir -p test-jni-output
$(call run-jnimarshalmethod-gen,"$<",-f -o test-jni-output --keeptemp)
(test -f test-jni-output/$(notdir $<) && test -f test-jni-output/Java.Interop.Export-Tests-JniMarshalMethods.dll) || { echo "jnimarshalmethod-gen did not create the expected assemblies in the test-jni-output directory"; exit 1; }
$(call run-jnimarshalmethod-gen,"$<")
$(call RUN_TEST,$<)
bin/Test$(CONFIGURATION)/generator.exe: bin/$(CONFIGURATION)/generator.exe
cp $<* `dirname "$@"`
update-test-generator-nunit:
-$(MAKE) run-tests TESTS=bin/Test$(CONFIGURATION)/generator-Tests.dll
for f in `find tests/generator-Tests/expected -name \*.cs` ; do \
source=`echo $$f | sed 's#^tests/generator-Tests/expected#bin/Test$(CONFIGURATION)/out#'` ; \
if [ -f "$$source" ]; then \
cp -f "$$source" "$$f" ; \
fi; \
done
for source in `find bin/Test$(CONFIGURATION)/out.ji -type f` ; do \
f=`echo $$source | sed 's#^bin/Test$(CONFIGURATION)/out.ji#tests/generator-Tests/expected.ji#'` ; \
mkdir -p `dirname $$f`; \
cp -f "$$source" "$$f" ; \
done
| xamarin/java.interop |
<|start_filename|>NewLife.Redis/RedisDelayQueue.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using NewLife.Caching.Common;
using NewLife.Log;
#if !NET40
using TaskEx = System.Threading.Tasks.Task;
#endif
namespace NewLife.Caching
{
/// <summary>Redis延迟队列</summary>
/// <remarks>
/// 延迟Redis队列,每次生产操作1次Redis,消费操作4次Redis。
/// </remarks>
public class RedisDelayQueue<T> : QueueBase, IProducerConsumer<T>
{
#region 属性
/// <summary>转移延迟消息到主队列的间隔。默认10s</summary>
public Int32 TransferInterval { get; set; } = 10;
/// <summary>个数</summary>
public Int32 Count => _sort?.Count ?? 0;
/// <summary>是否为空</summary>
public Boolean IsEmpty => Count == 0;
/// <summary>默认延迟时间。默认60秒</summary>
public Int32 Delay { get; set; } = 60;
private readonly RedisSortedSet<T> _sort;
#endregion
#region 构造
/// <summary>实例化延迟队列</summary>
/// <param name="redis"></param>
/// <param name="key"></param>
public RedisDelayQueue(Redis redis, String key) : base(redis, key)
{
_sort = new RedisSortedSet<T>(redis, key);
}
#endregion
#region 核心方法
/// <summary>添加延迟消息</summary>
/// <param name="value"></param>
/// <param name="delay"></param>
/// <returns></returns>
public Int32 Add(T value, Int32 delay)
{
using var span = Redis.Tracer?.NewSpan($"redismq:AddDelay:{TraceName}", value);
var target = DateTime.Now.ToUniversalTime().ToInt() + delay;
var rs = 0;
for (var i = 0; i <= RetryTimesWhenSendFailed; i++)
{
// 添加到有序集合的成员数量,不包括已经存在更新分数的成员
rs = _sort.Add(value, target);
if (rs >= 0) return rs;
span?.SetError(new RedisException($"发布到队列[{Topic}]失败!"), null);
if (i < RetryTimesWhenSendFailed) Thread.Sleep(RetryIntervalWhenSendFailed);
}
ValidWhenSendFailed(span);
return rs;
}
/// <summary>批量生产</summary>
/// <param name="values"></param>
/// <returns></returns>
public Int32 Add(params T[] values)
{
if (values == null || values.Length == 0) return 0;
using var span = Redis.Tracer?.NewSpan($"redismq:AddDelay:{TraceName}", values);
var target = DateTime.Now.ToUniversalTime().ToInt() + Delay;
var rs = 0;
for (var i = 0; i <= RetryTimesWhenSendFailed; i++)
{
rs = _sort.Add(values, target);
if (rs > 0) return rs;
span?.SetError(new RedisException($"发布到队列[{Topic}]失败!"), null);
if (i < RetryTimesWhenSendFailed) Thread.Sleep(RetryIntervalWhenSendFailed);
}
ValidWhenSendFailed(span);
return rs;
}
/// <summary>删除项</summary>
/// <param name="value"></param>
/// <returns></returns>
public Int32 Remove(T value) => _sort.Remove(value);
/// <summary>获取一个</summary>
/// <param name="timeout">超时时间,默认0秒永远阻塞;负数表示直接返回,不阻塞。</param>
/// <returns></returns>
public T TakeOne(Int32 timeout = 0)
{
//RetryAck();
// 最长等待
if (timeout == 0) timeout = 60;
while (true)
{
var score = DateTime.Now.ToUniversalTime().ToInt();
var rs = _sort.RangeByScore(0, score, 0, 1);
if (rs != null && rs.Length > 0 && TryPop(rs[0])) return rs[0];
// 是否需要等待
if (timeout <= 0) break;
Thread.Sleep(1000);
timeout--;
}
return default;
}
/// <summary>异步获取一个</summary>
/// <param name="timeout">超时时间,默认0秒永远阻塞;负数表示直接返回,不阻塞。</param>
/// <param name="cancellationToken">取消令牌</param>
/// <returns></returns>
public async Task<T> TakeOneAsync(Int32 timeout = 0, CancellationToken cancellationToken = default)
{
//RetryAck();
// 最长等待
if (timeout == 0) timeout = 60;
while (true)
{
var score = DateTime.Now.ToUniversalTime().ToInt();
var rs = await _sort.RangeByScoreAsync(0, score, 0, 1, cancellationToken);
if (rs != null && rs.Length > 0 && TryPop(rs[0])) return rs[0];
// 是否需要等待
if (timeout <= 0) break;
await TaskEx.Delay(1000, cancellationToken);
timeout--;
}
return default;
}
/// <summary>异步消费获取</summary>
/// <param name="timeout">超时时间,默认0秒永远阻塞;负数表示直接返回,不阻塞。</param>
/// <returns></returns>
Task<T> IProducerConsumer<T>.TakeOneAsync(Int32 timeout) => TakeOneAsync(timeout, default);
/// <summary>获取一批</summary>
/// <param name="count"></param>
/// <returns></returns>
public IEnumerable<T> Take(Int32 count = 1)
{
if (count <= 0) yield break;
//RetryAck();
var score = DateTime.Now.ToUniversalTime().ToInt();
var rs = _sort.RangeByScore(0, score, 0, count);
if (rs == null || rs.Length == 0) yield break;
foreach (var item in rs)
{
// 争夺消费
if (TryPop(item)) yield return item;
}
}
/// <summary>争夺消费,只有一个线程能够成功删除,作为抢到的标志。同时备份到Ack队列</summary>
/// <param name="value"></param>
/// <returns></returns>
private Boolean TryPop(T value)
{
//if (_ack != null)
//{
// // 先备份,再删除。备份到Ack队列
// var score = DateTime.Now.ToInt() + RetryInterval;
// _ack.Add(value, score);
//}
// 删除作为抢夺
return _sort.Remove(value) > 0;
}
/// <summary>确认删除</summary>
/// <param name="keys"></param>
/// <returns></returns>
public Int32 Acknowledge(params T[] keys) => -1;
/// <summary>确认删除</summary>
/// <param name="keys"></param>
/// <returns></returns>
Int32 IProducerConsumer<T>.Acknowledge(params String[] keys) => -1;
#endregion
#region 消息交换
/// <summary>异步转移消息,已到期消息转移到目标队列</summary>
/// <param name="queue">队列</param>
/// <param name="onException">异常处理</param>
/// <param name="cancellationToken">取消令牌</param>
/// <returns></returns>
public async Task TransferAsync(IProducerConsumer<T> queue, Action<Exception> onException = null, CancellationToken cancellationToken = default)
{
// 大循环之前,打断性能追踪调用链
DefaultSpan.Current = null;
// 超时时间,用于阻塞等待
//var timeout = Redis.Timeout / 1000 - 1;
//var topic = Key;
var tracer = Redis.Tracer;
while (!cancellationToken.IsCancellationRequested)
{
ISpan span = null;
try
{
// 异步阻塞消费
var score = DateTime.Now.ToUniversalTime().ToInt();
var msgs = await _sort.RangeByScoreAsync(0, score, 0, 10, cancellationToken);
if (msgs != null && msgs.Length > 0)
{
// 删除消息后直接进入目标队列,无需进入Ack
span = tracer?.NewSpan($"redismq:Transfer:{TraceName}", msgs);
// 逐个删除,多线程争夺可能失败
var list = new List<T>();
for (var i = 0; i < msgs.Length; i++)
{
if (Remove(msgs[i]) > 0) list.Add(msgs[i]);
}
// 转移消息
if (list.Count > 0) queue.Add(list.ToArray());
}
else
{
// 没有消息,歇一会
await TaskEx.Delay(TransferInterval * 1000, cancellationToken);
}
}
catch (ThreadAbortException) { break; }
catch (ThreadInterruptedException) { break; }
catch (Exception ex)
{
span?.SetError(ex, null);
onException?.Invoke(ex);
}
finally
{
span?.Dispose();
}
}
}
#endregion
}
}
<|start_filename|>NewLife.Redis/QueueExtensions.cs<|end_filename|>
#if !NET40
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using NewLife.Caching.Models;
using NewLife.Log;
using NewLife.Serialization;
namespace NewLife.Caching
{
/// <summary>IProducerConsumer接口扩展</summary>
public static class QueueExtensions
{
#region 循环消费
/// <summary>队列消费大循环,处理消息后自动确认</summary>
/// <typeparam name="T">消息类型</typeparam>
/// <param name="queue">队列</param>
/// <param name="onMessage">消息处理。如果处理消息时抛出异常,消息将延迟后回到队列</param>
/// <param name="cancellationToken">取消令牌</param>
/// <param name="log">日志对象</param>
/// <param name="idField">消息标识字段名,用于处理错误重试</param>
/// <returns></returns>
public static async Task ConsumeAsync<T>(this IProducerConsumer<String> queue, Func<T, String, CancellationToken, Task> onMessage, CancellationToken cancellationToken = default, ILog log = null, String idField = null)
{
await Task.Yield();
// 大循环之前,打断性能追踪调用链
DefaultSpan.Current = null;
// 主题
var topic = (queue as RedisBase).Key;
if (topic.IsNullOrEmpty()) topic = queue.GetType().Name;
var rds = (queue as RedisBase).Redis;
var tracer = rds.Tracer;
var errLog = log ?? XTrace.Log;
var ids = new List<String> { "Id", "guid", "OrderId", "Code" };
if (!idField.IsNullOrEmpty() && !ids.Contains(idField)) ids.Insert(0, idField);
// 超时时间,用于阻塞等待
var timeout = rds.Timeout / 1000 - 1;
while (!cancellationToken.IsCancellationRequested)
{
var msgId = "";
var mqMsg = "";
ISpan span = null;
try
{
// 异步阻塞消费
mqMsg = await queue.TakeOneAsync(timeout);
if (mqMsg != null)
{
// 埋点
span = tracer?.NewSpan($"redismq:{topic}", mqMsg);
log?.Info($"[{topic}]消息内容为:{mqMsg}");
// 解码
var dic = JsonParser.Decode(mqMsg);
var msg = JsonHelper.Convert<T>(dic);
if (dic.TryGetValue("traceParent", out var tp)) span.Detach(tp + "");
// 消息标识
foreach (var item in ids)
{
if (dic.TryGetValue(item, out var id))
{
msgId = id + "";
if (!msgId.IsNullOrEmpty()) break;
}
}
// 处理消息
await onMessage(msg, mqMsg, cancellationToken);
// 确认消息
queue.Acknowledge(mqMsg);
}
else
{
// 没有消息,歇一会
await Task.Delay(1000, cancellationToken);
}
}
catch (ThreadAbortException) { break; }
catch (ThreadInterruptedException) { break; }
catch (Exception ex)
{
span?.SetError(ex, null);
// 消息处理错误超过10次则抛弃
if (!mqMsg.IsNullOrEmpty())
{
if (msgId.IsNullOrEmpty()) msgId = mqMsg.MD5();
errLog?.Error("[{0}/{1}]消息处理异常:{2} {3}", topic, msgId, mqMsg, ex);
var key = $"{topic}:Error:{msgId}";
var rs = rds.Increment(key, 1);
if (rs < 10)
rds.SetExpire(key, TimeSpan.FromHours(24));
else
{
queue.Acknowledge(mqMsg);
errLog?.Error("[{0}/{1}]错误过多,删除消息", topic, msgId);
}
}
}
finally
{
span?.Dispose();
}
}
}
/// <summary>队列消费大循环,处理消息后自动确认</summary>
/// <typeparam name="T">消息类型</typeparam>
/// <param name="queue">队列</param>
/// <param name="onMessage">消息处理。如果处理消息时抛出异常,消息将延迟后回到队列</param>
/// <param name="cancellationToken">取消令牌</param>
/// <param name="log">日志对象</param>
/// <param name="idField">消息标识字段名,用于处理错误重试</param>
/// <returns></returns>
public static async Task ConsumeAsync<T>(this IProducerConsumer<String> queue, Action<T> onMessage, CancellationToken cancellationToken = default, ILog log = null, String idField = null)
{
await ConsumeAsync<T>(queue, (m, k, t) => { onMessage(m); return Task.FromResult(0); }, cancellationToken, log, idField);
}
/// <summary>队列消费大循环,处理消息后自动确认</summary>
/// <typeparam name="T">消息类型</typeparam>
/// <param name="queue">队列</param>
/// <param name="onMessage">消息处理。如果处理消息时抛出异常,消息将延迟后回到队列</param>
/// <param name="cancellationToken">取消令牌</param>
/// <param name="log">日志对象</param>
/// <param name="idField">消息标识字段名,用于处理错误重试</param>
/// <returns></returns>
public static async Task ConsumeAsync<T>(this RedisReliableQueue<String> queue, Func<T, String, CancellationToken, Task> onMessage, CancellationToken cancellationToken = default, ILog log = null, String idField = null)
{
await Task.Yield();
// 大循环之前,打断性能追踪调用链
DefaultSpan.Current = null;
// 主题
var topic = queue.Key;
if (topic.IsNullOrEmpty()) topic = queue.GetType().Name;
var rds = queue.Redis;
var tracer = rds.Tracer;
var errLog = log ?? XTrace.Log;
// 备用redis,容错、去重
var rds2 = new FullRedis
{
Name = rds.Name + "Bak",
Server = rds.Server,
UserName = rds.UserName,
Password = <PASSWORD>,
Db = rds.Db == 15 ? 0 : (rds.Db + 1),
Tracer = rds.Tracer,
};
var ids = new List<String> { "Id", "guid", "OrderId", "Code" };
if (!idField.IsNullOrEmpty() && !ids.Contains(idField)) ids.Insert(0, idField);
// 超时时间,用于阻塞等待
var timeout = rds.Timeout / 1000 - 1;
while (!cancellationToken.IsCancellationRequested)
{
var msgId = "";
var mqMsg = "";
ISpan span = null;
try
{
// 异步阻塞消费
mqMsg = await queue.TakeOneAsync(timeout, cancellationToken);
if (mqMsg != null)
{
// 埋点
span = tracer?.NewSpan($"redismq:{topic}", mqMsg);
log?.Info($"[{topic}]消息内容为:{mqMsg}");
// 解码
var dic = JsonParser.Decode(mqMsg);
var msg = JsonHelper.Convert<T>(dic);
if (dic.TryGetValue("traceParent", out var tp)) span.Detach(tp + "");
// 消息标识
foreach (var item in ids)
{
if (dic.TryGetValue(item, out var id))
{
msgId = id + "";
if (!msgId.IsNullOrEmpty()) break;
}
}
// 处理消息
await onMessage(msg, mqMsg, cancellationToken);
// 确认消息
queue.Acknowledge(mqMsg);
}
else
{
// 没有消息,歇一会
await Task.Delay(1000, cancellationToken);
}
}
catch (ThreadAbortException) { break; }
catch (ThreadInterruptedException) { break; }
catch (Exception ex)
{
span?.SetError(ex, null);
// 消息处理错误超过10次则抛弃
if (!mqMsg.IsNullOrEmpty())
{
if (msgId.IsNullOrEmpty()) msgId = mqMsg.MD5();
errLog?.Error("[{0}/{1}]消息处理异常:{2} {3}", topic, msgId, mqMsg, ex);
var key = $"{topic}:Error:{msgId}";
var rs = rds2.Increment(key, 1);
if (rs < 10)
rds2.SetExpire(key, TimeSpan.FromHours(24));
else
{
queue.Acknowledge(mqMsg);
errLog?.Error("[{0}/{1}]错误过多,删除消息", topic, msgId);
}
}
}
finally
{
span?.Dispose();
}
}
}
/// <summary>队列消费大循环,处理消息后自动确认</summary>
/// <typeparam name="T">消息类型</typeparam>
/// <param name="queue">队列</param>
/// <param name="onMessage">消息处理。如果处理消息时抛出异常,消息将延迟后回到队列</param>
/// <param name="cancellationToken">取消令牌</param>
/// <param name="log">日志对象</param>
/// <param name="idField">消息标识字段名,用于处理错误重试</param>
/// <returns></returns>
public static async Task ConsumeAsync<T>(this RedisReliableQueue<String> queue, Action<T> onMessage, CancellationToken cancellationToken = default, ILog log = null, String idField = null)
{
await ConsumeAsync<T>(queue, (m, k, t) => { onMessage(m); return Task.FromResult(0); }, cancellationToken, log, idField);
}
/// <summary>队列消费大循环,处理消息后自动确认</summary>
/// <typeparam name="T">消息类型</typeparam>
/// <param name="queue">队列</param>
/// <param name="onMessage">消息处理。如果处理消息时抛出异常,消息将延迟后回到队列</param>
/// <param name="cancellationToken">取消令牌</param>
/// <param name="log">日志对象</param>
/// <returns></returns>
public static async Task ConsumeAsync<T>(this RedisReliableQueue<String> queue, Action<String> onMessage, CancellationToken cancellationToken = default, ILog log = null)
{
await Task.Yield();
// 大循环之前,打断性能追踪调用链
DefaultSpan.Current = null;
// 主题
var topic = queue.Key;
if (topic.IsNullOrEmpty()) topic = queue.GetType().Name;
var rds = queue.Redis;
var tracer = rds.Tracer;
var errLog = log ?? XTrace.Log;
// 备用redis,容错、去重
var rds2 = new FullRedis
{
Name = rds.Name + "Bak",
Server = rds.Server,
UserName = rds.UserName,
Password = rds.Password,
Db = rds.Db == 15 ? 0 : (rds.Db + 1),
Tracer = rds.Tracer,
};
// 超时时间,用于阻塞等待
var timeout = rds.Timeout / 1000 - 1;
while (!cancellationToken.IsCancellationRequested)
{
var mqMsg = "";
ISpan span = null;
try
{
// 异步阻塞消费
mqMsg = await queue.TakeOneAsync(timeout, cancellationToken);
if (mqMsg != null)
{
// 埋点
span = tracer?.NewSpan($"redismq:{topic}", mqMsg);
log?.Info($"[{topic}]消息内容为:{mqMsg}");
// 处理消息
onMessage(mqMsg);
// 确认消息
queue.Acknowledge(mqMsg);
}
else
{
// 没有消息,歇一会
await Task.Delay(1000, cancellationToken);
}
}
catch (ThreadAbortException) { break; }
catch (ThreadInterruptedException) { break; }
catch (Exception ex)
{
span?.SetError(ex, null);
// 消息处理错误超过10次则抛弃
if (!mqMsg.IsNullOrEmpty())
{
var msgId = mqMsg.MD5();
errLog?.Error("[{0}/{1}]消息处理异常:{2} {3}", topic, msgId, mqMsg, ex);
var key = $"{topic}:Error:{msgId}";
var rs = rds2.Increment(key, 1);
if (rs < 10)
rds2.SetExpire(key, TimeSpan.FromHours(24));
else
{
queue.Acknowledge(mqMsg);
errLog?.Error("[{0}/{1}]错误过多,删除消息", topic, msgId);
}
}
}
finally
{
span?.Dispose();
}
}
}
/// <summary>队列消费大循环,处理消息后自动确认</summary>
/// <typeparam name="T">消息类型</typeparam>
/// <param name="queue">队列</param>
/// <param name="onMessage">消息处理。如果处理消息时抛出异常,消息将延迟后回到队列</param>
/// <param name="cancellationToken">取消令牌</param>
/// <param name="log">日志对象</param>
/// <returns></returns>
public static async Task ConsumeAsync<T>(this RedisStream<String> queue, Func<T, Message, CancellationToken, Task> onMessage, CancellationToken cancellationToken = default, ILog log = null)
{
await Task.Yield();
// 大循环之前,打断性能追踪调用链
DefaultSpan.Current = null;
// 自动创建消费组
var gis = queue.GetGroups();
if (gis == null || !queue.Group.IsNullOrEmpty() && !gis.Any(e => e.Name.EqualIgnoreCase(queue.Group))) queue.GroupCreate(queue.Group);
// 主题
var topic = queue.Key;
if (topic.IsNullOrEmpty()) topic = queue.GetType().Name;
var rds = queue.Redis;
var tracer = rds.Tracer;
var errLog = log ?? XTrace.Log;
// 超时时间,用于阻塞等待
var timeout = rds.Timeout / 1000 - 1;
while (!cancellationToken.IsCancellationRequested)
{
Message mqMsg = null;
ISpan span = null;
try
{
// 异步阻塞消费
mqMsg = await queue.TakeMessageAsync(timeout, cancellationToken);
if (mqMsg != null)
{
// 埋点
span = tracer?.NewSpan($"redismq:{topic}", mqMsg);
log?.Info($"[{topic}]消息内容为:{mqMsg}");
var bodys = mqMsg.Body;
for (var i = 0; i < bodys.Length; i++)
{
if (bodys[i].EqualIgnoreCase("traceParent") && i + 1 < bodys.Length) span.Detach(bodys[i + 1]);
}
// 解码
var msg = mqMsg.GetBody<T>();
// 处理消息
await onMessage(msg, mqMsg, cancellationToken);
// 确认消息
queue.Acknowledge(mqMsg.Id);
}
else
{
// 没有消息,歇一会
await Task.Delay(1000, cancellationToken);
}
}
catch (ThreadAbortException) { break; }
catch (ThreadInterruptedException) { break; }
catch (Exception ex)
{
span?.SetError(ex, null);
errLog?.Error("[{0}/{1}]消息处理异常:{2} {3}", topic, mqMsg?.Id, mqMsg?.ToJson(), ex);
}
finally
{
span?.Dispose();
}
}
}
/// <summary>队列消费大循环,处理消息后自动确认</summary>
/// <typeparam name="T">消息类型</typeparam>
/// <param name="queue">队列</param>
/// <param name="onMessage">消息处理。如果处理消息时抛出异常,消息将延迟后回到队列</param>
/// <param name="cancellationToken">取消令牌</param>
/// <param name="log">日志对象</param>
/// <returns></returns>
public static async Task ConsumeAsync<T>(this RedisStream<String> queue, Action<T> onMessage, CancellationToken cancellationToken = default, ILog log = null)
{
await ConsumeAsync<T>(queue, (m, k, t) => { onMessage(m); return Task.FromResult(0); }, cancellationToken, log);
}
#endregion
}
}
#endif | NewLifeX/NewLife.Redis |
<|start_filename|>test/fixtures/project/Brocfile.js<|end_filename|>
module.exports = 'subdir';
<|start_filename|>test/fixtures/project-esm/subdir-module.js<|end_filename|>
export default 'subdir';
<|start_filename|>test/fixtures/project-esm/Brocfile.js<|end_filename|>
import subdir from './subdir-module';
// Re-export this just to test esm
export default () => subdir;
<|start_filename|>test/fixtures/project/Brocfile-Function.js<|end_filename|>
module.exports = () => 'subdir';
| jamesgeorge007/broccoli |
<|start_filename|>custom-rules.js<|end_filename|>
var customRules = {
monitoringVariables: {
cpuOS: {
status: function(value) {
/* Value is a % of use */
if (value < 70) return "stable";
else if (value < 90) return "unstable";
else return "dangerous";
}
},
cpuProcess: {
status: function(value) {
/* Value is a % of use */
if (value < 50) return "stable";
else if (value < 70) return "unstable";
else return "dangerous";
}
},
requestsMeanTime: {
status: function(value) {
/* Brings the average request time in milliseconds */
if (value < 5000) return "stable";
else if (value < 10000) return "unstable";
else return "dangerous";
}
},
requests: {
status: function(value) {
/* Total of concurrent requests */
if (value < 1000) return "stable";
else if (value < 3000) return "unstable";
else return "dangerous";
}
},
requestsPerHour: {
status: function(value) {
if (value < 60000) return "stable";
else if (value < 180000) return "unstable";
else return "dangerous";
}
},
kbytesPerMinute: {
status: function(value) {
if (value < 10240) return "stable";
else if (value < 102400) return "unstable";
else return "dangerous";
}
},
disk: {
status: function(value) {
/* Value is a % of use */
if (value < 80) return "stable";
else if (value < 90) return "unstable";
else return "dangerous";
}
},
uptimeOS: {
status: function(value) {
/*
In this case, value is a JSON with values of the uptime period, as shown below:
{
years: integer,
months: integer,
days: integer,
hours: integer,
minutes: integer,
seconds: integer
}
*/
return "stable";
}
},
uptimeProcess: {
status: function(value) {
/*
In this case, value is a JSON with values of the uptime period, as shown below:
{
years: integer,
months: integer,
days: integer,
hours: integer,
minutes: integer,
seconds: integer
}
*/
return "stable";
}
},
residentSetSize: {
status: function(value) {
return "stable";
}
},
heap: {
status: function(value) {
/*
Value contains a table (array of arrays), where index 0 is associated with the value used and index 1 is the total value.
Within each, there is an array with the last 3 values obtained, with index 2 being the most recently obtained.
That is, value[0][2] stores the value of the heap used and value[1][2] stores its total.
*/
var used = ( value[0][2] / value[1][2] );
if ( used < 0.8 ) return "stable";
else if ( used < 0.9 ) return "unstable";
else return "dangerous";
}
},
processMemory: {
status: function(value) {
/*
Value is a json that contains the memory used by NodeJS and the total memory of the operating system, as shown in the following example:
{
used: 50,
total: 8000
}
*/
var percentage = value.used / value.total;
if (percentage < 50) return "stable";
else if (percentage < 70) return "unstable";
else return "dangerous";
}
}
}
};
<|start_filename|>infrastructure/requests.js<|end_filename|>
const fs = require('fs');
var Requests = function() {
this.getStatus = function(request,callback) {
request.socket.server.getConnections(function(error,count){
callback(error,count);
});
};
};
var obj = new Requests();
module.exports = {
requests: obj
};
<|start_filename|>routes.js<|end_filename|>
"use strict";
const express = require('express');
const app = express();
const router = express.Router();
const properties = require('./public/js/properties').config;
const CpuOS = require('./model/models').CpuOS;
const CpuProcess = require('./model/models').CpuProcess;
const Requests = require('./model/models').Requests;
const Disk = require('./model/models').Disk;
const UptimeOS = require('./model/models').UptimeOS;
const UptimeProcess = require('./model/models').UptimeProcess;
const ResidentSetSize = require('./model/models').ResidentSetSize;
const Heap = require('./model/models').Heap;
const ProcessMemory = require('./model/models').ProcessMemory;
/* Models */
let cpuOS = new CpuOS();
let cpuProcess = new CpuProcess();
let requests = new Requests();
let disk = new Disk();
let uptimeOS = new UptimeOS();
let uptimeProcess = new UptimeProcess();
let residentSetSize = new ResidentSetSize();
let heap = new Heap();
let processMemory = new ProcessMemory();
/* Static files of Hey-Joe */
router.use(express.static(__dirname + '/public'));
/* Index */
router.get('/', function (req, res) {
res.sendFile(__dirname + '/public/index.html');
});
router.get('/js/custom-rules.js', function (req, res) {
res.sendFile(__dirname + '/custom-rules.js');
});
router.get('/api/' + properties.apiVersion + "/cpu/os", function(req,res) {
cpuOS.getStatus(req,function(error,status){
if (error) {
res.status(500);
} else {
res.json(status);
}
});
});
router.get('/api/' + properties.apiVersion + "/cpu/process", function(req,res) {
cpuProcess.getStatus(req,function(error,status){
if (error) {
res.status(500);
} else {
res.json(status);
}
});
});
router.get('/api/' + properties.apiVersion + "/requests/mean-time", function(req,res) {
requests.getRequestsMeanTime(function(error,status){
if (error) {
res.status(500);
} else {
res.json(status);
}
});
});
router.get('/api/' + properties.apiVersion + "/requests", function(req,res) {
requests.getStatus(req,function(error,status){
if (error) {
res.status(500);
} else {
res.json(status);
}
});
});
router.get('/api/' + properties.apiVersion + "/requests/hour", function(req,res) {
requests.getStatusPerHour(function(error,status){
if (error) {
res.status(500);
} else {
res.json(status);
}
});
});
router.get('/api/' + properties.apiVersion + "/kbytes/minute", function(req,res) {
requests.getKbytesPerMinute(function(error,status){
if (error) {
res.status(500);
} else {
res.json(status);
}
});
});
router.get('/api/' + properties.apiVersion + "/disk", function(req,res) {
disk.getStatus(req,function(error,status){
if (error) {
res.status(500);
} else {
res.json(status);
}
});
});
router.get('/api/' + properties.apiVersion + "/uptime/os", function(req,res) {
uptimeOS.getStatus(req,function(error,status){
if (error) {
res.status(500);
} else {
res.json(status);
}
});
});
router.get('/api/' + properties.apiVersion + "/uptime/process", function(req,res) {
uptimeProcess.getStatus(req,function(error,status){
if (error) {
res.status(500);
} else {
res.json(status);
}
});
});
router.get('/api/' + properties.apiVersion + "/memory/rss", function(req,res) {
residentSetSize.getStatus(req,function(error,status){
if (error) {
res.status(500);
} else {
res.json(status);
}
});
});
router.get('/api/' + properties.apiVersion + "/memory/heap", function(req,res) {
heap.getStatus(req,function(error,status){
if (error) {
res.status(500);
} else {
res.json(status);
}
});
});
router.get('/api/' + properties.apiVersion + "/memory/process", function(req,res) {
processMemory.getStatus(req,function(error,status){
if (error) {
res.status(500);
} else {
res.json(status);
}
});
});
module.exports = router;
<|start_filename|>repositories/repository.js<|end_filename|>
"use strict";
const low = require('lowdb');
const db = low('node_modules/hey-joe/data/db.json');
const properties = require('../public/js/properties').config;
const moment = require('../public/js/moment');
/* Init values */
db.defaults({
cpuOS: [],
cpuProcess: [],
requests: [],
disk: [],
requestsPerHour: [],
kbytesPerMinute: [],
requestsMeanTime: [],
uptimeOS: [],
uptimeProcess: [],
residentSetSize: [],
heap: [],
processMemory: []
}).write();
var Repository = function() {};
Repository.prototype = {
get: function(monitoringVariable,index,callback,changeValue) {
var value = db.get(monitoringVariable + '[' + index + ']').value();
if (value === undefined) {
if (changeValue === undefined) {
value = 0;
} else {
value = changeValue(0);
}
}
callback(value);
},
getAll: function(monitoringVariable,callback,changeValue) {
let self = this;
let chartDataIndexes = properties.monitoringVariables[monitoringVariable].chartDataIndexes;
let values = [];
chartDataIndexes.forEach(function(index){
self.get(monitoringVariable,index,function(value){
values.push(value);
if (values.length === chartDataIndexes.length) {
callback(values);
}
},changeValue);
});
},
getStatus: function(monitoringVariable,data,callback) {
db.get(monitoringVariable).unshift(data).write();
if ( db.get(monitoringVariable).size().value() > properties.monitoringVariables[monitoringVariable].totalNumberMonitoring ) {
db.get(monitoringVariable).pop().write();
}
this.getAll(monitoringVariable,function(values){
callback(values);
});
}
};
function RepositoryRequests() {};
RepositoryRequests.prototype = {
savePerHour: function(addsOneMore) {
let requests = db.get('requestsPerHour[0]').value();
let timestamp = moment().format('YYYY-MM-DD_HH');
if ( (requests !== undefined) && (requests.timestamp === timestamp) ) {
if (addsOneMore) {
console.log(new Date() + ' ' + addsOneMore + ' ' + requests.count);
db.set('requestsPerHour[0].count', requests.count + 1).write();
}
} else {
let newRequests = {
timestamp: timestamp,
count: 0
};
if (addsOneMore) {
newRequests.count = 1;
}
db.get('requestsPerHour').unshift(newRequests).write();
if ( db.get('requestsPerHour').size().value() > properties.monitoringVariables.requestsPerHour.totalNumberMonitoring ) {
db.get('requestsPerHour').pop().write();
}
}
},
getStatusPerHour(callback) {
this.getAll('requestsPerHour',function(values){
callback(values);
},function(value){
return { count: value };
});
},
saveKbytesPerMinute: function(kbytes) {
let data = db.get('kbytesPerMinute[0]').value();
let timestamp = moment().format('YYYY-MM-DD_HH:mm');
if ( (data !== undefined) && (data.timestamp === timestamp) ) {
db.set('kbytesPerMinute[0].total', data.total + kbytes).write();
} else {
let newData = {
timestamp: timestamp,
total: kbytes
}
db.get('kbytesPerMinute').unshift(newData).write();
if ( db.get('kbytesPerMinute').size().value() > properties.monitoringVariables.kbytesPerMinute.totalNumberMonitoring ) {
db.get('kbytesPerMinute').pop().write();
}
}
},
getKbytesPerMinute(callback) {
this.getAll('kbytesPerMinute',function(values){
callback(values);
},function(value){
return { total: value };
});
},
saveRequestsMeanTime(time) {
let data = db.get('requestsMeanTime[0]').value();
if (data === undefined) {
data = {
minTime: 0,
meanTime: 0,
topTime: 0,
totalTime: 0,
counter: 0
};
}
data.counter = data.counter + 1;
if ( (time < data.minTime) || (data.minTime === 0) ) {
data.minTime = time;
} else if (time > data.topTime) {
data.topTime = time;
}
data.totalTime = data.totalTime + time;
data.meanTime = parseInt( data.totalTime / data.counter );
db.set('requestsMeanTime[0]', data).write();
},
getRequestsMeanTime(callback) {
this.getAll('requestsMeanTime',function(values){
callback(values);
},function(value){
return {
minTime: value,
meanTime: value,
topTime: value,
totalTime: value,
counter: value
};
});
}
};
properties.extend(Repository, RepositoryRequests);
module.exports = {
Repository: Repository,
RepositoryRequests: RepositoryRequests
};
<|start_filename|>libraries/filter.js<|end_filename|>
"use strict";
const Requests = require('../model/models').Requests;
var requests = new Requests();
let filter = {};
filter.all = function all(req,res,callback) {
requests.markStart(req);
res.once('finish', function() {
let bytes = res._headers['content-length'] | 0;
requests.countKbytes(bytes);
requests.calcRequestTime(req);
});
callback(res);
};
module.exports = filter;
<|start_filename|>infrastructure/uptime.js<|end_filename|>
"use strict";
const os = require('os');
function secondsToJson(seconds,callback) {
let numyears = Math.floor(seconds / 31536000);
let nummonths = Math.floor((seconds % 31536000) / 2592000);
let numdays = Math.floor((seconds % 31536000) / 86400);
let numhours = Math.floor(((seconds % 31536000) % 86400) / 3600);
let numminutes = Math.floor((((seconds % 31536000) % 86400) % 3600) / 60);
let numseconds = (((seconds % 31536000) % 86400) % 3600) % 60;
callback({
years: numyears,
months: nummonths,
days: numdays,
hours: numhours,
minutes: numminutes,
seconds: numseconds
});
return numyears + " years " + numdays + " days " + numhours + " hours " + numminutes + " minutes " + numseconds + " seconds";
}
var UptimeOS = function() {
this.getStatus = function(request,callback) {
let seconds = Math.floor(os.uptime());
secondsToJson(seconds,function(json){
callback(undefined,json);
});
};
};
var UptimeProcess = function() {
this.getStatus = function(request,callback) {
let seconds = Math.floor(process.uptime());
secondsToJson(seconds,function(json){
callback(undefined,json);
});
};
};
var uptimeOS = new UptimeOS();
var uptimeProcess = new UptimeProcess();
module.exports = {
uptimeOS: uptimeOS,
uptimeProcess: uptimeProcess
};
<|start_filename|>public/js/scripts.js<|end_filename|>
var monitoringVariables = [];
/* Show Monitoring Variables */
function showMonitoringVariables() {
monitoringVariables.forEach(function(variable){
$("section.boxes ul.box").append(
'<li id="' + variable.id + '" class="loading">' +
' <div class="title">' + variable.label + '</div>' +
'<div class="status"><div class="loading"></div></div>' +
' <div class="hr"><hr /></div>' +
' <div class="icons">' +
' <div class="' + variable.id + '"></div>' +
' <br />' +
' <span></span>' +
' </div>' +
' <div class="graph ct-chart ct-perfect-fourth"></div>' +
'</li>'
);
});
};
/* Access server */
function getMonitoringVariables() {
var count = 0;
var countCallback = 0;
monitoringVariables.forEach(function(variable){
variable.promise = false;
/* First time */
getMonitoringVariable(variable,function(){
countCallback++;
if (count === monitoringVariables.length) {
if (countCallback === count) {
$('section.header div.timestamp').text(moment().format('YYYY-MM-DD HH:mm:ss'));
calcTotalStatus();
}
}
});
count++;
});
};
function isIE() {
var ua = window.navigator.userAgent;
var msie = ua.indexOf("MSIE ");
if (msie > 0 || !!navigator.userAgent.match(/Trident.*rv\:11\./)) {
return true;
}
return false;
};
function isEdge() {
if (/Edge/.test(navigator.userAgent)) {
return true;
}
return false;
}
function isChrome() {
if(/chrom(e|ium)/.test(navigator.userAgent.toLowerCase())){
return true;
}
return false;
};
/* Charts*/
function createChart(variable,data) {
if (variable.chartType === "pie") {
variable.chart = new Chartist.Pie('#' + variable.id + ' div.graph', data, {
donut: true,
donutWidth: 30,
startAngle: 270,
total: 200,
showLabel: false,
width: 200,
height: 170
});
} else if (variable.chartType === "line") {
variable.chart = new Chartist.Line('#' + variable.id + ' div.graph', data, {
showPoint: false,
lineSmooth: false,
showArea: true,
chartPadding: {
right: 0,
left: 0
},
width: 200,
height: 110
});
} else if (variable.chartType === "hbar") {
variable.chart = new Chartist.Bar('#' + variable.id + ' div.graph', data, {
width: 200,
height: 90,
high: 100,
horizontalBars: true,
stackBars: true,
showLabel: true,
axisX: {
showGrid: false,
showLabel: false,
offset: 0
},
axisY: {
showGrid: false,
showLabel: false,
offset: 0
}
}).on('draw', function(data) {
if(data.type === 'bar') {
data.element.attr({
style: 'stroke-width: 50px'
});
}
});
} else if (variable.chartType === '3bar') {
let high = data.series[2] + 300;
variable.chart = new Chartist.Bar('#' + variable.id + ' div.graph', data, {
distributeSeries: true,
width: 200,
height: 110,
chartPadding: 0,
high: high
}).on('draw', function(data) {
var barHorizontalCenter, barVerticalCenter, label, value;
if(data.type === 'bar') {
data.element.attr({
style: 'stroke-width: 20px; stroke: #F8FFE3'
});
let marginLeft = 10;
let marginTop = 0;
if (isEdge()) {
marginTop = 14;
marginLeft = 10;
} else if(isChrome()){
marginLeft = 0;
} else if (isIE()) {
marginTop = 15;
}
barHorizontalCenter = data.x1 + (data.element.width() * .5) - marginLeft;
barVerticalCenter = data.y1 + (data.element.height() * -1) - 5 + marginTop;
value = data.element.attr('ct:value');
if (value !== '0') {
label = new Chartist.Svg('text');
label.text(value);
label.addClass("ct-barlabel");
label.attr({
x: barHorizontalCenter,
y: barVerticalCenter,
'text-anchor': 'middle'
});
return data.group.append(label);
}
}
});
} else if (variable.chartType === 'uptime') {
for(var i =0; i<data.labels.length; i++) {
$('#' + variable.id + ' div.graph').append(
'<div class="uptime">' +
' <div class="title">' + data.labels[i] + '</div>' +
' <div class="data">' + data.series[i] + '</div>' +
'</div>'
);
}
} else if (variable.chartType === 'heap') {
variable.chart = new Chartist.Bar('#' + variable.id + ' div.graph', data, {
width: 200,
height: 110,
chartPadding: {
right: 0,
left: 0
},
stackBars: true,
showLabel: true,
axisX: {
showGrid: false,
showLabel: true
},
axisY: {
showGrid: true,
showLabel: true
}
}).on('draw', function(data) {
if(data.type === 'bar') {
data.element.attr({
style: 'stroke-width: 35px'
});
}
});
} else if (variable.chartType === "hbar2") {
variable.chart = new Chartist.Bar('#' + variable.id + ' div.graph', data, {
width: 200,
height: 90,
horizontalBars: true,
stackBars: true,
showLabel: true,
axisX: {
showGrid: false,
showLabel: false,
offset: 0
},
axisY: {
showGrid: false,
showLabel: false,
offset: 0
}
}).on('draw', function(data) {
if(data.type === 'bar') {
data.element.attr({
style: 'stroke-width: 50px'
});
}
});
}
};
function updateChart(variable,data) {
if (variable.chartType === '3bar') {
let high = data.series[2] + 300;
variable.chart.options.high = high;
}
variable.chart.update(data,variable.chart.options);
};
/* Ajax */
function dataLoaded(id) {
$(id).attr('class','done');
$(id + ' div.icons').css('display', 'block');
$(id + ' div.graph').css('display', 'block');
};
function getMonitoringVariable(variable,callback) {
var id = '#' + variable.id;
$.get({
url: variable.url,
error: function(){
if (variable.chart === undefined) {
dataLoaded(id);
}
$(id + ' div.status div').attr('class','error');
variable.currentStatus = 'error';
$(id + ' div.icons span').text('error');
variable.chart = undefined;
$(id + ' div.graph').text(properties.config.errorMessage);
callback();
},
success: function(json) {
var values = variable.getDataAppropriately(json);
if (variable.chart === undefined) {
dataLoaded(id);
$(id + ' div.graph').text('');
createChart(variable,{labels: variable.chartLabels, series: values});
} else {
updateChart(variable,{labels: variable.chartLabels, series: values});
}
variable.currentStatus = customRules.monitoringVariables[variable.id].status(variable.value(values));
$(id + ' div.status div').attr('class',variable.currentStatus);
$(id + ' div.icons span').text(variable.formatedValue(values));
callback();
},
timeout: properties.config.defaultTimeout
});
};
/* Document ready */
$( document ).ready(function() {
properties.config.getAllProperties(properties.config.monitoringVariables).forEach(function(entry){
monitoringVariables.push(properties.config.monitoringVariables[entry]);
});
typeStatus(properties.config.currentStatus);
showMonitoringVariables();
$('section.boxes ul.box').fadeIn(1000);
getMonitoringVariables();
/* Intervals */
setInterval(function(){ getMonitoringVariables(); }, properties.config.millisecondsUpdateTime);
});
<|start_filename|>infrastructure/disk.js<|end_filename|>
"use strict";
const diskspace = require('diskspace');
const os = require('os');
let mainDisk = undefined;
var Disk = function() {
this.getStatus = function(request,callback) {
let path = os.platform() === 'win32' ? 'c:' : '/';
if (mainDisk !== undefined) path = mainDisk;
diskspace.check(path, function (error, result) {
if (error) {
callback(error);
} else {
let bytesUsed = result.used;
let used = parseInt( (bytesUsed * 100) / result.total );
let free = 100 - used;
if (used > 100) {
used = 100;
free = 0;
}
callback(undefined,[
[used],
[free]
]);
}
});
};
};
var obj = new Disk();
module.exports = {
disk: obj
};
<|start_filename|>public/js/properties.js<|end_filename|>
(function(exports){
const API_VERSION = 0;
exports.config = {
currentStatus: 'loading',
defaultTimeout: 3000,
errorMessage: 'Could not get this monitoring variable!',
millisecondsUpdateTime: 10000,
apiVersion: API_VERSION,
monitoringVariables: {
cpuOS: {
id: "cpuOS",
url: "api/" + API_VERSION + "/cpu/os",
label: "CPU (Operating System)",
currentStatus: 'loading',
chartLabels: [],
chartDataIndexes: [0],
getDataAppropriately: function(json) {
return json[0];
},
value: function(values){
return values[0];
},
formatedValue: function(values){
return values[0] + "%";
},
totalNumberMonitoring: 360,
chart: undefined,
chartType: 'pie'
},
cpuProcess: {
id: "cpuProcess",
url: "api/" + API_VERSION + "/cpu/process",
label: "CPU (NodeJS Process)",
currentStatus: 'loading',
chartLabels: [],
chartDataIndexes: [0],
getDataAppropriately: function(json) {
return json[0];
},
value: function(values){
return values[0];
},
formatedValue: function(values){
return values[0] + "%";
},
totalNumberMonitoring: 360,
chart: undefined,
chartType: 'pie'
},
requestsMeanTime: {
id: "requestsMeanTime",
url: "api/" + API_VERSION + "/requests/mean-time",
label: "HTTP Requests Mean Time",
currentStatus: 'loading',
chartLabels: ['Min', 'Mean', 'Top'],
chartDataIndexes: [0],
getDataAppropriately: function(json) {
return [
json[0].minTime,
json[0].meanTime,
json[0].topTime
];
},
value: function(values){
return values[1];
},
formatedValue: function(values){
return values[1] + ' ms';
},
totalNumberMonitoring: 1,
chart: undefined,
chartType: '3bar'
},
requests: {
id: "requests",
url: "api/" + API_VERSION + "/requests",
label: "Concurrent Requests",
currentStatus: 'loading',
chartLabels: ['-30s','-20s','-10s','now'],
chartDataIndexes: [3,2,1,0],
getDataAppropriately: function(json) {
return [ json ];
},
value: function(values){
return values[0][3];
},
formatedValue: function(values){
return values[0][3];
},
totalNumberMonitoring: 360,
chart: undefined,
chartType: 'line'
},
requestsPerHour: {
id: "requestsPerHour",
url: "api/" + API_VERSION + "/requests/hour",
label: "Total Requests per Hour",
currentStatus: 'loading',
chartLabels: ['-2h','-1h','now'],
chartDataIndexes: [2,1,0],
getDataAppropriately: function(json) {
return [[
json[0].count,
json[1].count,
json[2].count
]];
},
value: function(values){
return values[0][2];
},
formatedValue: function(values){
return values[0][2];
},
totalNumberMonitoring: 24,
chart: undefined,
chartType: 'line'
},
kbytesPerMinute: {
id: "kbytesPerMinute",
url: "api/" + API_VERSION + "/kbytes/minute",
label: "Download Kbytes per Minute",
currentStatus: 'loading',
chartLabels: ['-3m','-2m','-1m','now'],
chartDataIndexes: [3,2,1,0],
getDataAppropriately: function(json) {
return [[
parseFloat(json[0].total).toFixed(2),
parseFloat(json[1].total).toFixed(2),
parseFloat(json[2].total).toFixed(2),
parseFloat(json[3].total).toFixed(2)
]];
},
value: function(values){
return values[0][3];
},
formatedValue: function(values){
return values[0][3] + ' KB';
},
totalNumberMonitoring: 60,
chart: undefined,
chartType: 'line'
},
disk: {
id: "disk",
url: "api/" + API_VERSION + "/disk",
label: "Disk Usage",
currentStatus: 'loading',
chartLabels: [],
chartDataIndexes: [0],
getDataAppropriately: function(json) {
return json[0];
},
value: function(values){
return values[0][0];
},
formatedValue: function(values){
return values[0][0] + "%";
},
totalNumberMonitoring: 360,
chart: undefined,
chartType: 'hbar'
},
uptimeOS: {
id: "uptimeOS",
url: "api/" + API_VERSION + "/uptime/os",
label: "Operating System Uptime",
currentStatus: 'loading',
chartLabels: ['years','months','days','hours'],
chartDataIndexes: [0],
getDataAppropriately: function(json) {
return [
json[0].years,
json[0].months,
json[0].days,
json[0].hours,
json[0].minutes,
json[0].seconds
];
},
value: function(values){
return {
years: values[0],
months: values[1],
days: values[2],
hours: values[3],
minutes: values[4],
seconds: values[5]
};
},
formatedValue: function(values){
function round(v) {
return Math.round( v * 10 ) / 10;
}
var minutes = round((values[5] / 60) + values[4]);
var hours = round((minutes / 60) + values[3]);
var days = round((hours / 24) + values[2]);
var months = round((days / 30) + values[1]);
var years = round((months / 12) + values[0]);
if (years > 0) return years + ' years';
if (months > 0) return months + ' months';
if (days > 0) return days + ' days';
return hours + ' hours';
},
totalNumberMonitoring: 1,
chart: undefined,
chartType: 'uptime'
},
uptimeProcess: {
id: "uptimeProcess",
url: "api/" + API_VERSION + "/uptime/process",
label: "NodeJS Uptime",
currentStatus: 'loading',
chartLabels: ['years','months','days','hours'],
chartDataIndexes: [0],
getDataAppropriately: function(json) {
return [
json[0].years,
json[0].months,
json[0].days,
json[0].hours,
json[0].minutes,
json[0].seconds
];
},
value: function(values){
return {
years: values[0],
months: values[1],
days: values[2],
hours: values[3],
minutes: values[4],
seconds: values[5]
};
},
formatedValue: function(values){
function round(v) {
return Math.round( v * 10 ) / 10;
}
var minutes = round((values[5] / 60) + values[4]);
var hours = round((minutes / 60) + values[3]);
var days = round((hours / 24) + values[2]);
var months = round((days / 30) + values[1]);
var years = round((months / 12) + values[0]);
if (years > 0) return years + ' years';
if (months > 0) return months + ' months';
if (days > 0) return days + ' days';
return hours + ' hours';
},
totalNumberMonitoring: 1,
chart: undefined,
chartType: 'uptime'
},
residentSetSize: {
id: "residentSetSize",
url: "api/" + API_VERSION + "/memory/rss",
label: "Resident Set Size",
currentStatus: 'loading',
chartLabels: ['-30s','-20s','-10s','now'],
chartDataIndexes: [3,2,1,0],
getDataAppropriately: function(json) {
return [ json ];
},
value: function(values){
return values[0][3];
},
formatedValue: function(values){
return values[0][3] + ' MB';
},
totalNumberMonitoring: 60,
chart: undefined,
chartType: 'line'
},
heap: {
id: "heap",
url: "api/" + API_VERSION + "/memory/heap",
label: "Heap Used",
currentStatus: 'loading',
chartLabels: ['-20s','-10s','now'],
chartDataIndexes: [2,1,0],
getDataAppropriately: function(json) {
return [
[json[0].used, json[1].used, json[2].used],
[json[0].total, json[1].total, json[2].total],
];
},
value: function(values){
return values;
},
formatedValue: function(values){
return values[0][2] + " MB";
},
totalNumberMonitoring: 60,
chart: undefined,
chartType: 'heap'
},
processMemory: {
id: "processMemory",
url: "api/" + API_VERSION + "/memory/process",
label: "Used Memory (NodeJS)",
currentStatus: 'loading',
chartLabels: [],
chartDataIndexes: [0],
getDataAppropriately: function(json) {
return json[0];
},
value: function(values){
return {
used: values[0][0],
total: values[1][0],
};
},
formatedValue: function(values){
return values[0][0] + " MB";
},
totalNumberMonitoring: 1,
chart: undefined,
chartType: 'hbar2'
}
},
getAllProperties: function(object) {
var properties = [];
for(var key in object) {
properties.push(key);
}
return properties;
},
extend: function(base, sub) {
var origProto = sub.prototype;
sub.prototype = Object.create(base.prototype);
for (var key in origProto) {
sub.prototype[key] = origProto[key];
}
sub.prototype.constructor = sub;
Object.defineProperty(sub.prototype, 'constructor', {
enumerable: false,
value: sub
});
}
};
})(typeof exports === 'undefined'? this['properties']={}: exports);
<|start_filename|>public/css/style.css<|end_filename|>
/* Background */
div.image{
width: 100%;
height: 100%;
display:block;
z-index: -1;
top:0px;
left: 0px;
position:absolute;
}
div.image div{
position:absolute;
top:0px;
display:none;
width: 100%;
height:200%;
background-repeat: repeat;
background-size: auto;
}
div.image div.loading{
background-color: #616161;
background-image: url(../img/loading.png);
display:block;
}
div.image div.stable{
background-color: #1e88e5;
background-image: url(../img/stable.png);
}
div.image div.unstable{
background-color: #9e9d24;
background-image: url(../img/unstable.png);
}
div.image div.dangerous{
background-color: #ef6c00;
background-image: url(../img/dangerous.png);
}
div.image div.error{
background-color: #d84315;
background-image: url(../img/error.png);
}
/* Header */
section.header {
font-size: 30px;
font-weight: bold;
color: #424242;
}
section.header img.logo {
position:absolute;
left: 10px;
top: 10px;
}
section.header div.timestamp {
font-size: 16px;
font-weight: normal;
position:absolute;
right: 10px;
top: 10px;
width: 250px;
text-align: center;
display: block;
background-color: rgba(200,200,200,0.5);
padding: 2px 5px 2px 5px;
}
section.header div.status {
position:absolute;
right: 10px;
top: 30px;
width: 250px;
display: block;
background-color: rgba(150,150,150,0.5);
padding: 2px 5px 2px 5px;
}
.typed-cursor{
opacity: 1;
-webkit-animation: blink 0.7s infinite;
-moz-animation: blink 0.7s infinite;
animation: blink 0.7s infinite;
}
@keyframes blink{
0% { opacity:1; }
50% { opacity:0; }
100% { opacity:1; }
}
@-webkit-keyframes blink{
0% { opacity:1; }
50% { opacity:0; }
100% { opacity:1; }
}
@-moz-keyframes blink{
0% { opacity:1; }
50% { opacity:0; }
100% { opacity:1; }
}
/* Boxes */
section.boxes {
margin: 10px;
margin-top: 120px;
font-family: "Lucida Sans Unicode", "Lucida Grande", sans-serif;
color: #F8FFE3;
}
section.boxes ul.box {
list-style-type: none;
overflow: hidden;
margin: 0;
padding: 0;
display: none;
}
section.boxes ul.box li {
float: left;
margin-left: 10px;
margin-top: 10px;
background-color: #231F20;
width: 320px;
height: 140px;
display: block;
padding: 10px;
}
section.boxes ul.box li.done {
background-color: #231F20;
}
section.boxes ul.box li.loading {
background-image: url(../img/loading-boxes.gif);
background-size: 340px;
background-position: left -70px;
}
section.boxes ul.box li div.title {
font-size: 16px;
font-weight: bold;
float: left;
display: block;
width: 270px;
height: 25px;
text-align: left;
line-height: 25px;
}
section.boxes ul.box div.status {
display: block;
width: 25px;
height: 25px;
float: right;
border: 0px;
padding: 0px;
margin: 0px;
}
section.boxes ul.box div.status div.loading {
display: block;
width: 25px;
height: 25px;
background-image: url(../img/status.png);
background-position: 0px;
}
section.boxes ul.box div.status div.stable {
display: block;
width: 25px;
height: 25px;
background-image: url(../img/status.png);
background-position: -25px;
}
section.boxes ul.box div.status div.unstable {
display: block;
width: 25px;
height: 25px;
background-image: url(../img/status.png);
background-position: -50px;
}
section.boxes ul.box div.status div.dangerous {
display: block;
width: 25px;
height: 25px;
background-image: url(../img/status.png);
background-position: -75px;
}
section.boxes ul.box div.status div.error {
display: block;
width: 25px;
height: 25px;
background-image: url(../img/status.png);
background-position: -100px;
}
section.boxes ul.box div.hr {
display: block;
height: 4px;
clear: both;
margin-top: 28px;
}
section.boxes ul.box li hr {
border: 0;
height: 1px;
background-image: linear-gradient(to right, rgba(63, 63, 63, 0), rgba(63, 63, 63, 1), rgba(63, 63, 63, 0));
display: block;
}
section.boxes ul.box div.icons {
padding-top: 7px;
display: none;
width: 100px;
height: 100px;
font-size: 16px;
font-weight: none;
text-align: center;
float: left;
}
section.boxes ul.box div.icons div.requests {
display: block;
width: 64px;
height: 64px;
margin: 0 auto;
background-image: url(../img/icons.png);
background-repeat: no-repeat;
}
section.boxes ul.box div.icons div.cpuOS {
display: block;
width: 64px;
height: 64px;
margin: 0 auto;
background-image: url(../img/icons.png);
background-repeat: no-repeat;
background-position: -64px;
}
section.boxes ul.box div.icons div.disk {
display: block;
width: 64px;
height: 64px;
margin: 0 auto;
background-image: url(../img/icons.png);
background-repeat: no-repeat;
background-position: -128px;
}
section.boxes ul.box div.icons div.requestsPerHour {
display: block;
width: 64px;
height: 64px;
margin: 0 auto;
background-image: url(../img/icons.png);
background-repeat: no-repeat;
background-position: -192px;
}
section.boxes ul.box div.icons div.kbytesPerMinute {
display: block;
width: 64px;
height: 64px;
margin: 0 auto;
background-image: url(../img/icons.png);
background-repeat: no-repeat;
background-position: -256px;
}
section.boxes ul.box div.icons div.requestsMeanTime {
display: block;
width: 64px;
height: 64px;
margin: 0 auto;
background-image: url(../img/icons.png);
background-repeat: no-repeat;
background-position: -320px;
}
section.boxes ul.box div.icons div.cpuProcess {
display: block;
width: 64px;
height: 64px;
margin: 0 auto;
background-image: url(../img/icons.png);
background-repeat: no-repeat;
background-position: -384px;
}
section.boxes ul.box div.icons div.uptimeOS {
display: block;
width: 64px;
height: 64px;
margin: 0 auto;
background-image: url(../img/icons.png);
background-repeat: no-repeat;
background-position: -448px;
}
section.boxes ul.box div.icons div.uptimeProcess {
display: block;
width: 64px;
height: 64px;
margin: 0 auto;
background-image: url(../img/icons.png);
background-repeat: no-repeat;
background-position: -512px;
}
section.boxes ul.box div.icons div.residentSetSize {
display: block;
width: 64px;
height: 64px;
margin: 0 auto;
background-image: url(../img/icons.png);
background-repeat: no-repeat;
background-position: -576px;
}
section.boxes ul.box div.icons div.heap {
display: block;
width: 64px;
height: 64px;
margin: 0 auto;
background-image: url(../img/icons.png);
background-repeat: no-repeat;
background-position: -640px;
}
section.boxes ul.box div.icons div.processMemory {
display: block;
width: 64px;
height: 64px;
margin: 0 auto;
background-image: url(../img/icons.png);
background-repeat: no-repeat;
background-position: -704px;
}
section.boxes ul.box div.graph {
padding: 0px;
margin: 0px;
border: 0px;
margin-top: 7px;
float: left;
width: 205px;
height: 100px;
display: none;
}
section.boxes ul.box div.graph div.uptime {
width: 48px;
display: block;
float: left;
margin-right: 2px;
}
section.boxes ul.box div.graph div.uptime div.title {
width: 48px;
height: 27px;
display: block;
float: left;
color: #F8FFE3;
font-size: 12px;
text-align: center;
padding-top: 3px;
border-top-left-radius: 5px;
border-top-right-radius: 5px;
background-color: rgba(255,255,255,0.2);
}
section.boxes ul.box div.graph div.uptime div.data {
padding: 0px;
margin: 0px;
width: 48px;
height: 43px;
border-bottom-left-radius: 5px;
border-bottom-right-radius: 5px;
display: block;
float: left;
background-color: #F8FFE3;
color: #231F20;
font-size: 34px;
font-weight: bold;
padding-top: 7px;
text-align: center;
}
.ct-series-a .ct-bar, .ct-series-a .ct-line, .ct-series-a .ct-point, .ct-series-a .ct-slice-donut {
stroke: #F8FFE3;
stroke-width: 2px;
}
.ct-series-b .ct-bar, .ct-series-b .ct-line, .ct-series-b .ct-point, .ct-series-b .ct-slice-donut {
stroke: rgba(255,255,255,0.2);
}
.ct-grids line {
stroke: #c8cfc3;
}
.ct-labels span {
color: #c8cfc3;
}
.ct-series-a .ct-area {
fill:#F8FFE3;
}
.ct-barlabel {
font-size: 12px;
fill: #c8cfc3;
}
@media screen and (max-width: 800px) {
section.header {
font-size: 24px;
}
section.header img.logo {
width: 100px;
height: auto;
}
section.header div.timestamp {
width: 200px;
font-size: 12px;
}
section.header div.status {
top: 26px;
width: 200px;
}
}
@media screen and (max-width: 500px) {
section.header {
font-size: 18px;
}
section.header img.logo {
width: 64px;
height: auto;
}
section.header div.timestamp {
width: 150px;
font-size: 10px;
}
section.header div.status {
top: 24px;
width: 150px;
}
}
<|start_filename|>infrastructure/memory.js<|end_filename|>
"use strict";
const pusage = require('pidusage');
const os = require('os');
function round(v) {
return Math.round( v * 10 ) / 10;
}
var ResidentSetSize = function() {
this.getStatus = function(request,callback) {
let mb = parseInt( (process.memoryUsage().rss / 1024) / 1024 );
callback(undefined,mb);
};
};
var Heap = function() {
this.getStatus = function(request,callback) {
let heap = {
used: round( (process.memoryUsage().heapUsed / 1024) / 1024 ),
total: round( (process.memoryUsage().heapTotal / 1024) / 1024 )
};
callback(undefined,heap);
};
};
var ProcessMemory = function() {
this.getStatus = function(request,callback) {
pusage.stat(process.pid, function(err, stat) {
if (err) {
callback(err);
} else {
if (stat.memory === undefined) {
callback('Memory status not found!');
} else {
let memoryUsed = round( (stat.memory / 1024) / 1024 );
let memoryTotal = round( (os.totalmem() / 1024) / 1024 );
/* Unmonitor process */
pusage.unmonitor(process.pid);
callback(undefined,[
[memoryUsed],
[memoryTotal]
]);
}
}
});
};
};
var residentSetSize = new ResidentSetSize();
var heap = new Heap();
var processMemory = new ProcessMemory();
module.exports = {
residentSetSize: residentSetSize,
heap: heap,
processMemory: processMemory
};
<|start_filename|>infrastructure/cpu.js<|end_filename|>
"use strict";
const osUtils = require('os-utils');
const pusage = require('pidusage');
const windowsCpu = require('windows-cpu');
const os = require('os');
var CpuOS = function() {
this.getStatus = function(request,callback) {
osUtils.cpuUsage(function(value){
var cpuUsage = parseInt(value * 100);
var cpuFree = 100 - cpuUsage;
if (cpuUsage > 100) {
cpuUsage = 100;
cpuFree = 0;
}
callback(undefined,[
cpuUsage,
cpuFree
]);
});
};
};
var CpuProcess = function() {
this.getStatus = function(request,callback) {
if (os.platform() === 'win32') {
windowsCpu.nodeLoad(function(error, results) {
if(error) {
callback(error);
}
if (results === undefined) {
callback('CPU status not found!');
} else {
let cpuUsage = parseInt(results.load);
let cpuFree = 100 - cpuUsage;
if (cpuUsage > 100) {
cpuUsage = 100;
cpuFree = 0;
}
callback(undefined,[
cpuUsage,
cpuFree
]);
}
});
} else {
pusage.stat(process.pid, function(err, stat) {
if (err) {
callback(err);
} else {
if (stat.cpu === undefined) {
callback('CPU status not found!');
} else {
let cpuUsage = parseInt(stat.cpu)
let cpuFree = 100 - cpuUsage;
if (cpuUsage > 100) {
cpuUsage = 100;
cpuFree = 0;
}
/* Unmonitor process */
pusage.unmonitor(process.pid);
callback(undefined,[
cpuUsage,
cpuFree
]);
}
}
});
}
};
};
var cpuOS = new CpuOS();
var cpuProcess = new CpuProcess();
module.exports = {
cpuOS: cpuOS,
cpuProcess: cpuProcess
};
<|start_filename|>model/models.js<|end_filename|>
"use strict";
const path = require('path');
const properties = require('../public/js/properties').config;
const Repository = require('../repositories/repository').Repository;
const RepositoryRequests = require('../repositories/repository').RepositoryRequests;
const repository = new Repository();
const repositoryRequests = new RepositoryRequests();
/* Infrastructure */
var infrastructure = {
cpuOS: require('../infrastructure/cpu').cpuOS,
cpuProcess: require('../infrastructure/cpu').cpuProcess,
requests: require('../infrastructure/requests').requests,
disk: require('../infrastructure/disk').disk,
uptimeOS: require('../infrastructure/uptime').uptimeOS,
uptimeProcess: require('../infrastructure/uptime').uptimeProcess,
residentSetSize: require('../infrastructure/memory').residentSetSize,
heap: require('../infrastructure/memory').heap,
processMemory: require('../infrastructure/memory').processMemory
};
/* Super Class */
function BasicModel(_monitoringVariable) {
this.monitoringVariable = _monitoringVariable;
};
BasicModel.prototype = {
getStatus: function (request,callback) {
var monitoringVariable = this.monitoringVariable;
infrastructure[monitoringVariable].getStatus(request,function(error,data){
if (error) {
callback(error);
} else {
repository.getStatus(monitoringVariable,data,function(values){
callback(undefined,values);
});
}
});
}
};
/* Models */
var CpuOS = function() {
BasicModel.call(this, "cpuOS");
};
properties.extend(BasicModel, CpuOS);
var CpuProcess = function() {
BasicModel.call(this, "cpuProcess");
};
properties.extend(BasicModel, CpuProcess);
var Requests = function() {
BasicModel.call(this, "requests");
};
Requests.prototype = {
savePerHour: function(addsOneMore) {
repositoryRequests.savePerHour(addsOneMore);
},
getStatusPerHour: function(callback) {
repositoryRequests.getStatusPerHour(function(values){
callback(undefined,values);
});
},
countKbytes: function(bytes) {
var kbytes = bytes / 1024;
if (kbytes !== undefined) {
repositoryRequests.saveKbytesPerMinute(kbytes);
}
},
getKbytesPerMinute: function(callback) {
repositoryRequests.getKbytesPerMinute(function(values){
callback(undefined,values);
});
},
markStart: function(request) {
request.start = Date.now();
},
calcRequestTime: function(request) {
let time = Date.now() - request.start;
repositoryRequests.saveRequestsMeanTime(time);
},
getRequestsMeanTime: function(callback) {
repositoryRequests.getRequestsMeanTime(function(values){
callback(undefined,values);
});
}
};
properties.extend(BasicModel, Requests);
var Disk = function() {
BasicModel.call(this, "disk");
};
properties.extend(BasicModel, Disk);
var UptimeOS = function() {
BasicModel.call(this, "uptimeOS");
};
properties.extend(BasicModel, UptimeOS);
var UptimeProcess = function() {
BasicModel.call(this, "uptimeProcess");
};
properties.extend(BasicModel, UptimeProcess);
var ResidentSetSize = function() {
BasicModel.call(this, "residentSetSize");
};
properties.extend(BasicModel, ResidentSetSize);
var Heap = function() {
BasicModel.call(this, "heap");
};
properties.extend(BasicModel, Heap);
var ProcessMemory = function() {
BasicModel.call(this, "processMemory");
};
properties.extend(BasicModel, ProcessMemory);
module.exports = {
CpuOS: CpuOS,
CpuProcess: CpuProcess,
Requests: Requests,
Disk: Disk,
UptimeOS: UptimeOS,
UptimeProcess: UptimeProcess,
ResidentSetSize: ResidentSetSize,
Heap: Heap,
ProcessMemory: ProcessMemory
};
<|start_filename|>index.js<|end_filename|>
const express = require('express');
const app = express();
const router = express.Router();
const cors = require('cors');
const endMw = require('express-end');
const filter = require('./libraries/filter');
const properties = require('./public/js/properties').config;
const Requests = require('./model/models').Requests;
var requests = new Requests();
/* Cors */
router.use(cors());
/* Express end event */
app.use(endMw);
/* Middleware features */
router.use(function(req,res,next){
filter.all(req,res,function(newRes){
res = newRes;
/* Requests per hour */
requests.savePerHour((req.originalUrl.indexOf('hey-joe') == -1));
next();
});
});
router.use('/hey-joe',require('./routes'));
module.exports = router;
<|start_filename|>public/js/utils.js<|end_filename|>
/* Rules to change background */
var c = 0,
$img = $('.image div'),
n = $img.length;
var possibleStatus = [
"loading",
"stable",
"unstable",
"dangerous",
"error"
];
var possibleStatusValues = [
-1,
0,
1,
2,
3
];
function getIndexStatus(status,callback) {
var i = 0;
possibleStatus.forEach(function(entry){
if (entry === status) {
callback(i);
}
i++
});
};
function calcTotalStatus() {
var worseStatusIndex = 0;
var count = 0;
monitoringVariables.forEach(function(entry){
getIndexStatus(entry.currentStatus,function(currentStatusIndex){
if (possibleStatusValues[currentStatusIndex] > possibleStatusValues[worseStatusIndex]) {
worseStatusIndex = currentStatusIndex;
}
});
count++;
if (monitoringVariables.length === count) {
if (properties.config.currentStatus != possibleStatus[worseStatusIndex]) {
/* Change it */
properties.config.currentStatus = possibleStatus[worseStatusIndex];
colorStatus(properties.config.currentStatus);
}
}
});
};
function colorStatus(status) {
getIndexStatus(status,function(statusIndex){
$img.fadeOut(500).eq(statusIndex).fadeIn(500,function(){
typeStatus($img.eq(statusIndex).attr("class"));
});
});
};
function typeStatus(status) {
Typed.new('#status', {
strings: [status],
typeSpeed: 10
});
};
function getAllMethods(object) {
var methods = Object.getOwnPropertyNames(object).filter(function(property) {
return typeof object[property] == 'function';
});
return methods;
}; | lgapontes/hey-jude |
<|start_filename|>engine/core/IgeEntity.js<|end_filename|>
/**
* Creates an entity and handles the entity's life cycle and
* all related entity actions / methods.
*/
var IgeEntity = IgeObject.extend({
classId: 'IgeEntity',
init: function () {
IgeObject.prototype.init.call(this);
// Register the IgeEntity special properties handler for
// serialise and de-serialise support
this._specialProp.push('_texture');
this._specialProp.push('_eventListeners');
this._specialProp.push('_aabb');
this._anchor = new IgePoint2d(0, 0);
this._renderPos = {x: 0, y: 0};
this._computedOpacity = 1;
this._opacity = 1;
this._cell = 1;
this._deathTime = undefined;
this._bornTime = ige._currentTime;
this._translate = new IgePoint3d(0, 0, 0);
this._oldTranslate = new IgePoint3d(0, 0, 0);
this._rotate = new IgePoint3d(0, 0, 0);
this._scale = new IgePoint3d(1, 1, 1);
this._origin = new IgePoint3d(0.5, 0.5, 0.5);
this._bounds2d = new IgePoint2d(40, 40);
this._bounds3d = new IgePoint3d(0, 0, 0);
this._oldBounds2d = new IgePoint2d(40, 40);
this._oldBounds3d = new IgePoint3d(0, 0, 0);
this._highlight = false;
this._mouseEventsActive = false;
this._velocity = new IgePoint3d(0, 0, 0);
this._localMatrix = new IgeMatrix2d();
this._worldMatrix = new IgeMatrix2d();
this._oldWorldMatrix = new IgeMatrix2d();
this._inView = true;
this._hidden = false;
//this._mouseEventTrigger = 0;
/* CEXCLUDE */
if (typeof(module) !== 'undefined' && typeof(module.exports) !== 'undefined') {
// Set the stream floating point precision to 2 as default
this.streamFloatPrecision(2);
}
/* CEXCLUDE */
// Set the default stream sections as just the transform data
this.streamSections(['transform']);
},
/**
* Sets the entity as visible and able to be interacted with.
* @example #Show a hidden entity
* entity.show();
* @return {*} The object this method was called from to allow
* method chaining.
*/
show: function () {
this._hidden = false;
return this;
},
/**
* Sets the entity as hidden and cannot be interacted with.
* @example #Hide a visible entity
* entity.hide();
* @return {*} The object this method was called from to allow
* method chaining.
*/
hide: function () {
this._hidden = true;
return this;
},
/**
* Checks if the entity is visible.
* @returns {boolean} True if the entity is visible.
*/
isVisible: function () {
return this._hidden === false;
},
/**
* Checks if the entity is hidden.
* @returns {boolean} True if the entity is hidden.
*/
isHidden: function () {
return this._hidden === true;
},
/**
* Gets / sets the cache flag that determines if the entity's
* texture rendering output should be stored on an off-screen
* canvas instead of calling the texture.render() method each
* tick. Useful for expensive texture calls such as rendering
* fonts etc. If enabled, this will automatically disable advanced
* composite caching on this entity with a call to
* compositeCache(false).
* @param {Boolean=} val True to enable caching, false to
* disable caching.
* @example #Enable entity caching
* entity.cache(true);
* @example #Disable entity caching
* entity.cache(false);
* @example #Get caching flag value
* var val = entity.cache();
* @return {*}
*/
cache: function (val) {
if (val !== undefined) {
this._cache = val;
if (val) {
// Create the off-screen canvas
if (ige.isClient) {
// Use a real canvas
this._cacheCanvas = document.createElement('canvas');
} else {
// Use dummy objects for canvas and context
this._cacheCanvas = new IgeDummyCanvas();
}
this._cacheCtx = this._cacheCanvas.getContext('2d');
this._cacheDirty = true;
// Set smoothing mode
var smoothing = this._cacheSmoothing !== undefined ? this._cacheSmoothing : ige._globalSmoothing;
if (!smoothing) {
this._cacheCtx.imageSmoothingEnabled = false;
this._cacheCtx.mozImageSmoothingEnabled = false;
} else {
this._cacheCtx.imageSmoothingEnabled = true;
this._cacheCtx.mozImageSmoothingEnabled = true;
}
// Switch off composite caching
if (this.compositeCache()) {
this.compositeCache(false);
}
} else {
// Remove the off-screen canvas
delete this._cacheCanvas;
}
return this;
}
return this._cache;
},
/**
* When using the caching system, this boolean determines if the
* cache canvas should have image smoothing enabled or not. If
* not set, the ige global smoothing setting will be used instead.
* @param {Boolean=} val True to enable smoothing, false to disable.
* @returns {*}
*/
cacheSmoothing: function (val) {
if (val !== undefined) {
this._cacheSmoothing = val;
return this;
}
return this._cacheSmoothing;
},
/**
* Gets / sets composite caching. Composite caching draws this entity
* and all of it's children (and their children etc) to a single off
* screen canvas so that the entity does not need to be redrawn with
* all it's children every tick. For composite entities where little
* change occurs this will massively increase rendering performance.
* If enabled, this will automatically disable simple caching on this
* entity with a call to cache(false).
* @param {Boolean=} val
* @example #Enable entity composite caching
* entity.compositeCache(true);
* @example #Disable entity composite caching
* entity.compositeCache(false);
* @example #Get composite caching flag value
* var val = entity.cache();
* @return {*}
*/
compositeCache: function (val) {
if (ige.isClient) {
if (val !== undefined) {
if (val) {
// Switch off normal caching
this.cache(false);
// Create the off-screen canvas
this._cacheCanvas = document.createElement('canvas');
this._cacheCtx = this._cacheCanvas.getContext('2d');
this._cacheDirty = true;
// Set smoothing mode
var smoothing = this._cacheSmoothing !== undefined ? this._cacheSmoothing : ige._globalSmoothing;
if (!smoothing) {
this._cacheCtx.imageSmoothingEnabled = false;
this._cacheCtx.mozImageSmoothingEnabled = false;
} else {
this._cacheCtx.imageSmoothingEnabled = true;
this._cacheCtx.mozImageSmoothingEnabled = true;
}
}
// Loop children and set _compositeParent to the correct value
this._children.each(function () {
if (val) {
this._compositeParent = true;
} else {
delete this._compositeParent;
}
});
this._compositeCache = val;
return this;
}
return this._compositeCache;
} else {
return this;
}
},
/**
* Gets / sets the cache dirty flag. If set to true this will
* instruct the entity to re-draw it's cached image from the
* assigned texture. Once that occurs the flag will automatically
* be set back to false. This works in either standard cache mode
* or composite cache mode.
* @param {Boolean=} val True to force a cache update.
* @example #Get cache dirty flag value
* var val = entity.cacheDirty();
* @example #Set cache dirty flag value
* entity.cacheDirty(true);
* @return {*}
*/
cacheDirty: function (val) {
if (val !== undefined) {
this._cacheDirty = val;
// Check if the entity is a child of a composite or composite
// entity chain and propagate the dirty cache up the chain
if (val && this._compositeParent && this._parent) {
this._parent.cacheDirty(val);
if (!this._cache && !this._compositeCache) {
// Set clean immediately as no caching is enabled on this child
this._cacheDirty = false;
}
}
return this;
}
return this._cacheDirty;
},
/**
* Gets the position of the mouse relative to this entity's
* center point.
* @param {IgeViewport=} viewport The viewport to use as the
* base from which the mouse position is determined. If no
* viewport is specified then the current viewport the engine
* is rendering to is used instead.
* @example #Get the mouse position relative to the entity
* // The returned value is an object with properties x, y, z
* var mousePos = entity.mousePos();
* @return {IgePoint3d} The mouse point relative to the entity
* center.
*/
mousePos: function (viewport) {
viewport = viewport || ige._currentViewport;
if (viewport) {
var mp = viewport._mousePos.clone(),
cam;
if (this._ignoreCamera) {
/*cam = ige._currentCamera;
mp.thisMultiply(1 / cam._scale.x, 1 / cam._scale.y, 1 / cam._scale.z);
//mp.thisRotate(-cam._rotate.z);
mp.thisAddPoint(cam._translate);*/
}
mp.x += viewport._translate.x;
mp.y += viewport._translate.y;
this._transformPoint(mp);
return mp;
} else {
return new IgePoint3d(0, 0, 0);
}
},
/**
* Gets the position of the mouse relative to this entity not
* taking into account viewport translation.
* @param {IgeViewport=} viewport The viewport to use as the
* base from which the mouse position is determined. If no
* viewport is specified then the current viewport the engine
* is rendering to is used instead.
* @example #Get absolute mouse position
* var mousePosAbs = entity.mousePosAbsolute();
* @return {IgePoint3d} The mouse point relative to the entity
* center.
*/
mousePosAbsolute: function (viewport) {
viewport = viewport || ige._currentViewport;
if (viewport) {
var mp = viewport._mousePos.clone();
this._transformPoint(mp);
return mp;
}
return new IgePoint3d(0, 0, 0);
},
/**
* Gets the position of the mouse in world co-ordinates.
* @param {IgeViewport=} viewport The viewport to use as the
* base from which the mouse position is determined. If no
* viewport is specified then the current viewport the engine
* is rendering to is used instead.
* @example #Get mouse position in world co-ordinates
* var mousePosWorld = entity.mousePosWorld();
* @return {IgePoint3d} The mouse point relative to the world
* center.
*/
mousePosWorld: function (viewport) {
viewport = viewport || ige._currentViewport;
var mp = this.mousePos(viewport);
this.localToWorldPoint(mp, viewport);
if (this._ignoreCamera) {
//viewport.camera._worldMatrix.getInverse().transform([mp]);
}
return mp;
},
/**
* Rotates the entity to point at the target point around the z axis.
* @param {IgePoint3d} point The point in world co-ordinates to
* point the entity at.
* @example #Point the entity at another entity
* entity.rotateToPoint(otherEntity.worldPosition());
* @example #Point the entity at mouse
* entity.rotateToPoint(ige._currentViewport.mousePos());
* @example #Point the entity at an arbitrary point x, y
* entity.rotateToPoint(new IgePoint3d(x, y, 0));
* @return {*}
*/
rotateToPoint: function (point) {
var worldPos = this.worldPosition();
this.rotateTo(
this._rotate.x,
this._rotate.y,
(Math.atan2(worldPos.y - point.y, worldPos.x - point.x) - this._parent._rotate.z) + Math.radians(270)
);
return this;
},
/**
* Gets / sets the texture to use as the background
* pattern for this entity.
* @param {IgeTexture} texture The texture to use as
* the background.
* @param {String=} repeat The type of repeat mode either: "repeat",
* "repeat-x", "repeat-y" or "none".
* @param {Boolean=} trackCamera If set to true, will track the camera
* translation and "move" the background with the camera.
* @param {Boolean=} isoTile If true the tiles of the background will
* be treated as isometric and will therefore be drawn so that they are
* layered seamlessly in isometric view.
* @example #Set a background pattern for this entity with 2d tiling
* var texture = new IgeTexture('path/to/my/texture.png');
* entity.backgroundPattern(texture, 'repeat', true, false);
* @example #Set a background pattern for this entity with isometric tiling
* var texture = new IgeTexture('path/to/my/texture.png');
* entity.backgroundPattern(texture, 'repeat', true, true);
* @return {*}
*/
backgroundPattern: function (texture, repeat, trackCamera, isoTile) {
if (texture !== undefined) {
this._backgroundPattern = texture;
this._backgroundPatternRepeat = repeat || 'repeat';
this._backgroundPatternTrackCamera = trackCamera;
this._backgroundPatternIsoTile = isoTile;
this._backgroundPatternFill = null;
return this;
}
return this._backgroundPattern;
},
smartBackground: function (renderMethod) {
if (renderMethod !== undefined) {
this._smartBackground = renderMethod;
return this;
}
return this._smartBackground
},
/**
* Set the object's width to the number of tile width's specified.
* @param {Number} val Number of tiles.
* @param {Boolean=} lockAspect If true, sets the height according
* to the texture aspect ratio and the new width.
* @example #Set the width of the entity based on the tile width of the map the entity is mounted to
* // Set the entity width to the size of 1 tile with
* // lock aspect enabled which will automatically size
* // the height as well so as to maintain the aspect
* // ratio of the entity
* entity.widthByTile(1, true);
* @return {*} The object this method was called from to allow
* method chaining.
*/
widthByTile: function (val, lockAspect) {
if (this._parent && this._parent._tileWidth !== undefined && this._parent._tileHeight !== undefined) {
var tileSize = this._mode === 0 ? this._parent._tileWidth : this._parent._tileWidth * 2,
ratio;
this.width(val * tileSize);
if (lockAspect) {
if (this._texture) {
// Calculate the height based on the new width
ratio = this._texture._sizeX / this._bounds2d.x;
this.height(this._texture._sizeY / ratio);
} else {
this.log('Cannot set height based on texture aspect ratio and new width because no texture is currently assigned to the entity!', 'error');
}
}
} else {
this.log('Cannot set width by tile because the entity is not currently mounted to a tile map or the tile map has no tileWidth or tileHeight values.', 'warning');
}
return this;
},
/**
* Set the object's height to the number of tile height's specified.
* @param {Number} val Number of tiles.
* @param {Boolean=} lockAspect If true, sets the width according
* to the texture aspect ratio and the new height.
* @example #Set the height of the entity based on the tile height of the map the entity is mounted to
* // Set the entity height to the size of 1 tile with
* // lock aspect enabled which will automatically size
* // the width as well so as to maintain the aspect
* // ratio of the entity
* entity.heightByTile(1, true);
* @return {*} The object this method was called from to allow
* method chaining.
*/
heightByTile: function (val, lockAspect) {
if (this._parent && this._parent._tileWidth !== undefined && this._parent._tileHeight !== undefined) {
var tileSize = this._mode === 0 ? this._parent._tileHeight : this._parent._tileHeight * 2,
ratio;
this.height(val * tileSize);
if (lockAspect) {
if (this._texture) {
// Calculate the width based on the new height
ratio = this._texture._sizeY / this._bounds2d.y;
this.width(this._texture._sizeX / ratio);
} else {
this.log('Cannot set width based on texture aspect ratio and new height because no texture is currently assigned to the entity!', 'error');
}
}
} else {
this.log('Cannot set height by tile because the entity is not currently mounted to a tile map or the tile map has no tileWidth or tileHeight values.', 'warning');
}
return this;
},
/**
* Adds the object to the tile map at the passed tile co-ordinates. If
* no tile co-ordinates are passed, will use the current tile position
* and the tileWidth() and tileHeight() values.
* @param {Number=} x X co-ordinate of the tile to occupy.
* @param {Number=} y Y co-ordinate of the tile to occupy.
* @param {Number=} width Number of tiles along the x-axis to occupy.
* @param {Number=} height Number of tiles along the y-axis to occupy.
*/
occupyTile: function (x, y, width, height) {
// Check that the entity is mounted to a tile map
if (this._parent && this._parent.IgeTileMap2d) {
if (x !== undefined && y !== undefined) {
this._parent.occupyTile(x, y, width, height, this);
} else {
// Occupy tiles based upon tile point and tile width/height
var trPoint = new IgePoint3d(this._translate.x - (((this._tileWidth / 2) - 0.5) * this._parent._tileWidth), this._translate.y - (((this._tileHeight / 2) - 0.5) * this._parent._tileHeight), 0),
tilePoint = this._parent.pointToTile(trPoint);
if (this._parent._mountMode === 1) {
tilePoint.thisToIso();
}
this._parent.occupyTile(tilePoint.x, tilePoint.y, this._tileWidth, this._tileHeight, this);
}
}
return this;
},
/**
* Removes the object from the tile map at the passed tile co-ordinates.
* If no tile co-ordinates are passed, will use the current tile position
* and the tileWidth() and tileHeight() values.
* @param {Number=} x X co-ordinate of the tile to un-occupy.
* @param {Number=} y Y co-ordinate of the tile to un-occupy.
* @param {Number=} width Number of tiles along the x-axis to un-occupy.
* @param {Number=} height Number of tiles along the y-axis to un-occupy.
* @private
*/
unOccupyTile: function (x, y, width, height) {
// Check that the entity is mounted to a tile map
if (this._parent && this._parent.IgeTileMap2d) {
if (x !== undefined && y !== undefined) {
this._parent.unOccupyTile(x, y, width, height);
} else {
// Un-occupy tiles based upon tile point and tile width/height
var trPoint = new IgePoint3d(this._translate.x - (((this._tileWidth / 2) - 0.5) * this._parent._tileWidth), this._translate.y - (((this._tileHeight / 2) - 0.5) * this._parent._tileHeight), 0),
tilePoint = this._parent.pointToTile(trPoint);
if (this._parent._mountMode === 1) {
tilePoint.thisToIso();
}
this._parent.unOccupyTile(tilePoint.x, tilePoint.y, this._tileWidth, this._tileHeight);
}
}
return this;
},
/**
* Returns an array of tile co-ordinates that the object is currently
* over, calculated using the current world co-ordinates of the object
* as well as it's 3d geometry.
* @private
* @return {Array} The array of tile co-ordinates as IgePoint3d instances.
*/
overTiles: function () {
// Check that the entity is mounted to a tile map
if (this._parent && this._parent.IgeTileMap2d) {
var x,
y,
tileWidth = this._tileWidth || 1,
tileHeight = this._tileHeight || 1,
tile = this._parent.pointToTile(this._translate),
tileArr = [];
for (x = 0; x < tileWidth; x++) {
for (y = 0; y < tileHeight; y++) {
tileArr.push(new IgePoint3d(tile.x + x, tile.y + y, 0));
}
}
return tileArr;
}
},
/**
* Gets / sets the anchor position that this entity's texture
* will be adjusted by.
* @param {Number=} x The x anchor value.
* @param {Number=} y The y anchor value.
* @return {*} "this" when arguments are passed to allow method
* chaining or the current value if no arguments are specified.
*/
anchor: function (x, y) {
if (x !== undefined && y !== undefined) {
this._anchor = new IgePoint2d(x, y);
return this;
}
return this._anchor;
},
/**
* Gets / sets the geometry x value.
* @param {Number=} px The new x value in pixels.
* @example #Set the width of the entity
* entity.width(40);
* @return {*} "this" when arguments are passed to allow method
* chaining or the current value if no arguments are specified.
*/
width: function (px, lockAspect) {
if (px !== undefined) {
if (lockAspect) {
// Calculate the height from the change in width
var ratio = px / this._bounds2d.x;
this.height(this._bounds2d.y * ratio);
}
this._bounds2d.x = px;
this._bounds2d.x2 = (px / 2);
return this;
}
return this._bounds2d.x;
},
/**
* Gets / sets the geometry y value.
* @param {Number=} px The new y value in pixels.
* @example #Set the height of the entity
* entity.height(40);
* @return {*} "this" when arguments are passed to allow method
* chaining or the current value if no arguments are specified.
*/
height: function (px, lockAspect) {
if (px !== undefined) {
if (lockAspect) {
// Calculate the width from the change in height
var ratio = px / this._bounds2d.y;
this.width(this._bounds2d.x * ratio);
}
this._bounds2d.y = px;
this._bounds2d.y2 = (px / 2);
return this;
}
return this._bounds2d.y;
},
/**
* Gets / sets the 2d geometry of the entity. The x and y values are
* relative to the center of the entity. This geometry is used when
* rendering textures for the entity and positioning in world space as
* well as UI positioning calculations. It holds no bearing on isometric
* positioning.
* @param {Number=} x The new x value in pixels.
* @param {Number=} y The new y value in pixels.
* @example #Set the dimensions of the entity (width and height)
* entity.bounds2d(40, 40);
* @return {*} "this" when arguments are passed to allow method
* chaining or the current value if no arguments are specified.
*/
bounds2d: function (x, y) {
if (x !== undefined && y !== undefined) {
this._bounds2d = new IgePoint2d(x, y, 0);
return this;
}
if (x !== undefined && y === undefined) {
// x is considered an IgePoint2d instance
this._bounds2d = new IgePoint2d(x.x, x.y);
}
return this._bounds2d;
},
/**
* Gets / sets the 3d geometry of the entity. The x and y values are
* relative to the center of the entity and the z value is wholly
* positive from the "floor". Used to define a 3d bounding cuboid for
* the entity used in isometric depth sorting and hit testing.
* @param {Number=} x The new x value in pixels.
* @param {Number=} y The new y value in pixels.
* @param {Number=} z The new z value in pixels.
* @example #Set the dimensions of the entity (width, height and length)
* entity.bounds3d(40, 40, 20);
* @return {*} "this" when arguments are passed to allow method
* chaining or the current value if no arguments are specified.
*/
bounds3d: function (x, y, z) {
if (x !== undefined && y !== undefined && z !== undefined) {
this._bounds3d = new IgePoint3d(x, y, z);
return this;
}
return this._bounds3d;
},
/**
* @deprecated Use bounds3d instead
* @param x
* @param y
* @param z
*/
size3d: function (x, y, z) {
this.log('size3d has been renamed to bounds3d but is exactly the same so please search/replace your code to update calls.', 'warning');
},
/**
* Gets / sets the life span of the object in milliseconds. The life
* span is how long the object will exist for before being automatically
* destroyed.
* @param {Number=} milliseconds The number of milliseconds the entity
* will live for from the current time.
* @param {Function=} deathCallback Optional callback method to call when
* the entity is destroyed from end of lifespan.
* @example #Set the lifespan of the entity to 2 seconds after which it will automatically be destroyed
* entity.lifeSpan(2000);
* @return {*} "this" when arguments are passed to allow method
* chaining or the current value if no arguments are specified.
*/
lifeSpan: function (milliseconds, deathCallback) {
if (milliseconds !== undefined) {
this.deathTime(ige._currentTime + milliseconds, deathCallback);
return this;
}
return this.deathTime() - ige._currentTime;
},
/**
* Gets / sets the timestamp in milliseconds that denotes the time
* that the entity will be destroyed. The object checks it's own death
* time during each tick and if the current time is greater than the
* death time, the object will be destroyed.
* @param {Number=} val The death time timestamp. This is a time relative
* to the engine's start time of zero rather than the current time that
* would be retrieved from new Date().getTime(). It is usually easier
* to call lifeSpan() rather than setting the deathTime directly.
* @param {Function=} deathCallback Optional callback method to call when
* the entity is destroyed from end of lifespan.
* @example #Set the death time of the entity to 60 seconds after engine start
* entity.deathTime(60000);
* @return {*} "this" when arguments are passed to allow method
* chaining or the current value if no arguments are specified.
*/
deathTime: function (val, deathCallback) {
if (val !== undefined) {
this._deathTime = val;
if (deathCallback !== undefined) {
this._deathCallBack = deathCallback;
}
return this;
}
return this._deathTime;
},
/**
* Gets / sets the entity opacity from 0.0 to 1.0.
* @param {Number=} val The opacity value.
* @example #Set the entity to half-visible
* entity.opacity(0.5);
* @example #Set the entity to fully-visible
* entity.opacity(1.0);
* @return {*} "this" when arguments are passed to allow method
* chaining or the current value if no arguments are specified.
*/
opacity: function (val) {
if (val !== undefined) {
this._opacity = val;
return this;
}
return this._opacity;
},
/**
* Gets / sets the noAabb flag that determines if the entity's axis
* aligned bounding box should be calculated every tick or not. If
* you don't need the AABB data (for instance if you don't need to
* detect mouse events on this entity or you DO want the AABB to be
* updated but want to control it manually by calling aabb(true)
* yourself as needed).
* @param {Boolean=} val If set to true will turn off AABB calculation.
* @returns {*}
*/
noAabb: function (val) {
if (val !== undefined) {
this._noAabb = val;
return this;
}
return this._noAabb;
},
/**
* Gets / sets the texture to use when rendering the entity.
* @param {IgeTexture=} texture The texture object.
* @example #Set the entity texture (image)
* var texture = new IgeTexture('path/to/some/texture.png');
* entity.texture(texture);
* @return {*} "this" when arguments are passed to allow method
* chaining or the current value if no arguments are specified.
*/
texture: function (texture) {
if (texture !== undefined) {
this._texture = texture;
return this;
}
return this._texture;
},
/**
* Gets / sets the current texture cell used when rendering the game
* object's texture. If the texture is not cell-based, this value is
* ignored.
* @param {Number=} val The cell index.
* @example #Set the entity texture as a 4x4 cell sheet and then set the cell to use
* var texture = new IgeCellSheet('path/to/some/cellSheet.png', 4, 4);
* entity.texture(texture)
* .cell(3);
* @return {*} "this" when arguments are passed to allow method
* chaining or the current value if no arguments are specified.
*/
cell: function (val) {
if (val > 0 || val === null) {
this._cell = val;
return this;
}
return this._cell;
},
/**
* Gets / sets the current texture cell used when rendering the game
* object's texture. If the texture is not cell-based, this value is
* ignored. This differs from cell() in that it accepts a string id
* as the cell
* @param {Number=} val The cell id.
* @example #Set the entity texture as a sprite sheet with cell ids and then set the cell to use
* var texture = new IgeSpriteSheet('path/to/some/cellSheet.png', [
* [0, 0, 40, 40, 'robotHead'],
* [40, 0, 40, 40, 'humanHead'],
* ]);
*
* // Assign the texture, set the cell to use and then
* // set the entity to the size of the cell automatically!
* entity.texture(texture)
* .cellById('robotHead')
* .dimensionsFromCell();
* @return {*} "this" when arguments are passed to allow method
* chaining or the current value if no arguments are specified.
*/
cellById: function (val) {
if (val !== undefined) {
if (this._texture) {
// Find the cell index this id corresponds to
var i,
tex = this._texture,
cells = tex._cells;
for (i = 1; i < cells.length; i++) {
if (cells[i][4] === val) {
// Found the cell id so assign this cell index
this.cell(i);
return this;
}
}
// We were unable to find the cell index from the cell
// id so produce an error
this.log('Could not find the cell id "' + val + '" in the assigned entity texture ' + tex.id() + ', please check your sprite sheet (texture) cell definition to ensure the cell id "' + val + '" has been assigned to a cell!', 'error');
} else {
this.log('Cannot assign cell index from cell ID until an IgeSpriteSheet has been set as the texture for this entity. Please set the texture before calling cellById().', 'error');
}
}
return this._cell;
},
/**
* Sets the geometry of the entity to match the width and height
* of the assigned texture.
* @param {Number=} percent The percentage size to resize to.
* @example #Set the entity dimensions based on the assigned texture
* var texture = new IgeTexture('path/to/some/texture.png');
*
* // Assign the texture, and then set the entity to the
* // size of the texture automatically!
* entity.texture(texture)
* .dimensionsFromTexture();
* @return {*} The object this method was called from to allow
* method chaining.
*/
dimensionsFromTexture: function (percent) {
if (this._texture) {
if (percent === undefined) {
this.width(this._texture._sizeX);
this.height(this._texture._sizeY);
} else {
this.width(Math.floor(this._texture._sizeX / 100 * percent));
this.height(Math.floor(this._texture._sizeY / 100 * percent));
}
// Recalculate localAabb
this.localAabb(true);
}
return this;
},
/**
* Sets the geometry of the entity to match the width and height
* of the assigned texture cell. If the texture is not cell-based
* the entire texture width / height will be used.
* @param {Number=} percent The percentage size to resize to.
* @example #Set the entity dimensions based on the assigned texture and cell
* var texture = new IgeSpriteSheet('path/to/some/cellSheet.png', [
* [0, 0, 40, 40, 'robotHead'],
* [40, 0, 40, 40, 'humanHead'],
* ]);
*
* // Assign the texture, set the cell to use and then
* // set the entity to the size of the cell automatically!
* entity.texture(texture)
* .cellById('robotHead')
* .dimensionsFromCell();
* @return {*} The object this method was called from to allow
* method chaining
*/
dimensionsFromCell: function (percent) {
if (this._texture) {
if (this._texture._cells && this._texture._cells.length) {
if (percent === undefined) {
this.width(this._texture._cells[this._cell][2]);
this.height(this._texture._cells[this._cell][3]);
} else {
this.width(Math.floor(this._texture._cells[this._cell][2] / 100 * percent));
this.height(Math.floor(this._texture._cells[this._cell][3] / 100 * percent));
}
// Recalculate localAabb
this.localAabb(true);
}
}
return this;
},
/**
* Gets / sets the highlight mode. True is on false is off.
* @param {Boolean} val The highlight mode true, false or optionally a string representing a globalCompositeOperation.
* https://developer.mozilla.org/en-US/docs/Web/API/Canvas_API/Tutorial/Compositing
* @example #Set the entity to render highlighted
* entity.highlight(true);
* @example #Set the entity to render highlighted using 'screen' globalCompositeOperation
* entity.highlight('screen');
* @example #Get the current highlight state
* var isHighlighted = entity.highlight();
* @return {*} "this" when arguments are passed to allow method
* chaining or the current value if no arguments are specified.
*/
highlight: function (val, highlightChildEntities = true) {
if (val !== undefined) {
this._highlight = val;
if (highlightChildEntities) {
this._children.each(function (child) {
child.highlight(val);
});
}
this.cacheDirty(true);
return this;
}
return this._highlight;
},
/**
* Returns the absolute world position of the entity as an
* IgePoint3d.
* @example #Get the world position of the entity
* var wordPos = entity.worldPosition();
* @return {IgePoint3d} The absolute world position of the
* entity.
*/
worldPosition: function () {
return new IgePoint3d(this._worldMatrix.matrix[2], this._worldMatrix.matrix[5], 0);
},
/**
* Returns the absolute world rotation z of the entity as a
* value in radians.
* @example #Get the world rotation of the entity's z axis
* var wordRot = entity.worldRotationZ();
* @return {Number} The absolute world rotation z of the
* entity.
*/
worldRotationZ: function () {
return this._worldMatrix.rotationRadians();
},
/**
* Converts an array of points from local space to this entity's
* world space using it's world transform matrix. This will alter
* the points passed in the array directly.
* @param {Array} points The array of IgePoints to convert.
*/
localToWorld: function (points, viewport, inverse) {
viewport = viewport || ige._currentViewport;
if (this._adjustmentMatrix) {
// Apply the optional adjustment matrix
this._worldMatrix.multiply(this._adjustmentMatrix);
}
if (!inverse) {
this._worldMatrix.transform(points, this);
} else {
this._localMatrix.transform(points, this);
//this._worldMatrix.getInverse().transform(points, this);
}
if (this._ignoreCamera) {
//viewport.camera._worldMatrix.transform(points, this);
}
},
/**
* Converts a point from local space to this entity's world space
* using it's world transform matrix. This will alter the point's
* data directly.
* @param {IgePoint3d} point The IgePoint3d to convert.
*/
localToWorldPoint: function (point, viewport) {
viewport = viewport || ige._currentViewport;
this._worldMatrix.transform([point], this);
},
/**
* Returns the screen position of the entity as an IgePoint3d where x is the
* "left" and y is the "top", useful for positioning HTML elements at the
* screen location of an IGE entity. This method assumes that the top-left
* of the main canvas element is at 0, 0. If not you can adjust the values
* yourself to allow for offset.
* @example #Get the screen position of the entity
* var screenPos = entity.screenPosition();
* @return {IgePoint3d} The screen position of the entity.
*/
screenPosition: function () {
return new IgePoint3d(
Math.floor(((this._worldMatrix.matrix[2] - ige._currentCamera._translate.x) * ige._currentCamera._scale.x) + ige._bounds2d.x2),
Math.floor(((this._worldMatrix.matrix[5] - ige._currentCamera._translate.y) * ige._currentCamera._scale.y) + ige._bounds2d.y2),
0
);
},
/**
* @deprecated Use bounds3dPolygon instead
*/
localIsoBoundsPoly: function () {},
localBounds3dPolygon: function (recalculate) {
if (this._bounds3dPolygonDirty || !this._localBounds3dPolygon || recalculate) {
var geom = this._bounds3d,
poly = new IgePoly2d(),
// Bottom face
bf2 = Math.toIso(+(geom.x2), -(geom.y2), -(geom.z2)),
bf3 = Math.toIso(+(geom.x2), +(geom.y2), -(geom.z2)),
bf4 = Math.toIso(-(geom.x2), +(geom.y2), -(geom.z2)),
// Top face
tf1 = Math.toIso(-(geom.x2), -(geom.y2), (geom.z2)),
tf2 = Math.toIso(+(geom.x2), -(geom.y2), (geom.z2)),
tf4 = Math.toIso(-(geom.x2), +(geom.y2), (geom.z2));
poly.addPoint(tf1.x, tf1.y)
.addPoint(tf2.x, tf2.y)
.addPoint(bf2.x, bf2.y)
.addPoint(bf3.x, bf3.y)
.addPoint(bf4.x, bf4.y)
.addPoint(tf4.x, tf4.y)
.addPoint(tf1.x, tf1.y);
this._localBounds3dPolygon = poly;
this._bounds3dPolygonDirty = false;
}
return this._localBounds3dPolygon;
},
/**
* @deprecated Use bounds3dPolygon instead
*/
isoBoundsPoly: function () {},
bounds3dPolygon: function (recalculate) {
if (this._bounds3dPolygonDirty || !this._bounds3dPolygon || recalculate) {
var poly = this.localBounds3dPolygon(recalculate).clone();
// Convert local co-ordinates to world based on entities world matrix
this.localToWorld(poly._poly);
this._bounds3dPolygon = poly;
}
return this._bounds3dPolygon;
},
/**
* @deprecated Use mouseInBounds3d instead
*/
mouseInIsoBounds: function () {},
mouseInBounds3d: function (recalculate) {
var poly = this.localBounds3dPolygon(recalculate),
mp = this.mousePos();
return poly.pointInside(mp);
},
/**
* Calculates and returns the current axis-aligned bounding box in
* world co-ordinates.
* @param {Boolean=} recalculate If true this will force the
* recalculation of the AABB instead of returning a cached
* value.
* @example #Get the entity axis-aligned bounding box dimensions
* var aabb = entity.aabb();
*
* console.log(aabb.x);
* console.log(aabb.y);
* console.log(aabb.width);
* console.log(aabb.height);
* @example #Get the entity axis-aligned bounding box dimensions forcing the engine to update the values first
* var aabb = entity.aabb(true); // Call with true to force update
*
* console.log(aabb.x);
* console.log(aabb.y);
* console.log(aabb.width);
* console.log(aabb.height);
* @return {IgeRect} The axis-aligned bounding box in world co-ordinates.
*/
aabb: function (recalculate, inverse) {
if (this._aabbDirty || !this._aabb || recalculate) { // && this.newFrame()
var poly = new IgePoly2d(),
minX, minY,
maxX, maxY,
box,
anc = this._anchor,
ancX = anc.x,
ancY = anc.y,
geom,
geomX2,
geomY2,
x, y;
geom = this._bounds2d;
geomX2 = geom.x2;
geomY2 = geom.y2;
x = geomX2;
y = geomY2;
poly.addPoint(-x + ancX, -y + ancY);
poly.addPoint(x + ancX, -y + ancY);
poly.addPoint(x + ancX, y + ancY);
poly.addPoint(-x + ancX, y + ancY);
this._renderPos = {x: -x + ancX, y: -y + ancY};
// Convert the poly's points from local space to world space
this.localToWorld(poly._poly, null, inverse);
// Get the extents of the newly transformed poly
minX = Math.min(
poly._poly[0].x,
poly._poly[1].x,
poly._poly[2].x,
poly._poly[3].x
);
minY = Math.min(
poly._poly[0].y,
poly._poly[1].y,
poly._poly[2].y,
poly._poly[3].y
);
maxX = Math.max(
poly._poly[0].x,
poly._poly[1].x,
poly._poly[2].x,
poly._poly[3].x
);
maxY = Math.max(
poly._poly[0].y,
poly._poly[1].y,
poly._poly[2].y,
poly._poly[3].y
);
box = new IgeRect(minX, minY, maxX - minX, maxY - minY);
this._aabb = box;
this._aabbDirty = false;
}
return this._aabb;
},
/**
* Calculates and returns the local axis-aligned bounding box
* for the entity. This is the AABB relative to the entity's
* center point.
* @param {Boolean=} recalculate If true this will force the
* recalculation of the local AABB instead of returning a cached
* value.
* @example #Get the entity local axis-aligned bounding box dimensions
* var aabb = entity.localAabb();
*
* console.log(aabb.x);
* console.log(aabb.y);
* console.log(aabb.width);
* console.log(aabb.height);
* @example #Get the entity local axis-aligned bounding box dimensions forcing the engine to update the values first
* var aabb = entity.localAabb(true); // Call with true to force update
*
* console.log(aabb.x);
* console.log(aabb.y);
* console.log(aabb.width);
* console.log(aabb.height);
* @return {IgeRect} The local AABB.
*/
localAabb: function (recalculate) {
if (!this._localAabb || recalculate) {
var aabb = this.aabb();
this._localAabb = new IgeRect(-Math.floor(aabb.width / 2), -Math.floor(aabb.height / 2), Math.floor(aabb.width), Math.floor(aabb.height));
}
return this._localAabb;
},
/**
* Calculates the axis-aligned bounding box for this entity, including
* all child entity bounding boxes and returns the final composite
* bounds.
* @example #Get the composite AABB
* var entity = new IgeEntity(),
* aabb = entity.compositeAabb();
* @return {IgeRect}
*/
compositeAabb: function (inverse) {
var arr = this._children,
arrCount,
rect = this.aabb(true, inverse).clone();
// Now loop all children and get the aabb for each of them
// them add those bounds to the current rect
if (arr) {
arrCount = arr.length;
while (arrCount--) {
rect.thisCombineRect(arr[arrCount].compositeAabb(inverse));
}
}
return rect;
},
/**
* Gets / sets the composite stream flag. If set to true, any objects
* mounted to this one will have their streamMode() set to the same
* value as this entity and will also have their compositeStream flag
* set to true. This allows you to easily automatically stream any
* objects mounted to a root object and stream them all.
* @param val
* @returns {*}
*/
compositeStream: function (val) {
if (val !== undefined) {
this._compositeStream = val;
return this;
}
return this._compositeStream;
},
/**
* Override the _childMounted method and apply entity-based flags.
* @param {IgeEntity} child
* @private
*/
_childMounted: function (child) {
// Check if we need to set the compositeStream and streamMode
if (this.compositeStream()) {
child.compositeStream(true);
child.streamMode(this.streamMode());
child.streamControl(this.streamControl());
}
IgeObject.prototype._childMounted.call(this, child);
// Check if we are compositeCached and update the cache
if (this.compositeCache()) {
this.cacheDirty(true);
}
},
/**
* Takes two values and returns them as an array where index [0]
* is the y argument and index[1] is the x argument. This method
* is used specifically in the 3d bounds intersection process to
* determine entity depth sorting.
* @param {Number} x The first value.
* @param {Number} y The second value.
* @return {Array} The swapped arguments.
* @private
*/
_swapVars: function (x, y) {
return [y, x];
},
_internalsOverlap: function (x0, x1, y0, y1) {
var tempSwap;
if (x0 > x1) {
tempSwap = this._swapVars(x0, x1);
x0 = tempSwap[0];
x1 = tempSwap[1];
}
if (y0 > y1) {
tempSwap = this._swapVars(y0, y1);
y0 = tempSwap[0];
y1 = tempSwap[1];
}
if (x0 > y0) {
tempSwap = this._swapVars(x0, y0);
x0 = tempSwap[0];
y0 = tempSwap[1];
tempSwap = this._swapVars(x1, y1);
x1 = tempSwap[0];
y1 = tempSwap[1];
}
return y0 < x1;
},
_projectionOverlap: function (otherObject) {
var thisG3d = this._bounds3d,
thisMin = {
x: this._translate.x - thisG3d.x / 2,
y: this._translate.y - thisG3d.y / 2,
z: this._translate.z - thisG3d.z
},
thisMax = {
x: this._translate.x + thisG3d.x / 2,
y: this._translate.y + thisG3d.y / 2,
z: this._translate.z + thisG3d.z
},
otherG3d = otherObject._bounds3d,
otherMin = {
x: otherObject._translate.x - otherG3d.x / 2,
y: otherObject._translate.y - otherG3d.y / 2,
z: otherObject._translate.z - otherG3d.z
},
otherMax = {
x: otherObject._translate.x + otherG3d.x / 2,
y: otherObject._translate.y + otherG3d.y / 2,
z: otherObject._translate.z + otherG3d.z
};
return this._internalsOverlap(
thisMin.x - thisMax.y,
thisMax.x - thisMin.y,
otherMin.x - otherMax.y,
otherMax.x - otherMin.y
) && this._internalsOverlap(
thisMin.x - thisMax.z,
thisMax.x - thisMin.z,
otherMin.x - otherMax.z,
otherMax.x - otherMin.z
) && this._internalsOverlap(
thisMin.z - thisMax.y,
thisMax.z - thisMin.y,
otherMin.z - otherMax.y,
otherMax.z - otherMin.y
);
},
/**
* Compares the current entity's 3d bounds to the passed entity and
* determines if the current entity is "behind" the passed one. If an
* entity is behind another, it is drawn first during the scenegraph
* render phase.
* @param {IgeEntity} otherObject The other entity to check this
* entity's 3d bounds against.
* @example #Determine if this entity is "behind" another entity based on the current depth-sort
* var behind = entity.isBehind(otherEntity);
* @return {Boolean} If true this entity is "behind" the passed entity
* or false if not.
*/
isBehind: function (otherObject) {
var thisG3d = this._bounds3d,
otherG3d = otherObject._bounds3d,
thisTranslate = this._translate.clone(),
otherTranslate = otherObject._translate.clone();
// thisTranslate.thisToIso();
// otherTranslate.thisToIso();
if(this._origin.x !== 0.5 || this._origin.y !== 0.5) {
thisTranslate.x += this._bounds2d.x * (0.5 - this._origin.x)
thisTranslate.y += this._bounds2d.y * (0.5 - this._origin.y)
}
if(otherObject._origin.x !== 0.5 || otherObject._origin.y !== 0.5) {
otherTranslate.x += otherObject._bounds2d.x * (0.5 - otherObject._origin.x)
otherTranslate.y += otherObject._bounds2d.y * (0.5 - otherObject._origin.y)
}
var
thisX = thisTranslate.x,
thisY = thisTranslate.y,
otherX = otherTranslate.x,
otherY = otherTranslate.y,
thisMin = new IgePoint3d(
thisX - thisG3d.x / 2,
thisY - thisG3d.y / 2,
this._translate.z
),
thisMax = new IgePoint3d(
thisX + thisG3d.x / 2,
thisY + thisG3d.y / 2,
this._translate.z + thisG3d.z
),
otherMin = new IgePoint3d(
otherX - otherG3d.x / 2,
otherY - otherG3d.y / 2,
otherObject._translate.z
),
otherMax = new IgePoint3d(
otherX + otherG3d.x / 2,
otherY + otherG3d.y / 2,
otherObject._translate.z + otherG3d.z
);
if (thisMax.x <= otherMin.x) {
return false;
}
if (otherMax.x <= thisMin.x) {
return true;
}
if (thisMax.y <= otherMin.y) {
return false;
}
if (otherMax.y <= thisMin.y) {
return true;
}
if (thisMax.z <= otherMin.z) {
return false;
}
if (otherMax.z <= thisMin.z) {
return true;
}
return (thisX + thisY + this._translate.z) > (otherX + otherY + otherObject._translate.z);
},
/**
* Get / set the flag determining if this entity will respond
* to mouse interaction or not. When you set a mouse* event e.g.
* mouseUp, mouseOver etc this flag will automatically be reset
* to true.
* @param {Boolean=} val The flag value true or false.
* @example #Set entity to ignore mouse events
* entity.mouseEventsActive(false);
* @example #Set entity to receive mouse events
* entity.mouseEventsActive(true);
* @example #Get current flag value
* var val = entity.mouseEventsActive();
* @return {*} "this" when arguments are passed to allow method
* chaining or the current value if no arguments are specified.
*/
mouseEventsActive: function (val) {
if (val !== undefined) {
this._mouseEventsActive = val;
return this;
}
return this._mouseEventsActive;
},
/**
* Sets the _ignoreCamera internal flag to the value passed for this
* and all child entities down the scenegraph.
* @param val
*/
ignoreCameraComposite: function (val) {
var i,
arr = this._children,
arrCount = arr.length;
this._ignoreCamera = val;
for (i = 0; i < arrCount; i++) {
if (arr[i].ignoreCameraComposite) {
arr[i].ignoreCameraComposite(val);
}
}
},
/**
* Determines if the frame alternator value for this entity
* matches the engine's frame alternator value. The entity's
* frame alternator value will be set to match the engine's
* after each call to the entity.tick() method so the return
* value of this method can be used to determine if the tick()
* method has already been run for this entity.
*
* This is useful if you have multiple viewports which will
* cause the entity tick() method to fire once for each viewport
* but you only want to execute update code such as movement etc
* on the first time the tick() method is called.
*
* @example #Determine if the entity has already had it's tick method called
* var tickAlreadyCalled = entity.newFrame();
* @return {Boolean} If false, the entity's tick method has
* not yet been processed for this tick.
*/
newFrame: function () {
return ige._frameAlternator !== this._frameAlternatorCurrent;
},
/**
* Sets the canvas context transform properties to match the the game
* object's current transform values.
* @param {CanvasRenderingContext2D} ctx The canvas context to apply
* the transformation matrix to.
* @example #Transform a canvas context to the entity's local matrix values
* var canvas = document.createElement('canvas');
* canvas.width = 800;
* canvas.height = 600;
*
* var ctx = canvas.getContext('2d');
* entity._transformContext(ctx);
* @private
*/
_transformContext: function (ctx, inverse) {
if (this._parent) {
ctx.globalAlpha = this._computedOpacity = this._parent._computedOpacity * this._opacity;
} else {
ctx.globalAlpha = this._computedOpacity = this._opacity;
}
if (!inverse) {
this._localMatrix.transformRenderingContext(ctx);
} else {
this._localMatrix.getInverse().transformRenderingContext(ctx);
}
},
mouseAlwaysInside: function (val) {
if (val !== undefined) {
this._mouseAlwaysInside = val;
return this;
}
return this._mouseAlwaysInside;
},
/**
* Processes the updates required each render frame. Any code in the update()
* method will be called ONCE for each render frame BEFORE the tick() method.
* This differs from the tick() method in that the tick method can be called
* multiple times during a render frame depending on how many viewports your
* simulation is being rendered to, whereas the update() method is only called
* once. It is therefore the perfect place to put code that will control your
* entity's motion, AI etc.
* @param {CanvasRenderingContext2D} ctx The canvas context to render to.
*/
update: function (ctx, tickDelta) {
// Check if the entity should still exist
if (this._deathTime !== undefined && this._deathTime <= ige._tickStart) {
// Check if the deathCallBack was set
if (this._deathCallBack) {
this._deathCallBack.apply(this);
delete this._deathCallback;
}
// The entity should be removed because it has died
this.destroy();
} else {
// Check that the entity has been born
if (this._bornTime === undefined || ige._currentTime >= this._bornTime) {
// Remove the stream data cache
delete this._streamDataCache;
// Process any behaviours assigned to the entity
this._processUpdateBehaviours(ctx, tickDelta);
// Process velocity
if (this._velocity.x || this._velocity.y) {
this._translate.x += (this._velocity.x / 16) * tickDelta;
this._translate.y += (this._velocity.y / 16) * tickDelta;
}
if (this._timeStream.length) {
// Process any interpolation
this._processInterpolate(ige._tickStart - ige.network.stream._renderLatency);
}
// Check for changes to the transform values
// directly without calling the transform methods
this.updateTransform();
if (!this._noAabb && this._aabbDirty) {
// Update the aabb
this.aabb();
}
this._oldTranslate = this._translate.clone();
// Update this object's current frame alternator value
// which allows us to determine if we are still on the
// same frame
this._frameAlternatorCurrent = ige._frameAlternator;
} else {
// The entity is not yet born, unmount it and add to the spawn queue
this._birthMount = this._parent.id();
this.unMount();
ige.spawnQueue(this);
}
}
// Process super class
IgeObject.prototype.update.call(this, ctx, tickDelta);
},
/**
* Processes the actions required each render frame.
* @param {CanvasRenderingContext2D} ctx The canvas context to render to.
* @param {Boolean} dontTransform If set to true, the tick method will
* not transform the context based on the entity's matrices. This is useful
* if you have extended the class and want to process down the inheritance
* chain but have already transformed the entity in a previous overloaded
* method.
*/
tick: function (ctx, dontTransform) {
if (!this._hidden && this._inView && (!this._parent || (this._parent._inView)) && !this._streamJustCreated) {
// Process any behaviours assigned to the entity
this._processTickBehaviours(ctx);
// Process any mouse events we need to do
if (this._mouseEventsActive) {
if (this._processTriggerHitTests()) {
// Point is inside the trigger bounds
ige.input.queueEvent(this, this._mouseInTrigger, null);
} else {
if (ige.input.mouseMove) {
// There is a mouse move event but we are not inside the entity
// so fire a mouse out event (_handleMouseOut will check if the
// mouse WAS inside before firing an out event).
this._handleMouseOut(ige.input.mouseMove);
}
}
}
if (!this._dontRender) {
// Check for cached version
if (this._cache || this._compositeCache) {
// Caching is enabled
if (this._cacheDirty) {
// The cache is dirty, redraw it
this._refreshCache(dontTransform);
}
// Now render the cached image data to the main canvas
this._renderCache(ctx);
} else {
// Non-cached output
// Transform the context by the current transform settings
if (!dontTransform) {
this._transformContext(ctx);
}
// Render the entity
this._renderEntity(ctx, dontTransform);
}
}
// Process any automatic-mode stream updating required
if (this._streamMode === 1) {
this.streamSync();
}
if (this._compositeCache) {
if (this._cacheDirty) {
// Process children
IgeObject.prototype.tick.call(this, this._cacheCtx);
this._renderCache(ctx);
this._cacheDirty = false;
}
} else {
// Process children
IgeObject.prototype.tick.call(this, ctx);
}
}
},
_processTriggerHitTests: function () {
var mp, mouseTriggerPoly;
if (ige._currentViewport) {
if (!this._mouseAlwaysInside) {
mp = this.mousePosWorld();
if (mp) {
// Use the trigger polygon if defined
if (this._triggerPolygon && this[this._triggerPolygon]) {
mouseTriggerPoly = this[this._triggerPolygon](mp);
} else {
// Default to either aabb or bounds3dPolygon depending on entity parent mounting mode
if (this._parent && this._parent._mountMode === 1) {
// Use bounds3dPolygon
mouseTriggerPoly = this.bounds3dPolygon();
} else {
// Use aabb
mouseTriggerPoly = this.aabb();
}
}
// Check if the current mouse position is inside this aabb
return mouseTriggerPoly.xyInside(mp.x, mp.y);
}
} else {
return true;
}
}
return false;
},
_refreshCache: function (dontTransform) {
// The cache is not clean so re-draw it
// Render the entity to the cache
var _canvas = this._cacheCanvas,
_ctx = this._cacheCtx;
if (this._compositeCache) {
// Get the composite entity AABB and alter the internal canvas
// to the composite size so we can render the entire entity
var aabbC = this.compositeAabb();
this._compositeAabbCache = aabbC;
if (aabbC.width > 0 && aabbC.height > 0) {
_canvas.width = Math.ceil(aabbC.width);
_canvas.height = Math.ceil(aabbC.height);
} else {
// We cannot set a zero size for a canvas, it will
// cause the browser to freak out
_canvas.width = 2;
_canvas.height = 2;
}
// Translate to the center of the canvas
_ctx.translate(-aabbC.x, -aabbC.y);
/**
* Fires when the entity's composite cache is ready.
* @event IgeEntity#compositeReady
*/
this.emit('compositeReady');
} else {
if (this._bounds2d.x > 0 && this._bounds2d.y > 0) {
_canvas.width = this._bounds2d.x;
_canvas.height = this._bounds2d.y;
} else {
// We cannot set a zero size for a canvas, it will
// cause the browser to freak out
_canvas.width = 1;
_canvas.height = 1;
}
// Translate to the center of the canvas
_ctx.translate(this._bounds2d.x2, this._bounds2d.y2);
this._cacheDirty = false;
}
// Transform the context by the current transform settings
if (!dontTransform) {
this._transformContext(_ctx);
}
this._renderEntity(_ctx, dontTransform);
},
/**
* Handles calling the texture.render() method if a texture
* is applied to the entity. This part of the tick process has
* been abstracted to allow it to be overridden by an extending
* class.
* @param {CanvasRenderingContext2D} ctx The canvas context to render
* the entity to.
* @private
*/
_renderEntity: function (ctx) {
if (this._opacity > 0) {
// Check if the entity has a background pattern
if (this._backgroundPattern) {
if (!this._backgroundPatternFill) {
// We have a pattern but no fill produced
// from it. Check if we have a context to
// generate a pattern from
if (ctx) {
// Produce the pattern fill
this._backgroundPatternFill = ctx.createPattern(this._backgroundPattern.image, this._backgroundPatternRepeat);
}
}
if (this._backgroundPatternFill) {
// Draw the fill
ctx.save();
ctx.fillStyle = this._backgroundPatternFill;
if (this._smartBackground) {
this._smartBackground(ctx, this);
} else {
// TODO: When firefox has fixed their bug regarding negative rect co-ordinates, revert this change
// This is the proper way to do this but firefox has a bug which I'm gonna report
// so instead I have to use ANOTHER translate call instead. So crap!
//ctx.rect(-this._bounds2d.x2, -this._bounds2d.y2, this._bounds2d.x, this._bounds2d.y);
ctx.translate(-this._bounds2d.x2, -this._bounds2d.y2);
ctx.rect(0, 0, this._bounds2d.x, this._bounds2d.y);
if (this._backgroundPatternTrackCamera) {
ctx.translate(-ige._currentCamera._translate.x, -ige._currentCamera._translate.y);
ctx.scale(ige._currentCamera._scale.x, ige._currentCamera._scale.y);
}
ctx.fill();
ige._drawCount++;
if (this._backgroundPatternIsoTile) {
ctx.translate(-Math.floor(this._backgroundPattern.image.width) / 2, -Math.floor(this._backgroundPattern.image.height / 2));
ctx.fill();
ige._drawCount++;
}
}
ctx.restore();
}
}
var texture = this._texture;
// Check if the entity is visible based upon its opacity
if (texture && texture._loaded) {
// Draw the entity image
texture.render(ctx, this, ige._tickDelta);
if (this._highlight) {
ctx.save();
ctx.globalCompositeOperation = this._highlightToGlobalCompositeOperation(this._highlight);
texture.render(ctx, this);
ctx.restore();
}
}
if (this._compositeCache && ige._currentViewport._drawCompositeBounds) {
//console.log('moo');
ctx.fillStyle = 'rgba(0, 0, 255, 0.3)';
ctx.fillRect(-this._bounds2d.x2, -this._bounds2d.y2, this._bounds2d.x, this._bounds2d.y);
ctx.fillStyle = '#ffffff';
ctx.fillText('Composite Entity', -this._bounds2d.x2, -this._bounds2d.y2 - 15);
ctx.fillText(this.id(), -this._bounds2d.x2, -this._bounds2d.y2 - 5);
}
}
},
/**
* Draws the cached off-screen canvas image data to the passed canvas
* context.
* @param {CanvasRenderingContext2D} ctx The canvas context to render
* the entity to.
* @private
*/
_renderCache: function (ctx) {
ctx.save();
if (this._compositeCache) {
var aabbC = this._compositeAabbCache;
ctx.translate(this._bounds2d.x2 + aabbC.x, this._bounds2d.y2 + aabbC.y);
if (this._parent && this._parent._ignoreCamera) {
// Translate the entity back to negate the scene translate
var cam = ige._currentCamera;
//ctx.translate(-cam._translate.x, -cam._translate.y);
/*this.scaleTo(1 / cam._scale.x, 1 / cam._scale.y, 1 / cam._scale.z);
this.rotateTo(-cam._rotate.x, -cam._rotate.y, -cam._rotate.z);*/
}
}
// We have a clean cached version so output that
ctx.drawImage(
this._cacheCanvas,
-this._bounds2d.x2, -this._bounds2d.y2
);
if (ige._currentViewport._drawCompositeBounds) {
ctx.fillStyle = 'rgba(0, 255, 0, 0.5)';
ctx.fillRect(-this._bounds2d.x2, -this._bounds2d.y2, this._cacheCanvas.width, this._cacheCanvas.height);
ctx.fillStyle = '#ffffff';
ctx.fillText('Composite Cache', -this._bounds2d.x2, -this._bounds2d.y2 - 15);
ctx.fillText(this.id(), -this._bounds2d.x2, -this._bounds2d.y2 - 5);
}
ige._drawCount++;
ctx.restore();
},
/**
* Transforms a point by the entity's parent world matrix and
* it's own local matrix transforming the point to this entity's
* world space.
* @param {IgePoint3d} point The point to transform.
* @example #Transform a point by the entity's world matrix values
* var point = new IgePoint3d(0, 0, 0);
* entity._transformPoint(point);
*
* console.log(point);
* @return {IgePoint3d} The transformed point.
* @private
*/
_transformPoint: function (point) {
if (this._parent) {
var tempMat = new IgeMatrix2d();
// Copy the parent world matrix
tempMat.copy(this._parent._worldMatrix);
// Apply any local transforms
tempMat.multiply(this._localMatrix);
// Now transform the point
tempMat.getInverse().transformCoord(point, this);
} else {
this._localMatrix.transformCoord(point, this);
}
return point;
},
/**
* Helper method to transform an array of points using _transformPoint.
* @param {Array} points The points array to transform.
* @private
*/
_transformPoints: function (points) {
var point, pointCount = points.length;
while (pointCount--) {
point = points[pointCount];
if (this._parent) {
var tempMat = new IgeMatrix2d();
// Copy the parent world matrix
tempMat.copy(this._parent._worldMatrix);
// Apply any local transforms
tempMat.multiply(this._localMatrix);
// Now transform the point
tempMat.getInverse().transformCoord(point, this);
} else {
this._localMatrix.transformCoord(point, this);
}
}
},
/**
* Generates a string containing a code fragment that when
* evaluated will reproduce this object's properties via
* chained commands. This method will only check for
* properties that are directly related to this class.
* Other properties are handled by their own class method.
* @return {String} The string code fragment that will
* reproduce this entity when evaluated.
*/
_stringify: function (options) {
// Make sure we have an options object
if (options === undefined) { options = {}; }
// Get the properties for all the super-classes
var str = IgeObject.prototype._stringify.call(this, options), i;
// Loop properties and add property assignment code to string
for (i in this) {
if (this.hasOwnProperty(i) && this[i] !== undefined) {
switch (i) {
case '_opacity':
str += ".opacity(" + this.opacity() + ")";
break;
case '_texture':
str += ".texture(ige.$('" + this.texture().id() + "'))";
break;
case '_cell':
str += ".cell(" + this.cell() + ")";
break;
case '_translate':
if (options.transform !== false && options.translate !== false) {
str += ".translateTo(" + this._translate.x + ", " + this._translate.y + ", " + this._translate.z + ")";
}
break;
case '_rotate':
if (options.transform !== false && options.rotate !== false) {
str += ".rotateTo(" + this._rotate.x + ", " + this._rotate.y + ", " + this._rotate.z + ")";
}
break;
case '_scale':
if (options.transform !== false && options.scale !== false) {
str += ".scaleTo(" + this._scale.x + ", " + this._scale.y + ", " + this._scale.z + ")";
}
break;
case '_origin':
if (options.origin !== false) {
str += ".originTo(" + this._origin.x + ", " + this._origin.y + ", " + this._origin.z + ")";
}
break;
case '_anchor':
if (options.anchor !== false) {
str += ".anchor(" + this._anchor.x + ", " + this._anchor.y + ")";
}
break;
case '_width':
if (typeof(this.width()) === 'string') {
str += ".width('" + this.width() + "')";
} else {
str += ".width(" + this.width() + ")";
}
break;
case '_height':
if (typeof(this.height()) === 'string') {
str += ".height('" + this.height() + "')";
} else {
str += ".height(" + this.height() + ")";
}
break;
case '_bounds3d':
str += ".bounds3d(" + this._bounds3d.x + ", " + this._bounds3d.y + ", " + this._bounds3d.z + ")";
break;
case '_deathTime':
if (options.deathTime !== false && options.lifeSpan !== false) {
str += ".deathTime(" + this.deathTime() + ")";
}
break;
case '_highlight':
str += ".highlight(" + this.highlight() + ")";
break;
}
}
}
return str;
},
/**
* Destroys the entity by removing it from the scenegraph,
* calling destroy() on any child entities and removing
* any active event listeners for the entity. Once an entity
* has been destroyed it's this._alive flag is also set to
* false.
* @example #Destroy the entity
* entity.destroy();
*/
destroy: function () {
this._alive = false;
/* CEXCLUDE */
// Check if the entity is streaming
if (this._streamMode === 1) {
delete this._streamDataCache;
this.streamDestroy();
}
/* CEXCLUDE */
/**
* Fires when the entity has been destroyed.
* @event IgeEntity#destroyed
* @param {IgeEntity} The entity that has been destroyed.
*/
this.emit('destroyed', this);
// Call IgeObject.destroy()
IgeObject.prototype.destroy.call(this);
},
saveSpecialProp: function (obj, i) {
switch (i) {
case '_texture':
if (obj._texture) {
return {_texture: obj._texture.id()};
}
break;
default:
// Call super-class saveSpecialProp
return IgeObject.prototype.saveSpecialProp.call(this, obj, i);
break;
}
return undefined;
},
loadSpecialProp: function (obj, i) {
switch (i) {
case '_texture':
return {_texture: ige.$(obj[i])};
break;
default:
// Call super-class loadSpecialProp
return IgeObject.prototype.loadSpecialProp.call(this, obj, i);
break;
}
return undefined;
},
////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// INTERACTION
////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/**
* Gets / sets the callback that is fired when a mouse
* move event is triggered.
* @param {Function=} callback
* @example #Hook the mouse move event and stop it propagating further down the scenegraph
* entity.mouseMove(function (event, control) {
* // Mouse moved with button
* console.log('Mouse move button: ' + event.button);
*
* // Stop the event propagating further down the scenegraph
* control.stopPropagation();
*
* // You can ALSO stop propagation without the control object
* // reference via the global reference:
* ige.input.stopPropagation();
* });
* @return {*}
*/
mouseMove: function (callback) {
if (callback) {
this._mouseMove = callback;
this._mouseEventsActive = true;
return this;
}
return this._mouseMove;
},
/**
* Gets / sets the callback that is fired when a mouse
* over event is triggered.
* @param {Function=} callback
* @example #Hook the mouse over event and stop it propagating further down the scenegraph
* entity.mouseOver(function (event, control) {
* // Mouse over with button
* console.log('Mouse over button: ' + event.button);
*
* // Stop the event propagating further down the scenegraph
* control.stopPropagation();
*
* // You can ALSO stop propagation without the control object
* // reference via the global reference:
* ige.input.stopPropagation();
* });
* @return {*}
*/
mouseOver: function (callback) {
if (callback) {
this._mouseOver = callback;
this._mouseEventsActive = true;
return this;
}
return this._mouseOver;
},
/**
* Gets / sets the callback that is fired when a mouse
* out event is triggered.
* @param {Function=} callback
* @example #Hook the mouse out event and stop it propagating further down the scenegraph
* entity.mouseOut(function (event, control) {
* // Mouse out with button
* console.log('Mouse out button: ' + event.button);
*
* // Stop the event propagating further down the scenegraph
* control.stopPropagation();
*
* // You can ALSO stop propagation without the control object
* // reference via the global reference:
* ige.input.stopPropagation();
* });
* @return {*}
*/
mouseOut: function (callback) {
if (callback) {
this._mouseOut = callback;
this._mouseEventsActive = true;
return this;
}
return this._mouseOut;
},
/**
* Gets / sets the callback that is fired when a mouse
* up event is triggered.
* @param {Function=} callback
* @example #Hook the mouse up event and stop it propagating further down the scenegraph
* entity.mouseUp(function (event, control) {
* // Mouse up with button
* console.log('Mouse up button: ' + event.button);
*
* // Stop the event propagating further down the scenegraph
* control.stopPropagation();
*
* // You can ALSO stop propagation without the control object
* // reference via the global reference:
* ige.input.stopPropagation();
* });
* @return {*}
*/
mouseUp: function (callback) {
if (callback) {
this._mouseUp = callback;
this._mouseEventsActive = true;
return this;
}
return this._mouseUp;
},
/**
* Gets / sets the callback that is fired when a mouse
* down event is triggered.
* @param {Function=} callback
* @example #Hook the mouse down event and stop it propagating further down the scenegraph
* entity.mouseDown(function (event, control) {
* // Mouse down with button
* console.log('Mouse down button: ' + event.button);
*
* // Stop the event propagating further down the scenegraph
* control.stopPropagation();
*
* // You can ALSO stop propagation without the control object
* // reference via the global reference:
* ige.input.stopPropagation();
* });
* @return {*}
*/
mouseDown: function (callback) {
if (callback) {
this._mouseDown = callback;
this._mouseEventsActive = true;
return this;
}
return this._mouseDown;
},
/**
* Gets / sets the callback that is fired when a mouse
* wheel event is triggered.
* @param {Function=} callback
* @example #Hook the mouse wheel event and stop it propagating further down the scenegraph
* entity.mouseWheel(function (event, control) {
* // Mouse wheel with button
* console.log('Mouse wheel button: ' + event.button);
* console.log('Mouse wheel delta: ' + event.wheelDelta);
*
* // Stop the event propagating further down the scenegraph
* control.stopPropagation();
*
* // You can ALSO stop propagation without the control object
* // reference via the global reference:
* ige.input.stopPropagation();
* });
* @return {*}
*/
mouseWheel: function (callback) {
if (callback) {
this._mouseWheel = callback;
this._mouseEventsActive = true;
return this;
}
return this._mouseWheel;
},
/**
* Removes the callback that is fired when a mouse
* move event is triggered.
*/
mouseMoveOff: function () {
delete this._mouseMove;
return this;
},
/**
* Removes the callback that is fired when a mouse
* over event is triggered.
*/
mouseOverOff: function () {
delete this._mouseOver;
return this;
},
/**
* Removes the callback that is fired when a mouse
* out event is triggered.
*/
mouseOutOff: function () {
delete this._mouseOut;
return this;
},
/**
* Removes the callback that is fired when a mouse
* up event is triggered.
*/
mouseUpOff: function () {
delete this._mouseUp;
return this;
},
/**
* Removes the callback that is fired when a mouse
* down event is triggered if the listener was registered
* via the mouseDown() method.
*/
mouseDownOff: function () {
delete this._mouseDown;
return this;
},
/**
* Removes the callback that is fired when a mouse
* wheel event is triggered.
*/
mouseWheelOff: function () {
delete this._mouseWheel;
return this;
},
triggerPolygon: function (poly) {
if (poly !== undefined) {
this._triggerPolygon = poly;
return this;
}
return this._triggerPolygon;
},
/**
* Gets / sets the shape / polygon that the mouse events
* are triggered against. There are two options, 'aabb' and
* 'isoBounds'. The default is 'aabb'.
* @param val
* @returns {*}
* @deprecated
*/
mouseEventTrigger: function (val) {
this.log('mouseEventTrigger is no longer in use. Please see triggerPolygon() instead.', 'warning');
/*if (val !== undefined) {
// Set default value
this._mouseEventTrigger = 0;
switch (val) {
case 'isoBounds':
this._mouseEventTrigger = 1;
break;
case 'custom':
this._mouseEventTrigger = 2;
break;
case 'aabb':
this._mouseEventTrigger = 0;
break;
}
return this;
}
return this._mouseEventTrigger === 0 ? 'aabb' : 'isoBounds';*/
},
/**
* Handler method that determines which mouse-move event
* to fire, a mouse-over or a mouse-move.
* @private
*/
_handleMouseIn: function (event, evc, data) {
// Check if the mouse move is a mouse over
if (!this._mouseStateOver) {
this._mouseStateOver = true;
if (this._mouseOver) { this._mouseOver(event, evc, data); }
/**
* Fires when the mouse moves over the entity.
* @event IgeEntity#mouseOver
* @param {Object} The DOM event object.
* @param {Object} The IGE event control object.
* @param {*} Any further event data.
*/
this.emit('mouseOver', [event, evc, data]);
}
if (this._mouseMove) { this._mouseMove(event, evc, data); }
this.emit('mouseMove', [event, evc, data]);
},
/**
* Handler method that determines if a mouse-out event
* should be fired.
* @private
*/
_handleMouseOut: function (event, evc, data) {
// The mouse went away from this entity so
// set mouse-down to false, regardless of the situation
this._mouseStateDown = false;
// Check if the mouse move is a mouse out
if (this._mouseStateOver) {
this._mouseStateOver = false;
if (this._mouseOut) { this._mouseOut(event, evc, data); }
/**
* Fires when the mouse moves away from the entity.
* @event IgeEntity#mouseOut
* @param {Object} The DOM event object.
* @param {Object} The IGE event control object.
* @param {*} Any further event data.
*/
this.emit('mouseOut', [event, evc, data]);
}
},
/**
* Handler method that determines if a mouse-wheel event
* should be fired.
* @private
*/
_handleMouseWheel: function (event, evc, data) {
if (this._mouseWheel) { this._mouseWheel(event, evc, data); }
/**
* Fires when the mouse wheel is moved over the entity.
* @event IgeEntity#mouseWheel
* @param {Object} The DOM event object.
* @param {Object} The IGE event control object.
* @param {*} Any further event data.
*/
this.emit('mouseWheel', [event, evc, data]);
},
/**
* Handler method that determines if a mouse-up event
* should be fired.
* @private
*/
_handleMouseUp: function (event, evc, data) {
// Reset the mouse-down flag
this._mouseStateDown = false;
if (this._mouseUp) { this._mouseUp(event, evc, data); }
/**
* Fires when a mouse up occurs on the entity.
* @event IgeEntity#mouseUp
* @param {Object} The DOM event object.
* @param {Object} The IGE event control object.
* @param {*} Any further event data.
*/
this.emit('mouseUp', [event, evc, data]);
},
/**
* Handler method that determines if a mouse-down event
* should be fired.
* @private
*/
_handleMouseDown: function (event, evc, data) {
if (!this._mouseStateDown) {
this._mouseStateDown = true;
if (this._mouseDown) { this._mouseDown(event, evc, data); }
/**
* Fires when a mouse down occurs on the entity.
* @event IgeEntity#mouseDown
* @param {Object} The DOM event object.
* @param {Object} The IGE event control object.
* @param {*} Any further event data.
*/
this.emit('mouseDown', [event, evc, data]);
}
},
/**
* Checks mouse input types and fires the correct mouse event
* handler. This is an internal method that should never be
* called externally.
* @param {Object} evc The input component event control object.
* @param {Object} data Data passed by the input component into
* the new event.
* @private
*/
_mouseInTrigger: function (evc, data) {
if (ige.input.mouseMove) {
// There is a mouse move event
this._handleMouseIn(ige.input.mouseMove, evc, data);
}
if (ige.input.mouseDown) {
// There is a mouse down event
this._handleMouseDown(ige.input.mouseDown, evc, data);
}
if (ige.input.mouseUp) {
// There is a mouse up event
this._handleMouseUp(ige.input.mouseUp, evc, data);
}
if (ige.input.mouseWheel) {
// There is a mouse wheel event
this._handleMouseWheel(ige.input.mouseWheel, evc, data);
}
},
////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// TRANSFORM
////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/**
* Enables tracing calls which inadvertently assign NaN values to
* transformation properties. When called on an entity this system
* will break with a debug line when a transform property is set
* to NaN allowing you to step back through the call stack and
* determine where the offending value originated.
* @returns {IgeEntity}
*/
debugTransforms: function () {
ige.traceSet(this._translate, 'x', 1, function (val) {
return isNaN(val);
});
ige.traceSet(this._translate, 'y', 1, function (val) {
return isNaN(val);
});
ige.traceSet(this._translate, 'z', 1, function (val) {
return isNaN(val);
});
ige.traceSet(this._rotate, 'x', 1, function (val) {
return isNaN(val);
});
ige.traceSet(this._rotate, 'y', 1, function (val) {
return isNaN(val);
});
ige.traceSet(this._rotate, 'z', 1, function (val) {
return isNaN(val);
});
ige.traceSet(this._scale, 'x', 1, function (val) {
return isNaN(val);
});
ige.traceSet(this._scale, 'y', 1, function (val) {
return isNaN(val);
});
ige.traceSet(this._scale, 'z', 1, function (val) {
return isNaN(val);
});
return this;
},
velocityTo: function (x, y, z) {
if (x !== undefined && y!== undefined && z !== undefined) {
this._velocity.x = x;
this._velocity.y = y;
this._velocity.z = z;
} else {
this.log('velocityTo() called with a missing or undefined x, y or z parameter!', 'error');
}
return this._entity || this;
},
velocityBy: function (x, y, z) {
if (x !== undefined && y!== undefined && z !== undefined) {
this._velocity.x += x;
this._velocity.y += y;
this._velocity.z += z;
} else {
this.log('velocityBy() called with a missing or undefined x, y or z parameter!', 'error');
}
return this._entity || this;
},
/**
* Translates the entity by adding the passed values to
* the current translation values.
* @param {Number} x The x co-ordinate.
* @param {Number} y The y co-ordinate.
* @param {Number} z The z co-ordinate.
* @example #Translate the entity by 10 along the x axis
* entity.translateBy(10, 0, 0);
* @return {*}
*/
translateBy: function (x, y, z) {
if (x !== undefined && y!== undefined && z !== undefined) {
this._translate.x += x;
this._translate.y += y;
this._translate.z += z;
} else {
this.log('translateBy() called with a missing or undefined x, y or z parameter!', 'error');
}
return this._entity || this;
},
/**
* Translates the entity to the passed values.
* @param {Number} x The x co-ordinate.
* @param {Number} y The y co-ordinate.
* @param {Number} z The z co-ordinate.
* @example #Translate the entity to 10, 0, 0
* entity.translateTo(10, 0, 0);
* @return {*}
*/
translateTo: function (x, y, z) {
if (x !== undefined && y!== undefined && z !== undefined) {
this._translate.x = x;
this._translate.y = y;
this._translate.z = z;
} else {
this.log('translateTo() called with a missing or undefined x, y or z parameter!', 'error');
}
return this._entity || this;
},
/**
* Translates the entity to the passed point.
* @param {IgePoint3d} point The point with co-ordinates.
* @example #Translate the entity to 10, 0, 0
* var point = new IgePoint3d(10, 0, 0),
* entity = new IgeEntity();
*
* entity.translateToPoint(point);
* @return {*}
*/
translateToPoint: function (point) {
if (point !== undefined) {
this._translate.x = point.x;
this._translate.y = point.y;
this._translate.z = point.z;
} else {
this.log('translateToPoint() called with a missing or undefined point parameter!', 'error');
}
return this._entity || this;
},
/**
* Translates the object to the tile co-ordinates passed.
* @param {Number} x The x tile co-ordinate.
* @param {Number} y The y tile co-ordinate.
* @param {Number=} z The z tile co-ordinate.
* @example #Translate entity to tile
* // Create a tile map
* var tileMap = new IgeTileMap2d()
* .tileWidth(40)
* .tileHeight(40);
*
* // Mount our entity to the tile map
* entity.mount(tileMap);
*
* // Translate the entity to the tile x:10, y:12
* entity.translateToTile(10, 12, 0);
* @return {*} The object this method was called from to allow
* method chaining.
*/
translateToTile: function (x, y, z) {
if (this._parent && this._parent._tileWidth !== undefined && this._parent._tileHeight !== undefined) {
var finalZ;
// Handle being passed a z co-ordinate
if (z !== undefined) {
finalZ = z * this._parent._tileWidth;
} else {
finalZ = this._translate.z;
}
this.translateTo((x * this._parent._tileWidth) + this._parent._tileWidth / 2, (y * this._parent._tileHeight) + this._parent._tileWidth / 2, finalZ);
} else {
this.log('Cannot translate to tile because the entity is not currently mounted to a tile map or the tile map has no tileWidth or tileHeight values.', 'warning');
}
return this;
},
/**
* Gets the translate accessor object.
* @example #Use the translate accessor object to alter the y co-ordinate of the entity to 10
* entity.translate().y(10);
* @return {*}
*/
translate: function () {
if (arguments.length) {
this.log('You called translate with arguments, did you mean translateTo or translateBy instead of translate?', 'warning');
}
this.x = this._translateAccessorX;
this.y = this._translateAccessorY;
this.z = this._translateAccessorZ;
return this._entity || this;
},
/**
* The translate accessor method for the x axis. This
* method is not called directly but is accessed through
* the accessor object obtained by calling entity.translate().
* @param {Number=} val The new value to apply to the co-ordinate.
* @return {*}
* @private
*/
_translateAccessorX: function (val) {
if (val !== undefined) {
this._translate.x = val;
return this._entity || this;
}
return this._translate.x;
},
/**
* The translate accessor method for the y axis. This
* method is not called directly but is accessed through
* the accessor object obtained by calling entity.translate().
* @param {Number=} val The new value to apply to the co-ordinate.
* @return {*}
* @private
*/
_translateAccessorY: function (val) {
if (val !== undefined) {
this._translate.y = val;
return this._entity || this;
}
return this._translate.y;
},
/**
* The translate accessor method for the z axis. This
* method is not called directly but is accessed through
* the accessor object obtained by calling entity.translate().
* @param {Number=} val The new value to apply to the co-ordinate.
* @return {*}
* @private
*/
_translateAccessorZ: function (val) {
// TODO: Do we need to do anything to the matrix here for iso views?
//this._localMatrix.translateTo(this._translate.x, this._translate.y);
if (val !== undefined) {
this._translate.z = val;
return this._entity || this;
}
return this._translate.z;
},
/**
* Rotates the entity by adding the passed values to
* the current rotation values.
* @param {Number} x The x co-ordinate.
* @param {Number} y The y co-ordinate.
* @param {Number} z The z co-ordinate.
* @example #Rotate the entity by 10 degrees about the z axis
* entity.rotateBy(0, 0, Math.radians(10));
* @return {*}
*/
rotateBy: function (x, y, z) {
if (x !== undefined && y!== undefined && z !== undefined) {
this._rotate.x += x;
this._rotate.y += y;
this._rotate.z += z;
} else {
this.log('rotateBy() called with a missing or undefined x, y or z parameter!', 'error');
}
return this._entity || this;
},
/**
* Rotates the entity to the passed values.
* @param {Number} x The x co-ordinate.
* @param {Number} y The y co-ordinate.
* @param {Number} z The z co-ordinate.
* @example #Rotate the entity to 10 degrees about the z axis
* entity.rotateTo(0, 0, Math.radians(10));
* @return {*}
*/
rotateTo: function (x, y, z) {
if (x !== undefined && y!== undefined && z !== undefined) {
this._rotate.x = x;
this._rotate.y = y;
this._rotate.z = z;
} else {
this.log('rotateTo() called with a missing or undefined x, y or z parameter!', 'error');
}
return this._entity || this;
},
/**
* Gets the translate accessor object.
* @example #Use the rotate accessor object to rotate the entity about the z axis 10 degrees
* entity.rotate().z(Math.radians(10));
* @return {*}
*/
rotate: function () {
if (arguments.length) {
this.log('You called rotate with arguments, did you mean rotateTo or rotateBy instead of rotate?', 'warning');
}
this.x = this._rotateAccessorX;
this.y = this._rotateAccessorY;
this.z = this._rotateAccessorZ;
return this._entity || this;
},
/**
* The rotate accessor method for the x axis. This
* method is not called directly but is accessed through
* the accessor object obtained by calling entity.rotate().
* @param {Number=} val The new value to apply to the co-ordinate.
* @return {*}
* @private
*/
_rotateAccessorX: function (val) {
if (val !== undefined) {
this._rotate.x = val;
return this._entity || this;
}
return this._rotate.x;
},
/**
* The rotate accessor method for the y axis. This
* method is not called directly but is accessed through
* the accessor object obtained by calling entity.rotate().
* @param {Number=} val The new value to apply to the co-ordinate.
* @return {*}
* @private
*/
_rotateAccessorY: function (val) {
if (val !== undefined) {
this._rotate.y = val;
return this._entity || this;
}
return this._rotate.y;
},
/**
* The rotate accessor method for the z axis. This
* method is not called directly but is accessed through
* the accessor object obtained by calling entity.rotate().
* @param {Number=} val The new value to apply to the co-ordinate.
* @return {*}
* @private
*/
_rotateAccessorZ: function (val) {
if (val !== undefined) {
this._rotate.z = val;
return this._entity || this;
}
return this._rotate.z;
},
/**
* Scales the entity by adding the passed values to
* the current scale values.
* @param {Number} x The x co-ordinate.
* @param {Number} y The y co-ordinate.
* @param {Number} z The z co-ordinate.
* @example #Scale the entity by 2 on the x axis
* entity.scaleBy(2, 0, 0);
* @return {*}
*/
scaleBy: function (x, y, z) {
if (x !== undefined && y!== undefined && z !== undefined) {
this._scale.x += x;
this._scale.y += y;
this._scale.z += z;
} else {
this.log('scaleBy() called with a missing or undefined x, y or z parameter!', 'error');
}
return this._entity || this;
},
/**
* Scale the entity to the passed values.
* @param {Number} x The x co-ordinate.
* @param {Number} y The y co-ordinate.
* @param {Number} z The z co-ordinate.
* @example #Set the entity scale to 1 on all axes
* entity.scaleTo(1, 1, 1);
* @return {*}
*/
scaleTo: function (x, y, z) {
if (x !== undefined && y!== undefined && z !== undefined) {
this._scale.x = x;
this._scale.y = y;
this._scale.z = z;
} else {
this.log('scaleTo() called with a missing or undefined x, y or z parameter!', 'error');
}
return this._entity || this;
},
/**
* Gets the scale accessor object.
* @example #Use the scale accessor object to set the scale of the entity on the x axis to 1
* entity.scale().x(1);
* @return {*}
*/
scale: function () {
if (arguments.length) {
this.log('You called scale with arguments, did you mean scaleTo or scaleBy instead of scale?', 'warning');
}
this.x = this._scaleAccessorX;
this.y = this._scaleAccessorY;
this.z = this._scaleAccessorZ;
return this._entity || this;
},
/**
* The scale accessor method for the x axis. This
* method is not called directly but is accessed through
* the accessor object obtained by calling entity.scale().
* @param {Number=} val The new value to apply to the co-ordinate.
* @return {*}
* @private
*/
_scaleAccessorX: function (val) {
if (val !== undefined) {
this._scale.x = val;
return this._entity || this;
}
return this._scale.x;
},
/**
* The scale accessor method for the y axis. This
* method is not called directly but is accessed through
* the accessor object obtained by calling entity.scale().
* @param {Number=} val The new value to apply to the co-ordinate.
* @return {*}
* @private
*/
_scaleAccessorY: function (val) {
if (val !== undefined) {
this._scale.y = val;
return this._entity || this;
}
return this._scale.y;
},
/**
* The scale accessor method for the z axis. This
* method is not called directly but is accessed through
* the accessor object obtained by calling entity.scale().
* @param {Number=} val The new value to apply to the co-ordinate.
* @return {*}
* @private
*/
_scaleAccessorZ: function (val) {
if (val !== undefined) {
this._scale.z = val;
return this._entity || this;
}
return this._scale.z;
},
/**
* Sets the origin of the entity by adding the passed values to
* the current origin values.
* @param {Number} x The x co-ordinate.
* @param {Number} y The y co-ordinate.
* @param {Number} z The z co-ordinate.
* @example #Add 0.5 to the origin on the x axis
* entity.originBy(0.5, 0, 0);
* @return {*}
*/
originBy: function (x, y, z) {
if (x !== undefined && y!== undefined && z !== undefined) {
this._origin.x += x;
this._origin.y += y;
this._origin.z += z;
} else {
this.log('originBy() called with a missing or undefined x, y or z parameter!', 'error');
}
return this._entity || this;
},
/**
* Set the origin of the entity to the passed values.
* @param {Number} x The x co-ordinate.
* @param {Number} y The y co-ordinate.
* @param {Number} z The z co-ordinate.
* @example #Set the entity origin to 0.5 on all axes
* entity.originTo(0.5, 0.5, 0.5);
* @return {*}
*/
originTo: function (x, y, z) {
if (x !== undefined && y!== undefined && z !== undefined) {
this._origin.x = x;
this._origin.y = y;
this._origin.z = z;
} else {
this.log('originTo() called with a missing or undefined x, y or z parameter!', 'error');
}
return this._entity || this;
},
/**
* Gets the origin accessor object.
* @example #Use the origin accessor object to set the origin of the entity on the x axis to 1
* entity.origin().x(1);
* @return {*}
*/
origin: function () {
this.x = this._originAccessorX;
this.y = this._originAccessorY;
this.z = this._originAccessorZ;
return this._entity || this;
},
/**
* The origin accessor method for the x axis. This
* method is not called directly but is accessed through
* the accessor object obtained by calling entity.origin().
* @param {Number=} val The new value to apply to the co-ordinate.
* @return {*}
* @private
*/
_originAccessorX: function (val) {
if (val !== undefined) {
this._origin.x = val;
return this._entity || this;
}
return this._origin.x;
},
/**
* The origin accessor method for the y axis. This
* method is not called directly but is accessed through
* the accessor object obtained by calling entity.origin().
* @param {Number=} val The new value to apply to the co-ordinate.
* @return {*}
* @private
*/
_originAccessorY: function (val) {
if (val !== undefined) {
this._origin.y = val;
return this._entity || this;
}
return this._origin.y;
},
/**
* The origin accessor method for the z axis. This
* method is not called directly but is accessed through
* the accessor object obtained by calling entity.origin().
* @param {Number=} val The new value to apply to the co-ordinate.
* @return {*}
* @private
*/
_originAccessorZ: function (val) {
if (val !== undefined) {
this._origin.z = val;
return this._entity || this;
}
return this._origin.z;
},
_rotatePoint: function (point, radians, origin) {
var cosAngle = Math.cos(radians),
sinAngle = Math.sin(radians);
return {
x: origin.x + (point.x - origin.x) * cosAngle + (point.y - origin.y) * sinAngle,
y: origin.y - (point.x - origin.x) * sinAngle + (point.y - origin.y) * cosAngle
};
},
/**
* Checks the current transform values against the previous ones. If
* any value is different, the appropriate method is called which will
* update the transformation matrix accordingly.
*/
updateTransform: function () {
this._localMatrix.identity();
if (this._mode === 0) {
// 2d translation
this._localMatrix.multiply(this._localMatrix._newTranslate(this._translate.x, this._translate.y));
}
if (this._mode === 1) {
// iso translation
var isoPoint = this._translateIso = new IgePoint3d(
this._translate.x,
this._translate.y,
this._translate.z + this._bounds3d.z / 2
).toIso();
if (this._parent && this._parent._bounds3d.z) {
// This adjusts the child entity so that 0, 0, 0 inside the
// parent is the center of the base of the parent
isoPoint.y += this._parent._bounds3d.z / 1.6;
}
this._localMatrix.multiply(this._localMatrix._newTranslate(isoPoint.x, isoPoint.y));
}
this._localMatrix.rotateBy(this._rotate.z);
this._localMatrix.scaleBy(this._scale.x, this._scale.y);
// Adjust local matrix for origin values if not at center
if (this._origin.x !== 0.5 || this._origin.y !== 0.5) {
this._localMatrix.translateBy(
(this._bounds2d.x * (0.5 - this._origin.x)),
(this._bounds2d.y * (0.5 - this._origin.y))
);
}
// TODO: If the parent and local transforms are unchanged, we should used cached values
if (this._parent) {
this._worldMatrix.copy(this._parent._worldMatrix);
this._worldMatrix.multiply(this._localMatrix);
} else {
this._worldMatrix.copy(this._localMatrix);
}
// Check if the world matrix has changed and if so, set a few flags
// to allow other methods to know that a matrix change has occurred
if (!this._worldMatrix.compare(this._oldWorldMatrix)) {
this._oldWorldMatrix.copy(this._worldMatrix);
this._transformChanged = true;
this._aabbDirty = true;
this._bounds3dPolygonDirty = true;
this.cacheDirty(true);
} else {
this._transformChanged = false;
}
// Check if the geometry has changed and if so, update the aabb dirty
if (!this._oldBounds2d.compare(this._bounds2d)) {
this._aabbDirty = true;
// Record the new geometry to the oldGeometry data
this._oldBounds2d.copy(this._bounds2d);
}
if (!this._oldBounds3d.compare(this._bounds3d)) {
this._bounds3dPolygonDirty = true;
// Record the new geometry to the oldGeometry data
this._oldBounds3d.copy(this._bounds3d);
}
return this;
},
/**
* Gets / sets the disable interpolation flag. If set to true then
* stream data being received by the client will not be interpolated
* and will be instantly assigned instead. Useful if your entity's
* transformations should not be interpolated over time.
* @param val
* @returns {*}
*/
disableInterpolation: function (val) {
if (val !== undefined) {
this._disableInterpolation = val;
return this;
}
return this._disableInterpolation;
},
////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// STREAM
////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/**
* Gets / sets the array of sections that this entity will
* encode into its stream data.
* @param {Array=} sectionArray An array of strings.
* @example #Define the sections this entity will use in the network stream. Use the default "transform" section as well as a "custom1" section
* entity.streamSections('transform', 'custom1');
* @return {*} "this" when arguments are passed to allow method
* chaining or the current value if no arguments are specified.
*/
streamSections: function (sectionArray) {
if (sectionArray !== undefined) {
this._streamSections = sectionArray;
return this;
}
return this._streamSections;
},
/**
* Adds a section into the existing streamed sections array.
* @param {String} sectionName The section name to add.
*/
streamSectionsPush: function (sectionName) {
this._streamSections = this._streamSections || [];
this._streamSections.push(sectionName);
return this;
},
/**
* Removes a section into the existing streamed sections array.
* @param {String} sectionName The section name to remove.
*/
streamSectionsPull: function (sectionName) {
if (this._streamSections) {
this._streamSections.pull(sectionName);
}
return this;
},
/**
* Gets / sets a streaming property on this entity. If set, the
* property's new value is streamed to clients on the next packet.
*
* @param {String} propName The name of the property to get / set.
* @param {*=} propVal Optional. If provided, the property is set
* to this value.
* @return {*} "this" when a propVal argument is passed to allow method
* chaining or the current value if no propVal argument is specified.
*/
streamProperty: function (propName, propVal) {
this._streamProperty = this._streamProperty || {};
//this._streamPropertyChange = this._streamPropertyChange || {};
if (propName !== undefined) {
if (propVal !== undefined) {
//this._streamPropertyChange[propName] = this._streamProperty[propName] !== propVal;
this._streamProperty[propName] = propVal;
return this;
}
return this._streamProperty[propName];
}
return undefined;
},
/**
* Gets / sets the data for the specified data section id. This method
* is usually not called directly and instead is part of the network
* stream system. General use case is to write your own custom streamSectionData
* method in a class that extends IgeEntity so that you can control the
* data that the entity will send and receive over the network stream.
* @param {String} sectionId A string identifying the section to
* handle data get / set for.
* @param {*=} data If present, this is the data that has been sent
* from the server to the client for this entity.
* @param {Boolean=} bypassTimeStream If true, will assign transform
* directly to entity instead of adding the values to the time stream.
* @return {*} "this" when a data argument is passed to allow method
* chaining or the current value if no data argument is specified.
*/
streamSectionData: function (sectionId, data, bypassTimeStream) {
switch (sectionId) {
case 'transform':
if (data) {
// We have received updated data
var dataArr = data.split(',');
if (!this._disableInterpolation && !bypassTimeStream && !this._streamJustCreated) {
// Translate
if (dataArr[0]) { dataArr[0] = parseFloat(dataArr[0]); }
if (dataArr[1]) { dataArr[1] = parseFloat(dataArr[1]); }
if (dataArr[2]) { dataArr[2] = parseFloat(dataArr[2]); }
// Scale
if (dataArr[3]) { dataArr[3] = parseFloat(dataArr[3]); }
if (dataArr[4]) { dataArr[4] = parseFloat(dataArr[4]); }
if (dataArr[5]) { dataArr[5] = parseFloat(dataArr[5]); }
// Rotate
if (dataArr[6]) { dataArr[6] = parseFloat(dataArr[6]); }
if (dataArr[7]) { dataArr[7] = parseFloat(dataArr[7]); }
if (dataArr[8]) { dataArr[8] = parseFloat(dataArr[8]); }
// Add it to the time stream
this._timeStream.push([ige.network.stream._streamDataTime + ige.network._latency, dataArr]);
// Check stream length, don't allow higher than 10 items
if (this._timeStream.length > 10) {
// Remove the first item
this._timeStream.shift();
}
} else {
// Assign all the transform values immediately
if (dataArr[0]) { this._translate.x = parseFloat(dataArr[0]); }
if (dataArr[1]) { this._translate.y = parseFloat(dataArr[1]); }
if (dataArr[2]) { this._translate.z = parseFloat(dataArr[2]); }
// Scale
if (dataArr[3]) { this._scale.x = parseFloat(dataArr[3]); }
if (dataArr[4]) { this._scale.y = parseFloat(dataArr[4]); }
if (dataArr[5]) { this._scale.z = parseFloat(dataArr[5]); }
// Rotate
if (dataArr[6]) { this._rotate.x = parseFloat(dataArr[6]); }
if (dataArr[7]) { this._rotate.y = parseFloat(dataArr[7]); }
if (dataArr[8]) { this._rotate.z = parseFloat(dataArr[8]); }
// If we are using composite caching ensure we update the cache
if (this._compositeCache) {
this.cacheDirty(true);
}
}
} else {
// We should return stringified data
return this._translate.toString(this._streamFloatPrecision) + ',' + // translate
this._scale.toString(this._streamFloatPrecision) + ',' + // scale
this._rotate.toString(this._streamFloatPrecision) + ','; // rotate
}
break;
case 'depth':
if (data !== undefined) {
if (ige.isClient) {
this.depth(parseInt(data));
}
} else {
return String(this.depth());
}
break;
case 'layer':
if (data !== undefined) {
if (ige.isClient) {
this.layer(parseInt(data));
}
} else {
return String(this.layer());
}
break;
case 'bounds2d':
if (data !== undefined) {
if (ige.isClient) {
var geom = data.split(',');
this.bounds2d(parseFloat(geom[0]), parseFloat(geom[1]));
}
} else {
return String(this._bounds2d.x + ',' + this._bounds2d.y);
}
break;
case 'bounds3d':
if (data !== undefined) {
if (ige.isClient) {
var geom = data.split(',');
this.bounds3d(parseFloat(geom[0]), parseFloat(geom[1]), parseFloat(geom[2]));
}
} else {
return String(this._bounds3d.x + ',' + this._bounds3d.y + ',' + this._bounds3d.z);
}
break;
case 'hidden':
if (data !== undefined) {
if (ige.isClient) {
if (data == 'true') {
this.hide();
} else {
this.show();
}
}
} else {
return String(this.isHidden());
}
break;
case 'mount':
if (data !== undefined) {
if (ige.isClient) {
if (data) {
var newParent = ige.$(data);
if (newParent) {
this.mount(newParent);
}
} else {
// Unmount
this.unMount();
}
}
} else {
var parent = this.parent();
if (parent) {
return this.parent().id();
} else {
return '';
}
}
break;
case 'origin':
if (data !== undefined) {
if (ige.isClient) {
var geom = data.split(',');
this.origin(parseFloat(geom[0]), parseFloat(geom[1]), parseFloat(geom[2]));
}
} else {
return String(this._origin.x + ',' + this._origin.y + ',' + this._origin.z);
}
break;
case 'props':
var newData,
changed,
i;
if (data !== undefined) {
if (ige.isClient) {
var props = JSON.parse(data);
// Update properties that have been sent through
for (i in props) {
changed = false;
if (props.hasOwnProperty(i)) {
if (this._streamProperty[i] != props[i]) {
changed = true;
}
this._streamProperty[i] = props[i];
this.emit('streamPropChange', [i, props[i]]);
}
}
}
} else {
newData = {};
for (i in this._streamProperty) {
if (this._streamProperty.hasOwnProperty(i)) {
//if (this._streamPropertyChange[i]) {
newData[i] = this._streamProperty[i];
//this._streamPropertyChange[i] = false;
//}
}
}
return JSON.stringify(newData);
}
break;
}
},
/* CEXCLUDE */
/**
* Gets / sets the stream mode that the stream system will use when
* handling pushing data updates to connected clients.
* @param {Number=} val A value representing the stream mode.
* @example #Set the entity to disable streaming
* entity.streamMode(0);
* @example #Set the entity to automatic streaming
* entity.streamMode(1);
* @example #Set the entity to manual (advanced mode) streaming
* entity.streamMode(2);
* @return {*} "this" when arguments are passed to allow method
* chaining or the current value if no arguments are specified.
*/
streamMode: function (val) {
if (val !== undefined) {
if (ige.isServer) {
this._streamMode = val;
}
return this;
}
return this._streamMode;
},
/**
* Gets / sets the stream control callback function that will be called
* each time the entity tick method is called and stream-able data is
* updated.
* @param {Function=} method The stream control method.
* @example #Set the entity's stream control method to control when this entity is streamed and when it is not
* entity.streamControl(function (clientId) {
* // Let's use an example where we only want this entity to stream
* // to one particular client with the id 4039589434
* if (clientId === '4039589434') {
* // Returning true tells the network stream to send data
* // about this entity to the client
* return true;
* } else {
* // Returning false tells the network stream NOT to send
* // data about this entity to the client
* return false;
* }
* });
*
* Further reading: [Controlling Streaming](http://www.isogenicengine.com/documentation/isogenic-game-engine/versions/1-1-0/manual/networking-multiplayer/realtime-network-streaming/stream-modes-and-controlling-streaming/)
* @return {*} "this" when arguments are passed to allow method
* chaining or the current value if no arguments are specified.
*/
streamControl: function (method) {
if (method !== undefined) {
this._streamControl = method;
return this;
}
return this._streamControl;
},
/**
* Gets / sets the stream sync interval. This value
* is in milliseconds and cannot be lower than 16. It will
* determine how often data from this entity is added to the
* stream queue.
* @param {Number=} val Number of milliseconds between adding
* stream data for this entity to the stream queue.
* @param {String=} sectionId Optional id of the stream data
* section you want to set the interval for. If omitted the
* interval will be applied to all sections.
* @example #Set the entity's stream update (sync) interval to 1 second because this entity's data is not highly important to the simulation so save some bandwidth!
* entity.streamSyncInterval(1000);
* @example #Set the entity's stream update (sync) interval to 16 milliseconds because this entity's data is very important to the simulation so send as often as possible!
* entity.streamSyncInterval(16);
* @return {*} "this" when arguments are passed to allow method
* chaining or the current value if no arguments are specified.
*/
streamSyncInterval: function (val, sectionId) {
if (val !== undefined) {
if (!sectionId) {
if (val < 16) {
delete this._streamSyncInterval;
} else {
this._streamSyncDelta = 0;
this._streamSyncInterval = val;
}
} else {
this._streamSyncSectionInterval = this._streamSyncSectionInterval || {};
this._streamSyncSectionDelta = this._streamSyncSectionDelta || {};
if (val < 16) {
delete this._streamSyncSectionInterval[sectionId];
} else {
this._streamSyncSectionDelta[sectionId] = 0;
this._streamSyncSectionInterval[sectionId] = val;
}
}
return this;
}
return this._streamSyncInterval;
},
/**
* Gets / sets the precision by which floating-point values will
* be encoded and sent when packaged into stream data.
* @param {Number=} val The number of decimal places to preserve.
* @example #Set the float precision to 2
* // This will mean that any data using floating-point values
* // that gets sent across the network stream will be rounded
* // to 2 decimal places. This helps save bandwidth by not
* // having to send the entire number since precision above
* // 2 decimal places is usually not that important to the
* // simulation.
* entity.streamFloatPrecision(2);
* @return {*} "this" when arguments are passed to allow method
* chaining or the current value if no arguments are specified.
*/
streamFloatPrecision: function (val) {
if (val !== undefined) {
this._streamFloatPrecision = val;
var i, floatRemove = '\\.';
// Update the floatRemove regular expression pattern
for (i = 0; i < this._streamFloatPrecision; i++) {
floatRemove += '0';
}
// Add the trailing comma
floatRemove += ',';
// Create the new regexp
this._floatRemoveRegExp = new RegExp(floatRemove, 'g');
return this;
}
return this._streamFloatPrecision;
},
/**
* Queues stream data for this entity to be sent to the
* specified client id or array of client ids.
* @param {Array} clientId An array of string IDs of each
* client to send the stream data to.
* @return {IgeEntity} "this".
*/
streamSync: function (clientId) {
if (this._streamMode === 1) {
// Check if we have a stream sync interval
if (this._streamSyncInterval) {
this._streamSyncDelta += ige._tickDelta;
if (this._streamSyncDelta < this._streamSyncInterval) {
// The stream sync interval is still higher than
// the stream sync delta so exit without calling the
// stream sync method
return this;
} else {
// We've reached the delta we want so zero it now
// ready for the next loop
this._streamSyncDelta = 0;
}
}
// Grab an array of connected clients from the network
// system
var recipientArr = [],
clientArr = ige.network.clients(this._streamRoomId),
i;
for (i in clientArr) {
if (clientArr.hasOwnProperty(i)) {
// Check for a stream control method
if (this._streamControl) {
// Call the callback method and if it returns true,
// send the stream data to this client
if (this._streamControl.apply(this, [i, this._streamRoomId])) {
recipientArr.push(i);
}
} else {
// No control method so process for this client
recipientArr.push(i);
}
}
}
this._streamSync(recipientArr);
return this;
}
if (this._streamMode === 2) {
// Stream mode is advanced
this._streamSync(clientId, this._streamRoomId);
return this;
}
return this;
},
/**
* Override this method if your entity should send data through to
* the client when it is being created on the client for the first
* time through the network stream. The data will be provided as the
* first argument in the constructor call to the entity class so
* you should expect to receive it as per this example:
* @example #Using and Receiving Stream Create Data
* var MyNewClass = IgeEntity.extend({
* classId: 'MyNewClass',
*
* // Define the init with the parameter to receive the
* // data you return in the streamCreateData() method
* init: function (myCreateData) {
* this._myData = myCreateData;
* },
*
* streamCreateData: function () {
* return this._myData;
* }
* });
*
* Valid return values must not include circular references!
*/
streamCreateData: function () {},
/**
* Gets / sets the stream emit created flag. If set to true this entity
* emit a "streamCreated" event when it is created by the stream, but
* after the id and initial transform are set.
* @param val
* @returns {*}
*/
streamEmitCreated: function (val) {
if (val !== undefined) {
this._streamEmitCreated = val;
return this;
}
return this._streamEmitCreated;
},
/**
* Asks the stream system to queue the stream data to the specified
* client id or array of ids.
* @param {Array} recipientArr The array of ids of the client(s) to
* queue stream data for. The stream data being queued
* is returned by a call to this._streamData().
* @param {String} streamRoomId The id of the room the entity belongs
* in (can be undefined or null if no room assigned).
* @private
*/
_streamSync: function (recipientArr, streamRoomId) {
var arrCount = recipientArr.length,
arrIndex,
clientId,
stream = ige.network.stream,
thisId = this.id(),
filteredArr = [],
createResult = true; // We set this to true by default
// Loop the recipient array
for (arrIndex = 0; arrIndex < arrCount; arrIndex++) {
clientId = recipientArr[arrIndex];
// Check if the client has already received a create
// command for this entity
stream._streamClientCreated[thisId] = stream._streamClientCreated[thisId] || {};
if (!stream._streamClientCreated[thisId][clientId]) {
createResult = this.streamCreate(clientId);
}
// Make sure that if we had to create the entity for
// this client that the create worked before bothering
// to waste bandwidth on stream updates
if (createResult) {
// Get the stream data
var data = this._streamData();
// Is the data different from the last data we sent
// this client?
stream._streamClientData[thisId] = stream._streamClientData[thisId] || {};
if (stream._streamClientData[thisId][clientId] != data) {
filteredArr.push(clientId);
// Store the new data for later comparison
stream._streamClientData[thisId][clientId] = data;
}
}
}
if (filteredArr.length) {
stream.queue(thisId, data, filteredArr);
}
},
/**
* Forces the stream to push this entity's full stream data on the
* next stream sync regardless of what clients have received in the
* past. This should only be used when required rather than every
* tick as it will reduce the overall efficiency of the stream if
* used every tick.
* @returns {*}
*/
streamForceUpdate: function () {
if (ige.isServer) {
var thisId = this.id();
// Invalidate the stream client data lookup to ensure
// the latest data will be pushed on the next stream sync
if (ige.network && ige.network.stream && ige.network.stream._streamClientData && ige.network.stream._streamClientData[thisId]) {
ige.network.stream._streamClientData[thisId] = {};
}
}
return this;
},
/**
* Issues a create entity command to the passed client id
* or array of ids. If no id is passed it will issue the
* command to all connected clients. If using streamMode(1)
* this method is called automatically.
* @param {*} clientId The id or array of ids to send
* the command to.
* @example #Send a create command for this entity to all clients
* entity.streamCreate();
* @example #Send a create command for this entity to an array of client ids
* entity.streamCreate(['43245325', '326755464', '436743453']);
* @example #Send a create command for this entity to a single client id
* entity.streamCreate('43245325');
* @return {Boolean}
*/
streamCreate: function (clientId) {
if (this._parent) {
var thisId = this.id(),
arr,
i;
// Send the client an entity create command first
ige.network.send('_igeStreamCreate', [
this.classId(),
thisId,
this._parent.id(),
this.streamSectionData('transform'),
this.streamCreateData()
], clientId);
ige.network.stream._streamClientCreated[thisId] = ige.network.stream._streamClientCreated[thisId] || {};
if (clientId) {
// Mark the client as having received a create
// command for this entity
ige.network.stream._streamClientCreated[thisId][clientId] = true;
} else {
// Mark all clients as having received this create
arr = ige.network.clients();
for (i in arr) {
if (arr.hasOwnProperty(i)) {
ige.network.stream._streamClientCreated[thisId][i] = true;
}
}
}
return true;
}
return false;
},
/**
* Issues a destroy entity command to the passed client id
* or array of ids. If no id is passed it will issue the
* command to all connected clients. If using streamMode(1)
* this method is called automatically.
* @param {*} clientId The id or array of ids to send
* the command to.
* @example #Send a destroy command for this entity to all clients
* entity.streamDestroy();
* @example #Send a destroy command for this entity to an array of client ids
* entity.streamDestroy(['43245325', '326755464', '436743453']);
* @example #Send a destroy command for this entity to a single client id
* entity.streamDestroy('43245325');
* @return {Boolean}
*/
streamDestroy: function (clientId) {
var thisId = this.id(),
arr,
i;
// Send clients the stream destroy command for this entity
ige.network.send('_igeStreamDestroy', [ige._currentTime, thisId], clientId);
ige.network.stream._streamClientCreated[thisId] = ige.network.stream._streamClientCreated[thisId] || {};
ige.network.stream._streamClientData[thisId] = ige.network.stream._streamClientData[thisId] || {};
if (clientId) {
// Mark the client as having received a destroy
// command for this entity
ige.network.stream._streamClientCreated[thisId][clientId] = false;
ige.network.stream._streamClientData[thisId][clientId] = undefined;
} else {
// Mark all clients as having received this destroy
arr = ige.network.clients();
for (i in arr) {
if (arr.hasOwnProperty(i)) {
ige.network.stream._streamClientCreated[thisId][i] = false;
ige.network.stream._streamClientData[thisId][i] = undefined;
}
}
}
return true;
},
/**
* Generates and returns the current stream data for this entity. The
* data will usually include only properties that have changed since
* the last time the stream data was generated. The returned data is
* a string that has been compressed in various ways to reduce network
* overhead during transmission.
* @return {String} The string representation of the stream data for
* this entity.
* @private
*/
_streamData: function () {
// Check if we already have a cached version of the streamData
if (this._streamDataCache) {
return this._streamDataCache;
} else {
// Let's generate our stream data
var streamData = '',
sectionDataString = '',
sectionArr = this._streamSections,
sectionCount = sectionArr.length,
sectionData,
sectionIndex,
sectionId;
// Add the entity id
streamData += this.id();
// Only send further data if the entity is still "alive"
if (this._alive) {
// Now loop the data sections array and compile the rest of the
// data string from the data section return data
for (sectionIndex = 0; sectionIndex < sectionCount; sectionIndex++) {
sectionData = '';
sectionId = sectionArr[sectionIndex];
// Stream section sync intervals allow individual stream sections
// to be streamed at different (usually longer) intervals than other
// sections so you could for instance reduce the number of updates
// a particular section sends out in a second because the data is
// not that important compared to updated transformation data
if (this._streamSyncSectionInterval && this._streamSyncSectionInterval[sectionId]) {
// Check if the section interval has been reached
this._streamSyncSectionDelta[sectionId] += ige._tickDelta;
if (this._streamSyncSectionDelta[sectionId] >= this._streamSyncSectionInterval[sectionId]) {
// Get the section data for this section id
sectionData = this.streamSectionData(sectionId);
// Reset the section delta
this._streamSyncSectionDelta[sectionId] = 0;
}
} else {
// Get the section data for this section id
sectionData = this.streamSectionData(sectionId);
}
// Add the section start designator character. We do this
// regardless of if there is actually any section data because
// we want to be able to identify sections in a serial fashion
// on receipt of the data string on the client
sectionDataString += ige.network.stream._sectionDesignator;
// Check if we were returned any data
if (sectionData !== undefined) {
// Add the data to the section string
sectionDataString += sectionData;
}
}
// Add any custom data to the stream string at this point
if (sectionDataString) {
streamData += sectionDataString;
}
// Remove any .00 from the string since we don't need that data
// TODO: What about if a property is a string with something.00 and it should be kept?
streamData = streamData.replace(this._floatRemoveRegExp, ',');
}
// Store the data in cache in case we are asked for it again this tick
// the update() method of the IgeEntity class clears this every tick
this._streamDataCache = streamData;
return streamData;
}
},
/* CEXCLUDE */
////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// INTERPOLATOR
////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/**
* Calculates the current value based on the time along the
* value range.
* @param {Number} startValue The value that the interpolation started from.
* @param {Number} endValue The target value to be interpolated to.
* @param {Number} startTime The time the interpolation started.
* @param {Number} currentTime The current time.
* @param {Number} endTime The time the interpolation will end.
* @return {Number} The interpolated value.
*/
interpolateValue: function (startValue, endValue, startTime, currentTime, endTime) {
var totalValue = endValue - startValue,
dataDelta = endTime - startTime,
offsetDelta = currentTime - startTime,
deltaTime = offsetDelta / dataDelta;
// Clamp the current time from 0 to 1
if (deltaTime < 0) { deltaTime = 0; } else if (deltaTime > 1) { deltaTime = 1; }
return (totalValue * deltaTime) + startValue;
},
/**
* Processes the time stream for the entity.
* @param {Number} renderTime The time that the time stream is
* targeting to render the entity at.
* @param {Number} maxLerp The maximum lerp before the value
* is assigned directly instead of being interpolated.
* @private
*/
_processInterpolate: function (renderTime, maxLerp) {
// Set the maximum lerp to 200 if none is present
if (!maxLerp) { maxLerp = 200; }
var maxLerpSquared = maxLerp * maxLerp,
previousData,
nextData,
timeStream = this._timeStream,
dataDelta,
offsetDelta,
currentTime,
previousTransform,
nextTransform,
currentTransform = [],
i = 1;
// Find the point in the time stream that is
// closest to the render time and assign the
// previous and next data points
while (timeStream[i]) {
if (timeStream[i][0] > renderTime) {
// We have previous and next data points from the
// time stream so store them
previousData = timeStream[i - 1];
nextData = timeStream[i];
break;
}
i++;
}
// Check if we have some data to use
if (!nextData && !previousData) {
// No in-time data was found, check for lagging data
if (timeStream.length > 2) {
if (timeStream[timeStream.length - 1][0] < renderTime) {
// Lagging data is available, use that
previousData = timeStream[timeStream.length - 2];
nextData = timeStream[timeStream.length - 1];
timeStream.shift();
/**
* Fires when the entity interpolates against old data, usually
* the result of slow processing on the client or too much data
* being sent from the server.
* @event IgeEntity#interpolationLag
*/
this.emit('interpolationLag');
}
}
} else {
// We have some new data so clear the old data
timeStream.splice(0, i - 1);
}
// If we have data to use
if (nextData && previousData) {
// Check if the previous data has a timestamp and if not,
// use the next data's timestamp
if (isNaN(previousData[0])) { previousData[0] = nextData[0]; }
// Store the data so outside systems can access them
this._timeStreamPreviousData = previousData;
this._timeStreamNextData = nextData;
// Calculate the delta times
dataDelta = nextData[0] - previousData[0];
offsetDelta = renderTime - previousData[0];
this._timeStreamDataDelta = Math.floor(dataDelta);
this._timeStreamOffsetDelta = Math.floor(offsetDelta);
// Calculate the current time between the two data points
currentTime = offsetDelta / dataDelta;
this._timeStreamCurrentInterpolateTime = currentTime;
// Clamp the current time from 0 to 1
//if (currentTime < 0) { currentTime = 0.0; } else if (currentTime > 1) { currentTime = 1.0; }
// Set variables up to store the previous and next data
previousTransform = previousData[1];
nextTransform = nextData[1];
// Translate
currentTransform[0] = this.interpolateValue(previousTransform[0], nextTransform[0], previousData[0], renderTime, nextData[0]);
currentTransform[1] = this.interpolateValue(previousTransform[1], nextTransform[1], previousData[0], renderTime, nextData[0]);
currentTransform[2] = this.interpolateValue(previousTransform[2], nextTransform[2], previousData[0], renderTime, nextData[0]);
// Scale
currentTransform[3] = this.interpolateValue(previousTransform[3], nextTransform[3], previousData[0], renderTime, nextData[0]);
currentTransform[4] = this.interpolateValue(previousTransform[4], nextTransform[4], previousData[0], renderTime, nextData[0]);
currentTransform[5] = this.interpolateValue(previousTransform[5], nextTransform[5], previousData[0], renderTime, nextData[0]);
// Rotate
currentTransform[6] = this.interpolateValue(previousTransform[6], nextTransform[6], previousData[0], renderTime, nextData[0]);
currentTransform[7] = this.interpolateValue(previousTransform[7], nextTransform[7], previousData[0], renderTime, nextData[0]);
currentTransform[8] = this.interpolateValue(previousTransform[8], nextTransform[8], previousData[0], renderTime, nextData[0]);
this.translateTo(parseFloat(currentTransform[0]), parseFloat(currentTransform[1]), parseFloat(currentTransform[2]));
this.scaleTo(parseFloat(currentTransform[3]), parseFloat(currentTransform[4]), parseFloat(currentTransform[5]));
this.rotateTo(parseFloat(currentTransform[6]), parseFloat(currentTransform[7]), parseFloat(currentTransform[8]));
/*// Calculate the squared distance between the previous point and next point
dist = this.distanceSquared(previousTransform.x, previousTransform.y, nextTransform.x, nextTransform.y);
// Check that the distance is not higher than the maximum lerp and if higher,
// set the current time to 1 to snap to the next position immediately
if (dist > maxLerpSquared) { currentTime = 1; }
// Interpolate the entity position by multiplying the Delta times T, and adding the previous position
currentPosition = {};
currentPosition.x = ( (nextTransform.x - previousTransform.x) * currentTime ) + previousTransform.x;
currentPosition.y = ( (nextTransform.y - previousTransform.y) * currentTime ) + previousTransform.y;
// Now actually transform the entity
this.translate(entity, currentPosition.x, currentPosition.y);*/
// Record the last time we updated the entity so we can disregard any updates
// that arrive and are before this timestamp (not applicable in TCP but will
// apply if we ever get UDP in websockets)
this._lastUpdate = new Date().getTime();
}
},
_highlightToGlobalCompositeOperation: function (val) {
if (val) {
if (val === true) {
return 'lighter'
}
return val;
}
}
});
if (typeof(module) !== 'undefined' && typeof(module.exports) !== 'undefined') { module.exports = IgeEntity; } | realitydimensions/ige |
<|start_filename|>django_extensions/templates/django_extensions/graph_models/body.html<|end_filename|>
{% if use_subgraph %}
subgraph {{ cluster_app_name }} {
label=<
<TABLE BORDER="0" CELLBORDER="0" CELLSPACING="0">
<TR><TD COLSPAN="2" CELLPADDING="4" ALIGN="CENTER"
><FONT FACE="Helvetica Bold" COLOR="Black" POINT-SIZE="12"
>{{ app_name }}</FONT></TD></TR>
</TABLE>
>
color=olivedrab4
style="rounded"
{% endif %}
{% for model in models %}
{{ model.app_name }}_{{ model.name }} [label=<
<TABLE BGCOLOR="palegoldenrod" BORDER="0" CELLBORDER="0" CELLSPACING="0">
<TR><TD COLSPAN="2" CELLPADDING="4" ALIGN="CENTER" BGCOLOR="olivedrab4"
><FONT FACE="Helvetica Bold" COLOR="white"
>{{ model.label }}{% if model.abstracts %}<BR/><<FONT FACE="Helvetica Italic">{{ model.abstracts|join:"," }}</FONT>>{% endif %}</FONT></TD></TR>
{% if not disable_fields %}
{% for field in model.fields %}
<TR><TD ALIGN="LEFT" BORDER="0"
><FONT {% if field.blank %}COLOR="#7B7B7B" {% endif %}FACE="Helvetica {% if field.abstract %}Italic{% else %}Bold{% endif %}">{{ field.label }}</FONT
></TD>
<TD ALIGN="LEFT"
><FONT {% if field.blank %}COLOR="#7B7B7B" {% endif %}FACE="Helvetica {% if field.abstract %}Italic{% else %}Bold{% endif %}">{{ field.type }}</FONT
></TD></TR>
{% endfor %}
{% endif %}
</TABLE>
>]
{% endfor %}
{% if use_subgraph %}
}
{% endif %}
<|start_filename|>django_extensions/templates/django_extensions/graph_models/rel.html<|end_filename|>
{% for model in models %}
{% for relation in model.relations %}
{% if relation.needs_node %}
{{ relation.target_app }}_{{ relation.target }} [label=<
<TABLE BGCOLOR="palegoldenrod" BORDER="0" CELLBORDER="0" CELLSPACING="0">
<TR><TD COLSPAN="2" CELLPADDING="4" ALIGN="CENTER" BGCOLOR="olivedrab4"
><FONT FACE="Helvetica Bold" COLOR="white"
>{{ relation.target }}</FONT></TD></TR>
</TABLE>
>]
{% endif %}
{{ model.app_name }}_{{ model.name }} -> {{ relation.target_app }}_{{ relation.target }}
[label="{{ relation.label }}"] {{ relation.arrows }};
{% endfor %}
{% endfor %}
<|start_filename|>django_extensions/templates/django_extensions/graph_models/head.html<|end_filename|>
digraph name {
fontname = "Helvetica"
fontsize = 8
node [
fontname = "Helvetica"
fontsize = 8
shape = "plaintext"
]
edge [
fontname = "Helvetica"
fontsize = 8
]
| Perkville/django-extensions |
<|start_filename|>src/Tests/EFCoreSecondLevelCacheInterceptor.Tests/SecondLevelCacheInterceptorTransactionTests.cs<|end_filename|>
using System;
using System.Linq;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace EFCoreSecondLevelCacheInterceptor.Tests
{
[TestClass]
public class SecondLevelCacheInterceptorTransactionTests
{
[DataTestMethod]
[DataRow(TestCacheProvider.BuiltInInMemory)]
[DataRow(TestCacheProvider.CacheManagerCoreInMemory)]
[DataRow(TestCacheProvider.CacheManagerCoreRedis)]
[DataRow(TestCacheProvider.EasyCachingCoreInMemory)]
[DataRow(TestCacheProvider.EasyCachingCoreRedis)]
[ExpectedException(typeof(TimeoutException))]
public void TestQueriesUsingExplicitTransactionsWillNotUseTheCache(TestCacheProvider cacheProvider)
{
EFServiceProvider.RunInContext(cacheProvider, LogLevel.Debug, false, (context, loggerProvider) =>
{
using (var txn = context.Database.BeginTransaction())
{
// Read and modify an entity
var entity1 = context.Tags.Cacheable(CacheExpirationMode.Absolute, TimeSpan.FromMinutes(45)).First();
entity1.Name = "FOO";
// Save the change, cache will be invalidated
context.SaveChanges();
// Read the same entity again
entity1 = context.Tags.Cacheable(CacheExpirationMode.Absolute, TimeSpan.FromMinutes(45)).First();
// It will not get cached
Assert.AreEqual(0, loggerProvider.GetCacheHitCount());
// Call some method
// THIS METHOD THROWS AN EXCEPTION SO THE TRANSACTION IS NEVER COMMITTED
throw new TimeoutException();
// (we never hit these lines, so the cache is not invalidated and the transaction is not committed)
context.SaveChanges();
txn.Commit();
}
});
}
[DataTestMethod]
[DataRow(TestCacheProvider.BuiltInInMemory)]
[DataRow(TestCacheProvider.CacheManagerCoreInMemory)]
[DataRow(TestCacheProvider.CacheManagerCoreRedis)]
[DataRow(TestCacheProvider.EasyCachingCoreInMemory)]
[DataRow(TestCacheProvider.EasyCachingCoreRedis)]
public void TestQueriesUsingExplicitTransactionsWillInvalidateTheCache(TestCacheProvider cacheProvider)
{
var rnd = new Random();
EFServiceProvider.RunInContext(cacheProvider, LogLevel.Debug, false, (context, loggerProvider) =>
{
// Read and cache data
var entity0 = context.Tags.Cacheable(CacheExpirationMode.Absolute, TimeSpan.FromMinutes(45)).First();
using (var txn = context.Database.BeginTransaction())
{
// Read and modify an entity.
var entity1 = context.Tags.Cacheable(CacheExpirationMode.Absolute, TimeSpan.FromMinutes(45)).First();
// Reading the data from the database, not cache.
Assert.AreEqual(0, loggerProvider.GetCacheHitCount());
entity1.Name = $"FOO{rnd.Next()}";
// Save the change, cache will be invalidated.
context.SaveChanges();
// Read the same entity again.
entity1 = context.Tags.Cacheable(CacheExpirationMode.Absolute, TimeSpan.FromMinutes(45)).First();
// Reading the data from the database, not cache.
Assert.AreEqual(0, loggerProvider.GetCacheHitCount());
context.SaveChanges();
txn.Commit();
}
// `After` committing the transaction, the related query cache should be invalidated.
var entity2 = context.Tags.Cacheable(CacheExpirationMode.Absolute, TimeSpan.FromMinutes(45)).First();
// Reading the data from the database after invalidation, not cache.
Assert.AreEqual(0, loggerProvider.GetCacheHitCount());
});
}
}
}
<|start_filename|>update-dependencies.bat<|end_filename|>
dotnet restore
dotnet tool update --global dotnet-outdated-tool
dotnet outdated
rem dotnet outdated --pre-release Always
dotnet restore
pause
<|start_filename|>src/Tests/EFCoreSecondLevelCacheInterceptor.Tests/XxHashTests.cs<|end_filename|>
using Microsoft.VisualStudio.TestTools.UnitTesting;
[assembly: Parallelize(Workers = 0, Scope = ExecutionScope.MethodLevel)] // Workers: The number of threads to run the tests. Set it to 0 to use the number of core of your computer.
namespace EFCoreSecondLevelCacheInterceptor.Tests
{
[TestClass]
public class XxHashTests
{
[TestMethod]
public void TestXxHashReturnsCorrectValue()
{
byte[] data =
{
0x60, 0x82, 0x40, 0x77, 0x8a, 0x0e, 0xe4, 0xd5,
0x85, 0x1f, 0xa6, 0x86, 0x34, 0x01, 0xd7, 0xf2,
0x30, 0x5d, 0x84, 0x54, 0x15, 0xf9, 0xbd, 0x03,
0x4b, 0x0f, 0x90, 0x4e, 0xf5, 0x57, 0x21, 0x21,
0xed, 0x8c, 0x19, 0x93, 0xbd, 0x01, 0x12, 0x0c,
0x20, 0xb0, 0x33, 0x98, 0x4b, 0xe7, 0xc1, 0x0a,
0x27, 0x6d, 0xb3, 0x5c, 0xc7, 0xc0, 0xd0, 0xa0,
0x7e, 0x28, 0xce, 0x46, 0x85, 0xb7, 0x2b, 0x16,
};
var hash = new XxHash64Unsafe().ComputeHash(data);
Assert.AreEqual((ulong)0x4c0a65b1ef9ea060, hash);
}
}
}
<|start_filename|>src/EFCoreSecondLevelCacheInterceptor/EFCacheDependenciesProcessor.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Data.Common;
using System.Linq;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Options;
namespace EFCoreSecondLevelCacheInterceptor
{
/// <summary>
/// Cache Dependencies Calculator
/// </summary>
public class EFCacheDependenciesProcessor : IEFCacheDependenciesProcessor
{
private readonly string _cacheKeyPrefix;
private readonly IEFDebugLogger _logger;
private readonly IEFCacheServiceProvider _cacheServiceProvider;
private readonly IEFSqlCommandsProcessor _sqlCommandsProcessor;
private readonly EFCoreSecondLevelCacheSettings _cacheSettings;
/// <summary>
/// Cache Dependencies Calculator
/// </summary>
public EFCacheDependenciesProcessor(
IEFDebugLogger logger,
IEFCacheServiceProvider cacheServiceProvider,
IEFSqlCommandsProcessor sqlCommandsProcessor,
IOptions<EFCoreSecondLevelCacheSettings> cacheSettings)
{
_logger = logger;
_cacheServiceProvider = cacheServiceProvider;
_sqlCommandsProcessor = sqlCommandsProcessor;
if (cacheSettings == null)
{
throw new ArgumentNullException(nameof(cacheSettings));
}
_cacheSettings = cacheSettings.Value;
_cacheKeyPrefix = cacheSettings.Value.CacheKeyPrefix;
}
/// <summary>
/// Finds the related table names of the current query.
/// </summary>
public SortedSet<string> GetCacheDependencies(DbCommand command, DbContext context, EFCachePolicy cachePolicy)
{
if (command == null)
{
throw new ArgumentNullException(nameof(command));
}
var tableNames = new SortedSet<string>(
_sqlCommandsProcessor.GetAllTableNames(context).Select(x => x.TableName),
StringComparer.OrdinalIgnoreCase);
return GetCacheDependencies(cachePolicy, tableNames, command.CommandText);
}
/// <summary>
/// Finds the related table names of the current query.
/// </summary>
public SortedSet<string> GetCacheDependencies(EFCachePolicy cachePolicy, SortedSet<string> tableNames, string commandText)
{
if (cachePolicy == null)
{
throw new ArgumentNullException(nameof(cachePolicy));
}
var textsInsideSquareBrackets = _sqlCommandsProcessor.GetSqlCommandTableNames(commandText);
var cacheDependencies = new SortedSet<string>(
tableNames.Intersect(textsInsideSquareBrackets, StringComparer.OrdinalIgnoreCase),
StringComparer.OrdinalIgnoreCase);
if (cacheDependencies.Any())
{
logProcess(tableNames, textsInsideSquareBrackets, cacheDependencies);
return PrefixCacheDependencies(cacheDependencies);
}
cacheDependencies = cachePolicy.CacheItemsDependencies as SortedSet<string>;
if (cacheDependencies?.Any() != true)
{
_logger.LogDebug($"It's not possible to calculate the related table names of the current query[{commandText}]. Please use EFCachePolicy.Configure(options => options.CacheDependencies(\"real_table_name_1\", \"real_table_name_2\")) to specify them explicitly.");
cacheDependencies = new SortedSet<string>(StringComparer.OrdinalIgnoreCase)
{
EFCachePolicy.UnknownsCacheDependency
};
}
logProcess(tableNames, textsInsideSquareBrackets, cacheDependencies);
return PrefixCacheDependencies(cacheDependencies);
}
private void logProcess(SortedSet<string> tableNames, SortedSet<string> textsInsideSquareBrackets, SortedSet<string> cacheDependencies)
{
_logger.LogDebug($"ContextTableNames: {string.Join(", ", tableNames)}, PossibleQueryTableNames: {string.Join(", ", textsInsideSquareBrackets)} -> CacheDependencies: {string.Join(", ", cacheDependencies)}.");
}
/// <summary>
/// Invalidates all of the cache entries which are dependent on any of the specified root keys.
/// </summary>
public bool InvalidateCacheDependencies(string commandText, EFCacheKey cacheKey)
{
if (cacheKey is null)
{
throw new ArgumentNullException(nameof(cacheKey));
}
if (!_sqlCommandsProcessor.IsCrudCommand(commandText))
{
_logger.LogDebug($"Skipped invalidating a none-CRUD command[{commandText}].");
return false;
}
if (shouldSkipCacheInvalidationCommands(commandText))
{
_logger.LogDebug($"Skipped invalidating the related cache entries of this query[{commandText}] based on the provided predicate.");
return false;
}
cacheKey.CacheDependencies.Add($"{_cacheKeyPrefix}{EFCachePolicy.UnknownsCacheDependency}");
_cacheServiceProvider.InvalidateCacheDependencies(cacheKey);
_logger.LogDebug(CacheableEventId.QueryResultInvalidated, $"Invalidated [{string.Join(", ", cacheKey.CacheDependencies)}] dependencies.");
return true;
}
private bool shouldSkipCacheInvalidationCommands(string commandText)
{
return _cacheSettings.SkipCacheInvalidationCommands != null && _cacheSettings.SkipCacheInvalidationCommands(commandText);
}
private SortedSet<string> PrefixCacheDependencies(SortedSet<string> cacheDependencies)
=> new(cacheDependencies.Select(x => $"{_cacheKeyPrefix}{x}"), StringComparer.OrdinalIgnoreCase);
}
}
<|start_filename|>src/EFCoreSecondLevelCacheInterceptor/EFCachedQueryExtensions.cs<|end_filename|>
using System;
using System.Linq;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Query.Internal;
namespace EFCoreSecondLevelCacheInterceptor
{
/// <summary>
/// Returns a new cached query.
/// </summary>
public static class EFCachedQueryExtensions
{
private static readonly TimeSpan _thirtyMinutes = TimeSpan.FromMinutes(30);
/// <summary>
/// IsNotCachable Marker
/// </summary>
public static readonly string IsNotCachableMarker = $"{nameof(EFCoreSecondLevelCacheInterceptor)}{nameof(NotCacheable)}";
/// <summary>
/// Returns a new query where the entities returned will be cached.
/// </summary>
/// <typeparam name="TType">Entity type.</typeparam>
/// <param name="query">The input EF query.</param>
/// <param name="expirationMode">Defines the expiration mode of the cache item.</param>
/// <param name="timeout">The expiration timeout.</param>
/// <returns>Provides functionality to evaluate queries against a specific data source.</returns>
public static IQueryable<TType> Cacheable<TType>(
this IQueryable<TType> query, CacheExpirationMode expirationMode, TimeSpan timeout)
{
sanityCheck(query);
return query.TagWith(EFCachePolicy.Configure(options =>
options.ExpirationMode(expirationMode).Timeout(timeout)));
}
/// <summary>
/// Returns a new query where the entities returned will be cached.
/// </summary>
/// <typeparam name="TType">Entity type.</typeparam>
/// <param name="query">The input EF query.</param>
/// <param name="expirationMode">Defines the expiration mode of the cache item.</param>
/// <param name="timeout">The expiration timeout.</param>
/// <param name="cacheDependencies">
/// Set this option to the `real` related table names of the current query, if you are using an stored procedure,
/// otherswise cache dependencies of normal queries will be calculated automatically.
/// `cacheDependencies` determines which tables are used in this final query.
/// This array will be used to invalidate the related cache of all related queries automatically.
/// </param>
/// <param name="saltKey">If you think the computed hash of the query to calculate the cache-key is not enough, set this value.</param>
/// <returns>Provides functionality to evaluate queries against a specific data source.</returns>
public static IQueryable<TType> Cacheable<TType>(
this IQueryable<TType> query, CacheExpirationMode expirationMode, TimeSpan timeout, string[] cacheDependencies, string saltKey)
{
sanityCheck(query);
return query.TagWith(EFCachePolicy.Configure(options =>
options.ExpirationMode(expirationMode).Timeout(timeout)
.CacheDependencies(cacheDependencies).SaltKey(saltKey)));
}
/// <summary>
/// Returns a new query where the entities returned will be cached.
/// </summary>
/// <typeparam name="TType">Entity type.</typeparam>
/// <param name="query">The input EF query.</param>
/// <param name="expirationMode">Defines the expiration mode of the cache item.</param>
/// <param name="timeout">The expiration timeout.</param>
/// <param name="cacheDependencies">
/// Set this option to the `real` related table names of the current query, if you are using an stored procedure,
/// otherswise cache dependencies of normal queries will be calculated automatically.
/// `cacheDependencies` determines which tables are used in this final query.
/// This array will be used to invalidate the related cache of all related queries automatically.
/// </param>
/// <returns>Provides functionality to evaluate queries against a specific data source.</returns>
public static IQueryable<TType> Cacheable<TType>(
this IQueryable<TType> query, CacheExpirationMode expirationMode, TimeSpan timeout, string[] cacheDependencies)
{
sanityCheck(query);
return query.TagWith(EFCachePolicy.Configure(options =>
options.ExpirationMode(expirationMode).Timeout(timeout).CacheDependencies(cacheDependencies)));
}
/// <summary>
/// Returns a new query where the entities returned will be cached.
/// </summary>
/// <typeparam name="TType">Entity type.</typeparam>
/// <param name="query">The input EF query.</param>
/// <param name="expirationMode">Defines the expiration mode of the cache item.</param>
/// <param name="timeout">The expiration timeout.</param>
/// <param name="saltKey">If you think the computed hash of the query to calculate the cache-key is not enough, set this value.</param>
/// <returns>Provides functionality to evaluate queries against a specific data source.</returns>
public static IQueryable<TType> Cacheable<TType>(
this IQueryable<TType> query, CacheExpirationMode expirationMode, TimeSpan timeout, string saltKey)
{
sanityCheck(query);
return query.TagWith(EFCachePolicy.Configure(options =>
options.ExpirationMode(expirationMode).Timeout(timeout).SaltKey(saltKey)));
}
/// <summary>
/// Returns a new query where the entities returned by it will be cached only for 30 minutes.
/// </summary>
/// <typeparam name="TType">Entity type.</typeparam>
/// <param name="query">The input EF query.</param>
/// <returns>Provides functionality to evaluate queries against a specific data source.</returns>
public static IQueryable<TType> Cacheable<TType>(
this IQueryable<TType> query)
{
sanityCheck(query);
return query.TagWith(EFCachePolicy.Configure(options =>
options.ExpirationMode(CacheExpirationMode.Absolute).Timeout(_thirtyMinutes).DefaultCacheableMethod(true)));
}
/// <summary>
/// Returns a new query where the entities returned by it will be cached only for 30 minutes.
/// </summary>
/// <typeparam name="TType">Entity type.</typeparam>
/// <param name="query">The input EF query.</param>
/// <returns>Provides functionality to evaluate queries against a specific data source.</returns>
public static IQueryable<TType> Cacheable<TType>(
this DbSet<TType> query) where TType : class
{
sanityCheck(query);
return query.TagWith(EFCachePolicy.Configure(options =>
options.ExpirationMode(CacheExpirationMode.Absolute).Timeout(_thirtyMinutes).DefaultCacheableMethod(true)));
}
/// <summary>
/// Returns a new query where the entities returned will be cached.
/// </summary>
/// <typeparam name="TType">Entity type.</typeparam>
/// <param name="query">The input EF query.</param>
/// <param name="expirationMode">Defines the expiration mode of the cache item.</param>
/// <param name="timeout">The expiration timeout.</param>
/// <returns>Provides functionality to evaluate queries against a specific data source.</returns>
public static IQueryable<TType> Cacheable<TType>(
this DbSet<TType> query, CacheExpirationMode expirationMode, TimeSpan timeout) where TType : class
{
sanityCheck(query);
return query.TagWith(EFCachePolicy.Configure(options =>
options.ExpirationMode(expirationMode).Timeout(timeout)));
}
/// <summary>
/// Returns a new query where the entities returned will be cached.
/// </summary>
/// <typeparam name="TType">Entity type.</typeparam>
/// <param name="query">The input EF query.</param>
/// <param name="expirationMode">Defines the expiration mode of the cache item.</param>
/// <param name="timeout">The expiration timeout.</param>
/// <param name="cacheDependencies">
/// Set this option to the `real` related table names of the current query, if you are using an stored procedure,
/// otherswise cache dependencies of normal queries will be calculated automatically.
/// `cacheDependencies` determines which tables are used in this final query.
/// This array will be used to invalidate the related cache of all related queries automatically.
/// </param>
/// <param name="saltKey">If you think the computed hash of the query to calculate the cache-key is not enough, set this value.</param>
/// <returns>Provides functionality to evaluate queries against a specific data source.</returns>
public static IQueryable<TType> Cacheable<TType>(
this DbSet<TType> query, CacheExpirationMode expirationMode, TimeSpan timeout, string[] cacheDependencies, string saltKey) where TType : class
{
sanityCheck(query);
return query.TagWith(EFCachePolicy.Configure(options =>
options.ExpirationMode(expirationMode).Timeout(timeout)
.CacheDependencies(cacheDependencies).SaltKey(saltKey)));
}
/// <summary>
/// Returns a new query where the entities returned will be cached.
/// </summary>
/// <typeparam name="TType">Entity type.</typeparam>
/// <param name="query">The input EF query.</param>
/// <param name="expirationMode">Defines the expiration mode of the cache item.</param>
/// <param name="timeout">The expiration timeout.</param>
/// <param name="cacheDependencies">
/// Set this option to the `real` related table names of the current query, if you are using an stored procedure,
/// otherswise cache dependencies of normal queries will be calculated automatically.
/// `cacheDependencies` determines which tables are used in this final query.
/// This array will be used to invalidate the related cache of all related queries automatically.
/// </param>
/// <returns>Provides functionality to evaluate queries against a specific data source.</returns>
public static IQueryable<TType> Cacheable<TType>(
this DbSet<TType> query, CacheExpirationMode expirationMode, TimeSpan timeout, string[] cacheDependencies) where TType : class
{
sanityCheck(query);
return query.TagWith(EFCachePolicy.Configure(options =>
options.ExpirationMode(expirationMode).Timeout(timeout).CacheDependencies(cacheDependencies)));
}
/// <summary>
/// Returns a new query where the entities returned will be cached.
/// </summary>
/// <typeparam name="TType">Entity type.</typeparam>
/// <param name="query">The input EF query.</param>
/// <param name="expirationMode">Defines the expiration mode of the cache item.</param>
/// <param name="timeout">The expiration timeout.</param>
/// <param name="saltKey">If you think the computed hash of the query to calculate the cache-key is not enough, set this value.</param>
/// <returns>Provides functionality to evaluate queries against a specific data source.</returns>
public static IQueryable<TType> Cacheable<TType>(
this DbSet<TType> query, CacheExpirationMode expirationMode, TimeSpan timeout, string saltKey) where TType : class
{
sanityCheck(query);
return query.TagWith(EFCachePolicy.Configure(options =>
options.ExpirationMode(expirationMode).Timeout(timeout).SaltKey(saltKey)));
}
/// <summary>
/// Returns a new query where the entities returned will note be cached.
/// </summary>
/// <typeparam name="TType">Entity type.</typeparam>
/// <param name="query">The input EF query.</param>
/// <returns>Provides functionality to evaluate queries against a specific data source.</returns>
public static IQueryable<TType> NotCacheable<TType>(this IQueryable<TType> query)
{
sanityCheck(query);
return query.TagWith(IsNotCachableMarker);
}
/// <summary>
/// Returns a new query where the entities returned will note be cached.
/// </summary>
/// <typeparam name="TType">Entity type.</typeparam>
/// <param name="query">The input EF query.</param>
/// <returns>Provides functionality to evaluate queries against a specific data source.</returns>
public static IQueryable<TType> NotCacheable<TType>(this DbSet<TType> query) where TType : class
{
sanityCheck(query);
return query.TagWith(IsNotCachableMarker);
}
private static void sanityCheck<TType>(IQueryable<TType> query)
{
if (query == null)
{
throw new ArgumentNullException(nameof(query));
}
if (!(query.Provider is EntityQueryProvider))
{
throw new NotSupportedException("`Cacheable` method is designed only for relational EF Core queries.");
}
}
}
}
<|start_filename|>src/Tests/Issues/Issue12MySQL/DataLayer/ApplicationDbContext.cs<|end_filename|>
using System;
using System.Linq;
using Issue12MySQL.Entities;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Microsoft.Extensions.Logging;
namespace Issue12MySQL.DataLayer
{
public class ApplicationDbContext : DbContext
{
public ApplicationDbContext(DbContextOptions options)
: base(options)
{
}
public DbSet<Person> People { get; set; }
protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder)
{
optionsBuilder
.UseLoggerFactory(LoggerFactory.Create(x => x
.AddConsole()
.AddFilter(y => y >= LogLevel.Debug)))
.EnableSensitiveDataLogging()
.EnableDetailedErrors();
base.OnConfiguring(optionsBuilder);
}
protected override void OnModelCreating(ModelBuilder builder)
{
base.OnModelCreating(builder);
// MySQL does not support DateTimeOffset
foreach (var property in builder.Model.GetEntityTypes()
.SelectMany(t => t.GetProperties())
.Where(p => p.ClrType == typeof(DateTimeOffset)))
{
property.SetValueConverter(
new ValueConverter<DateTimeOffset, DateTime>(
convertToProviderExpression: dateTimeOffset => dateTimeOffset.UtcDateTime,
convertFromProviderExpression: dateTime => new DateTimeOffset(dateTime)
));
}
foreach (var property in builder.Model.GetEntityTypes()
.SelectMany(t => t.GetProperties())
.Where(p => p.ClrType == typeof(DateTimeOffset?)))
{
property.SetValueConverter(
new ValueConverter<DateTimeOffset?, DateTime>(
convertToProviderExpression: dateTimeOffset => dateTimeOffset.Value.UtcDateTime,
convertFromProviderExpression: dateTime => new DateTimeOffset(dateTime)
));
}
// To solve: Unable to cast object of type 'System.Char' to type 'System.Int32'.
foreach (var property in builder.Model.GetEntityTypes()
.SelectMany(t => t.GetProperties())
.Where(p => p.ClrType == typeof(char)))
{
property.SetValueConverter(
new ValueConverter<char, int>(
convertToProviderExpression: charValue => charValue,
convertFromProviderExpression: intValue => (char)intValue
));
}
//To solve: Unable to cast object of type 'System.UInt32' to type 'System.Int32'
foreach (var property in builder.Model.GetEntityTypes()
.SelectMany(t => t.GetProperties())
.Where(p => p.ClrType == typeof(uint)))
{
property.SetValueConverter(
new ValueConverter<uint, int>(
convertToProviderExpression: uintValue => (int)uintValue,
convertFromProviderExpression: intValue => (uint)intValue
));
}
//To solve: Unable to cast object of type 'System.UInt64' to type 'System.Int64'
foreach (var property in builder.Model.GetEntityTypes()
.SelectMany(t => t.GetProperties())
.Where(p => p.ClrType == typeof(ulong)))
{
property.SetValueConverter(
new ValueConverter<ulong, long>(
convertToProviderExpression: ulongValue => (long)ulongValue,
convertFromProviderExpression: longValue => (ulong)longValue
));
}
}
}
}
<|start_filename|>src/EFCoreSecondLevelCacheInterceptor/EFCachePolicy.cs<|end_filename|>
using System;
using System.Collections.Generic;
namespace EFCoreSecondLevelCacheInterceptor
{
/// <summary>
/// EFCachePolicy determines the Expiration time of the cache.
/// </summary>
public class EFCachePolicy
{
/// <summary>
/// It's `|`
/// </summary>
public const char ItemsSeparator = '|';
/// <summary>
/// It's `-->`
/// </summary>
public const string PartsSeparator = "-->";
/// <summary>
/// It's `_`
/// </summary>
public const string CacheDependenciesSeparator = "_";
/// <summary>
/// It's an special key for unknown cache dependencies
/// </summary>
public const string UnknownsCacheDependency = nameof(UnknownsCacheDependency);
/// <summary>
/// Defines the expiration mode of the cache item.
/// Its default value is Absolute.
/// </summary>
public CacheExpirationMode CacheExpirationMode { get; private set; }
/// <summary>
/// The expiration timeout.
/// Its default value is 20 minutes later.
/// </summary>
public TimeSpan CacheTimeout { get; private set; } = TimeSpan.FromMinutes(20);
/// <summary>
/// If you think the computed hash of the query to calculate the cache-key is not enough, set this value.
/// Its default value is string.Empty.
/// </summary>
public string CacheSaltKey { get; private set; } = string.Empty;
/// <summary>
/// Determines which entities are used in this LINQ query.
/// This array will be used to invalidate the related cache of all related queries automatically.
/// </summary>
public ISet<string> CacheItemsDependencies { get; private set; } = new SortedSet<string>(StringComparer.OrdinalIgnoreCase);
/// <summary>
/// Determines the default Cacheable method
/// </summary>
public bool IsDefaultCacheableMethod { set; get; }
/// <summary>
/// Set this option to the `real` related table names of the current query, if you are using an stored procedure,
/// otherwise cache dependencies of normal queries will be calculated automatically.
/// `cacheDependencies` determines which tables are used in this final query.
/// This array will be used to invalidate the related cache of all related queries automatically.
/// </summary>
public EFCachePolicy CacheDependencies(params string[] cacheDependencies)
{
CacheItemsDependencies = new SortedSet<string>(cacheDependencies, StringComparer.OrdinalIgnoreCase);
return this;
}
/// <summary>
/// Defines the expiration mode of the cache item.
/// Its default value is Absolute.
/// </summary>
public EFCachePolicy ExpirationMode(CacheExpirationMode expirationMode)
{
CacheExpirationMode = expirationMode;
return this;
}
/// <summary>
/// The expiration timeout.
/// Its default value is 20 minutes later.
/// </summary>
public EFCachePolicy Timeout(TimeSpan timeout)
{
CacheTimeout = timeout;
return this;
}
/// <summary>
/// If you think the computed hash of the query to calculate the cache-key is not enough, set this value.
/// Its default value is string.Empty.
/// </summary>
public EFCachePolicy SaltKey(string saltKey)
{
CacheSaltKey = saltKey;
return this;
}
/// <summary>
/// Determines the default Cacheable method
/// </summary>
public EFCachePolicy DefaultCacheableMethod(bool state)
{
IsDefaultCacheableMethod = state;
return this;
}
/// <summary>
/// Determines the Expiration time of the cache.
/// </summary>
public static string Configure(Action<EFCachePolicy> options)
{
if (options == null)
{
throw new ArgumentNullException(nameof(options));
}
var cachePolicy = new EFCachePolicy();
options.Invoke(cachePolicy);
return cachePolicy.ToString();
}
/// <summary>
/// Represents the textual form of the current object
/// </summary>
public override string ToString()
{
return $"{nameof(EFCachePolicy)} {PartsSeparator} {CacheExpirationMode}{ItemsSeparator}{CacheTimeout}{ItemsSeparator}{CacheSaltKey}{ItemsSeparator}{string.Join(CacheDependenciesSeparator, CacheItemsDependencies)}{ItemsSeparator}{IsDefaultCacheableMethod}".TrimEnd(ItemsSeparator);
}
}
}
<|start_filename|>src/Tests/Issues/Issue12MySQL/Migrations/20210712051023_V2021_07_12_0939.cs<|end_filename|>
using System;
using Microsoft.EntityFrameworkCore.Migrations;
using MySql.EntityFrameworkCore.Metadata;
namespace Issue12MySQL.Migrations
{
public partial class V2021_07_12_0939 : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "People",
columns: table => new
{
Id = table.Column<int>(type: "int", nullable: false)
.Annotation("MySQL:ValueGenerationStrategy", MySQLValueGenerationStrategy.IdentityColumn),
Name = table.Column<string>(type: "text", nullable: true),
AddDate = table.Column<DateTime>(type: "datetime", nullable: false),
UpdateDate = table.Column<DateTime>(type: "datetime", nullable: true),
Points = table.Column<long>(type: "bigint", nullable: false),
IsActive = table.Column<bool>(type: "tinyint(1)", nullable: false),
ByteValue = table.Column<byte>(type: "tinyint unsigned", nullable: false),
CharValue = table.Column<int>(type: "int", nullable: false),
DateTimeOffsetValue = table.Column<DateTime>(type: "datetime", nullable: false),
DecimalValue = table.Column<decimal>(type: "decimal(18, 2)", nullable: false),
DoubleValue = table.Column<double>(type: "double", nullable: false),
FloatValue = table.Column<float>(type: "float", nullable: false),
GuidValue = table.Column<byte[]>(type: "varbinary(16)", nullable: false),
TimeSpanValue = table.Column<TimeSpan>(type: "time", nullable: false),
ShortValue = table.Column<short>(type: "smallint", nullable: false),
ByteArrayValue = table.Column<byte[]>(type: "varbinary(4000)", nullable: true),
UintValue = table.Column<int>(type: "int unsigned", nullable: false),
UlongValue = table.Column<long>(type: "bigint unsigned", nullable: false),
UshortValue = table.Column<long>(type: "bigint unsigned", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_People", x => x.Id);
});
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "People");
}
}
}
<|start_filename|>src/EFCoreSecondLevelCacheInterceptor/XxHash64Unsafe.cs<|end_filename|>
using System;
using System.Runtime.CompilerServices;
namespace EFCoreSecondLevelCacheInterceptor
{
/// <summary>
/// xxHash is an extremely fast non-cryptographic Hash algorithm, working at speeds close to RAM limits.
/// https://github.com/Cyan4973/xxHash
/// </summary>
public class XxHash64Unsafe : IEFHashProvider
{
private const ulong Prime1 = 11400714785074694791UL;
private const ulong Prime2 = 14029467366897019727UL;
private const ulong Prime3 = 1609587929392839161UL;
private const ulong Prime4 = 9650029242287828579UL;
private const ulong Prime5 = 2870177450012600261UL;
private const ulong Seed = 0UL;
/// <summary>
/// Computes the xxHash64 of the input string. xxHash64 is an extremely fast non-cryptographic Hash algorithm.
/// </summary>
/// <param name="data">the input string</param>
/// <returns>xxHash64</returns>
public unsafe ulong ComputeHash(string data)
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
}
fixed (char* input = data)
{
return ComputeHash((byte*)input, data.Length * sizeof(char), Seed);
}
}
/// <summary>
/// Computes the xxHash64 of the input array. xxHash is an extremely fast non-cryptographic Hash algorithm.
/// </summary>
/// <param name="data">the input array</param>
/// <returns>xxHash64</returns>
public unsafe ulong ComputeHash(byte[] data)
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
}
fixed (byte* input = &data[0])
{
return ComputeHash(input, data.Length, Seed);
}
}
/// <summary>
/// Computes the xxHash64 of the input byte array. xxHash is an extremely fast non-cryptographic Hash algorithm.
/// </summary>
/// <param name="data">the input byte array</param>
/// <param name="offset">start offset</param>
/// <param name="len">length</param>
/// <param name="seed">initial seed</param>
/// <returns>xxHash64</returns>
public unsafe ulong ComputeHash(byte[] data, int offset, int len, uint seed)
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
}
fixed (byte* input = &data[offset])
{
return ComputeHash(input, len, seed);
}
}
/// <summary>
/// Computes the xxHash64 of the input string. xxHash is an extremely fast non-cryptographic Hash algorithm.
/// </summary>
/// <param name="ptr"></param>
/// <param name="length"></param>
/// <param name="seed"></param>
/// <returns>xxHash</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static unsafe ulong ComputeHash(byte* ptr, int length, ulong seed)
{
byte* end = ptr + length;
ulong h64;
if (length >= 32)
{
byte* limit = end - 32;
ulong v1 = seed + Prime1 + Prime2;
ulong v2 = seed + Prime2;
ulong v3 = seed + 0;
ulong v4 = seed - Prime1;
do
{
v1 += *(ulong*)ptr * Prime2;
v1 = RotateLeft(v1, 31); // rotl 31
v1 *= Prime1;
ptr += 8;
v2 += *(ulong*)ptr * Prime2;
v2 = RotateLeft(v2, 31); // rotl 31
v2 *= Prime1;
ptr += 8;
v3 += *(ulong*)ptr * Prime2;
v3 = RotateLeft(v3, 31); // rotl 31
v3 *= Prime1;
ptr += 8;
v4 += *(ulong*)ptr * Prime2;
v4 = RotateLeft(v4, 31); // rotl 31
v4 *= Prime1;
ptr += 8;
} while (ptr <= limit);
h64 = RotateLeft(v1, 1) + // rotl 1
RotateLeft(v2, 7) + // rotl 7
RotateLeft(v3, 12) + // rotl 12
RotateLeft(v4, 18); // rotl 18
// merge round
v1 *= Prime2;
v1 = RotateLeft(v1, 31); // rotl 31
v1 *= Prime1;
h64 ^= v1;
h64 = h64 * Prime1 + Prime4;
// merge round
v2 *= Prime2;
v2 = RotateLeft(v2, 31); // rotl 31
v2 *= Prime1;
h64 ^= v2;
h64 = h64 * Prime1 + Prime4;
// merge round
v3 *= Prime2;
v3 = RotateLeft(v3, 31); // rotl 31
v3 *= Prime1;
h64 ^= v3;
h64 = h64 * Prime1 + Prime4;
// merge round
v4 *= Prime2;
v4 = RotateLeft(v4, 31); // rotl 31
v4 *= Prime1;
h64 ^= v4;
h64 = h64 * Prime1 + Prime4;
}
else
{
h64 = seed + Prime5;
}
h64 += (ulong)length;
// finalize
while (ptr <= end - 8)
{
ulong t1 = *(ulong*)ptr * Prime2;
t1 = RotateLeft(t1, 31); // rotl 31
t1 *= Prime1;
h64 ^= t1;
h64 = RotateLeft(h64, 27) * Prime1 + Prime4; // (rotl 27) * p1 + p4
ptr += 8;
}
if (ptr <= end - 4)
{
h64 ^= *(uint*)ptr * Prime1;
h64 = RotateLeft(h64, 23) * Prime2 + Prime3; // (rotl 23) * p2 + p3
ptr += 4;
}
while (ptr < end)
{
h64 ^= *ptr * Prime5;
h64 = RotateLeft(h64, 11) * Prime1; // (rotl 11) * p1
ptr += 1;
}
// avalanche
h64 ^= h64 >> 33;
h64 *= Prime2;
h64 ^= h64 >> 29;
h64 *= Prime3;
h64 ^= h64 >> 32;
return h64;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static ulong RotateLeft(ulong value, int offset)
{
#if NET5_0 || NETCORE3_1
return System.Numerics.BitOperations.RotateLeft(value, offset);
#else
return (value << offset) | (value >> (64 - offset));
#endif
}
}
}
<|start_filename|>src/EFCoreSecondLevelCacheInterceptor/IEFHashProvider.cs<|end_filename|>
namespace EFCoreSecondLevelCacheInterceptor
{
/// <summary>
/// A hash provider contract
/// </summary>
public interface IEFHashProvider
{
/// <summary>
/// Computes the Hash of the input string.
/// </summary>
/// <param name="data">the input string</param>
/// <returns>Hash</returns>
ulong ComputeHash(string data);
/// <summary>
/// Computes the hash of the input array.
/// </summary>
/// <param name="data">the input array</param>
/// <returns>Hash</returns>
ulong ComputeHash(byte[] data);
/// <summary>
/// Computes the hash of the input byte array.
/// </summary>
/// <param name="data">the input byte array</param>
/// <param name="offset">start offset</param>
/// <param name="len">length</param>
/// <param name="seed">initial seed</param>
/// <returns>Hash</returns>
ulong ComputeHash(byte[] data, int offset, int len, uint seed);
}
}
<|start_filename|>src/Tests/Issues/Issue123WithMessagePack/DataLayer/MsSqlContextFactory.cs<|end_filename|>
using Microsoft.EntityFrameworkCore.Design;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Configuration;
using System;
using System.IO;
using Microsoft.Extensions.DependencyInjection;
namespace Issue123WithMessagePack.DataLayer
{
public class MsSqlContextFactory : IDesignTimeDbContextFactory<ApplicationDbContext>
{
public ApplicationDbContext CreateDbContext(string[] args)
{
var services = new ServiceCollection();
var basePath = Directory.GetCurrentDirectory();
Console.WriteLine($"Using `{basePath}` as the ContentRootPath");
var configuration = new ConfigurationBuilder()
.SetBasePath(basePath)
.AddJsonFile("appsettings.json", optional: false, reloadOnChange: true)
.Build();
services.AddSingleton(_ => configuration);
var optionsBuilder = new DbContextOptionsBuilder<ApplicationDbContext>();
optionsBuilder.UseSqlServer(EFServiceProvider.GetConnectionString(basePath, configuration));
return new ApplicationDbContext(optionsBuilder.Options);
}
}
}
<|start_filename|>src/EFCoreSecondLevelCacheInterceptor/EFSqlCommandsProcessor.cs<|end_filename|>
using System;
using System.Linq;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Threading;
using Microsoft.EntityFrameworkCore;
using System.Reflection;
namespace EFCoreSecondLevelCacheInterceptor
{
/// <summary>
/// SqlCommands Utils
/// </summary>
public class EFSqlCommandsProcessor : IEFSqlCommandsProcessor
{
private static readonly Type IEntityType =
Type.GetType("Microsoft.EntityFrameworkCore.Metadata.IEntityType, Microsoft.EntityFrameworkCore") ?? throw new TypeLoadException("Couldn't load Microsoft.EntityFrameworkCore.Metadata.IEntityType");
private static readonly PropertyInfo ClrTypePropertyInfo =
IEntityType.GetInterfaces()
.Union(new Type[] { IEntityType })
.Select(i => i.GetProperty("ClrType", BindingFlags.Public | BindingFlags.Instance))
.Distinct()
.FirstOrDefault(propertyInfo => propertyInfo != null) ?? throw new KeyNotFoundException("Couldn't find `ClrType` on IEntityType.");
private static readonly Type RelationalEntityTypeExtensionsType =
Type.GetType("Microsoft.EntityFrameworkCore.RelationalEntityTypeExtensions, Microsoft.EntityFrameworkCore.Relational") ?? throw new TypeLoadException("Couldn't load Microsoft.EntityFrameworkCore.RelationalEntityTypeExtensions");
private static readonly MethodInfo GetTableNameMethodInfo =
RelationalEntityTypeExtensionsType.GetMethod("GetTableName", BindingFlags.Static | BindingFlags.Public)
?? throw new KeyNotFoundException("Couldn't find `GetTableName()` on RelationalEntityTypeExtensions.");
private readonly ConcurrentDictionary<Type, Lazy<List<TableEntityInfo>>> _contextTableNames = new();
private readonly ConcurrentDictionary<string, Lazy<SortedSet<string>>> _commandTableNames = new(StringComparer.OrdinalIgnoreCase);
private readonly IEFHashProvider _hashProvider;
/// <summary>
/// SqlCommands Utils
/// </summary>
public EFSqlCommandsProcessor(IEFHashProvider hashProvider)
{
_hashProvider = hashProvider ?? throw new ArgumentNullException(nameof(hashProvider));
}
/// <summary>
/// Is `insert`, `update` or `delete`?
/// </summary>
public bool IsCrudCommand(string text)
{
if (string.IsNullOrWhiteSpace(text))
{
return false;
}
string[] crudMarkers = { "insert ", "update ", "delete ", "create " };
var lines = text.Split('\n');
foreach (var line in lines)
{
foreach (var marker in crudMarkers)
{
if (line.Trim().StartsWith(marker, StringComparison.OrdinalIgnoreCase))
{
return true;
}
}
}
return false;
}
/// <summary>
/// Returns all of the given context's table names.
/// </summary>
public IList<TableEntityInfo> GetAllTableNames(DbContext context)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
return _contextTableNames.GetOrAdd(context.GetType(),
_ => new Lazy<List<TableEntityInfo>>(() => getTableNames(context),
LazyThreadSafetyMode.ExecutionAndPublication)).Value;
}
private static List<TableEntityInfo> getTableNames(DbContext context)
{
var tableNames = new List<TableEntityInfo>();
foreach (var entityType in context.Model.GetEntityTypes())
{
var clrType = getClrType(entityType);
tableNames.Add(
new TableEntityInfo
{
ClrType = clrType,
TableName = getTableName(entityType) ?? clrType.ToString()
});
}
return tableNames;
}
private static string? getTableName(object entityType)
{
return GetTableNameMethodInfo.Invoke(null, new[] { entityType }) as string;
}
private static Type getClrType(object entityType)
{
var value = ClrTypePropertyInfo.GetValue(entityType) ?? throw new InvalidOperationException($"Couldn't get the ClrType value of `{entityType}`");
return (Type)value;
}
/// <summary>
/// Extracts the table names of an SQL command.
/// </summary>
public SortedSet<string> GetSqlCommandTableNames(string commandText)
{
var commandTextKey = $"{_hashProvider.ComputeHash(commandText):X}";
return _commandTableNames.GetOrAdd(commandTextKey,
_ => new Lazy<SortedSet<string>>(() => getRawSqlCommandTableNames(commandText),
LazyThreadSafetyMode.ExecutionAndPublication)).Value;
}
/// <summary>
/// Extracts the entity types of an SQL command.
/// </summary>
public IList<Type> GetSqlCommandEntityTypes(string commandText, IList<TableEntityInfo> allEntityTypes)
{
var commandTableNames = GetSqlCommandTableNames(commandText);
return allEntityTypes.Where(entityType => commandTableNames.Contains(entityType.TableName))
.Select(entityType => entityType.ClrType)
.ToList();
}
private static SortedSet<string> getRawSqlCommandTableNames(string commandText)
{
string[] tableMarkers = { "FROM", "JOIN", "INTO", "UPDATE" };
var tables = new SortedSet<string>(StringComparer.OrdinalIgnoreCase);
var sqlItems = commandText.Split(new[] { " ", "\r\n", Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries);
var sqlItemsLength = sqlItems.Length;
for (var i = 0; i < sqlItemsLength; i++)
{
foreach (var marker in tableMarkers)
{
if (!sqlItems[i].Equals(marker, StringComparison.OrdinalIgnoreCase))
{
continue;
}
++i;
if (i >= sqlItemsLength)
{
continue;
}
var tableName = string.Empty;
var tableNameParts = sqlItems[i].Split(new[] { "." }, StringSplitOptions.RemoveEmptyEntries);
if (tableNameParts.Length == 1)
{
tableName = tableNameParts[0].Trim();
}
else if (tableNameParts.Length >= 2)
{
tableName = tableNameParts[1].Trim();
}
if (string.IsNullOrWhiteSpace(tableName))
{
continue;
}
tableName = tableName.Replace("[", "", StringComparison.Ordinal)
.Replace("]", "", StringComparison.Ordinal)
.Replace("'", "", StringComparison.Ordinal)
.Replace("`", "", StringComparison.Ordinal)
.Replace("\"", "", StringComparison.Ordinal);
tables.Add(tableName);
}
}
return tables;
}
}
}
<|start_filename|>src/EFCoreSecondLevelCacheInterceptor/SkipCacheSpecificQueriesOptions.cs<|end_filename|>
using System;
using System.Collections.Generic;
namespace EFCoreSecondLevelCacheInterceptor
{
/// <summary>
/// CacheAllQueries Options
/// </summary>
public class SkipCacheSpecificQueriesOptions : CacheAllQueriesOptions
{
/// <summary>
/// Given entity types to cache
/// </summary>
public IList<Type>? EntityTypes { get; }
/// <summary>
/// Given table names to cache
/// </summary>
public IEnumerable<string>? TableNames { set; get; }
/// <summary>
/// CacheAllQueries Options
/// </summary>
public SkipCacheSpecificQueriesOptions(IList<Type>? entityTypes)
{
EntityTypes = entityTypes;
}
}
} | dante2130/EFCoreSecondLevelCacheInterceptor |
<|start_filename|>fields_test.go<|end_filename|>
package struc
import (
"bytes"
"reflect"
"testing"
)
var refVal = reflect.ValueOf(reference)
func TestFieldsParse(t *testing.T) {
if _, err := parseFields(refVal); err != nil {
t.Fatal(err)
}
}
func TestFieldsString(t *testing.T) {
fields, _ := parseFields(refVal)
fields.String()
}
type sizefromStruct struct {
Size1 uint `struc:"sizeof=Var1"`
Var1 []byte
Size2 int `struc:"sizeof=Var2"`
Var2 []byte
}
func TestFieldsSizefrom(t *testing.T) {
var test = sizefromStruct{
Var1: []byte{1, 2, 3},
Var2: []byte{4, 5, 6},
}
var buf bytes.Buffer
err := Pack(&buf, &test)
if err != nil {
t.Fatal(err)
}
err = Unpack(&buf, &test)
if err != nil {
t.Fatal(err)
}
}
type sizefromStructBad struct {
Size1 string `struc:"sizeof=Var1"`
Var1 []byte
}
func TestFieldsSizefromBad(t *testing.T) {
var test = &sizefromStructBad{Var1: []byte{1, 2, 3}}
var buf bytes.Buffer
defer func() {
if err := recover(); err == nil {
t.Fatal("failed to panic on bad sizeof type")
}
}()
Pack(&buf, &test)
}
type StructWithinArray struct {
a uint32
}
type StructHavingArray struct {
Props [1]StructWithinArray `struc:"[1]StructWithinArray"`
}
func TestStrucArray(t *testing.T) {
var buf bytes.Buffer
a := &StructHavingArray{[1]StructWithinArray{}}
err := Pack(&buf, a)
if err != nil {
t.Fatal(err)
}
b := &StructHavingArray{}
err = Unpack(&buf, b)
if err != nil {
t.Fatal(err)
}
}
| giter/struc |
<|start_filename|>source/SProxy/WSDLParser.cpp<|end_filename|>
//
// WSDLParser.cpp
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#include "stdafx.h"
#include "Util.h"
#include "WSDLParser.h"
#include "WSDLTypesParser.h"
#include "WSDLMessageParser.h"
#include "WSDLPortTypeParser.h"
#include "WSDLBindingParser.h"
#include "WSDLServiceParser.h"
#include "WSDLDocument.h"
#include "Attribute.h"
#include "Content.h"
#include "Element.h"
#include "ComplexType.h"
CWSDLParser::CWSDLParser()
:m_pDocument(NULL)
{
}
CWSDLParser::CWSDLParser(ISAXXMLReader *pReader, CParserBase *pParent, DWORD dwLevel)
:CParserBase(pReader, pParent, dwLevel), m_pDocument(NULL)
{
}
TAG_METHOD_IMPL( CWSDLParser, OnDefinitions )
{
TRACE_PARSE_ENTRY();
return S_OK;
}
TAG_METHOD_IMPL( CWSDLParser, OnImport )
{
//
// TODO: parse import (?)
//
TRACE_PARSE_ENTRY();
CStringW strNs;
HRESULT hr = GetAttribute(pAttributes, L"namespace", sizeof("namespace")-1, strNs);
if (SUCCEEDED(hr))
{
CStringW strLoc;
hr = GetAttribute(pAttributes, L"location", sizeof("location")-1, strLoc);
if (SUCCEEDED(hr))
{
CStringW localStrLoc = GetDiscoMapDocument()->GetValue(strLoc);
if(!localStrLoc.IsEmpty())
strLoc = localStrLoc;
if (m_importMap.SetAt(strNs, strLoc) != NULL)
{
return S_OK;
}
EmitErrorHr(E_OUTOFMEMORY);
}
else
{
OnMissingAttribute(TRUE, L"location", sizeof("location")-1, L"", 0);
}
}
else
{
OnMissingAttribute(TRUE, L"location", sizeof("location")-1, L"", 0);
}
return E_FAIL;
}
TAG_METHOD_IMPL( CWSDLParser, OnDocumentation )
{
TRACE_PARSE_ENTRY();
return SkipElement();
}
TAG_METHOD_IMPL( CWSDLParser, OnTypes )
{
TRACE_PARSE_ENTRY();
CWSDLDocument *pCurr = GetWSDLDocument();
if (pCurr != NULL)
{
CWSDLType *pElem = pCurr->AddType();
if (pElem != NULL)
{
SetXMLElementInfo(pElem, pCurr, GetLocator());
pElem->SetParentDocument(pCurr);
CAutoPtr<CWSDLTypesParser> p( new CWSDLTypesParser(GetReader(), this, GetLevel(), pElem));
if (p != NULL)
{
if (g_ParserList.AddHead(p) != NULL)
{
return p.Detach()->GetAttributes(pAttributes);
}
}
}
}
EmitErrorHr(E_OUTOFMEMORY);
return E_FAIL;
}
TAG_METHOD_IMPL( CWSDLParser, OnMessage )
{
TRACE_PARSE_ENTRY();
CWSDLDocument *pCurr = GetWSDLDocument();
if (pCurr != NULL)
{
CAutoPtr<CWSDLMessage> spElem;
spElem.Attach( new CWSDLMessage );
if (spElem != NULL)
{
SetXMLElementInfo(spElem, pCurr, GetLocator());
spElem->SetParentDocument(pCurr);
CAutoPtr<CWSDLMessageParser> p( new CWSDLMessageParser(GetReader(), this, GetLevel(), spElem) );
if (p != NULL)
{
if (g_ParserList.AddHead(p) != NULL)
{
if (SUCCEEDED(p.Detach()->GetAttributes(pAttributes)))
{
if (pCurr->AddMessage(spElem) != NULL)
{
spElem.Detach();
return S_OK;
}
}
}
}
}
}
EmitErrorHr(E_OUTOFMEMORY);
return E_FAIL;
}
TAG_METHOD_IMPL( CWSDLParser, OnPortType )
{
TRACE_PARSE_ENTRY();
CWSDLDocument *pCurr = GetWSDLDocument();
if (pCurr != NULL)
{
CAutoPtr<CWSDLPortType> spElem;
spElem.Attach( new CWSDLPortType );
if (spElem != NULL)
{
SetXMLElementInfo(spElem, pCurr, GetLocator());
spElem->SetParentDocument(pCurr);
CAutoPtr<CWSDLPortTypeParser> p( new CWSDLPortTypeParser(GetReader(), this, GetLevel(), spElem) );
if (p != NULL)
{
if (g_ParserList.AddHead(p) != NULL)
{
if (SUCCEEDED(p.Detach()->GetAttributes(pAttributes)))
{
if (pCurr->AddPortType(spElem) != NULL)
{
spElem.Detach();
return S_OK;
}
}
}
}
}
}
EmitErrorHr(E_OUTOFMEMORY);
return E_FAIL;
}
TAG_METHOD_IMPL( CWSDLParser, OnBinding )
{
TRACE_PARSE_ENTRY();
CWSDLDocument *pCurr = GetWSDLDocument();
if (pCurr != NULL)
{
CAutoPtr<CWSDLBinding> spElem;
spElem.Attach( new CWSDLBinding );
if (spElem != NULL)
{
SetXMLElementInfo(spElem, pCurr, GetLocator());
spElem->SetParentDocument(pCurr);
CWSDLBindingParser * p = new CWSDLBindingParser(GetReader(), this, GetLevel(), spElem);
if (p != NULL)
{
if (SUCCEEDED(p->GetAttributes(pAttributes)))
{
if (pCurr->AddBinding(spElem) != NULL)
{
spElem.Detach();
return S_OK;
}
}
}
}
}
EmitErrorHr(E_OUTOFMEMORY);
return E_FAIL;
}
TAG_METHOD_IMPL( CWSDLParser, OnService )
{
TRACE_PARSE_ENTRY();
CWSDLDocument *pCurr = GetWSDLDocument();
if (pCurr != NULL)
{
CAutoPtr<CWSDLService> spElem;
spElem.Attach( new CWSDLService );
if (spElem != NULL)
{
SetXMLElementInfo(spElem, pCurr, GetLocator());
spElem->SetParentDocument(pCurr);
CAutoPtr<CWSDLServiceParser> p( new CWSDLServiceParser(GetReader(), this, GetLevel(), spElem) );
if (p != NULL)
{
if (g_ParserList.AddHead(p) != NULL)
{
if (SUCCEEDED(p.Detach()->GetAttributes(pAttributes)))
{
if (pCurr->AddService(spElem) != NULL)
{
spElem.Detach();
return S_OK;
}
}
}
}
}
}
EmitErrorHr(E_OUTOFMEMORY);
return E_FAIL;
}
ATTR_METHOD_IMPL( CWSDLParser, OnName )
{
TRACE_PARSE_ENTRY();
CWSDLDocument * pCurr = GetWSDLDocument();
if (pCurr != NULL)
{
return pCurr->SetName(wszValue, cchValue);
}
EmitError(IDS_SDL_INTERNAL);
return E_FAIL;
}
ATTR_METHOD_IMPL( CWSDLParser, OnTargetNamespace )
{
TRACE_PARSE_ENTRY();
CWSDLDocument * pCurr = GetWSDLDocument();
if (pCurr != NULL)
{
return pCurr->SetTargetNamespace(wszValue, cchValue);
}
EmitError(IDS_SDL_INTERNAL);
return E_FAIL;
}
CWSDLDocument * CWSDLParser::CreateWSDLDocument()
{
m_pDocument.Attach( new CWSDLDocument );
return m_pDocument;
}
HRESULT __stdcall CWSDLParser::startPrefixMapping(
const wchar_t *wszPrefix,
int cchPrefix,
const wchar_t *wszUri,
int cchUri)
{
CWSDLDocument * pCurr = GetWSDLDocument();
if (pCurr != NULL)
{
return pCurr->SetNamespaceUri(wszPrefix, cchPrefix, wszUri, cchUri);
}
EmitErrorHr(E_OUTOFMEMORY);
return E_FAIL;
}
<|start_filename|>include/atlsession.h<|end_filename|>
// This is a part of the Active Template Library.
// Copyright (C) Microsoft Corporation
// All rights reserved.
//
// This source code is only intended as a supplement to the
// Active Template Library Reference and related
// electronic documentation provided with the library.
// See these sources for detailed information regarding the
// Active Template Library product.
#ifndef __ATLSESSION_H__
#define __ATLSESSION_H__
#pragma once
#pragma warning(push)
#pragma warning(disable: 4702) // unreachable code
#include <atldbcli.h>
#include <atlcom.h>
#include <atlstr.h>
#include <stdio.h>
#include <atlcoll.h>
#include <atltime.h>
#include <atlcrypt.h>
#include <atlenc.h>
#include <atlutil.h>
#include <atlcache.h>
#include <atlspriv.h>
#include <atlsiface.h>
#pragma warning(disable: 4625) // copy constructor could not be generated because a base class copy constructor is inaccessible
#pragma warning(disable: 4626) // assignment operator could not be generated because a base class assignment operator is inaccessible
#ifndef MAX_SESSION_KEY_LEN
#define MAX_SESSION_KEY_LEN 128
#endif
#ifndef MAX_VARIABLE_NAME_LENGTH
#define MAX_VARIABLE_NAME_LENGTH 50
#endif
#ifndef MAX_VARIABLE_VALUE_LENGTH
#define MAX_VARIABLE_VALUE_LENGTH 1024
#endif
#ifndef MAX_CONNECTION_STRING_LEN
#define MAX_CONNECTION_STRING_LEN 2048
#endif
#ifndef SESSION_COOKIE_NAME
#define SESSION_COOKIE_NAME "SESSIONID"
#endif
#ifndef ATL_SESSION_TIMEOUT
#define ATL_SESSION_TIMEOUT 600000 //10 min
#endif
#ifndef ATL_SESSION_SWEEPER_TIMEOUT
#define ATL_SESSION_SWEEPER_TIMEOUT 1000 // 1sec
#endif
#define INVALID_DB_SESSION_POS 0x0
#define ATL_DBSESSION_ID _T("__ATL_SESSION_DB_CONNECTION")
#pragma pack(push,_ATL_PACKING)
namespace ATL {
// CSessionNameGenerator
// This is a helper class that generates random data for session key
// names. This class tries to use the CryptoApi to generate random
// bytes for the session key name. If the CryptoApi isn't available
// then the CRT rand() is used to generate the random bytes. This
// class's GetNewSessionName member function is used to actually
// generate the session name.
class CSessionNameGenerator :
public CCryptProv
{
public:
bool m_bCryptNotAvailable;
enum {MIN_SESSION_KEY_LEN=5};
CSessionNameGenerator() throw() :
m_bCryptNotAvailable(false)
{
// Note that the crypto api is being
// initialized with no private key
// information
HRESULT hr = InitVerifyContext();
m_bCryptNotAvailable = FAILED(hr) ? true : false;
}
// This function creates a new session name and base64 encodes it.
// The base64 encoding algorithm used needs at least MIN_SESSION_KEY_LEN
// bytes to work correctly. Since we stack allocate the temporary
// buffer that holds the key name, the buffer must be less than or equal to
// the MAX_SESSION_KEY_LEN in size.
HRESULT GetNewSessionName(__out_ecount_part_z(*pdwSize, *pdwSize) LPSTR szNewID, __inout DWORD *pdwSize) throw()
{
HRESULT hr = E_FAIL;
if (!pdwSize)
return E_POINTER;
if (*pdwSize < MIN_SESSION_KEY_LEN ||
*pdwSize > MAX_SESSION_KEY_LEN)
return E_INVALIDARG;
if (!szNewID)
return E_POINTER;
BYTE key[MAX_SESSION_KEY_LEN] = {0x0};
// calculate the number of bytes that will fit in the
// buffer we've been passed
DWORD dwDataSize = CalcMaxInputSize(*pdwSize);
if (dwDataSize && *pdwSize >= (DWORD)(Base64EncodeGetRequiredLength(dwDataSize,
ATL_BASE64_FLAG_NOCRLF)))
{
int dwKeySize = *pdwSize;
hr = GenerateRandomName(key, dwDataSize);
if (SUCCEEDED(hr))
{
if( Base64Encode(key,
dwDataSize,
szNewID,
&dwKeySize,
ATL_BASE64_FLAG_NOCRLF) )
{
//null terminate
szNewID[dwKeySize]=0;
*pdwSize = dwKeySize+1;
}
else
hr = E_FAIL;
}
else
{
*pdwSize = (DWORD)(Base64EncodeGetRequiredLength(dwDataSize,
ATL_BASE64_FLAG_NOCRLF));
return E_OUTOFMEMORY;
}
}
return hr;
}
DWORD CalcMaxInputSize(DWORD nOutputSize) throw()
{
if (nOutputSize < (DWORD)MIN_SESSION_KEY_LEN)
return 0;
// subtract one from the output size to make room
// for the NULL terminator in the output then
// calculate the biggest number of input bytes that
// when base64 encoded will fit in a buffer of size
// nOutputSize (including base64 padding)
int nInputSize = ((nOutputSize-1)*3)/4;
int factor = ((nInputSize*4)/3)%4;
if (factor)
nInputSize -= factor;
return nInputSize;
}
HRESULT GenerateRandomName(BYTE *pBuff, DWORD dwBuffSize) throw()
{
if (!pBuff)
return E_POINTER;
if (!dwBuffSize)
return E_UNEXPECTED;
if (!m_bCryptNotAvailable && GetHandle())
{
// Use the crypto api to generate random data.
return GenRandom(dwBuffSize, pBuff);
}
// CryptoApi isn't available so we generate
// random data using rand. We seed the random
// number generator with a seed that is a combination
// of bytes from an arbitrary number and the system
// time which changes every millisecond so it will
// be different for every call to this function.
FILETIME ft;
GetSystemTimeAsFileTime(&ft);
static DWORD dwVal = 0x21;
DWORD dwSeed = (dwVal++ << 0x18) | (ft.dwLowDateTime & 0x00ffff00) | dwVal++ & 0x000000ff;
srand(dwSeed);
BYTE *pCurr = pBuff;
// fill buffer with random bytes
for (int i=0; i < (int)dwBuffSize; i++)
{
*pCurr = (BYTE) (rand() & 0x000000ff);
pCurr++;
}
return S_OK;
}
};
//
// CDefaultQueryClass
// returns Query strings for use in SQL queries used
// by the database persisted session service.
class CDefaultQueryClass
{
public:
LPCTSTR GetSessionRefDelete() throw()
{
return _T("DELETE FROM SessionReferences ")
_T("WHERE SessionID=? AND RefCount <= 0 ")
_T("AND DATEDIFF(millisecond, LastAccess, getdate()) > TimeoutMs");
}
LPCTSTR GetSessionRefIsExpired() throw()
{
return _T("SELECT SessionID FROM SessionReferences ")
_T("WHERE (SessionID=?) AND (DATEDIFF(millisecond, LastAccess, getdate()) > TimeoutMs)");
}
LPCTSTR GetSessionRefDeleteFinal() throw()
{
return _T("DELETE FROM SessionReferences ")
_T("WHERE SessionID=?");
}
LPCTSTR GetSessionRefCreate() throw()
{
return _T("INSERT INTO SessionReferences ")
_T("(SessionID, LastAccess, RefCount, TimeoutMs) ")
_T("VALUES (?, getdate(), 1, ?)");
}
LPCTSTR GetSessionRefUpdateTimeout() throw()
{
return _T("UPDATE SessionReferences ")
_T("SET TimeoutMs=? WHERE SessionID=?");
}
LPCTSTR GetSessionRefAddRef() throw()
{
return _T("UPDATE SessionReferences ")
_T("SET RefCount=RefCount+1, ")
_T("LastAccess=getdate() ")
_T("WHERE SessionID=?");
}
LPCTSTR GetSessionRefRemoveRef() throw()
{
return _T("UPDATE SessionReferences ")
_T("SET RefCount=RefCount-1, ")
_T("LastAccess=getdate() ")
_T("WHERE SessionID=?");
}
LPCTSTR GetSessionRefAccess() throw()
{
return _T("UPDATE SessionReferences ")
_T("SET LastAccess=getdate() ")
_T("WHERE SessionID=?");
}
LPCTSTR GetSessionRefSelect() throw()
{
return _T("SELECT * FROM SessionReferences ")
_T("WHERE SessionID=?");
}
LPCTSTR GetSessionRefGetCount() throw()
{
return _T("SELECT COUNT(*) FROM SessionReferences");
}
LPCTSTR GetSessionVarCount() throw()
{
return _T("SELECT COUNT(*) FROM SessionVariables WHERE SessionID=?");
}
LPCTSTR GetSessionVarInsert() throw()
{
return _T("INSERT INTO SessionVariables ")
_T("(VariableValue, SessionID, VariableName) ")
_T("VALUES (?, ?, ?)");
}
LPCTSTR GetSessionVarUpdate() throw()
{
return _T("UPDATE SessionVariables ")
_T("SET VariableValue=? ")
_T("WHERE SessionID=? AND VariableName=?");
}
LPCTSTR GetSessionVarDeleteVar() throw()
{
return _T("DELETE FROM SessionVariables ")
_T("WHERE SessionID=? AND VariableName=?");
}
LPCTSTR GetSessionVarDeleteAllVars() throw()
{
return _T("DELETE FROM SessionVariables WHERE (SessionID=?)");
}
LPCTSTR GetSessionVarSelectVar()throw()
{
return _T("SELECT SessionID, VariableName, VariableValue ")
_T("FROM SessionVariables ")
_T("WHERE SessionID=? AND VariableName=?");
}
LPCTSTR GetSessionVarSelectAllVars() throw()
{
return _T("SELECT SessionID, VariableName, VariableValue ")
_T("FROM SessionVariables ")
_T("WHERE SessionID=?");
}
LPCTSTR GetSessionReferencesSet() throw()
{
return _T("UPDATE SessionReferences SET TimeoutMs=?");
}
};
// Contains the data for the session variable accessors
class CSessionDataBase
{
public:
TCHAR m_szSessionID[MAX_SESSION_KEY_LEN];
TCHAR m_VariableName[MAX_VARIABLE_NAME_LENGTH];
BYTE m_VariableValue[MAX_VARIABLE_VALUE_LENGTH];
DBLENGTH m_VariableLen;
CSessionDataBase() throw()
{
m_szSessionID[0] = '\0';
m_VariableName[0] = '\0';
m_VariableValue[0] = '\0';
m_VariableLen = 0;
}
HRESULT Assign(LPCTSTR szSessionID, LPCTSTR szVarName, VARIANT *pVal) throw()
{
HRESULT hr = S_OK;
CVariantStream stream;
if ( szSessionID )
{
if (Checked::tcsnlen(szSessionID, MAX_SESSION_KEY_LEN)< MAX_SESSION_KEY_LEN)
Checked::tcscpy_s(m_szSessionID, _countof(m_szSessionID), szSessionID);
else
hr = E_OUTOFMEMORY;
}
else
return E_INVALIDARG;
if (hr == S_OK && szVarName)
if (Checked::tcsnlen(szVarName, MAX_VARIABLE_NAME_LENGTH) < MAX_VARIABLE_NAME_LENGTH)
Checked::tcscpy_s(m_VariableName, _countof(m_VariableName), szVarName);
else
hr = E_OUTOFMEMORY;
if (hr == S_OK && pVal)
{
hr = stream.InsertVariant(pVal);
if (hr == S_OK)
{
BYTE *pBytes = stream.m_stream;
size_t size = stream.GetVariantSize();
if (pBytes && size && size < MAX_VARIABLE_VALUE_LENGTH)
{
Checked::memcpy_s(m_VariableValue, MAX_VARIABLE_VALUE_LENGTH, pBytes, size);
m_VariableLen = static_cast<DBLENGTH>(size);
}
else
hr = E_INVALIDARG;
}
}
return hr;
}
};
// Use to select a session variable given the name
// of a session and the name of a variable.
class CSessionDataSelector : public CSessionDataBase
{
public:
BEGIN_COLUMN_MAP(CSessionDataSelector)
COLUMN_ENTRY(1, m_szSessionID)
COLUMN_ENTRY(2, m_VariableName)
COLUMN_ENTRY_LENGTH(3, m_VariableValue, m_VariableLen)
END_COLUMN_MAP()
BEGIN_PARAM_MAP(CSessionDataSelector)
SET_PARAM_TYPE(DBPARAMIO_INPUT)
COLUMN_ENTRY(1, m_szSessionID)
COLUMN_ENTRY(2, m_VariableName)
END_PARAM_MAP()
};
// Use to select all session variables given the name of
// of a session.
class CAllSessionDataSelector : public CSessionDataBase
{
public:
BEGIN_COLUMN_MAP(CAllSessionDataSelector)
COLUMN_ENTRY(1, m_szSessionID)
COLUMN_ENTRY(2, m_VariableName)
COLUMN_ENTRY_LENGTH(3, m_VariableValue, m_VariableLen)
END_COLUMN_MAP()
BEGIN_PARAM_MAP(CAllSessionDataSelector)
SET_PARAM_TYPE(DBPARAMIO_INPUT)
COLUMN_ENTRY(1, m_szSessionID)
END_PARAM_MAP()
};
// Use to update the value of a session variable
class CSessionDataUpdator : public CSessionDataBase
{
public:
BEGIN_PARAM_MAP(CSessionDataUpdator)
SET_PARAM_TYPE(DBPARAMIO_INPUT)
COLUMN_ENTRY_LENGTH(1, m_VariableValue, m_VariableLen)
COLUMN_ENTRY(2, m_szSessionID)
COLUMN_ENTRY(3, m_VariableName)
END_PARAM_MAP()
};
// Use to delete a session variable given the
// session name and the name of the variable
class CSessionDataDeletor
{
public:
CSessionDataDeletor()
{
m_szSessionID[0] = '\0';
m_VariableName[0] = '\0';
}
TCHAR m_szSessionID[MAX_SESSION_KEY_LEN];
TCHAR m_VariableName[MAX_VARIABLE_NAME_LENGTH];
HRESULT Assign(LPCTSTR szSessionID, LPCTSTR szVarName) throw()
{
if (szSessionID)
{
if (Checked::tcsnlen(szSessionID, MAX_SESSION_KEY_LEN) < MAX_SESSION_KEY_LEN)
Checked::tcscpy_s(m_szSessionID, _countof(m_szSessionID), szSessionID);
else
return E_OUTOFMEMORY;
}
if (szVarName)
{
if(Checked::tcsnlen(szVarName, MAX_VARIABLE_NAME_LENGTH) < MAX_VARIABLE_NAME_LENGTH)
Checked::tcscpy_s(m_VariableName, _countof(m_VariableName), szVarName);
else
return E_OUTOFMEMORY;
}
return S_OK;
}
BEGIN_PARAM_MAP(CSessionDataDeletor)
SET_PARAM_TYPE(DBPARAMIO_INPUT)
COLUMN_ENTRY(1, m_szSessionID)
COLUMN_ENTRY(2, m_VariableName)
END_PARAM_MAP()
};
class CSessionDataDeleteAll
{
public:
TCHAR m_szSessionID[MAX_SESSION_KEY_LEN];
HRESULT Assign(LPCTSTR szSessionID) throw()
{
if (!szSessionID)
return E_INVALIDARG;
if (Checked::tcsnlen(szSessionID, MAX_SESSION_KEY_LEN) < MAX_SESSION_KEY_LEN)
Checked::tcscpy_s(m_szSessionID, _countof(m_szSessionID), szSessionID);
else
return E_OUTOFMEMORY;
return S_OK;
}
BEGIN_PARAM_MAP(CSessionDataDeleteAll)
SET_PARAM_TYPE(DBPARAMIO_INPUT)
COLUMN_ENTRY(1, m_szSessionID)
END_PARAM_MAP()
};
// Used for retrieving the count of session variables for
// a given session ID.
class CCountAccessor
{
public:
LONG m_nCount;
TCHAR m_szSessionID[MAX_SESSION_KEY_LEN];
CCountAccessor() throw()
{
m_szSessionID[0] = '\0';
m_nCount = 0;
}
HRESULT Assign(LPCTSTR szSessionID) throw()
{
if (!szSessionID)
return E_INVALIDARG;
if (Checked::tcsnlen(szSessionID, MAX_SESSION_KEY_LEN) < MAX_SESSION_KEY_LEN)
Checked::tcscpy_s(m_szSessionID, _countof(m_szSessionID), szSessionID);
else
return E_OUTOFMEMORY;
return S_OK;
}
BEGIN_COLUMN_MAP(CCountAccessor)
COLUMN_ENTRY(1, m_nCount)
END_COLUMN_MAP()
BEGIN_PARAM_MAP(CCountAccessor)
SET_PARAM_TYPE(DBPARAMIO_INPUT)
COLUMN_ENTRY(1, m_szSessionID)
END_PARAM_MAP()
};
// Used for updating entries in the session
// references table, given a session ID
class CSessionRefUpdator
{
public:
TCHAR m_SessionID[MAX_SESSION_KEY_LEN];
HRESULT Assign(LPCTSTR szSessionID) throw()
{
if (!szSessionID)
return E_INVALIDARG;
if (Checked::tcsnlen(szSessionID, MAX_SESSION_KEY_LEN) < MAX_SESSION_KEY_LEN)
Checked::tcscpy_s(m_SessionID, _countof(m_SessionID), szSessionID);
else
return E_OUTOFMEMORY;
return S_OK;
}
BEGIN_PARAM_MAP(CSessionRefUpdator)
SET_PARAM_TYPE(DBPARAMIO_INPUT)
COLUMN_ENTRY(1, m_SessionID)
END_PARAM_MAP()
};
class CSessionRefIsExpired
{
public:
TCHAR m_SessionID[MAX_SESSION_KEY_LEN];
TCHAR m_SessionIDOut[MAX_SESSION_KEY_LEN];
HRESULT Assign(LPCTSTR szSessionID) throw()
{
m_SessionIDOut[0]=0;
if (!szSessionID)
return E_INVALIDARG;
if (Checked::tcsnlen(szSessionID, MAX_SESSION_KEY_LEN) < MAX_SESSION_KEY_LEN)
Checked::tcscpy_s(m_SessionID, _countof(m_SessionID), szSessionID);
else
return E_OUTOFMEMORY;
return S_OK;
}
BEGIN_COLUMN_MAP(CSessionRefIsExpired)
COLUMN_ENTRY(1, m_SessionIDOut)
END_COLUMN_MAP()
BEGIN_PARAM_MAP(CSessionRefIsExpired)
SET_PARAM_TYPE(DBPARAMIO_INPUT)
COLUMN_ENTRY(1, m_SessionID)
END_PARAM_MAP()
};
class CSetAllTimeouts
{
public:
unsigned __int64 m_dwNewTimeout;
HRESULT Assign(unsigned __int64 dwNewValue)
{
m_dwNewTimeout = dwNewValue;
return S_OK;
}
BEGIN_PARAM_MAP(CSetAllTimeouts)
SET_PARAM_TYPE(DBPARAMIO_INPUT)
COLUMN_ENTRY(1, m_dwNewTimeout)
END_PARAM_MAP()
};
class CSessionRefUpdateTimeout
{
public:
TCHAR m_SessionID[MAX_SESSION_KEY_LEN];
unsigned __int64 m_nNewTimeout;
HRESULT Assign(LPCTSTR szSessionID, unsigned __int64 nNewTimeout) throw()
{
if (!szSessionID)
return E_INVALIDARG;
if (Checked::tcsnlen(szSessionID, MAX_SESSION_KEY_LEN) < MAX_SESSION_KEY_LEN)
Checked::tcscpy_s(m_SessionID, _countof(m_SessionID), szSessionID);
else
return E_OUTOFMEMORY;
m_nNewTimeout = nNewTimeout;
return S_OK;
}
BEGIN_PARAM_MAP(CSessionRefUpdateTimeout)
SET_PARAM_TYPE(DBPARAMIO_INPUT)
COLUMN_ENTRY(1, m_nNewTimeout)
COLUMN_ENTRY(2, m_SessionID)
END_PARAM_MAP()
};
class CSessionRefSelector
{
public:
TCHAR m_SessionID[MAX_SESSION_KEY_LEN];
int m_RefCount;
HRESULT Assign(LPCTSTR szSessionID) throw()
{
if (!szSessionID)
return E_INVALIDARG;
if (Checked::tcsnlen(szSessionID, MAX_SESSION_KEY_LEN) < MAX_SESSION_KEY_LEN)
Checked::tcscpy_s(m_SessionID, _countof(m_SessionID), szSessionID);
else
return E_OUTOFMEMORY;
return S_OK;
}
BEGIN_COLUMN_MAP(CSessionRefSelector)
COLUMN_ENTRY(1, m_SessionID)
COLUMN_ENTRY(3, m_RefCount)
END_COLUMN_MAP()
BEGIN_PARAM_MAP(CSessionRefSelector)
SET_PARAM_TYPE(DBPARAMIO_INPUT)
COLUMN_ENTRY(1, m_SessionID)
END_PARAM_MAP()
};
class CSessionRefCount
{
public:
LONG m_nCount;
BEGIN_COLUMN_MAP(CSessionRefCount)
COLUMN_ENTRY(1, m_nCount)
END_COLUMN_MAP()
};
// Used for creating new entries in the session
// references table.
class CSessionRefCreator
{
public:
TCHAR m_SessionID[MAX_SESSION_KEY_LEN];
unsigned __int64 m_TimeoutMs;
HRESULT Assign(LPCTSTR szSessionID, unsigned __int64 timeout) throw()
{
if (!szSessionID)
return E_INVALIDARG;
if (Checked::tcsnlen(szSessionID, MAX_SESSION_KEY_LEN) < MAX_SESSION_KEY_LEN)
{
Checked::tcscpy_s(m_SessionID, _countof(m_SessionID), szSessionID);
m_TimeoutMs = timeout;
}
else
return E_OUTOFMEMORY;
return S_OK;
}
BEGIN_PARAM_MAP(CSessionRefCreator)
SET_PARAM_TYPE(DBPARAMIO_INPUT)
COLUMN_ENTRY(1, m_SessionID)
COLUMN_ENTRY(2, m_TimeoutMs)
END_PARAM_MAP()
};
// CDBSession
// This session persistance class persists session variables to
// an OLEDB datasource. The following table gives a general description
// of the table schema for the tables this class uses.
//
// TableName: SessionVariables
// Column Name Type Description
// 1 SessionID char[MAX_SESSION_KEY_LEN] Session Key name
// 2 VariableName char[MAX_VARIABLE_NAME_LENGTH] Variable Name
// 3 VariableValue varbinary[MAX_VARIABLE_VALUE_LENGTH] Variable Value
//
// TableName: SessionReferences
// Column Name Type Description
// 1 SessionID char[MAX_SESSION_KEY_LEN] Session Key Name.
// 2 LastAccess datetime Date and time of last access to this session.
// 3 RefCount int Current references on this session.
// 4 TimeoutMS int Timeout value for the session in milli seconds
typedef bool (*PFN_GETPROVIDERINFO)(DWORD_PTR, wchar_t **);
template <class QueryClass=CDefaultQueryClass>
class CDBSession:
public ISession,
public CComObjectRootEx<CComGlobalsThreadModel>
{
typedef CCommand<CAccessor<CAllSessionDataSelector> > iterator_accessor;
public:
typedef QueryClass DBQUERYCLASS_TYPE;
BEGIN_COM_MAP(CDBSession)
COM_INTERFACE_ENTRY(ISession)
END_COM_MAP()
CDBSession() throw():
m_dwTimeout(ATL_SESSION_TIMEOUT)
{
m_szSessionName[0] = '\0';
}
~CDBSession() throw()
{
}
void FinalRelease()throw()
{
SessionUnlock();
}
STDMETHOD(SetVariable)(LPCSTR szName, VARIANT Val) throw()
{
HRESULT hr = E_FAIL;
if (!szName)
return E_INVALIDARG;
// Get the data connection for this thread.
CDataConnection dataconn;
hr = GetSessionConnection(&dataconn, m_spServiceProvider);
if (hr != S_OK)
return hr;
// Update the last access time for this session
hr = Access();
if (hr != S_OK)
return hr;
// Allocate an updator command and fill out it's input parameters.
CCommand<CAccessor<CSessionDataUpdator> > command;
_ATLTRY
{
CA2CT name(szName);
hr = command.Assign(m_szSessionName, name, &Val);
}
_ATLCATCHALL()
{
hr = E_OUTOFMEMORY;
}
if (hr != S_OK)
return hr;
// Try an update. Update will fail if the variable is not already there.
DBROWCOUNT nRows = 0;
hr = command.Open(dataconn,
m_QueryObj.GetSessionVarUpdate(),
NULL, &nRows, DBGUID_DEFAULT, false);
if (hr == S_OK && nRows <= 0)
hr = E_UNEXPECTED;
if (hr != S_OK)
{
// Try an insert
hr = command.Open(dataconn, m_QueryObj.GetSessionVarInsert(), NULL, &nRows, DBGUID_DEFAULT, false);
if (hr == S_OK && nRows <=0)
hr = E_UNEXPECTED;
}
return hr;
}
// Warning: For string data types, depending on the configuration of
// your database, strings might be returned with trailing white space.
STDMETHOD(GetVariable)(LPCSTR szName, VARIANT *pVal) throw()
{
HRESULT hr = E_FAIL;
if (!szName)
return E_INVALIDARG;
if (pVal)
VariantInit(pVal);
else
return E_POINTER;
// Get the data connection for this thread
CDataConnection dataconn;
hr = GetSessionConnection(&dataconn, m_spServiceProvider);
if (hr != S_OK)
return hr;
// Update the last access time for this session
hr = Access();
if (hr != S_OK)
return hr;
// Allocate a command a fill out it's input parameters.
CCommand<CAccessor<CSessionDataSelector> > command;
_ATLTRY
{
CA2CT name(szName);
hr = command.Assign(m_szSessionName, name, NULL);
}
_ATLCATCHALL()
{
hr = E_OUTOFMEMORY;
}
if (hr == S_OK)
{
hr = command.Open(dataconn, m_QueryObj.GetSessionVarSelectVar());
if (SUCCEEDED(hr))
{
if ( S_OK == (hr = command.MoveFirst()))
{
CStreamOnByteArray stream(command.m_VariableValue);
CComVariant vOut;
hr = vOut.ReadFromStream(static_cast<IStream*>(&stream));
if (hr == S_OK)
hr = vOut.Detach(pVal);
}
}
}
return hr;
}
STDMETHOD(RemoveVariable)(LPCSTR szName) throw()
{
HRESULT hr = E_FAIL;
if (!szName)
return E_INVALIDARG;
// Get the data connection for this thread.
CDataConnection dataconn;
hr = GetSessionConnection(&dataconn, m_spServiceProvider);
if (hr != S_OK)
return hr;
// update the last access time for this session
hr = Access();
if (hr != S_OK)
return hr;
// allocate a command and set it's input parameters
CCommand<CAccessor<CSessionDataDeletor> > command;
_ATLTRY
{
CA2CT name(szName);
hr = command.Assign(m_szSessionName, name);
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
// execute the command
DBROWCOUNT nRows = 0;
if (hr == S_OK)
hr = command.Open(dataconn, m_QueryObj.GetSessionVarDeleteVar(),
NULL, &nRows, DBGUID_DEFAULT, false);
if (hr == S_OK && nRows <= 0)
hr = E_FAIL;
return hr;
}
// Gives the count of rows in the table for this session ID.
STDMETHOD(GetCount)(long *pnCount) throw()
{
HRESULT hr = S_OK;
if (pnCount)
*pnCount = 0;
else
return E_POINTER;
// Get the database connection for this thread.
CDataConnection dataconn;
hr = GetSessionConnection(&dataconn, m_spServiceProvider);
if (hr != S_OK)
return hr;
hr = Access();
if (hr != S_OK)
return hr;
CCommand<CAccessor<CCountAccessor> > command;
hr = command.Assign(m_szSessionName);
if (hr == S_OK)
{
hr = command.Open(dataconn, m_QueryObj.GetSessionVarCount());
if (hr == S_OK)
{
if (S_OK == (hr = command.MoveFirst()))
{
*pnCount = command.m_nCount;
hr = S_OK;
}
}
}
return hr;
}
STDMETHOD(RemoveAllVariables)() throw()
{
HRESULT hr = E_UNEXPECTED;
// Get the data connection for this thread.
CDataConnection dataconn;
hr = GetSessionConnection(&dataconn, m_spServiceProvider);
if (hr != S_OK)
return hr;
CCommand<CAccessor<CSessionDataDeleteAll> > command;
hr = command.Assign(m_szSessionName);
if (hr != S_OK)
return hr;
// delete all session variables
hr = command.Open(dataconn, m_QueryObj.GetSessionVarDeleteAllVars(), NULL, NULL, DBGUID_DEFAULT, false);
return hr;
}
// Iteration of variables works by taking a snapshot
// of the sessions at the point in time BeginVariableEnum
// is called, and then keeping an index variable that you use to
// move through the snapshot rowset. It is important to know
// that the handle returned in phEnum is not thread safe. It
// should only be used by the calling thread.
STDMETHOD(BeginVariableEnum)(POSITION *pPOS, HSESSIONENUM *phEnum) throw()
{
HRESULT hr = E_FAIL;
if (!pPOS)
return E_POINTER;
if (phEnum)
*phEnum = NULL;
else
return E_POINTER;
// Get the data connection for this thread.
CDataConnection dataconn;
hr = GetSessionConnection(&dataconn, m_spServiceProvider);
if (hr != S_OK)
return hr;
// Update the last access time for this session.
hr = Access();
if (hr != S_OK)
return hr;
// Allocate a new iterator accessor and initialize it's input parameters.
iterator_accessor *pIteratorAccessor = NULL;
ATLTRYALLOC(pIteratorAccessor = new iterator_accessor);
if (!pIteratorAccessor)
return E_OUTOFMEMORY;
hr = pIteratorAccessor->Assign(m_szSessionName, NULL, NULL);
if (hr == S_OK)
{
// execute the command and move to the first row of the recordset.
hr = pIteratorAccessor->Open(dataconn,
m_QueryObj.GetSessionVarSelectAllVars());
if (hr == S_OK)
{
hr = pIteratorAccessor->MoveFirst();
if (hr == S_OK)
{
*pPOS = (POSITION) INVALID_DB_SESSION_POS + 1;
*phEnum = reinterpret_cast<HSESSIONENUM>(pIteratorAccessor);
}
}
if (hr != S_OK)
{
*pPOS = INVALID_DB_SESSION_POS;
*phEnum = NULL;
delete pIteratorAccessor;
}
}
return hr;
}
// The values for hEnum and pPos must have been initialized in a previous
// call to BeginVariableEnum. On success, the out variant will hold the next
// variable
STDMETHOD(GetNextVariable)(POSITION *pPOS, VARIANT *pVal, HSESSIONENUM hEnum, LPSTR szName=NULL, DWORD dwLen=0) throw()
{
if (!pPOS)
return E_INVALIDARG;
if (pVal)
VariantInit(pVal);
else
return E_POINTER;
if (!hEnum)
return E_UNEXPECTED;
if (*pPOS <= INVALID_DB_SESSION_POS)
return E_UNEXPECTED;
iterator_accessor *pIteratorAccessor = reinterpret_cast<iterator_accessor*>(hEnum);
// update the last access time.
HRESULT hr = Access();
POSITION posCurrent = *pPOS;
if (szName)
{
// caller wants entry name
_ATLTRY
{
CT2CA szVarName(pIteratorAccessor->m_VariableName);
if (szVarName != NULL && dwLen > Checked::strnlen(szVarName, dwLen))
{
Checked::strcpy_s(szName, dwLen, szVarName);
}
else
hr = E_OUTOFMEMORY; // buffer not big enough
}
_ATLCATCHALL()
{
hr = E_OUTOFMEMORY;
}
}
if (hr == S_OK)
{
CStreamOnByteArray stream(pIteratorAccessor->m_VariableValue);
CComVariant vOut;
hr = vOut.ReadFromStream(static_cast<IStream*>(&stream));
if (hr == S_OK)
vOut.Detach(pVal);
else
return hr;
}
else
return hr;
hr = pIteratorAccessor->MoveNext();
*pPOS = ++posCurrent;
if (hr == DB_S_ENDOFROWSET)
{
// We're done iterating, reset everything
*pPOS = INVALID_DB_SESSION_POS;
hr = S_OK;
}
if (hr != S_OK)
{
VariantClear(pVal);
}
return hr;
}
// CloseEnum frees up any resources allocated by the iterator
STDMETHOD(CloseEnum)(HSESSIONENUM hEnum) throw()
{
iterator_accessor *pIteratorAccessor = reinterpret_cast<iterator_accessor*>(hEnum);
if (!pIteratorAccessor)
return E_INVALIDARG;
pIteratorAccessor->Close();
delete pIteratorAccessor;
return S_OK;
}
//
// Returns S_FALSE if it's not expired
// S_OK if it is expired and an error HRESULT
// if an error occurred.
STDMETHOD(IsExpired)() throw()
{
HRESULT hrRet = S_FALSE;
HRESULT hr = E_UNEXPECTED;
// Get the data connection for this thread.
CDataConnection dataconn;
hr = GetSessionConnection(&dataconn, m_spServiceProvider);
if (hr != S_OK)
return hr;
CCommand<CAccessor<CSessionRefIsExpired> > command;
hr = command.Assign(m_szSessionName);
if (hr != S_OK)
return hr;
hr = command.Open(dataconn, m_QueryObj.GetSessionRefIsExpired(),
NULL, NULL, DBGUID_DEFAULT, true);
if (hr == S_OK)
{
if (S_OK == command.MoveFirst())
{
if (!_tcscmp(command.m_SessionIDOut, m_szSessionName))
hrRet = S_OK;
}
}
if (hr == S_OK)
return hrRet;
return hr;
}
STDMETHOD(SetTimeout)(unsigned __int64 dwNewTimeout) throw()
{
HRESULT hr = E_UNEXPECTED;
// Get the data connection for this thread.
CDataConnection dataconn;
hr = GetSessionConnection(&dataconn, m_spServiceProvider);
if (hr != S_OK)
return hr;
// allocate a command and set it's input parameters
CCommand<CAccessor<CSessionRefUpdateTimeout> > command;
hr = command.Assign(m_szSessionName, dwNewTimeout);
if (hr != S_OK)
return hr;
hr = command.Open(dataconn, m_QueryObj.GetSessionRefUpdateTimeout(),
NULL, NULL, DBGUID_DEFAULT, false);
return hr;
}
// SessionLock increments the session reference count for this session.
// If there is not a session by this name in the session references table,
// a new session entry is created in the the table.
HRESULT SessionLock() throw()
{
HRESULT hr = E_UNEXPECTED;
if (!m_szSessionName || m_szSessionName[0]==0)
return hr; // no session to lock.
// retrieve the data connection for this thread
CDataConnection dataconn;
hr = GetSessionConnection(&dataconn, m_spServiceProvider);
if (hr != S_OK)
return hr;
// first try to update a session with this name
DBROWCOUNT nRows = 0;
CCommand<CAccessor<CSessionRefUpdator> > updator;
if (S_OK == updator.Assign(m_szSessionName))
{
if (S_OK != (hr = updator.Open(dataconn, m_QueryObj.GetSessionRefAddRef(),
NULL, &nRows, DBGUID_DEFAULT, false)) ||
nRows == 0)
{
// No session to update. Use the creator accessor
// to create a new session reference.
CCommand<CAccessor<CSessionRefCreator> > creator;
hr = creator.Assign(m_szSessionName, m_dwTimeout);
if (hr == S_OK)
hr = creator.Open(dataconn, m_QueryObj.GetSessionRefCreate(),
NULL, &nRows, DBGUID_DEFAULT, false);
}
}
// We should have been able to create or update a session.
ATLASSERT(nRows > 0);
if (hr == S_OK && nRows <= 0)
hr = E_UNEXPECTED;
return hr;
}
// SessionUnlock decrements the session RefCount for this session.
// Sessions cannot be removed from the database unless the session
// refcount is 0
HRESULT SessionUnlock() throw()
{
HRESULT hr = E_UNEXPECTED;
if (!m_szSessionName ||
m_szSessionName[0]==0)
return hr;
// get the data connection for this thread
CDataConnection dataconn;
hr = GetSessionConnection(&dataconn, m_spServiceProvider);
if (hr != S_OK)
return hr;
// The session must exist at this point in order to unlock it
// so we can just use the session updator here.
DBROWCOUNT nRows = 0;
CCommand<CAccessor<CSessionRefUpdator> > updator;
hr = updator.Assign(m_szSessionName);
if (hr == S_OK)
{
hr = updator.Open( dataconn,
m_QueryObj.GetSessionRefRemoveRef(),
NULL,
&nRows,
DBGUID_DEFAULT,
false);
}
if (hr != S_OK)
return hr;
// delete the session from the database if
// nobody else is using it and it's expired.
hr = FreeSession();
return hr;
}
// Access updates the last access time for the session. The access
// time for sessions is updated using the SQL GETDATE function on the
// database server so that all clients will be using the same clock
// to compare access times against.
HRESULT Access() throw()
{
HRESULT hr = E_UNEXPECTED;
if (!m_szSessionName ||
m_szSessionName[0]==0)
return hr; // no session to access
// get the data connection for this thread
CDataConnection dataconn;
hr = GetSessionConnection(&dataconn, m_spServiceProvider);
if (hr != S_OK)
return hr;
// The session reference entry in the references table must
// be created prior to calling this function so we can just
// use an updator to update the current entry.
CCommand<CAccessor<CSessionRefUpdator> > updator;
DBROWCOUNT nRows = 0;
hr = updator.Assign(m_szSessionName);
if (hr == S_OK)
{
hr = updator.Open( dataconn,
m_QueryObj.GetSessionRefAccess(),
NULL,
&nRows,
DBGUID_DEFAULT,
false);
}
ATLASSERT(nRows > 0);
if (hr == S_OK && nRows <= 0)
hr = E_UNEXPECTED;
return hr;
}
// If the session is expired and it's reference is 0,
// it can be deleted. SessionUnlock calls this function to
// unlock the session and delete it after we release a session
// lock. Note that our SQL command will only delete the session
// if it is expired and it's refcount is <= 0
HRESULT FreeSession() throw()
{
HRESULT hr = E_UNEXPECTED;
if (!m_szSessionName ||
m_szSessionName[0]==0)
return hr;
// Get the data connection for this thread.
CDataConnection dataconn;
hr = GetSessionConnection(&dataconn, m_spServiceProvider);
if (hr != S_OK)
return hr;
CCommand<CAccessor<CSessionRefUpdator> > updator;
// The SQL for this command only deletes the
// session reference from the references table if it's access
// count is 0 and it has expired.
return updator.Open(dataconn,
m_QueryObj.GetSessionRefDelete(),
NULL,
NULL,
DBGUID_DEFAULT,
false);
}
// Initialize is called each time a new session is created.
HRESULT Initialize( LPCSTR szSessionName,
IServiceProvider *pServiceProvider,
DWORD_PTR dwCookie,
PFN_GETPROVIDERINFO pfnInfo) throw()
{
if (!szSessionName)
return E_INVALIDARG;
if (!pServiceProvider)
return E_INVALIDARG;
if (!pfnInfo)
return E_INVALIDARG;
m_pfnInfo = pfnInfo;
m_dwProvCookie = dwCookie;
m_spServiceProvider = pServiceProvider;
_ATLTRY
{
CA2CT tcsSessionName(szSessionName);
if (Checked::tcsnlen(tcsSessionName, MAX_SESSION_KEY_LEN) < MAX_SESSION_KEY_LEN)
Checked::tcscpy_s(m_szSessionName, _countof(m_szSessionName), tcsSessionName);
else
return E_OUTOFMEMORY;
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
return SessionLock();
}
HRESULT GetSessionConnection(CDataConnection *pConn,
IServiceProvider *pProv) throw()
{
if (!pProv)
return E_INVALIDARG;
if (!m_pfnInfo ||
!m_dwProvCookie)
return E_UNEXPECTED;
wchar_t *wszProv = NULL;
if (m_pfnInfo(m_dwProvCookie, &wszProv) && wszProv!=NULL)
{
return GetDataSource(pProv,
ATL_DBSESSION_ID,
wszProv,
pConn);
}
return E_FAIL;
}
protected:
TCHAR m_szSessionName[MAX_SESSION_KEY_LEN];
unsigned __int64 m_dwTimeout;
CComPtr<IServiceProvider> m_spServiceProvider;
DWORD_PTR m_dwProvCookie;
PFN_GETPROVIDERINFO m_pfnInfo;
DBQUERYCLASS_TYPE m_QueryObj;
}; // CDBSession
template <class TDBSession=CDBSession<> >
class CDBSessionServiceImplT
{
wchar_t m_szConnectionString[MAX_CONNECTION_STRING_LEN];
CComPtr<IServiceProvider> m_spServiceProvider;
typename TDBSession::DBQUERYCLASS_TYPE m_QueryObj;
public:
typedef const wchar_t* SERVICEIMPL_INITPARAM_TYPE;
CDBSessionServiceImplT() throw()
{
m_dwTimeout = ATL_SESSION_TIMEOUT;
m_szConnectionString[0] = '\0';
}
static bool GetProviderInfo(DWORD_PTR dwProvCookie, wchar_t **ppszProvInfo) throw()
{
if (dwProvCookie &&
ppszProvInfo)
{
CDBSessionServiceImplT<TDBSession> *pSvc =
reinterpret_cast<CDBSessionServiceImplT<TDBSession>*>(dwProvCookie);
*ppszProvInfo = pSvc->m_szConnectionString;
return true;
}
return false;
}
HRESULT GetSessionConnection(CDataConnection *pConn,
IServiceProvider *pProv) throw()
{
if (!pProv)
return E_INVALIDARG;
if(!m_szConnectionString[0])
return E_UNEXPECTED;
return GetDataSource(pProv,
ATL_DBSESSION_ID,
m_szConnectionString,
pConn);
}
HRESULT Initialize(SERVICEIMPL_INITPARAM_TYPE pData,
IServiceProvider *pProvider,
unsigned __int64 dwInitialTimeout) throw()
{
if (!pData || !pProvider)
return E_INVALIDARG;
if (Checked::wcsnlen(pData, MAX_CONNECTION_STRING_LEN) < MAX_CONNECTION_STRING_LEN)
{
Checked::wcscpy_s(m_szConnectionString, _countof(m_szConnectionString), pData);
}
else
return E_OUTOFMEMORY;
m_dwTimeout = dwInitialTimeout;
m_spServiceProvider = pProvider;
return S_OK;
}
HRESULT CreateNewSession(__out_ecount_part_z(*pdwSize, *pdwSize) LPSTR szNewID, __inout DWORD *pdwSize, __deref_out ISession** ppSession) throw()
{
HRESULT hr = E_FAIL;
CComObject<TDBSession> *pNewSession = NULL;
if (!pdwSize)
return E_POINTER;
if (ppSession)
*ppSession = NULL;
else
return E_POINTER;
if (szNewID)
*szNewID = NULL;
else
return E_INVALIDARG;
// Create new session
CComObject<TDBSession>::CreateInstance(&pNewSession);
if (pNewSession == NULL)
return E_OUTOFMEMORY;
// Create a session name and initialize the object
hr = m_SessionNameGenerator.GetNewSessionName(szNewID, pdwSize);
if (hr == S_OK)
{
hr = pNewSession->Initialize(szNewID,
m_spServiceProvider,
reinterpret_cast<DWORD_PTR>(this),
GetProviderInfo);
if (hr == S_OK)
{
// we don't hold a reference to the object
hr = pNewSession->QueryInterface(ppSession);
}
}
if (hr != S_OK)
delete pNewSession;
return hr;
}
HRESULT CreateNewSessionByName(__in_z LPSTR szNewID, __deref_out ISession** ppSession) throw()
{
HRESULT hr = E_FAIL;
CComObject<TDBSession> *pNewSession = NULL;
if (!szNewID || *szNewID == 0)
return E_INVALIDARG;
if (ppSession)
*ppSession = NULL;
else
return E_POINTER;
// Create new session
CComObject<TDBSession>::CreateInstance(&pNewSession);
if (pNewSession == NULL)
return E_OUTOFMEMORY;
hr = pNewSession->Initialize(szNewID,
m_spServiceProvider,
reinterpret_cast<DWORD_PTR>(this),
GetProviderInfo);
if (hr == S_OK)
{
// we don't hold a reference to the object
hr = pNewSession->QueryInterface(ppSession);
}
if (hr != S_OK)
delete pNewSession;
return hr;
}
HRESULT GetSession(LPCSTR szID, ISession **ppSession) throw()
{
HRESULT hr = E_FAIL;
if (!szID)
return E_INVALIDARG;
if (ppSession)
*ppSession = NULL;
else
return E_POINTER;
CComObject<TDBSession> *pNewSession = NULL;
// Check the DB to see if the session ID is a valid session
_ATLTRY
{
CA2CT session(szID);
hr = IsValidSession(session);
}
_ATLCATCHALL()
{
hr = E_OUTOFMEMORY;
}
if (hr == S_OK)
{
// Create new session object to represent this session
CComObject<TDBSession>::CreateInstance(&pNewSession);
if (pNewSession == NULL)
return E_OUTOFMEMORY;
hr = pNewSession->Initialize(szID,
m_spServiceProvider,
reinterpret_cast<DWORD_PTR>(this),
GetProviderInfo);
if (hr == S_OK)
{
// we don't hold a reference to the object
hr = pNewSession->QueryInterface(ppSession);
}
}
if (hr != S_OK && pNewSession)
delete pNewSession;
return hr;
}
HRESULT CloseSession(LPCSTR szID) throw()
{
if (!szID)
return E_INVALIDARG;
CDataConnection conn;
HRESULT hr = GetSessionConnection(&conn,
m_spServiceProvider);
if (hr != S_OK)
return hr;
// set up accessors
CCommand<CAccessor<CSessionRefUpdator> > updator;
CCommand<CAccessor<CSessionDataDeleteAll> > command;
_ATLTRY
{
CA2CT session(szID);
hr = updator.Assign(session);
if (hr == S_OK)
hr = command.Assign(session);
}
_ATLCATCHALL()
{
hr = E_OUTOFMEMORY;
}
if (hr == S_OK)
{
// delete all session variables (may not be any!)
hr = command.Open(conn,
m_QueryObj.GetSessionVarDeleteAllVars(),
NULL,
NULL,
DBGUID_DEFAULT,
false);
if (hr == S_OK)
{
DBROWCOUNT nRows = 0;
nRows = 0;
// delete references in the session references table
hr = updator.Open(conn,
m_QueryObj.GetSessionRefDeleteFinal(),
NULL,
&nRows,
DBGUID_DEFAULT,
false);
if (nRows == 0)
hr = E_UNEXPECTED;
}
}
return hr;
}
HRESULT SetSessionTimeout(unsigned __int64 nTimeout) throw()
{
// Get the data connection for this thread
CDataConnection conn;
HRESULT hr = GetSessionConnection(&conn, m_spServiceProvider);
if (hr != S_OK)
return hr;
// all sessions get the same timeout
CCommand<CAccessor<CSetAllTimeouts> > command;
hr = command.Assign(nTimeout);
if (hr == S_OK)
{
hr = command.Open(conn, m_QueryObj.GetSessionReferencesSet(),
NULL,
NULL,
DBGUID_DEFAULT,
false);
if (hr == S_OK)
{
m_dwTimeout = nTimeout;
}
}
return hr;
}
HRESULT GetSessionTimeout(unsigned __int64* pnTimeout) throw()
{
if (pnTimeout)
*pnTimeout = m_dwTimeout;
else
return E_INVALIDARG;
return S_OK;
}
HRESULT GetSessionCount(DWORD *pnCount) throw()
{
if (pnCount)
*pnCount = 0;
else
return E_POINTER;
CCommand<CAccessor<CSessionRefCount> > command;
CDataConnection conn;
HRESULT hr = GetSessionConnection(&conn,
m_spServiceProvider);
if (hr != S_OK)
return hr;
hr = command.Open(conn,
m_QueryObj.GetSessionRefGetCount());
if (hr == S_OK)
{
hr = command.MoveFirst();
if (hr == S_OK)
{
*pnCount = (DWORD)command.m_nCount;
}
}
return hr;
}
void ReleaseAllSessions() throw()
{
// nothing to do
}
void SweepSessions() throw()
{
// nothing to do
}
// Helpers
HRESULT IsValidSession(LPCTSTR szID) throw()
{
if (!szID)
return E_INVALIDARG;
// Look in the sessionreferences table to see if there is an entry
// for this session.
if (m_szConnectionString[0] == 0)
return E_UNEXPECTED;
CDataConnection conn;
HRESULT hr = GetSessionConnection(&conn,
m_spServiceProvider);
if (hr != S_OK)
return hr;
// Check the session references table to see if
// this is a valid session
CCommand<CAccessor<CSessionRefSelector> > selector;
hr = selector.Assign(szID);
if (hr != S_OK)
return hr;
hr = selector.Open(conn,
m_QueryObj.GetSessionRefSelect(),
NULL,
NULL,
DBGUID_DEFAULT,
true);
if (hr == S_OK)
return selector.MoveFirst();
return hr;
}
CSessionNameGenerator m_SessionNameGenerator; // Object for generating session names
unsigned __int64 m_dwTimeout;
}; // CDBSessionServiceImplT
typedef CDBSessionServiceImplT<> CDBSessionServiceImpl;
//////////////////////////////////////////////////////////////////
//
// In-memory persisted session
//
//////////////////////////////////////////////////////////////////
// In-memory persisted session service keeps a pointer
// to the session obejct around in memory. The pointer is
// contained in a CComPtr, which is stored in a CAtlMap, so
// we have to have a CElementTraits class for that.
typedef CComPtr<ISession> SESSIONPTRTYPE;
template<>
class CElementTraits<SESSIONPTRTYPE> :
public CElementTraitsBase<SESSIONPTRTYPE>
{
public:
static ULONG Hash( INARGTYPE obj ) throw()
{
return( (ULONG)(ULONG_PTR)obj.p);
}
static BOOL CompareElements( OUTARGTYPE element1, OUTARGTYPE element2 ) throw()
{
return element1.IsEqualObject(element2.p) ? TRUE : FALSE;
}
static int CompareElementsOrdered( INARGTYPE , INARGTYPE ) throw()
{
ATLASSERT(0); // NOT IMPLEMENTED
return 0;
}
};
// CMemSession
// This session persistance class persists session variables in memory.
// Note that this type of persistance should only be used on single server
// web sites.
class CMemSession :
public ISession,
public CComObjectRootEx<CComGlobalsThreadModel>
{
public:
BEGIN_COM_MAP(CMemSession)
COM_INTERFACE_ENTRY(ISession)
END_COM_MAP()
CMemSession() throw(...)
{
}
virtual ~CMemSession()
{
}
STDMETHOD(GetVariable)(LPCSTR szName, VARIANT *pVal) throw()
{
if (!szName)
return E_INVALIDARG;
if (pVal)
VariantInit(pVal);
else
return E_POINTER;
HRESULT hr = Access();
if (hr == S_OK)
{
CSLockType lock(m_cs, false);
hr = lock.Lock();
if (FAILED(hr))
return hr;
hr = E_FAIL;
_ATLTRY
{
CComVariant val;
if (m_Variables.Lookup(szName, val))
{
hr = VariantCopy(pVal, &val);
}
}
_ATLCATCHALL()
{
hr = E_UNEXPECTED;
}
}
return hr;
}
STDMETHOD(SetVariable)(LPCSTR szName, VARIANT vNewVal) throw()
{
if (!szName)
return E_INVALIDARG;
HRESULT hr = Access();
if (hr == S_OK)
{
CSLockType lock(m_cs, false);
hr = lock.Lock();
if (FAILED(hr))
return hr;
_ATLTRY
{
hr = m_Variables.SetAt(szName, vNewVal) ? S_OK : E_FAIL;
}
_ATLCATCHALL()
{
hr = E_UNEXPECTED;
}
}
return hr;
}
STDMETHOD(RemoveVariable)(LPCSTR szName) throw()
{
if (!szName)
return E_INVALIDARG;
HRESULT hr = Access();
if (hr == S_OK)
{
CSLockType lock(m_cs, false);
hr = lock.Lock();
if (FAILED(hr))
return hr;
_ATLTRY
{
hr = m_Variables.RemoveKey(szName) ? S_OK : E_FAIL;
}
_ATLCATCHALL()
{
hr = E_UNEXPECTED;
}
}
return hr;
}
STDMETHOD(GetCount)(long *pnCount) throw()
{
if (pnCount)
*pnCount = 0;
else
return E_POINTER;
HRESULT hr = Access();
if (hr == S_OK)
{
CSLockType lock(m_cs, false);
hr = lock.Lock();
if (FAILED(hr))
return hr;
*pnCount = (long) m_Variables.GetCount();
}
return hr;
}
STDMETHOD(RemoveAllVariables)() throw()
{
HRESULT hr = Access();
if (hr == S_OK)
{
CSLockType lock(m_cs, false);
hr = lock.Lock();
if (FAILED(hr))
return hr;
m_Variables.RemoveAll();
}
return hr;
}
STDMETHOD(BeginVariableEnum)(POSITION *pPOS, HSESSIONENUM *phEnumHandle=NULL) throw()
{
if (phEnumHandle)
*phEnumHandle = NULL;
if (pPOS)
*pPOS = NULL;
else
return E_POINTER;
HRESULT hr = Access();
if (hr == S_OK)
{
CSLockType lock(m_cs, false);
hr = lock.Lock();
if (FAILED(hr))
return hr;
*pPOS = m_Variables.GetStartPosition();
}
return hr;
}
STDMETHOD(GetNextVariable)(POSITION *pPOS, VARIANT *pVal,
HSESSIONENUM hEnum=NULL,
LPSTR szName=NULL,
DWORD dwLen=0 ) throw()
{
(hEnum); // Unused!
if (pVal)
VariantInit(pVal);
else
return E_POINTER;
if (!pPOS)
return E_POINTER;
CComVariant val;
POSITION pos = *pPOS;
HRESULT hr = Access();
if (hr == S_OK)
{
CSLockType lock(m_cs, false);
hr = lock.Lock();
if (FAILED(hr))
return hr;
hr = E_FAIL;
_ATLTRY
{
if (szName)
{
CStringA strName = m_Variables.GetKeyAt(pos);
if (strName.GetLength())
{
if (dwLen > (DWORD)strName.GetLength())
{
Checked::strcpy_s(szName, dwLen, strName);
hr = S_OK;
}
else
hr = E_OUTOFMEMORY;
}
}
else
hr = S_OK;
if (hr == S_OK)
{
val = m_Variables.GetNextValue(pos);
hr = VariantCopy(pVal, &val);
if (hr == S_OK)
*pPOS = pos;
}
}
_ATLCATCHALL()
{
hr = E_UNEXPECTED;
}
}
return hr;
}
STDMETHOD(CloseEnum)(HSESSIONENUM /*hEnumHandle*/) throw()
{
return S_OK;
}
STDMETHOD(IsExpired)() throw()
{
CTime tmNow = CTime::GetCurrentTime();
CTimeSpan span = tmNow-m_tLastAccess;
if ((unsigned __int64)((span.GetTotalSeconds()*1000)) > m_dwTimeout)
return S_OK;
return S_FALSE;
}
HRESULT Access() throw()
{
// We lock here to protect against multiple threads
// updating the same member concurrently.
CSLockType lock(m_cs, false);
HRESULT hr = lock.Lock();
if (FAILED(hr))
return hr;
m_tLastAccess = CTime::GetCurrentTime();
return S_OK;
}
STDMETHOD(SetTimeout)(unsigned __int64 dwNewTimeout) throw()
{
// We lock here to protect against multiple threads
// updating the same member concurrently
CSLockType lock(m_cs, false);
HRESULT hr = lock.Lock();
if (FAILED(hr))
return hr;
m_dwTimeout = dwNewTimeout;
return S_OK;
}
HRESULT SessionLock() throw()
{
Access();
return S_OK;
}
HRESULT SessionUnlock() throw()
{
return S_OK;
}
protected:
typedef CAtlMap<CStringA,
CComVariant,
CStringElementTraits<CStringA> > VarMapType;
unsigned __int64 m_dwTimeout;
CTime m_tLastAccess;
VarMapType m_Variables;
CComAutoCriticalSection m_cs;
typedef CComCritSecLock<CComAutoCriticalSection> CSLockType;
}; // CMemSession
//
// CMemSessionServiceImpl
// Implements the service part of in-memory persisted session services.
//
class CMemSessionServiceImpl
{
public:
typedef void* SERVICEIMPL_INITPARAM_TYPE;
CMemSessionServiceImpl() throw()
{
m_dwTimeout = ATL_SESSION_TIMEOUT;
}
~CMemSessionServiceImpl() throw()
{
m_CritSec.Term();
}
HRESULT CreateNewSession(__out_ecount_part_z(*pdwSize, *pdwSize) LPSTR szNewID, __inout DWORD *pdwSize, __deref_out_opt ISession** ppSession) throw()
{
HRESULT hr = E_FAIL;
CComObject<CMemSession> *pNewSession = NULL;
if (!szNewID)
return E_INVALIDARG;
if (!pdwSize)
return E_POINTER;
if (ppSession)
*ppSession = NULL;
else
return E_POINTER;
_ATLTRY
{
// Create new session
CComObject<CMemSession>::CreateInstance(&pNewSession);
if (pNewSession == NULL)
return E_OUTOFMEMORY;
// Initialize and add to list of CSessionData
hr = m_SessionNameGenerator.GetNewSessionName(szNewID, pdwSize);
if (SUCCEEDED(hr))
{
CComPtr<ISession> spSession;
hr = pNewSession->QueryInterface(&spSession);
if (SUCCEEDED(hr))
{
pNewSession->SetTimeout(m_dwTimeout);
pNewSession->Access();
CSLockType lock(m_CritSec, false);
hr = lock.Lock();
if (FAILED(hr))
return hr;
hr = m_Sessions.SetAt(szNewID, spSession) != NULL ? S_OK : E_FAIL;
if (hr == S_OK)
*ppSession = spSession.Detach();
}
}
}
_ATLCATCHALL()
{
hr = E_UNEXPECTED;
}
return hr;
}
HRESULT CreateNewSessionByName(__in_z LPSTR szNewID, __deref_out_opt ISession** ppSession) throw()
{
HRESULT hr = E_FAIL;
CComObject<CMemSession> *pNewSession = NULL;
if (!szNewID || *szNewID == 0)
return E_INVALIDARG;
if (ppSession)
*ppSession = NULL;
else
return E_POINTER;
CComPtr<ISession> spSession;
// If the session already exists, you get a pointer to the
// existing session. You can't have multiple entries with the
// same name in CAtlMap
hr = GetSession(szNewID, &spSession);
if (hr == S_OK)
{
*ppSession = spSession.Detach();
return hr;
}
_ATLTRY
{
// Create new session
CComObject<CMemSession>::CreateInstance(&pNewSession);
if (pNewSession == NULL)
return E_OUTOFMEMORY;
hr = pNewSession->QueryInterface(&spSession);
if (SUCCEEDED(hr))
{
pNewSession->SetTimeout(m_dwTimeout);
pNewSession->Access();
CSLockType lock(m_CritSec, false);
hr = lock.Lock();
if (FAILED(hr))
return hr;
hr = m_Sessions.SetAt(szNewID, spSession) != NULL ? S_OK : E_FAIL;
if (hr == S_OK)
*ppSession = spSession.Detach();
}
}
_ATLCATCHALL()
{
hr = E_UNEXPECTED;
}
return hr;
}
HRESULT GetSession(LPCSTR szID, ISession **ppSession) throw()
{
HRESULT hr = E_FAIL;
SessMapType::CPair *pPair = NULL;
if (ppSession)
*ppSession = NULL;
else
return E_POINTER;
if (!szID)
return E_INVALIDARG;
CSLockType lock(m_CritSec, false);
hr = lock.Lock();
if (FAILED(hr))
return hr;
hr = E_FAIL;
_ATLTRY
{
pPair = m_Sessions.Lookup(szID);
if (pPair) // the session exists and is in our local map of sessions
{
hr = pPair->m_value.QueryInterface(ppSession);
}
}
_ATLCATCHALL()
{
return E_UNEXPECTED;
}
return hr;
}
HRESULT CloseSession(LPCSTR szID) throw()
{
if (!szID)
return E_INVALIDARG;
HRESULT hr = E_FAIL;
CSLockType lock(m_CritSec, false);
hr = lock.Lock();
if (FAILED(hr))
return hr;
_ATLTRY
{
hr = m_Sessions.RemoveKey(szID) ? S_OK : E_UNEXPECTED;
}
_ATLCATCHALL()
{
hr = E_UNEXPECTED;
}
return hr;
}
void SweepSessions() throw()
{
POSITION posRemove = NULL;
const SessMapType::CPair *pPair = NULL;
POSITION pos = NULL;
CSLockType lock(m_CritSec, false);
if (FAILED(lock.Lock()))
return;
pos = m_Sessions.GetStartPosition();
while (pos)
{
posRemove = pos;
pPair = m_Sessions.GetNext(pos);
if (pPair)
{
if (pPair->m_value.p &&
S_OK == pPair->m_value->IsExpired())
{
// remove our reference on the session
m_Sessions.RemoveAtPos(posRemove);
}
}
}
}
HRESULT SetSessionTimeout(unsigned __int64 nTimeout) throw()
{
HRESULT hr = S_OK;
CComPtr<ISession> spSession;
m_dwTimeout = nTimeout;
CSLockType lock(m_CritSec, false);
hr = lock.Lock();
if (FAILED(hr))
return hr;
POSITION pos = m_Sessions.GetStartPosition();
if (!pos)
return S_OK; // no sessions to set the timeout on
while (pos)
{
SessMapType::CPair *pPair = const_cast<SessMapType::CPair*>(m_Sessions.GetNext(pos));
if (pPair)
{
spSession = pPair->m_value;
if (spSession)
{
// if we fail on any of the sets we will return the
// error code immediately
hr = spSession->SetTimeout(nTimeout);
spSession.Release();
if (hr != S_OK)
break;
}
else
{
hr = E_UNEXPECTED;
break;
}
}
else
{
hr = E_UNEXPECTED;
break;
}
}
return hr;
}
HRESULT GetSessionTimeout(unsigned __int64* pnTimeout) throw()
{
if (pnTimeout)
*pnTimeout = m_dwTimeout;
else
return E_POINTER;
return S_OK;
}
HRESULT GetSessionCount(DWORD *pnCount) throw()
{
if (pnCount)
*pnCount = 0;
else
return E_POINTER;
CSLockType lock(m_CritSec, false);
HRESULT hr = lock.Lock();
if (FAILED(hr))
return hr;
*pnCount = (DWORD)m_Sessions.GetCount();
return S_OK;
}
void ReleaseAllSessions() throw()
{
CSLockType lock(m_CritSec, false);
if (FAILED(lock.Lock()))
return;
m_Sessions.RemoveAll();
}
HRESULT Initialize(SERVICEIMPL_INITPARAM_TYPE,
IServiceProvider*,
unsigned __int64 dwNewTimeout) throw()
{
m_dwTimeout = dwNewTimeout;
return m_CritSec.Init();
}
typedef CAtlMap<CStringA,
SESSIONPTRTYPE,
CStringElementTraits<CStringA>,
CElementTraitsBase<SESSIONPTRTYPE> > SessMapType;
SessMapType m_Sessions; // map for holding sessions in memory
CComCriticalSection m_CritSec; // for synchronizing access to map
typedef CComCritSecLock<CComCriticalSection> CSLockType;
CSessionNameGenerator m_SessionNameGenerator; // Object for generating session names
unsigned __int64 m_dwTimeout;
}; // CMemSessionServiceImpl
//
// CSessionStateService
// This class implements the session state service which can be
// exposed to request handlers.
//
// Template Parameters:
// MonitorClass: Provides periodic sweeping services for the session service class.
// TServiceImplClass: The class that actually implements the methods of the
// ISessionStateService and ISessionStateControl interfaces.
template <class MonitorClass, class TServiceImplClass >
class CSessionStateService :
public ISessionStateService,
public ISessionStateControl,
public IWorkerThreadClient,
public CComObjectRootEx<CComGlobalsThreadModel>
{
protected:
MonitorClass m_Monitor;
HANDLE m_hTimer;
CComPtr<IServiceProvider> m_spServiceProvider;
TServiceImplClass m_SessionServiceImpl;
public:
// Construction/Initialization
CSessionStateService() throw() :
m_hTimer(NULL)
{
}
~CSessionStateService() throw()
{
ATLASSUME(m_hTimer == NULL);
}
BEGIN_COM_MAP(CSessionStateService)
COM_INTERFACE_ENTRY(ISessionStateService)
COM_INTERFACE_ENTRY(ISessionStateControl)
END_COM_MAP()
// ISessionStateServie methods
STDMETHOD(CreateNewSession)(LPSTR szNewID, DWORD *pdwSize, ISession** ppSession) throw()
{
return m_SessionServiceImpl.CreateNewSession(szNewID, pdwSize, ppSession);
}
STDMETHOD(CreateNewSessionByName)(LPSTR szNewID, ISession** ppSession) throw()
{
return m_SessionServiceImpl.CreateNewSessionByName(szNewID, ppSession);
}
STDMETHOD(GetSession)(LPCSTR szID, ISession **ppSession) throw()
{
return m_SessionServiceImpl.GetSession(szID, ppSession);
}
STDMETHOD(CloseSession)(LPCSTR szSessionID) throw()
{
return m_SessionServiceImpl.CloseSession(szSessionID);
}
STDMETHOD(SetSessionTimeout)(unsigned __int64 nTimeout) throw()
{
return m_SessionServiceImpl.SetSessionTimeout(nTimeout);
}
STDMETHOD(GetSessionTimeout)(unsigned __int64 *pnTimeout) throw()
{
return m_SessionServiceImpl.GetSessionTimeout(pnTimeout);
}
STDMETHOD(GetSessionCount)(DWORD *pnSessionCount) throw()
{
return m_SessionServiceImpl.GetSessionCount(pnSessionCount);
}
void SweepSessions() throw()
{
m_SessionServiceImpl.SweepSessions();
}
void ReleaseAllSessions() throw()
{
m_SessionServiceImpl.ReleaseAllSessions();
}
HRESULT Initialize(
IServiceProvider *pServiceProvider = NULL,
typename TServiceImplClass::SERVICEIMPL_INITPARAM_TYPE pInitData = NULL,
unsigned __int64 dwTimeout = ATL_SESSION_TIMEOUT) throw()
{
HRESULT hr = S_OK;
if (pServiceProvider)
m_spServiceProvider = pServiceProvider;
hr = m_SessionServiceImpl.Initialize(pInitData, pServiceProvider, dwTimeout);
return hr;
}
template <class ThreadTraits>
HRESULT Initialize(
CWorkerThread<ThreadTraits> *pWorker,
IServiceProvider *pServiceProvider = NULL,
typename TServiceImplClass::SERVICEIMPL_INITPARAM_TYPE pInitData = NULL,
unsigned __int64 dwTimeout = ATL_SESSION_TIMEOUT) throw()
{
if (!pWorker)
return E_INVALIDARG;
HRESULT hr = Initialize(pServiceProvider, pInitData, dwTimeout);
if (hr == S_OK)
{
hr = m_Monitor.Initialize(pWorker);
if (hr == S_OK)
{
//sweep every 500ms
hr = m_Monitor.AddTimer(ATL_SESSION_SWEEPER_TIMEOUT, this, 0, &m_hTimer);
}
}
return hr;
}
void Shutdown() throw()
{
if (m_hTimer)
{
if(FAILED(m_Monitor.RemoveHandle(m_hTimer)))
{
/* can't report from here */
ATLASSERT(FALSE);
}
m_hTimer = NULL;
}
ReleaseAllSessions();
}
// Implementation
HRESULT Execute(DWORD_PTR /*dwParam*/, HANDLE /*hObject*/) throw()
{
SweepSessions();
return S_OK;
}
HRESULT CloseHandle(HANDLE hHandle) throw()
{
::CloseHandle(hHandle);
m_hTimer = NULL;
return S_OK;
}
}; // CSessionStateService
} // namespace ATL
#pragma pack(pop)
#pragma warning(pop)
#endif // __ATLSESSION_H__
<|start_filename|>include/atlsharedsvc.h<|end_filename|>
// This is a part of the Active Template Library.
// Copyright (C) Microsoft Corporation
// All rights reserved.
//
// This source code is only intended as a supplement to the
// Active Template Library Reference and related
// electronic documentation provided with the library.
// See these sources for detailed information regarding the
// Active Template Library product.
#ifndef __ATLSHAREDSVC_H__
#define __ATLSHAREDSVC_H__
#pragma once
#ifdef _WIN32_WCE
#error atlsharedsvc.h is not supported on Windows CE (_WIN32_WCE is defined)
#endif //_WIN32_WCE
#include <atltime.h>
#include <atlsoap.h>
#pragma pack(push,_ATL_PACKING)
namespace ATL{
#ifndef ATL_SHAREDBLOBCACHE_TIMEOUT
#define ATL_SHAREDBLOBCACHE_TIMEOUT 36000000000 // in 100 nano second intervals
// each entry will be free'd if
// no access in 1 hour.
#endif
// Interface used by to access the shared blob cache.
[ uuid("AB4AF9CD-8DB1-4974-A617-CF0449578FB9"), object ]
__interface ISharedBlobCache
{
[id(0)] STDMETHOD(AddItem)([in] BSTR szItemName, [in] BSTR szData);
[id(1)] STDMETHOD(GetItem)([in] BSTR szItemName, [out,retval] BSTR *szData);
};
class CSharedCache:
public CBlobCache<CWorkerThread<>, CStdStatClass >,
public IMemoryCacheClient,
public ISharedBlobCache
{
typedef CBlobCache<CWorkerThread<>, CStdStatClass > basecache;
public:
// IMemoryCacheClient method, frees data in the memory cache.
STDMETHOD( Free )(const void *pvData)
{
if (pvData)
{
::SysFreeString((BSTR)pvData);
}
return S_OK;
}
STDMETHODIMP AddItem(BSTR szItemName, BSTR szData)
{
HRESULT hr = E_UNEXPECTED;
// We make a copy of the BSTR and stick it in the cache.
// The BSTR will be freed in our IMemoryCacheClient::Free
// implementation above.
BSTR szEntry = SysAllocString(szData);
if(szEntry)
{
USES_CONVERSION_EX;
// create a time span and for the entry
CFileTime tm = CFileTime::GetCurrentTime();
CFileTimeSpan span;
span.SetTimeSpan(ATL_SHAREDBLOBCACHE_TIMEOUT);
tm += span;
HCACHEITEM h;
hr = basecache::Add(OLE2A_EX(szItemName, _ATL_SAFE_ALLOCA_DEF_THRESHOLD), szEntry, sizeof(BSTR),
&tm, _AtlBaseModule.m_hInst, &h, static_cast<IMemoryCacheClient*>(this));
if (hr == S_OK)
{
// On successful add, we have to release our
// reference on the entry.
basecache::ReleaseEntry(h);
}
}
return hr;
}
STDMETHODIMP GetItem(BSTR szItemName, BSTR *szData)
{
USES_CONVERSION_EX;
HRESULT hr = E_UNEXPECTED;
HCACHEITEM hEntry = NULL;
if (!szItemName || !szData)
return hr;
hr = basecache::LookupEntry(OLE2A_EX(szItemName, _ATL_SAFE_ALLOCA_DEF_THRESHOLD), &hEntry);
if (hr == S_OK)
{
void *pData = NULL;
DWORD dwSize = 0;
hr = basecache::GetData(hEntry, &pData, &dwSize);
if (hr == S_OK)
{
// make a copy of the string
*szData = ::SysAllocString((BSTR)pData);
}
basecache::ReleaseEntry(hEntry);
}
return hr;
}
STDMETHODIMP QueryInterface(REFIID riid, void **ppv)
{
HRESULT hr = E_NOINTERFACE;
if (InlineIsEqualGUID(__uuidof(IMemoryCacheClient), riid)||
InlineIsEqualGUID(__uuidof(IUnknown), riid))
{
*ppv = static_cast<void*>(static_cast<IMemoryCacheClient*>(this));
hr = S_OK;
}
else if( InlineIsEqualGUID(__uuidof(ISharedBlobCache), riid))
{
*ppv = static_cast<void*>(static_cast<ISharedBlobCache*>(this));
hr = S_OK;
}
return hr;
}
ULONG STDMETHODCALLTYPE AddRef()
{
return 1;
}
ULONG STDMETHODCALLTYPE Release()
{
return 1;
}
};
// This class implements the SOAP interface for the shared blob cache.
[
soap_handler(
name="SharedBlobCache",
namespace="http://www.microsoft.com/vc/atlserver/soap/SharedBlobCache",
protocol="soap"
),
request_handler(
name="SharedBlobCache",
sdl="GenSharedBlobCacheWSDL"
)
]
class CSharedCacheHandler:
public ISharedBlobCache
{
public:
[soap_method]
STDMETHOD(AddItem)(BSTR szItemName, BSTR szData)
{
if (!m_spMemCache)
return E_UNEXPECTED;
return m_spMemCache->AddItem(szItemName, szData);
}
[soap_method]
STDMETHOD(GetItem)(BSTR szItemName, BSTR *szData)
{
if (!m_spMemCache)
return E_UNEXPECTED;
return m_spMemCache->GetItem(szItemName, szData);
}
HTTP_CODE Initialize(IServiceProvider *pProvider)
{
ATLASSERT(pProvider); // should never be NULL
if (!pProvider)
return HTTP_ERROR(500, ISE_SUBERR_UNEXPECTED);
if (m_spMemCache)
return HTTP_SUCCESS; // already initialized
pProvider->QueryService(__uuidof(ISharedBlobCache), &m_spMemCache);
return m_spMemCache ? HTTP_SUCCESS : HTTP_ERROR(500, ISE_SUBERR_UNEXPECTED);
}
// override HandleRequest to Initialize our m_spServiceProvider
// and to handle authorizing the client.
HTTP_CODE HandleRequest(AtlServerRequest *pRequestInfo, IServiceProvider *pProvider)
{
HTTP_CODE dwErr = Initialize(pProvider);
if (dwErr != HTTP_SUCCESS)
return dwErr;
dwErr = CSoapHandler<CSharedCacheHandler>::HandleRequest(pRequestInfo,
pProvider);
return dwErr;
}
CComPtr<ISharedBlobCache> m_spMemCache;
};
} //ATL
#pragma pack(pop)
#endif // __ATLSHAREDSVC_H__
<|start_filename|>source/SProxy/WSDLDocument.h<|end_filename|>
//
// WSDLDocument.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
#include "XMLDocument.h"
#include "XMLElement.h"
#include "WSDLType.h"
#include "WSDLMessage.h"
#include "WSDLPortType.h"
#include "WSDLBinding.h"
#include "WSDLService.h"
#include "Emit.h"
#include "resource.h"
#include "Attribute.h"
#include "Content.h"
#include "SimpleType.h"
#include "ComplexType.h"
#include "Element.h"
//#include "UnsupportedElement.h"
class CWSDLDocument : public CXMLDocument
{
private:
typedef CAtlPtrMap<CStringW, CWSDLMessage *, CStringRefElementTraits<CStringW> > MESSAGEMAP;
typedef CAtlPtrMap<CStringW, CWSDLPortType *, CStringRefElementTraits<CStringW> > PORTMAP;
typedef CAtlPtrMap<CStringW, CWSDLBinding *, CStringRefElementTraits<CStringW> > BINDINGMAP;
typedef CAtlPtrMap<CStringW, CWSDLService *, CStringRefElementTraits<CStringW> > SERVICEMAP;
CAtlPtrList<CWSDLType *> m_types;
MESSAGEMAP m_messages;
PORTMAP m_portTypes;
BINDINGMAP m_bindings;
SERVICEMAP m_services;
CStringW m_strName;
public:
inline CWSDLDocument()
{
SetDocumentType(WSDLDOC);
}
inline HRESULT SetName(const wchar_t *wszName, int cchName)
{
if (!wszName)
{
return E_FAIL;
}
m_strName.SetString(wszName, cchName);
return S_OK;
}
inline HRESULT SetName(const CStringW& strName)
{
m_strName = strName;
return S_OK;
}
inline const CStringW& GetName()
{
return m_strName;
}
inline CWSDLType * AddType(CWSDLType * p = NULL)
{
CAutoPtr<CWSDLType> spOut;
if (p == NULL)
{
spOut.Attach( new CWSDLType );
p = spOut;
}
if (p != NULL)
{
if (m_types.AddTail(p) != NULL)
{
spOut.Detach();
return p;
}
}
return NULL;
}
inline POSITION GetFirstType()
{
return m_types.GetHeadPosition();
}
inline CWSDLType * GetNextType(POSITION& pos)
{
return m_types.GetNext(pos);
}
inline CComplexType * GetComplexType(const CStringW& strName, const CStringW& strUri)
{
if (strUri != GetTargetNamespace())
{
//
// TODO: call import handler
//
}
POSITION pos = GetFirstType();
while (pos != NULL)
{
CWSDLType *pType = GetNextType(pos);
if (pType != NULL)
{
POSITION schemaPos = pType->GetFirstSchema();
while (schemaPos != NULL)
{
CSchema *pSchema = pType->GetNextSchema(schemaPos);
if (pSchema != NULL)
{
CComplexType *pRet = pSchema->GetComplexType(strUri, strName);
if (pRet != NULL)
{
return pRet;
}
}
}
}
}
return NULL;
}
inline CElement * GetElement(const CStringW& strName, const CStringW& strUri)
{
if (strUri != GetTargetNamespace())
{
//
// TODO: call import handler
//
}
POSITION pos = GetFirstType();
while (pos != NULL)
{
CWSDLType *pType = GetNextType(pos);
if (pType != NULL)
{
POSITION schemaPos = pType->GetFirstSchema();
while (schemaPos != NULL)
{
CSchema *pSchema = pType->GetNextSchema(schemaPos);
if (pSchema != NULL)
{
CElement *pRet = pSchema->GetElement(strUri, strName);
if (pRet != NULL)
{
return pRet;
}
}
}
}
}
return NULL;
}
inline CSimpleType * GetSimpleType(const CStringW& strName, const CStringW& strUri)
{
if (strUri != GetTargetNamespace())
{
//
// TODO: call import handler
//
}
POSITION pos = GetFirstType();
while (pos != NULL)
{
CWSDLType *pType = GetNextType(pos);
if (pType != NULL)
{
POSITION schemaPos = pType->GetFirstSchema();
while (schemaPos != NULL)
{
CSchema *pSchema = pType->GetNextSchema(schemaPos);
if (pSchema != NULL)
{
CSimpleType *pRet = pSchema->GetSimpleType(strUri, strName);
if (pRet != NULL)
{
return pRet;
}
}
}
}
}
return NULL;
}
// inline CUnsupportedElement * GetUnsupportedElement(const CStringW& strName, const CStringW& strUri)
// {
// if (strUri != GetTargetNamespace())
// {
// //
// // TODO: call import handler
// //
// }
//
// POSITION pos = GetFirstType();
// while (pos != NULL)
// {
// CWSDLType *pType = GetNextType(pos);
// if (pType != NULL)
// {
// POSITION schemaPos = pType->GetFirstSchema();
// while (schemaPos != NULL)
// {
// CSchema *pSchema = pType->GetNextSchema(schemaPos);
// if (pSchema != NULL)
// {
// CUnsupportedElement *pRet = pSchema->GetUnsupportedElement(strUri, strName);
// if (pRet != NULL)
// {
// return pRet;
// }
// }
// }
// }
// }
//
//// EmitError(IDS_SDL_UNRESOLVED_ELEM, strUri, strName);
// return NULL;
// }
inline CWSDLMessage * AddMessage(CWSDLMessage * p)
{
if (p != NULL)
{
if (p->GetName().GetLength() != 0)
{
if (m_messages.SetAt(p->GetName(), p) != NULL)
{
return p;
}
}
}
return NULL;
}
inline CWSDLMessage * GetMessage(const CStringW& strName)
{
const MESSAGEMAP::CPair * p = m_messages.Lookup(strName);
if (p != NULL)
{
return p->m_value;
}
return NULL;
}
inline CWSDLPortType * AddPortType(CWSDLPortType * p)
{
if (p != NULL)
{
if (p->GetName().GetLength() != 0)
{
if (m_portTypes.SetAt(p->GetName(), p) != NULL)
{
return p;
}
}
}
return NULL;
}
inline CWSDLPortType * GetPortType(const CStringW& strName)
{
const PORTMAP::CPair * p = m_portTypes.Lookup(strName);
if (p != NULL)
{
return p->m_value;
}
return NULL;
}
inline CWSDLBinding * AddBinding(CWSDLBinding * p)
{
if (p != NULL)
{
if (p->GetName().GetLength() != 0)
{
if (m_bindings.SetAt(p->GetName(), p) != NULL)
{
return p;
}
}
}
return NULL;
}
inline POSITION GetFirstBinding()
{
return m_bindings.GetStartPosition();
}
inline CWSDLBinding * GetNextBinding(POSITION &pos)
{
return m_bindings.GetNextValue(pos);
}
inline CWSDLBinding * GetBinding(const CStringW &strName)
{
const BINDINGMAP::CPair * p = m_bindings.Lookup(strName);
if (p != NULL)
{
return p->m_value;
}
return NULL;
}
inline CWSDLService * AddService(CWSDLService * p)
{
if (p != NULL)
{
if (p->GetName().GetLength() != 0)
{
if (m_services.SetAt(p->GetName(), p) != NULL)
{
return p;
}
}
}
return NULL;
}
inline POSITION GetFirstService()
{
return m_services.GetStartPosition();
}
inline CWSDLService * GetNextService(POSITION &pos)
{
return m_services.GetNextValue(pos);
}
inline CWSDLService * GetService(const CStringW &strName)
{
const SERVICEMAP::CPair * p = m_services.Lookup(strName);
if (p != NULL)
{
return p->m_value;
}
return NULL;
}
};
<|start_filename|>source/SProxy/SimpleTypeParser.h<|end_filename|>
//
// SimpleTypeParser.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
#include "Parser.h"
#include "resource.h"
class CSimpleType;
class CSimpleTypeParser : public CParserBase
{
private:
CSimpleType * m_pType;
public:
inline CSimpleTypeParser(ISAXXMLReader *pReader, CParserBase *pParent, DWORD dwLevel, CSimpleType * pType)
:CParserBase(pReader, pParent, dwLevel), m_pType(pType)
{
}
inline CSimpleType * GetSimpleType()
{
return m_pType;
}
inline void SetSimpleType(CSimpleType * pType)
{
m_pType = pType;
}
inline void MarkUnsupported(const wchar_t *wszQName, int cchQName)
{
#ifdef _DEBUG
int nLine;
int nCol;
GetLocator()->getLineNumber(&nLine);
GetLocator()->getColumnNumber(&nCol);
ATLTRACE( _T("%sUnsupported tag@(%d, %d) : %.*ws -- skipping element\n"), GetTabs(GetLevel()),
nLine, nCol,
cchQName, wszQName );
#endif
}
/*
annotation, length, enumeration, pattern, scale, period, duration,
maxLength, precision, minInclusive, minExclusive, maxInclusive,
maxExclusive, minLength, encoding
*/
BEGIN_XMLTAG_MAP()
XMLTAG_ENTRY_EX("enumeration", XSD_NAMESPACEA, OnEnumeration)
XMLTAG_ENTRY_EX("annotation", XSD_NAMESPACEA, OnAnnotation)
XMLTAG_ENTRY_EX("length", XSD_NAMESPACEA, OnLength)
XMLTAG_ENTRY_EX("pattern", XSD_NAMESPACEA, OnPattern)
XMLTAG_ENTRY_EX("scale", XSD_NAMESPACEA, OnScale)
XMLTAG_ENTRY_EX("period", XSD_NAMESPACEA, OnPeriod)
XMLTAG_ENTRY_EX("duration", XSD_NAMESPACEA, OnDuration)
XMLTAG_ENTRY_EX("maxLength", XSD_NAMESPACEA, OnMaxLength)
XMLTAG_ENTRY_EX("precision", XSD_NAMESPACEA, OnPrecision)
XMLTAG_ENTRY_EX("minInclusive", XSD_NAMESPACEA, OnMinInclusive)
XMLTAG_ENTRY_EX("minExclusive", XSD_NAMESPACEA, OnMinExclusive)
XMLTAG_ENTRY_EX("maxInclusive", XSD_NAMESPACEA, OnMaxInclusive)
XMLTAG_ENTRY_EX("maxExclusive", XSD_NAMESPACEA, OnMaxExclusive)
XMLTAG_ENTRY_EX("minLength", XSD_NAMESPACEA, OnMinLength)
XMLTAG_ENTRY_EX("encoding", XSD_NAMESPACEA, OnEncoding)
// REVIEW: new one
XMLTAG_ENTRY_EX("restriction", XSD_NAMESPACEA, OnRestriction)
END_XMLTAG_MAP()
/*
<simpleType
abstract = "boolean"
id = "ID"
name="NCName"
{any attributes with non-schema namespace}
>
*/
BEGIN_XMLATTR_MAP()
XMLATTR_ENTRY("name", OnName)
XMLATTR_ENTRY("id", OnID)
XMLATTR_ENTRY("abstract", OnAbstract)
END_XMLATTR_MAP()
TAG_METHOD_DECL(OnAnnotation);
TAG_METHOD_DECL(OnLength);
TAG_METHOD_DECL(OnPattern);
TAG_METHOD_DECL(OnEnumeration);
TAG_METHOD_DECL(OnScale);
TAG_METHOD_DECL(OnPeriod);
TAG_METHOD_DECL(OnDuration);
TAG_METHOD_DECL(OnMaxLength);
TAG_METHOD_DECL(OnPrecision);
TAG_METHOD_DECL(OnMinInclusive);
TAG_METHOD_DECL(OnMinExclusive);
TAG_METHOD_DECL(OnMaxInclusive);
TAG_METHOD_DECL(OnMaxExclusive);
TAG_METHOD_DECL(OnMinLength);
TAG_METHOD_DECL(OnEncoding);
TAG_METHOD_DECL(OnRestriction);
ATTR_METHOD_DECL(OnName);
ATTR_METHOD_DECL(OnID);
ATTR_METHOD_DECL(OnAbstract);
HRESULT __stdcall startPrefixMapping(
const wchar_t *wszPrefix,
int cchPrefix,
const wchar_t *wszUri,
int cchUri);
};
<|start_filename|>include/atlhtml.h<|end_filename|>
// This is a part of the Active Template Library.
// Copyright (C) Microsoft Corporation
// All rights reserved.
//
// This source code is only intended as a supplement to the
// Active Template Library Reference and related
// electronic documentation provided with the library.
// See these sources for detailed information regarding the
// Active Template Library product.
#ifndef __ATLHTML_H__
#define __ATLHTML_H__
#pragma once
#include <atlstr.h>
#include <atlsiface.h>
#include <atlconv.h>
#pragma pack(push,_ATL_PACKING)
namespace ATL {
#define TAGF_NONE 0
#define TAGF_HASEND 1
#define TAGF_BLOCK 2
struct ATL_HTML_TAG
{
LPCTSTR szTagName;
UINT uFlags;
};
enum ATL_HTML_TAGS {
ATL_HTML_TAG_BODY,
ATL_HTML_TAG_A,
ATL_HTML_TAG_B,
ATL_HTML_TAG_I,
ATL_HTML_TAG_U,
ATL_HTML_TAG_FONT,
ATL_HTML_TAG_IMG,
ATL_HTML_TAG_HR,
ATL_HTML_TAG_BR,
ATL_HTML_TAG_DIV,
ATL_HTML_TAG_BLOCKQUOTE,
ATL_HTML_TAG_ADDRESS,
ATL_HTML_TAG_P,
ATL_HTML_TAG_H1,
ATL_HTML_TAG_H2,
ATL_HTML_TAG_H3,
ATL_HTML_TAG_H4,
ATL_HTML_TAG_H5,
ATL_HTML_TAG_H6,
ATL_HTML_TAG_PRE,
ATL_HTML_TAG_Q,
ATL_HTML_TAG_SUB,
ATL_HTML_TAG_SUP,
ATL_HTML_TAG_INS,
ATL_HTML_TAG_DEL,
ATL_HTML_TAG_EM,
ATL_HTML_TAG_STRONG,
ATL_HTML_TAG_DFN,
ATL_HTML_TAG_CODE,
ATL_HTML_TAG_SAMP,
ATL_HTML_TAG_KBD,
ATL_HTML_TAG_VAR,
ATL_HTML_TAG_CITE,
ATL_HTML_TAG_ABBR,
ATL_HTML_TAG_ACRONYM,
ATL_HTML_TAG_OL,
ATL_HTML_TAG_UL,
ATL_HTML_TAG_LI,
ATL_HTML_TAG_DL,
ATL_HTML_TAG_DT,
ATL_HTML_TAG_DD,
ATL_HTML_TAG_TABLE,
ATL_HTML_TAG_TR,
ATL_HTML_TAG_TD,
ATL_HTML_TAG_FORM,
ATL_HTML_TAG_INPUT,
ATL_HTML_TAG_SELECT,
ATL_HTML_TAG_OPTION,
ATL_HTML_TAG_HEAD,
ATL_HTML_TAG_HTML,
ATL_HTML_TAG_MAP,
ATL_HTML_TAG_AREA,
ATL_HTML_TAG_BASE,
ATL_HTML_TAG_BDO,
ATL_HTML_TAG_BIG,
ATL_HTML_TAG_BUTTON,
ATL_HTML_TAG_IFRAME,
ATL_HTML_TAG_LABEL,
ATL_HTML_TAG_LINK,
ATL_HTML_TAG_META,
ATL_HTML_TAG_NOFRAMES,
ATL_HTML_TAG_NOSCRIPT,
ATL_HTML_TAG_COL,
ATL_HTML_TAG_COLGROUP,
ATL_HTML_TAG_FIELDSET,
ATL_HTML_TAG_LEGEND,
ATL_HTML_TAG_TBODY,
ATL_HTML_TAG_TEXTAREA,
ATL_HTML_TAG_TFOOT,
ATL_HTML_TAG_TH,
ATL_HTML_TAG_TITLE,
ATL_HTML_TAG_TT,
ATL_HTML_TAG_SMALL,
ATL_HTML_TAG_SPAN,
ATL_HTML_TAG_OBJECT,
ATL_HTML_TAG_PARAM,
ATL_HTML_TAG_LAST };
extern __declspec(selectany) const ATL_HTML_TAG s_tags[] =
{
{ _T("body"), TAGF_HASEND | TAGF_BLOCK },
{ _T("a"), TAGF_HASEND },
{ _T("b"), TAGF_HASEND },
{ _T("i"), TAGF_HASEND },
{ _T("u"), TAGF_HASEND },
{ _T("font"), TAGF_HASEND },
{ _T("img"), TAGF_NONE },
{ _T("hr"), TAGF_NONE },
{ _T("br"), TAGF_NONE },
{ _T("div"), TAGF_HASEND | TAGF_BLOCK },
{ _T("blockquote"), TAGF_HASEND | TAGF_BLOCK },
{ _T("adress"), TAGF_HASEND },
{ _T("p"), TAGF_HASEND | TAGF_BLOCK },
{ _T("h1"), TAGF_HASEND | TAGF_BLOCK},
{ _T("h2"), TAGF_HASEND | TAGF_BLOCK},
{ _T("h3"), TAGF_HASEND | TAGF_BLOCK },
{ _T("h4"), TAGF_HASEND | TAGF_BLOCK },
{ _T("h5"), TAGF_HASEND | TAGF_BLOCK },
{ _T("h6"), TAGF_HASEND | TAGF_BLOCK },
{ _T("pre"), TAGF_HASEND | TAGF_BLOCK },
{ _T("q"), TAGF_HASEND },
{ _T("sub"), TAGF_HASEND },
{ _T("sup"), TAGF_HASEND },
{ _T("ins"), TAGF_HASEND },
{ _T("del"), TAGF_HASEND },
{ _T("em"), TAGF_HASEND },
{ _T("strong"), TAGF_HASEND },
{ _T("dfn"), TAGF_HASEND },
{ _T("code"), TAGF_HASEND },
{ _T("samp"), TAGF_HASEND },
{ _T("kbd"), TAGF_HASEND },
{ _T("var"), TAGF_HASEND },
{ _T("cite"), TAGF_HASEND },
{ _T("abbr"), TAGF_HASEND },
{ _T("acronym"), TAGF_HASEND },
{ _T("ol"), TAGF_HASEND | TAGF_BLOCK },
{ _T("ul"), TAGF_HASEND | TAGF_BLOCK },
{ _T("li"), TAGF_HASEND },
{ _T("dl"), TAGF_HASEND | TAGF_BLOCK },
{ _T("dt"), TAGF_HASEND },
{ _T("dd"), TAGF_HASEND },
{ _T("table"), TAGF_HASEND },
{ _T("tr"), TAGF_HASEND },
{ _T("td"), TAGF_HASEND },
{ _T("form"), TAGF_HASEND },
{ _T("input"), TAGF_HASEND },
{ _T("select"), TAGF_HASEND },
{ _T("option"), TAGF_HASEND },
{ _T("head"), TAGF_HASEND | TAGF_BLOCK },
{ _T("html"), TAGF_HASEND | TAGF_BLOCK },
{ _T("map"), TAGF_HASEND | TAGF_BLOCK },
{ _T("area"), TAGF_BLOCK },
{ _T("base"), TAGF_BLOCK },
{ _T("bdo"), TAGF_HASEND },
{ _T("big"), TAGF_HASEND },
{ _T("button"), TAGF_HASEND },
{ _T("iframe"), TAGF_HASEND },
{ _T("label"), TAGF_HASEND },
{ _T("link"), TAGF_NONE },
{ _T("meta"), TAGF_BLOCK },
{ _T("noframes"), TAGF_BLOCK },
{ _T("noscript"), TAGF_BLOCK },
{ _T("col"), TAGF_BLOCK },
{ _T("colgroup"), TAGF_HASEND | TAGF_BLOCK },
{ _T("fieldset"), TAGF_HASEND | TAGF_BLOCK },
{ _T("legend"), TAGF_HASEND | TAGF_BLOCK },
{ _T("tbody"), TAGF_HASEND | TAGF_BLOCK },
{ _T("textarea"), TAGF_HASEND | TAGF_BLOCK },
{ _T("tfoot"), TAGF_HASEND | TAGF_BLOCK },
{ _T("th"), TAGF_HASEND | TAGF_BLOCK },
{ _T("title"), TAGF_HASEND | TAGF_BLOCK },
{ _T("tt"), TAGF_HASEND },
{ _T("small"), TAGF_HASEND },
{ _T("span"), TAGF_HASEND },
{ _T("object"), TAGF_HASEND | TAGF_BLOCK },
{ _T("param"), TAGF_NONE },
};
class AtlHtmlAttrs
{
public:
CString m_strAttrs;
AtlHtmlAttrs()
{
}
#pragma warning(push)
#pragma warning(disable : 4793)
AtlHtmlAttrs(int nCount, ...)
{
va_list args;
va_start(args, nCount);
for (int i=0; i<nCount; i++)
{
LPCTSTR szName = va_arg(args, LPCTSTR);
LPCTSTR szVal = va_arg(args, LPCTSTR);
Add(szName, szVal);
}
va_end(args);
}
#pragma warning(pop)
#pragma warning(push)
#pragma warning(disable : 4793)
AtlHtmlAttrs(LPCTSTR szFormat, ...)
{
if (!szFormat || !*szFormat)
return;
va_list args;
va_start(args, szFormat);
CString strTmp;
strTmp.FormatV(szFormat, args);
va_end(args);
m_strAttrs += _T(" ");
m_strAttrs += strTmp;
}
#pragma warning(pop)
BOOL Add(LPCTSTR szName, LPCTSTR szValue)
{
if (szValue)
m_strAttrs.AppendFormat(_T(" %s=\"%s\""), szName, szValue);
else
m_strAttrs.AppendFormat(_T(" %s"), szName);
return TRUE;
}
#pragma warning(push)
#pragma warning(disable : 4793)
void AddFormat(LPCTSTR szFormat, ...)
{
va_list args;
va_start(args, szFormat);
CString strTmp;
strTmp.FormatV(szFormat, args);
va_end(args);
m_strAttrs += _T(" ");
m_strAttrs += strTmp;
}
#pragma warning(pop)
void Set(LPCTSTR szAttrs)
{
if (szAttrs)
{
m_strAttrs.Empty();
#ifndef UNICODE
if (!isspace(static_cast<unsigned char>(szAttrs[0])))
#else
if (!iswspace(szAttrs[0]))
#endif
m_strAttrs = _T(" ");
m_strAttrs += szAttrs;
}
}
operator LPCTSTR()
{
return m_strAttrs;
}
};
class CStreamOnWriteStream : public IStream
{
public:
IWriteStream *m_pWriteStream;
CStreamOnWriteStream()
{
m_pWriteStream = NULL;
}
void Init(IWriteStream *pWriteStream)
{
m_pWriteStream = pWriteStream;
}
// IUnknown methods
STDMETHOD(QueryInterface)(REFIID riid, void **ppv)
{
if (!ppv)
return E_POINTER;
*ppv = NULL;
if (IsEqualGUID(riid, IID_IUnknown) ||
IsEqualGUID(riid, IID_IStream) ||
IsEqualGUID(riid, IID_ISequentialStream))
{
*ppv = (IStream *) this;
}
if (!*ppv)
return E_NOINTERFACE;
return S_OK;
}
ULONG __stdcall AddRef()
{
return 1;
}
ULONG __stdcall Release()
{
return 1;
}
// ISequentialStream methods
HRESULT STDMETHODCALLTYPE Read(void * /*pDest*/, ULONG /*dwMaxLen*/, ULONG * /*pdwRead*/)
{
return E_NOTIMPL;
}
HRESULT STDMETHODCALLTYPE Write(const void *pv, ULONG cb, ULONG *pcbWritten)
{
ATLASSUME(m_pWriteStream);
HRESULT hr = m_pWriteStream->WriteStream((const char *) pv, cb, pcbWritten);
return (hr==S_OK) ? S_OK : STG_E_WRITEFAULT;
}
// IStream methods
HRESULT STDMETHODCALLTYPE Seek(LARGE_INTEGER /*dlibMove*/, DWORD /*dwOrigin*/, ULARGE_INTEGER * /*plibNewPosition*/)
{
return E_NOTIMPL;
}
HRESULT STDMETHODCALLTYPE SetSize(ULARGE_INTEGER /*libNewSize*/)
{
return E_NOTIMPL;
}
HRESULT STDMETHODCALLTYPE CopyTo(IStream * /*pstm*/, ULARGE_INTEGER /*cb*/, ULARGE_INTEGER * /*pcbRead*/, ULARGE_INTEGER * /*pcbWritten*/)
{
return E_NOTIMPL;
}
HRESULT STDMETHODCALLTYPE Commit(DWORD /*grfCommitFlags*/)
{
return E_NOTIMPL;
}
HRESULT STDMETHODCALLTYPE Revert(void)
{
return E_NOTIMPL;
}
HRESULT STDMETHODCALLTYPE LockRegion(ULARGE_INTEGER /*libOffset*/, ULARGE_INTEGER /*cb*/, DWORD /*dwLockType*/)
{
return E_NOTIMPL;
}
HRESULT STDMETHODCALLTYPE UnlockRegion(ULARGE_INTEGER /*libOffset*/, ULARGE_INTEGER /*cb*/, DWORD /*dwLockType*/)
{
return E_NOTIMPL;
}
HRESULT STDMETHODCALLTYPE Stat(STATSTG * /*pstatstg*/, DWORD /*grfStatFlag*/)
{
return E_NOTIMPL;
}
HRESULT STDMETHODCALLTYPE Clone(IStream ** /*ppstm*/)
{
return E_NOTIMPL;
}
};
class CStreamFormatter
{
protected:
CStreamOnWriteStream m_sows;
IStream *m_pStream;
BOOL m_bAddCRLF;
BOOL m_bEmitUnicode;
UINT m_nConversionCodepage;
public:
CStreamFormatter()
{
m_pStream = NULL;
m_bAddCRLF = TRUE;
m_bEmitUnicode = FALSE;
m_nConversionCodepage = _AtlGetConversionACP();
}
void Initialize(IStream *pStream, BOOL bAddCRLF=TRUE)
{
m_pStream = pStream;
m_bAddCRLF = bAddCRLF;
}
void Initialize(IWriteStream *pWriteStream, BOOL bAddCRLF=TRUE)
{
m_bAddCRLF = bAddCRLF;
m_sows.Init(pWriteStream);
m_pStream = &m_sows;
}
void EmitUnicode(BOOL bEmitUnicode)
{
m_bEmitUnicode = bEmitUnicode;
}
void SetConversionCodepage(UINT nConversionCodepage)
{
m_nConversionCodepage = nConversionCodepage;
}
void AddCRLF(bool bNewVal)
{
m_bAddCRLF = bNewVal;
}
HRESULT WriteRaw(LPCTSTR szString, int nCount=-1)
{
ATLENSURE_RETURN(szString != NULL);
if (!m_pStream)
return E_FAIL;
if (m_bEmitUnicode)
{
#ifdef _UNICODE
LPCWSTR sz = szString;
if (nCount == -1)
nCount = (int) wcslen(szString);
#else
CA2W sz(szString, m_nConversionCodepage);
nCount = (int) wcslen(sz);
#endif
DWORD dwWritten;
return m_pStream->Write(sz, (DWORD) nCount*sizeof(WCHAR), &dwWritten);
}
else
{
#ifdef _UNICODE
CW2A sz(szString, m_nConversionCodepage);
nCount = (int) strlen(sz);
#else
LPCSTR sz = szString;
if (nCount == -1)
nCount = (int) strlen(szString);
#endif
DWORD dwWritten;
return m_pStream->Write(sz, (DWORD) nCount, &dwWritten);
}
}
HRESULT StartTag(int nTagIndex, LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
if (nTagIndex < 0 || nTagIndex >= ATL_HTML_TAG_LAST)
return E_INVALIDARG;
if (m_bAddCRLF && (s_tags[nTagIndex].uFlags & TAGF_BLOCK))
WriteRaw(_T("\r\n"));
HRESULT hr = StartTag(s_tags[nTagIndex].szTagName, szContent, szAttrs);
if (FAILED(hr))
return hr;
if (m_bAddCRLF && (s_tags[nTagIndex].uFlags & TAGF_BLOCK))
WriteRaw(_T("\r\n"));
return S_OK;
}
HRESULT StartTag(LPCTSTR szTag, LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
HRESULT hr;
hr = WriteRaw(_T("<"));
if (FAILED(hr))
return hr;
hr = WriteRaw(szTag);
if (FAILED(hr))
return hr;
hr = WriteAttributes(szAttrs);
if (FAILED(hr))
return hr;
hr = WriteRaw(_T(">"));
if (FAILED(hr))
return hr;
if (szContent && *szContent)
{
WriteRaw(szContent);
EndTag(szTag);
}
return S_OK;
}
HRESULT EndTag(int nTagIndex)
{
if (nTagIndex < 0 || nTagIndex >= ATL_HTML_TAG_LAST)
return E_INVALIDARG;
if (m_bAddCRLF && (s_tags[nTagIndex].uFlags & TAGF_BLOCK))
WriteRaw(_T("\r\n"));
HRESULT hr = EndTag(s_tags[nTagIndex].szTagName);
if (FAILED(hr))
return hr;
if (m_bAddCRLF && (s_tags[nTagIndex].uFlags & TAGF_BLOCK))
WriteRaw(_T("\r\n"));
return S_OK;
}
HRESULT EndTag(LPCTSTR szTag)
{
HRESULT hr = WriteRaw(_T("</"));
if (FAILED(hr))
return hr;
hr = WriteRaw(szTag);
if (FAILED(hr))
return hr;
return WriteRaw(_T(">"));
}
HRESULT WriteAttributes(LPCTSTR szAttrs)
{
if (szAttrs && szAttrs[0])
{
#ifndef UNICODE
if (!isspace(static_cast<unsigned char>(szAttrs[0])))
#else
if (!iswspace(szAttrs[0]))
#endif
WriteRaw(_T(" "));
return WriteRaw(szAttrs);
}
return S_OK;
}
#ifndef _WIN32_WCE
#pragma warning(push)
#pragma warning(disable : 4793)
HRESULT WriteFormatted(LPCTSTR szFormat, ...)
{
ATLASSERT(szFormat != NULL);
if (!m_pStream)
return E_FAIL;
va_list args;
va_start(args, szFormat);
TCHAR buffFixed[1024];
CTempBuffer<TCHAR> buffHeap;
TCHAR *szTemp = buffFixed;
int nCount = _vstprintf_s((LPTSTR)szTemp, _countof(buffFixed), szFormat, args);
if (nCount < 0)
{
// we'll have to dynamically allocate the buffer
nCount = _vsctprintf(szFormat, args);
szTemp = NULL;
ATLTRY(szTemp = buffHeap.Allocate(nCount + 1));
if (!szTemp)
return E_OUTOFMEMORY;
nCount = _vstprintf_s(szTemp, nCount+1, szFormat, args);
}
va_end(args);
if (nCount > 0)
return WriteRaw(szTemp, (DWORD) nCount);
return E_UNEXPECTED;
}
#pragma warning(pop)
#endif // _WIN32_WCE
};
template <typename TData, int nMax=64>
class CSimpleStack
{
public:
int m_nTop;
TData m_Data[nMax];
CSimpleStack()
{
m_nTop = -1;
}
bool IsEmpty()
{
return (m_nTop == -1);
}
bool Push(const TData *pData)
{
if (m_nTop + 1 >= nMax)
return false;
m_nTop++;
m_Data[m_nTop] = *pData;
return true;
}
bool Pop(TData *pData)
{
if (m_nTop < 0)
return false;
*pData = m_Data[m_nTop];
m_nTop--;
return true;
}
};
struct HTML_SCHEME
{
CString strBgColor;
CString strLinkColor;
CString strVLinkColor;
CString strALinkColor;
CString strBackground;
int nTopMargin;
int nLeftMargin;
CString strTdBgColor;
CString strTableBgColor;
CString strTrBgColor;
HTML_SCHEME()
{
nTopMargin = -1;
nLeftMargin = -1;
}
};
template <class T>
class CHtmlGenBase : public CStreamFormatter
{
public:
T* GetOuter()
{
return static_cast<T*>(this);
}
enum ATL_HTML_FORM_METHOD { ATL_HTML_FORM_METHOD_NONE=0, ATL_HTML_FORM_METHOD_GET, ATL_HTML_FORM_METHOD_POST, ATL_HTML_FORM_METHOD_MULTIPART };
CHtmlGenBase()
{
m_nWidthPercent = -1;
m_nHeightPercent = -1;
m_nFormMethod = ATL_HTML_FORM_METHOD_NONE;
m_pScheme = NULL;
}
void SetScheme(HTML_SCHEME *pScheme)
{
m_pScheme = pScheme;
}
HRESULT body(LPCTSTR szBgColor=NULL, LPCTSTR szBackground=NULL, LPCTSTR szTopMargin=NULL, LPCTSTR szLeftMargin=NULL,
LPCTSTR szAttrs=NULL)
{
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
if (szBgColor && *szBgColor)
Attrs.Add(_T("bgColor"), szBgColor);
else if (m_pScheme && m_pScheme->strBgColor.GetLength())
Attrs.Add(_T("bgColor"), m_pScheme->strBgColor);
if (szBackground && *szBackground)
Attrs.Add(_T("background"), szBackground);
else if (m_pScheme && m_pScheme->strBackground.GetLength())
Attrs.Add(_T("background"), m_pScheme->strBackground);
if (m_pScheme && m_pScheme->strLinkColor.GetLength())
Attrs.Add(_T("link"), m_pScheme->strLinkColor);
if (m_pScheme && m_pScheme->strALinkColor.GetLength())
Attrs.Add(_T("alink"), m_pScheme->strALinkColor);
if (m_pScheme && m_pScheme->strVLinkColor.GetLength())
Attrs.Add(_T("vlink"), m_pScheme->strVLinkColor);
if (szTopMargin && *szTopMargin)
Attrs.Add(_T("topmargin"), szTopMargin);
else if (m_pScheme && m_pScheme->nTopMargin != -1)
Attrs.AddFormat(_T("topmargin=\"%d\""), m_pScheme->nTopMargin);
if (szLeftMargin && *szLeftMargin)
Attrs.Add(_T("leftmargin"), szLeftMargin);
else if (m_pScheme && m_pScheme->nLeftMargin != -1)
Attrs.AddFormat(_T("leftmargin=\"%d\""), m_pScheme->nLeftMargin);
return GetOuter()->StartTag(ATL_HTML_TAG_BODY, NULL, Attrs);
}
HRESULT bodyEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_BODY);
}
HRESULT a(LPCTSTR szHref, LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
if (m_strState.GetLength()==0)
{
if (szHref && *szHref)
Attrs.Add(_T("href"), szHref);
return GetOuter()->StartTag(ATL_HTML_TAG_A, szContent, Attrs);
}
const TCHAR *szQuestion = NULL;
if(szHref)
szQuestion = _tcschr(szHref, '?');
CString strHref = szHref;
if (!szQuestion)
strHref.Append("?");
else
strHref.Append("&");
strHref += m_strState;
if (szHref && *szHref)
Attrs.Add(_T("href"), strHref);
return GetOuter()->StartTag(ATL_HTML_TAG_A, szContent, Attrs);
}
HRESULT aEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_A);
}
HRESULT b(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_B, szContent, szAttrs);
}
HRESULT bEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_B);
}
HRESULT i(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_I, szContent, szAttrs);
}
HRESULT iEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_I);
}
HRESULT u(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_U, szContent, szAttrs);
}
HRESULT uEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_U);
}
HRESULT font(LPCTSTR szFace, LPCTSTR szSize=NULL, LPCTSTR szColor=NULL, LPCTSTR szAttrs=NULL)
{
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
if (szFace && *szFace)
Attrs.Add(_T("face"), szFace);
if (szSize && *szSize)
Attrs.Add(_T("size"), szSize);
if (szColor && *szColor)
Attrs.Add(_T("color"), szColor);
return GetOuter()->StartTag(ATL_HTML_TAG_FONT, NULL, Attrs);
}
HRESULT font(COLORREF clrColor, LPCTSTR szAttrs=NULL)
{
TCHAR szColor[8];
_stprintf_s(szColor, _countof(szColor), _T("#%02x%02x%02x"), GetRValue(clrColor),
GetGValue(clrColor), GetBValue(clrColor));
return GetOuter()->font(NULL, NULL, szColor, szAttrs);
}
HRESULT fontEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_FONT);
}
HRESULT img(LPCTSTR szSrc, LPCTSTR szAttrs=NULL)
{
ATLASSERT(szSrc && *szSrc);
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
Attrs.Add(_T("src"), szSrc);
return GetOuter()->StartTag(ATL_HTML_TAG_IMG, NULL, Attrs);
}
HRESULT br(LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_BR, NULL, szAttrs);
}
HRESULT hr(LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_HR, NULL, szAttrs);
}
HRESULT div(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_DIV, szContent, szAttrs);
}
HRESULT divEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_DIV);
}
HRESULT blockquote(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_BLOCKQUOTE, szContent, szAttrs);
}
HRESULT blockquoteEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_BLOCKQUOTE);
}
HRESULT address(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_ADDRESS, szContent, szAttrs);
}
HRESULT addressEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_ADDRESS);
}
HRESULT p(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_P, szContent, szAttrs);
}
HRESULT pEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_P);
}
HRESULT h(int nLevel=1, LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
if (nLevel < 1 || nLevel > 6)
return E_INVALIDARG;
return GetOuter()->StartTag(ATL_HTML_TAG_H1+nLevel-1, szContent, szAttrs);
}
HRESULT hEnd(int nLevel=1)
{
if (nLevel < 1 || nLevel > 6)
return E_INVALIDARG;
return GetOuter()->EndTag(ATL_HTML_TAG_H1+nLevel-1);
}
HRESULT pre(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_PRE, szContent, szAttrs);
}
HRESULT preEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_PRE);
}
HRESULT q(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_Q, szContent, szAttrs);
}
HRESULT qEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_Q);
}
HRESULT sub(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_SUB, szContent, szAttrs);
}
HRESULT subEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_SUB);
}
HRESULT sup(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_SUP, szContent, szAttrs);
}
HRESULT supEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_SUP);
}
HRESULT ins(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_INS, szContent, szAttrs);
}
HRESULT insEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_INS);
}
HRESULT del(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_DEL, szContent, szAttrs);
}
HRESULT delEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_DEL);
}
HRESULT em(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_EM, szContent, szAttrs);
}
HRESULT emEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_EM);
}
HRESULT strong(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_STRONG, szContent, szAttrs);
}
HRESULT strongEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_STRONG);
}
HRESULT dfn(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_DFN, szContent, szAttrs);
}
HRESULT dfnEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_DFN);
}
HRESULT code(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_CODE, szContent, szAttrs);
}
HRESULT codeEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_CODE);
}
HRESULT samp(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_SAMP, szContent, szAttrs);
}
HRESULT sampEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_SAMP);
}
HRESULT kbd(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_KBD, szContent, szAttrs);
}
HRESULT kbdEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_KBD);
}
HRESULT var(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_VAR, szContent, szAttrs);
}
HRESULT varEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_VAR);
}
HRESULT cite(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_CITE, szContent, szAttrs);
}
HRESULT citeEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_CITE);
}
HRESULT abbr(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_ABBR, szContent, szAttrs);
}
HRESULT abbrEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_ABBR);
}
HRESULT acronym(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_ACRONYM, szContent, szAttrs);
}
HRESULT acronymEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_ACRONYM);
}
HRESULT ol(LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_OL, NULL, szAttrs);
}
HRESULT ul(LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_UL, NULL, szAttrs);
}
HRESULT olEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_OL);
}
HRESULT ulEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_UL);
}
HRESULT li(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_LI, szContent, szAttrs);
}
HRESULT liEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_LI);
}
HRESULT dl(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_DL, szContent, szAttrs);
}
HRESULT dlEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_DL);
}
HRESULT dt(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_DT, szContent, szAttrs);
}
HRESULT dtEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_DT);
}
HRESULT dd(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_DD, szContent, szAttrs);
}
HRESULT ddEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_DD);
}
void SetSizePercent(int nWidth, int nHeight)
{
m_nWidthPercent = nWidth;
m_nHeightPercent = nHeight;
}
HRESULT table(int nBorderWidth=0, LPCTSTR szAttrs=NULL)
{
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
m_RowStack.Push(&m_tableState);
m_tableState.Clear();
Attrs.AddFormat(_T("border=\"%d\""), nBorderWidth);
if (m_nWidthPercent != -1)
Attrs.AddFormat(_T("width=\"%d%%\""), m_nWidthPercent);
if (m_nHeightPercent != -1)
Attrs.AddFormat(_T("height=\"%d%%\""), m_nHeightPercent);
if (m_pScheme && m_pScheme->strTableBgColor.GetLength())
Attrs.Add(_T("bgcolor"), m_pScheme->strTableBgColor);
m_nWidthPercent = -1;
m_nHeightPercent = -1;
return GetOuter()->StartTag(ATL_HTML_TAG_TABLE, NULL, Attrs);
}
HRESULT tableEnd()
{
if (m_tableState.m_bRowOpen)
GetOuter()->trEnd();
m_RowStack.Pop(&m_tableState);
return GetOuter()->EndTag(ATL_HTML_TAG_TABLE);
}
HRESULT tr(LPCTSTR szAttrs=NULL)
{
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
if (m_RowStack.IsEmpty())
GetOuter()->table();
if (m_tableState.m_bRowOpen)
GetOuter()->trEnd();
m_tableState.m_bRowOpen = true;
if (m_pScheme && m_pScheme->strTrBgColor.GetLength())
Attrs.Add(_T("bgcolor"), m_pScheme->strTrBgColor);
return GetOuter()->StartTag(ATL_HTML_TAG_TR, NULL, Attrs);
}
HRESULT td(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
if (!m_tableState.m_bRowOpen)
GetOuter()->tr();
m_tableState.m_bDataOpen = true;
if (m_pScheme && m_pScheme->strTdBgColor.GetLength())
Attrs.Add(_T("bgColor"), m_pScheme->strTdBgColor);
HRESULT hr = GetOuter()->StartTag(ATL_HTML_TAG_TD, szContent, Attrs);
if (FAILED(hr))
return hr;
if (szContent)
m_tableState.m_bDataOpen = false;
return S_OK;
}
HRESULT tdEnd()
{
if (!m_tableState.m_bDataOpen)
return S_OK;
m_tableState.m_bDataOpen = false;
return GetOuter()->EndTag(ATL_HTML_TAG_TD);
}
HRESULT trEnd()
{
if (!m_tableState.m_bRowOpen)
return S_OK;
if (m_tableState.m_bDataOpen)
GetOuter()->tdEnd();
m_tableState.m_bRowOpen = false;
return GetOuter()->EndTag(ATL_HTML_TAG_TR);
}
HRESULT form(LPCTSTR szAction, ATL_HTML_FORM_METHOD nMethod=ATL_HTML_FORM_METHOD_GET, LPCTSTR szAttrs=NULL)
{
static const LPCTSTR s_szFormMethods[] = { NULL, _T("get"), _T("post"), _T("multipart-www-url-encoded") };
return GetOuter()->form(szAction, s_szFormMethods[nMethod], szAttrs);
}
HRESULT form(LPCTSTR szAction, LPCTSTR szMethod, LPCTSTR szAttrs=NULL)
{
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
if (szAction && *szAction)
Attrs.Add(_T("action"), szAction);
if (szMethod && *szMethod)
Attrs.Add(_T("method"), szMethod);
return GetOuter()->StartTag(ATL_HTML_TAG_FORM, NULL, Attrs);
}
HRESULT input(LPCTSTR szType, LPCTSTR szName, LPCTSTR szValue, LPCTSTR szAttrs=NULL)
{
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
if (szType && *szType)
Attrs.Add(_T("type"), szType);
if (szName && *szName)
Attrs.Add(_T("name"), szName);
if (szValue && *szValue)
Attrs.Add(_T("value"), szValue);
return GetOuter()->StartTag(ATL_HTML_TAG_INPUT, NULL, Attrs);
}
HRESULT submit(LPCTSTR szValue=NULL, LPCTSTR szName=NULL, LPCTSTR szAttrs=NULL)
{
return input(_T("submit"), szName, szValue, szAttrs);
}
HRESULT textarea(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_TEXTAREA, szContent, szAttrs);
}
HRESULT textareaEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_TEXTAREA);
}
HRESULT formEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_FORM);
}
HRESULT select(LPCTSTR szName, BOOL bMultiple=FALSE, LPCTSTR szAttrs=NULL)
{
ATLASSERT(szName && *szName);
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
Attrs.Add(_T("name"), szName);
if (bMultiple)
Attrs.Add(_T("multiple"), NULL);
return GetOuter()->StartTag(ATL_HTML_TAG_SELECT, NULL, Attrs);
}
HRESULT option(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_OPTION, szContent, szAttrs);
}
HRESULT optionEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_OPTION);
}
HRESULT selectEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_SELECT);
}
HRESULT head(LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_HEAD, NULL, szAttrs);
}
HRESULT headEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_HEAD);
}
HRESULT html(LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_HTML, NULL, szAttrs);
}
HRESULT htmlEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_HTML);
}
HRESULT map(LPCTSTR szName, LPCTSTR szAttrs=NULL)
{
ATLASSERT(szName && *szName);
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
Attrs.Add(_T("name"), szName);
return GetOuter()->StartTag(ATL_HTML_TAG_MAP, NULL, Attrs);
}
HRESULT mapEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_MAP);
}
HRESULT area(LPCTSTR szAlt, LPCTSTR szHref=NULL, LPCTSTR szAttrs=NULL)
{
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
if (szAlt && *szAlt)
Attrs.Add(_T("alt"), szAlt);
if (szHref && *szHref)
Attrs.Add(_T("href"), szHref);
return GetOuter()->StartTag(ATL_HTML_TAG_AREA, NULL, Attrs);
}
HRESULT base(LPCTSTR szHref, LPCTSTR szAttrs=NULL)
{
ATLASSERT(szHref && *szHref);
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
Attrs.Add(_T("href"), szHref);
return GetOuter()->StartTag(ATL_HTML_TAG_BASE, NULL, Attrs);
}
HRESULT bdo(LPCTSTR szDir, LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
ATLASSERT(szDir&& *szDir);
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
Attrs.Add(_T("dir"), szDir);
return GetOuter()->StartTag(ATL_HTML_TAG_BDO, szContent, Attrs);
}
HRESULT bdoEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_BDO);
}
HRESULT big(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_BIG, szContent, szAttrs);
}
HRESULT bigEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_BIG);
}
HRESULT button(LPCTSTR szName=NULL, LPCTSTR szValue=NULL, LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
if (szName && *szName)
Attrs.Add(_T("name"), szName);
if (szValue && *szValue)
Attrs.Add(_T("value"), szValue);
return GetOuter()->StartTag(ATL_HTML_TAG_BUTTON, szContent, Attrs);
}
HRESULT buttonEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_BUTTON);
}
HRESULT iframe(LPCTSTR szSrc=NULL, LPCTSTR szWidth=NULL, LPCTSTR szHeight=NULL, LPCTSTR szAttrs=NULL)
{
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
if (szSrc && *szSrc)
Attrs.Add(_T("src"), szSrc);
if (szWidth && *szWidth)
Attrs.Add(_T("width"), szWidth);
if (szHeight && *szHeight)
Attrs.Add(_T("height"), szHeight);
return GetOuter()->StartTag(ATL_HTML_TAG_IFRAME, NULL, Attrs);
}
HRESULT iframeEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_IFRAME);
}
HRESULT label(LPCTSTR szFor=NULL, LPCTSTR szAccessKey=NULL, LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
if (szFor && *szFor)
Attrs.Add(_T("for"), szFor);
if (szAccessKey && *szAccessKey)
Attrs.Add(_T("accesskey"), szAccessKey);
return GetOuter()->StartTag(ATL_HTML_TAG_LABEL, szContent, Attrs);
}
HRESULT labelEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_LABEL);
}
HRESULT link(LPCTSTR szRel=NULL, LPCTSTR szHref=NULL, LPCTSTR szAttrs=NULL)
{
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
if (szRel && *szRel)
Attrs.Add(_T("rel"), szRel);
if (szHref && *szHref)
Attrs.Add(_T("href"), szHref);
return GetOuter()->StartTag(ATL_HTML_TAG_LINK, NULL, Attrs);
}
HRESULT meta(LPCTSTR szName=NULL, LPCTSTR szContent=NULL, LPCTSTR szHttpEquiv=NULL, LPCTSTR szAttrs=NULL)
{
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
if (szName && *szName)
Attrs.Add(_T("name"), szName);
if (szContent && *szContent)
Attrs.Add(_T("content"), szContent);
if (szHttpEquiv && *szHttpEquiv)
Attrs.Add(_T("http-equiv"), szHttpEquiv);
return GetOuter()->StartTag(ATL_HTML_TAG_META, NULL, Attrs);
}
HRESULT noframes(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_NOFRAMES, szContent, szAttrs);
}
HRESULT noframesEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_NOFRAMES);
}
HRESULT noscript(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_NOSCRIPT, szContent, szAttrs);
}
HRESULT noscriptEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_NOSCRIPT);
}
HRESULT col(int nSpan=1, LPCTSTR szWidth=NULL, LPCTSTR szHeight=NULL, LPCTSTR szVAlign=NULL,
LPCTSTR szHAlign=NULL, LPCTSTR szAttrs=NULL)
{
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
if (nSpan != 1)
Attrs.AddFormat(_T("span"), _T("\"%d\""), nSpan);
if (szWidth && *szWidth)
Attrs.Add(_T("width"), szWidth);
if (szHeight && *szHeight)
Attrs.Add(_T("height"), szHeight);
if (szVAlign && *szVAlign)
Attrs.Add(_T("valign"), szVAlign);
if (szHAlign && *szHAlign)
Attrs.Add(_T("align"), szHAlign);
return GetOuter()->StartTag(ATL_HTML_TAG_COL, NULL, Attrs);
}
HRESULT colgroup(int nSpan=1, LPCTSTR szWidth=NULL, LPCTSTR szHeight=NULL, LPCTSTR szVAlign=NULL,
LPCTSTR szHAlign=NULL, LPCTSTR szAttrs=NULL)
{
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
if (nSpan != 1)
Attrs.AddFormat(_T("span"), _T("\"%d\""), nSpan);
if (szWidth && *szWidth)
Attrs.Add(_T("width"), szWidth);
if (szHeight && *szHeight)
Attrs.Add(_T("height"), szHeight);
if (szVAlign && *szVAlign)
Attrs.Add(_T("valign"), szVAlign);
if (szHAlign && *szHAlign)
Attrs.Add(_T("align"), szHAlign);
return GetOuter()->StartTag(ATL_HTML_TAG_COL, NULL, Attrs);
}
HRESULT colgroupEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_COLGROUP);
}
HRESULT fieldset(LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_FIELDSET, NULL, szAttrs);
}
HRESULT fieldsetEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_FIELDSET);
}
HRESULT legend(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_LEGEND, szContent, szAttrs);
}
HRESULT legendEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_LEGEND);
}
HRESULT tbody(LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_TBODY, NULL, szAttrs);
}
HRESULT tbodyEnd()
{
return GetOuter()->StartTag(ATL_HTML_TAG_TBODY);
}
HRESULT tfoot(LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_TFOOT, NULL, szAttrs);
}
HRESULT tfootEnd()
{
return GetOuter()->StartTag(ATL_HTML_TAG_TFOOT);
}
HRESULT th(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
if (!m_tableState.m_bRowOpen)
GetOuter()->tr();
m_tableState.m_bDataOpen = true;
return GetOuter()->StartTag(ATL_HTML_TAG_TH, szContent, szAttrs);
}
HRESULT thEnd()
{
ATLASSUME(m_tableState.m_bDataOpen);
m_tableState.m_bDataOpen = false;
return GetOuter()->EndTag(ATL_HTML_TAG_TH);
}
HRESULT title(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_TITLE, szContent, szAttrs);
}
HRESULT titleEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_TITLE);
}
HRESULT tt(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_TT, szContent, szAttrs);
}
HRESULT ttEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_TT);
}
// unfortunately, we can't use small since it is defined as char
// in rpcndr.h!
HRESULT _small(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_SMALL, szContent, szAttrs);
}
HRESULT _smallEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_SMALL);
}
HRESULT span(LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
return GetOuter()->StartTag(ATL_HTML_TAG_SPAN, szContent, szAttrs);
}
HRESULT spanEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_SPAN);
}
HRESULT object(LPCTSTR szClassId, LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
if (szClassId && *szClassId)
Attrs.Add(_T("classid"), szClassId);
return GetOuter()->StartTag(ATL_HTML_TAG_OBJECT, szContent, Attrs);
}
HRESULT object(REFCLSID rclsid, LPCTSTR szContent=NULL, LPCTSTR szAttrs=NULL)
{
USES_CONVERSION_EX;
OLECHAR szClsid[64];
CString strClassId;
int i = StringFromGUID2(rclsid, szClsid, 64);
if (!i)
return E_FAIL;
szClsid[i-2] = 0; // don't want curly braces
strClassId.Format(_T("clsid:%s"), OLE2T_EX_DEF(szClsid+1));
return object(strClassId, szContent, szAttrs);
}
HRESULT objectEnd()
{
return GetOuter()->EndTag(ATL_HTML_TAG_OBJECT);
}
HRESULT param(LPCTSTR szName, LPCTSTR szValue, LPCTSTR szAttrs=NULL)
{
ATLASSERT(szName && *szName);
AtlHtmlAttrs Attrs;
Attrs.Set(szAttrs);
Attrs.Add(_T("name"), szName);
if (szValue && *szValue)
Attrs.Add(_T("value"), szValue);
return GetOuter()->StartTag(ATL_HTML_TAG_PARAM, NULL, Attrs);
}
private:
CString m_strState;
HTML_SCHEME *m_pScheme;
struct TableState
{
TableState() : m_bRowOpen(false), m_bDataOpen(false)
{
}
void Clear()
{
m_bRowOpen = false;
m_bDataOpen = false;
}
bool m_bRowOpen;
bool m_bDataOpen;
};
ATL_HTML_FORM_METHOD m_nFormMethod;
TableState m_tableState;
CSimpleStack<TableState> m_RowStack;
int m_nWidthPercent;
int m_nHeightPercent;
};
class CHtmlGen : public CHtmlGenBase<CHtmlGen>
{
public:
};
} // namespace ATL
#pragma pack(pop)
#endif // __ATLHTML_H__
<|start_filename|>source/SProxy/XSDElement.h<|end_filename|>
//
// XSDElement.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
#include "XMLElement.h"
#include "XSDMappingTable.h"
#include "Emit.h"
enum XSDELEMENT_TYPE
{
XSD_UNK = 0,
XSD_ERR = 1,
//
// Permits the elements in the group to appear (or not appear)
// in any order in the containing element.
//
XSD_ALL = 2,
//
// Enables any element from the specified namespace(s)
// to appear in the containing complexType, sequence, or choice element.
//
XSD_ANY,
//
// Enables any attribute from the specified namespace(s)
// to appear in the containing complexType element.
//
XSD_ANYATTRIBUTE,
//
// Defines an annotation.
//
XSD_ANNOTATION,
//
// Specifies information to be used by applications
// within an annotation.
//
XSD_APPINFO,
//
// Declares an attribute.
//
XSD_ATTRIBUTE,
//
// Groups a set of attribute declarations so that they
// can be incorporated as a group into complex type definitions.
//
XSD_ATTRIBUTEGROUP,
//
// Permits one and only one of the elements contained in the group
// to be present within the containing element.
//
XSD_CHOICE,
//
// Defines a complex type, which determines the set of attributes
// and the content of an element.
//
XSD_COMPLEXTYPE,
//
// Specifies information to be read by or used by humans
// within an annotation.
//
XSD_DOCUMENTATION,
//
// Declares an element.
//
XSD_ELEMENT,
//
// Specifies an XPATH expression that specifies value
// (or one of the values) used to enforce an identity
// constraint (unique, key, keyref).
//
XSD_FIELD,
//
// Groups a set of element declarations so that they can
// be incorporated as a group into complex type definitions.
//
XSD_GROUP,
//
// Identifies a namespace whose schema components are referenced
// by the containing schema.
//
XSD_IMPORT,
//
// Includes the specified schema document in the targetNamespace
// of the containing schema.
//
XSD_INCLUDE,
//
// Specifies that an attribute or element value (or set of values)
// must be a key within the specified scope. A key must be unique,
// non-nullable, and always present.
//
XSD_KEY,
//
// Specifies that an attribute or element value (or set of values)
// have a correspondence with those of the specified key or unique element.
//
XSD_KEYREF,
//
// Contains the definition of a schema.
//
XSD_SCHEMA,
//
// Specifies an XPATH expression that selects a set of elements for an
// identity constraint (unique, key, keyref).
//
XSD_SELECTOR,
//
// Requires the elements in the group to appear in the specified sequence
// within the containing element.
//
XSD_SEQUENCE,
//
// Defines a simple type, which determines the constraints on and
// information about the values of attributes or elements with
// text-only content.
//
XSD_SIMPLETYPE,
//
// Specifies that an attribute or element value (or set of values)
// must be unique within the specified scope.
//
XSD_UNIQUE,
XSD_COMPLEXCONTENT,
XSD_SIMPLECONTENT,
XSD_RESTRICTION,
XSD_EXTENSION,
XSD_UNSUPPORTED
};
class CSchema;
class CXSDElement : public CXMLElement
{
private:
XSDELEMENT_TYPE m_elementType;
CSchema * m_pParentSchema;
public:
CXSDElement(CXMLElement * pParentElement = NULL, XSDELEMENT_TYPE elementType = XSD_UNK)
:m_elementType(elementType)
{
SetParentElement(pParentElement);
}
inline XSDELEMENT_TYPE GetElementType()
{
return m_elementType;
}
inline void SetElementType(XSDELEMENT_TYPE elementType)
{
m_elementType = elementType;
}
inline CSchema * GetParentSchema()
{
return m_pParentSchema;
}
inline void SetParentSchema(CSchema * pParentSchema)
{
m_pParentSchema = pParentSchema;
}
};
// XSD Mapping table
extern const __declspec(selectany) CXSDTypeLookup g_xsdLookup;
inline HRESULT GetXSDType(const CStringW& strUri, const CStringW& strName, XSDTYPE *pXSD)
{
ATLASSERT( pXSD != NULL );
*pXSD = XSDTYPE_ERR;
if (strUri == XSD_NAMESPACEW)
{
const CXSDTypeLookup::HashNode *pNode = g_xsdLookup.Lookup(strName);
if (pNode != NULL)
{
*pXSD = pNode->data.xsdType;
return S_OK;
}
else // (pNode == NULL)
{
EmitError(IDS_SDL_UNRESOLVED_ELEM, strUri, strName);
return E_FAIL;
}
}
return S_FALSE;
}
<|start_filename|>source/SProxy/WSDLPortTypeIO.h<|end_filename|>
//
// WSDLPortTypeIO.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
#include "XMLElement.h"
#include "QName.h"
#include "Emit.h"
#include "WSDLSoapElement.h"
class CWSDLMessage;
class CWSDLPortTypeIO : public CXMLElement
{
private:
CStringW m_strName;
CQName m_message;
CWSDLMessage * m_pMessage;
CAutoPtr<CSoapBody> m_pSoapBody;
CAtlPtrList<CSoapHeader *> m_headers;
CAtlPtrList<CSoapFault *> m_faults;
public:
inline CWSDLPortTypeIO()
:m_pMessage(NULL)
{
}
inline CSoapHeader * AddSoapHeader()
{
CAutoPtr<CSoapHeader> p ( new CSoapHeader );
if (p != NULL)
{
if (m_headers.AddTail(p) != NULL)
{
return p.Detach();
}
}
EmitErrorHr(E_OUTOFMEMORY);
return NULL;
}
inline CSoapHeader * AddSoapHeader(CSoapHeader *p)
{
if (m_headers.AddTail(p) != NULL)
{
return p;
}
EmitErrorHr(E_OUTOFMEMORY);
return NULL;
}
inline POSITION GetFirstSoapHeader()
{
return m_headers.GetHeadPosition();
}
inline CSoapHeader * GetNextSoapHeader(POSITION &pos)
{
return m_headers.GetNext(pos);
}
inline size_t GetNumSoapHeaders()
{
return m_headers.GetCount();
}
inline CSoapFault * AddSoapFault()
{
CAutoPtr<CSoapFault> p ( new CSoapFault );
if (p != NULL)
{
if (m_faults.AddTail(p) != NULL)
{
return p.Detach();
}
}
EmitErrorHr(E_OUTOFMEMORY);
return NULL;
}
inline CSoapFault * AddSoapFault(CSoapFault *p)
{
if (m_faults.AddTail(p) != NULL)
{
return p;
}
EmitErrorHr(E_OUTOFMEMORY);
return NULL;
}
inline POSITION GetFirstSoapFault()
{
return m_faults.GetHeadPosition();
}
inline CSoapFault * GetNextSoapFault(POSITION &pos)
{
return m_faults.GetNext(pos);
}
inline CSoapBody * AddSoapBody()
{
m_pSoapBody.Free();
m_pSoapBody.Attach( new CSoapBody );
return m_pSoapBody;
}
inline CSoapBody * AddSoapBody(CSoapBody *pBody)
{
m_pSoapBody.Free();
m_pSoapBody.Attach( pBody );
return m_pSoapBody;
}
inline CSoapBody * GetSoapBody()
{
return m_pSoapBody;
}
inline HRESULT SetName(const wchar_t *wszName, int cchName)
{
if (!wszName)
{
return E_FAIL;
}
m_strName.SetString(wszName, cchName);
return S_OK;
}
inline HRESULT SetName(const CStringW& strName)
{
m_strName = strName;
return S_OK;
}
inline const CStringW& GetName()
{
return m_strName;
}
inline HRESULT SetMessage(const CStringW& strQName)
{
m_message.SetQName(strQName);
return S_OK;
}
inline HRESULT SetMessage(const CStringW& strPrefix, const CStringW& strName)
{
m_message.SetQName(strPrefix, strName);
return S_OK;
}
inline HRESULT SetMessage(const wchar_t *wszQName, int cchQName)
{
m_message.SetQName(wszQName, cchQName);
return S_OK;
}
inline const CQName& GetMessageName()
{
return m_message;
}
CWSDLMessage * GetMessage();
};
typedef CWSDLPortTypeIO CWSDLPortTypeInput;
typedef CWSDLPortTypeIO CWSDLPortTypeOutput;
typedef CWSDLPortTypeIO CWSDLPortTypeFault;
<|start_filename|>include/atlperf.inl<|end_filename|>
// This is a part of the Active Template Library.
// Copyright (C) Microsoft Corporation
// All rights reserved.
//
// This source code is only intended as a supplement to the
// Active Template Library Reference and related
// electronic documentation provided with the library.
// See these sources for detailed information regarding the
// Active Template Library product.
#ifndef __ATLPERF_INL__
#define __ATLPERF_INL__
#pragma once
#ifndef __ATLPERF_H__
#error atlperf.inl requires atlperf.h to be included first
#endif
#pragma warning(push)
#ifndef _CPPUNWIND
#pragma warning(disable: 4702) // unreachable code
#endif
namespace ATL
{
extern __declspec(selectany) const TCHAR * const c_szAtlPerfCounter = _T("Counter");
extern __declspec(selectany) const TCHAR * const c_szAtlPerfFirstCounter = _T("First Counter");
extern __declspec(selectany) const TCHAR * const c_szAtlPerfLastCounter = _T("Last Counter");
extern __declspec(selectany) const TCHAR * const c_szAtlPerfHelp = _T("Help");
extern __declspec(selectany) const TCHAR * const c_szAtlPerfFirstHelp = _T("First Help");
extern __declspec(selectany) const TCHAR * const c_szAtlPerfLastHelp = _T("Last Help");
extern __declspec(selectany) const WCHAR * const c_szAtlPerfGlobal = L"Global";
extern __declspec(selectany) const TCHAR * const c_szAtlPerfLibrary = _T("Library");
extern __declspec(selectany) const TCHAR * const c_szAtlPerfOpen = _T("Open");
extern __declspec(selectany) const TCHAR * const c_szAtlPerfCollect = _T("Collect");
extern __declspec(selectany) const TCHAR * const c_szAtlPerfClose = _T("Close");
extern __declspec(selectany) const TCHAR * const c_szAtlPerfLanguages = _T("Languages");
extern __declspec(selectany) const TCHAR * const c_szAtlPerfMap = _T("Map");
extern __declspec(selectany) const TCHAR * const c_szAtlPerfPerformance = _T("Performance");
extern __declspec(selectany) const TCHAR * const c_szAtlPerfServicesKey = _T("SYSTEM\\CurrentControlSet\\Services\\%s");
extern __declspec(selectany) const TCHAR * const c_szAtlPerfPerformanceKey = _T("SYSTEM\\CurrentControlSet\\Services\\%s\\Performance");
extern __declspec(selectany) const TCHAR * const c_szAtlPerfPerfLibKey = _T("SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Perflib");
extern __declspec(selectany) const TCHAR * const c_szAtlPerfPerfLibLangKey = _T("SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Perflib\\%3.3x");
inline CPerfMon::CounterInfo* CPerfMon::CategoryInfo::_GetCounterInfo(UINT nIndex) throw()
{
ATLASSERT(nIndex < _GetNumCounters());
return &m_counters[nIndex];
}
inline UINT CPerfMon::CategoryInfo::_GetNumCounters() throw()
{
return (UINT) m_counters.GetCount();
}
inline CPerfMon::~CPerfMon() throw()
{
UnInitialize();
}
inline HRESULT CPerfMon::CreateMap(LANGID language, HINSTANCE hResInstance, UINT* pSampleRes) throw()
{
(language); // unused
(hResInstance); // unused
(pSampleRes); // unused
return S_OK;
}
inline UINT CPerfMon::_GetNumCategoriesAndCounters() throw()
{
UINT nResult = _GetNumCategories();
for (UINT i=0; i<_GetNumCategories(); i++)
{
nResult += _GetCategoryInfo(i)->_GetNumCounters();
}
return nResult;
}
inline CPerfMon::CategoryInfo* CPerfMon::_GetCategoryInfo(UINT nIndex) throw()
{
ATLASSERT(nIndex < _GetNumCategories());
return &m_categories[nIndex];
}
inline UINT CPerfMon::_GetNumCategories() throw()
{
return (UINT) m_categories.GetCount();
}
inline CPerfObject* CPerfMon::_GetFirstInstance(CAtlFileMappingBase* pBlock)
{
ATLENSURE(pBlock != NULL);
// should never happen if Initialize succeeded
// are you checking return codes?
ATLASSERT(pBlock->GetData() != NULL);
return reinterpret_cast<CPerfObject*>(LPBYTE(pBlock->GetData()) + m_nHeaderSize);
}
inline CPerfObject* CPerfMon::_GetNextInstance(CPerfObject* pInstance)
{
ATLENSURE_RETURN_VAL(pInstance != NULL, NULL);
ATLENSURE_RETURN_VAL(pInstance->m_nAllocSize != (ULONG)-1, NULL);
ATLASSERT(pInstance->m_nAllocSize != (ULONG)0);
return reinterpret_cast<CPerfObject*>(LPBYTE(pInstance) + pInstance->m_nAllocSize);
}
inline CAtlFileMappingBase* CPerfMon::_GetNextBlock(CAtlFileMappingBase* pBlock) throw()
{
// calling _GetNextBlock(NULL) will return the first block
DWORD dwNextBlockIndex = 0;
DWORD* pDw= _GetBlockId_NoThrow(pBlock);
if (pDw)
{
dwNextBlockIndex = *pDw +1;
}
if (m_aMem.GetCount() == dwNextBlockIndex)
return NULL;
return m_aMem[dwNextBlockIndex];
}
inline CAtlFileMappingBase* CPerfMon::_OpenNextBlock(CAtlFileMappingBase* pPrev) throw()
{
CAutoPtr<CAtlFileMappingBase> spMem;
CAtlFileMappingBase* pMem = NULL;
ATLTRY(spMem.Attach(new CAtlFileMappingBase));
if (spMem == NULL)
return NULL;
// create a unique name for the shared mem segment based on the index
DWORD dwNextBlockIndex;
DWORD* pDw= _GetBlockId_NoThrow(pPrev);
if (pDw)
{
dwNextBlockIndex = *pDw +1;
}
else
{
// use the system allocation granularity (65536 currently. may be different in the future)
SYSTEM_INFO si;
GetSystemInfo(&si);
m_nAllocSize = si.dwAllocationGranularity;
dwNextBlockIndex = 0;
}
_ATLTRY
{
CString strName;
strName.Format(_T("Global\\ATLPERF_%s_%3.3d"), GetAppName(), dwNextBlockIndex);
HRESULT hr = spMem->OpenMapping(strName, m_nAllocSize, 0, FILE_MAP_READ);
if (FAILED(hr))
return NULL;
pMem = spMem;
m_aMem.Add(spMem);
}
_ATLCATCHALL()
{
return NULL;
}
return pMem;
}
inline CAtlFileMappingBase* CPerfMon::_AllocNewBlock(CAtlFileMappingBase* pPrev, BOOL* pbExisted /* == NULL */) throw()
{
CAtlFileMappingBase* pMem = NULL;
_ATLTRY
{
CSecurityAttributes sa;
sa.Set(m_sd);
CAutoPtr<CAtlFileMappingBase> spMem;
spMem.Attach(new CAtlFileMappingBase);
if (spMem == NULL)
{
return NULL;
}
// create a unique name for the shared mem segment based on the index
DWORD dwNextBlockIndex;
if (pPrev != NULL)
{
dwNextBlockIndex = _GetBlockId(pPrev) +1;
}
else
{
// use the system allocation granularity (65536 currently. may be different in the future)
SYSTEM_INFO si;
GetSystemInfo(&si);
m_nAllocSize = si.dwAllocationGranularity;
dwNextBlockIndex = 0;
}
BOOL bExisted = FALSE;
CString strName;
strName.Format(_T("Global\\ATLPERF_%s_%3.3d"), GetAppName(), dwNextBlockIndex);
HRESULT hr = spMem->MapSharedMem(m_nAllocSize, strName, &bExisted, &sa);
if (FAILED(hr))
{
return NULL;
}
if(!bExisted)
{
memset(spMem->GetData(), 0, m_nAllocSize);
// save the index of this block
// don't for first block since we don't know m_nSchemaSize yet
if (dwNextBlockIndex)
{
_GetBlockId(spMem) = dwNextBlockIndex;
}
}
else
{
CSid owner;
CDacl dacl;
m_sd.GetOwner(&owner);
m_sd.GetDacl(&dacl);
// prevent us from using an object someone else has opened
if (::SetSecurityInfo(spMem->GetHandle(), SE_KERNEL_OBJECT,
DACL_SECURITY_INFORMATION | OWNER_SECURITY_INFORMATION,
const_cast<SID*>(owner.GetPSID()),
NULL,
const_cast<ACL*>(dacl.GetPACL()),
NULL) != ERROR_SUCCESS)
{
return NULL;
}
}
if (pbExisted)
{
*pbExisted = bExisted;
}
pMem = spMem;
m_aMem.Add(spMem);
OnBlockAlloc(pMem);
}
_ATLCATCHALL()
{
return NULL;
}
return pMem;
}
inline HRESULT CPerfMon::_OpenAllBlocks() throw()
{
HRESULT hr;
// if we haven't opened any yet, initialize
if (m_aMem.GetCount() == 0)
{
CAtlFileMappingBase* pMem = _OpenNextBlock(NULL);
if (pMem == NULL)
return S_OK;
hr = _LoadMap(LPDWORD(pMem->GetData()));
if (FAILED(hr))
{
m_aMem.RemoveAll();
return hr;
}
m_nSchemaSize = *LPDWORD(pMem->GetData());
m_nHeaderSize = m_nSchemaSize + sizeof(DWORD);
m_nHeaderSize = AtlAlignUp(m_nHeaderSize,16);
}
// open any new blocks
CAtlFileMappingBase* pMem = m_aMem[m_aMem.GetCount()-1];
while (pMem)
pMem = _OpenNextBlock(pMem);
return S_OK;
}
inline HRESULT CPerfMon::_LoadMap(DWORD* pData) throw()
{
_ATLTRY
{
HRESULT hr;
ClearMap();
DWORD dwDataSize = *pData++; // blob size
DWORD dwNumItems = *pData++; // number of items
// see if we have name data
DWORD* pNameData = NULL;
if (dwDataSize > (2+dwNumItems*9) * sizeof(DWORD))
pNameData = pData + dwNumItems*9; // blob size and item count already skipped. skip item data
for (DWORD i=0; i<dwNumItems; i++)
{
DWORD dwIsObject = *pData++;
DWORD dwPerfId = *pData++;
DWORD dwDetailLevel = *pData++;
CString strName;
if (pNameData)
{
strName = CString(LPWSTR(pNameData+1), *pNameData);
pNameData += AtlAlignUp(sizeof(WCHAR) * *pNameData, sizeof(DWORD))/sizeof(DWORD) + 1;
}
if (dwIsObject)
{
DWORD dwDefaultCounter = *pData++;
DWORD dwInstanceLess = *pData++;
DWORD dwStructSize = *pData++;
DWORD dwMaxInstanceNameLen = *pData++;
hr = AddCategoryDefinition(
dwPerfId,
strName,
NULL,
dwDetailLevel,
dwDefaultCounter,
dwInstanceLess,
dwStructSize,
dwMaxInstanceNameLen);
if (FAILED(hr))
{
ClearMap();
return hr;
}
DWORD dwNameId = *pData++;
DWORD dwHelpId = *pData++;
CategoryInfo* pCategoryInfo = _GetCategoryInfo(_GetNumCategories()-1);
pCategoryInfo->m_nNameId = dwNameId;
pCategoryInfo->m_nHelpId = dwHelpId;
}
else
{
DWORD dwCounterType = *pData++;
DWORD dwMaxCounterSize = *pData++;
DWORD dwDataOffset = *pData++;
DWORD dwDefaultScale = *pData++;
hr = AddCounterDefinition(
dwPerfId,
strName,
NULL,
dwDetailLevel,
dwCounterType,
dwMaxCounterSize,
dwDataOffset,
dwDefaultScale);
if (FAILED(hr))
{
ClearMap();
return hr;
}
DWORD dwNameId = *pData++;
DWORD dwHelpId = *pData++;
CategoryInfo* pCategoryInfo = _GetCategoryInfo(_GetNumCategories()-1);
CounterInfo* pCounterInfo = pCategoryInfo->_GetCounterInfo(pCategoryInfo->_GetNumCounters()-1);
pCounterInfo->m_nNameId = dwNameId;
pCounterInfo->m_nHelpId = dwHelpId;
}
}
// fill in cache data
ULONG* pnCounterBlockSize = NULL; // pointer to the object's counter block size
for (DWORD i=0; i<_GetNumCategories(); i++)
{
CategoryInfo* pCategoryInfo = _GetCategoryInfo(i);
// align at 8 bytes per Q262335
pCategoryInfo->m_nCounterBlockSize = (ULONG) AtlAlignUp(sizeof(PERF_COUNTER_BLOCK), 8);
pnCounterBlockSize = &pCategoryInfo->m_nCounterBlockSize;
_FillCategoryType(pCategoryInfo);
for (DWORD j=0; j<pCategoryInfo->_GetNumCounters(); j++)
{
CounterInfo* pCounterInfo = pCategoryInfo->_GetCounterInfo(j);
_FillCounterDef(pCounterInfo, pnCounterBlockSize);
}
// align at 8 bytes per Q262335
pCategoryInfo->m_nCounterBlockSize = (ULONG) AtlAlignUp(pCategoryInfo->m_nCounterBlockSize, 8);
}
return S_OK;
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
}
inline HRESULT CPerfMon::_SaveMap() throw()
{
_ATLTRY
{
// figure out how much memory we need
size_t nSize = (2 + 9*_GetNumCategoriesAndCounters()) * sizeof(DWORD);
for (UINT i=0; i<_GetNumCategories(); i++)
{
// if any of the entries have names, they'd better all have names
CategoryInfo* pCategoryInfo = _GetCategoryInfo(i);
if (!pCategoryInfo->m_strName.IsEmpty())
{
nSize += sizeof(DWORD) + AtlAlignUp(sizeof(WCHAR) * pCategoryInfo->m_strName.GetLength(), sizeof(DWORD));
for (UINT j=0; j<pCategoryInfo->_GetNumCounters(); j++)
{
CounterInfo* pCounterInfo = pCategoryInfo->_GetCounterInfo(j);
nSize += sizeof(DWORD) + AtlAlignUp(sizeof(WCHAR) * pCounterInfo->m_strName.GetLength(), sizeof(DWORD));
}
}
}
CHeapPtr<BYTE> blob;
if (!blob.Allocate(nSize))
return E_OUTOFMEMORY;
// start with blob size and number of items in the blob
DWORD* pCurrent = reinterpret_cast<DWORD*>(blob.m_pData);
memset(pCurrent, 0, nSize);
*pCurrent++ = (DWORD) nSize; // blob size
*pCurrent++ = _GetNumCategoriesAndCounters(); // number of items
size_t nSizeLast = nSize;
nSize -= 2 * sizeof(DWORD);
if(nSize > nSizeLast) return E_FAIL;
for (UINT i=0; i<_GetNumCategories(); i++)
{
// add all the relevant runtime info to the blob for each item
CategoryInfo* pCategoryInfo = _GetCategoryInfo(i);
*pCurrent++ = TRUE; // is object
*pCurrent++ = pCategoryInfo->m_dwCategoryId;
*pCurrent++ = pCategoryInfo->m_dwDetailLevel;
*pCurrent++ = pCategoryInfo->m_nDefaultCounter;
*pCurrent++ = pCategoryInfo->m_nInstanceLess;
*pCurrent++ = pCategoryInfo->m_nStructSize;
*pCurrent++ = pCategoryInfo->m_nMaxInstanceNameLen;
*pCurrent++ = pCategoryInfo->m_nNameId;
*pCurrent++ = pCategoryInfo->m_nHelpId;
nSizeLast = nSize;
nSize -= 9 * sizeof(DWORD);
if(nSize > nSizeLast) return E_FAIL;
for (UINT j=0; j<pCategoryInfo->_GetNumCounters(); j++)
{
CounterInfo* pCounterInfo = pCategoryInfo->_GetCounterInfo(j);
*pCurrent++ = FALSE; // is object
*pCurrent++ = pCounterInfo->m_dwCounterId;
*pCurrent++ = pCounterInfo->m_dwDetailLevel;
*pCurrent++ = pCounterInfo->m_dwCounterType;
*pCurrent++ = pCounterInfo->m_nMaxCounterSize;
*pCurrent++ = pCounterInfo->m_nDataOffset;
*pCurrent++ = pCounterInfo->m_nDefaultScale;
*pCurrent++ = pCounterInfo->m_nNameId;
*pCurrent++ = pCounterInfo->m_nHelpId;
nSizeLast = nSize;
nSize -= 9 * sizeof(DWORD);
if(nSize > nSizeLast) return E_FAIL;
}
}
// add names to the blob
for (UINT i=0; i<_GetNumCategories(); i++)
{
CategoryInfo* pCategoryInfo = _GetCategoryInfo(i);
// copy the len of the string (in characters) then the wide-char version of the string
// pad the string to a dword boundary
int nLen = pCategoryInfo->m_strName.GetLength();
*pCurrent++ = nLen;
nSizeLast = nSize;
nSize -= sizeof(DWORD);
if(nSize > nSizeLast) return E_FAIL;
Checked::memcpy_s(pCurrent, nSize, CT2CW(pCategoryInfo->m_strName), sizeof(WCHAR)*nLen);
pCurrent += AtlAlignUp(sizeof(WCHAR) * nLen, sizeof(DWORD))/sizeof(DWORD);
nSizeLast = nSize;
nSize -= sizeof(WCHAR)*nLen;
if(nSize > nSizeLast) return E_FAIL;
for (UINT j=0; j<pCategoryInfo->_GetNumCounters(); j++)
{
CounterInfo* pCounterInfo = pCategoryInfo->_GetCounterInfo(j);
// copy the len of the string (in characters) then the wide-char version of the string
// pad the string to a dword boundary
int nCounterLen = pCounterInfo->m_strName.GetLength();
*pCurrent++ = nCounterLen;
nSizeLast = nSize;
nSize -= sizeof(DWORD);
if(nSize > nSizeLast) return E_FAIL;
Checked::memcpy_s(pCurrent, nSize, CT2CW(pCounterInfo->m_strName), sizeof(WCHAR)*nCounterLen);
pCurrent += AtlAlignUp(sizeof(WCHAR) * nCounterLen, sizeof(DWORD))/sizeof(DWORD);
nSizeLast = nSize;
nSize -= sizeof(WCHAR)*nCounterLen;
if(nSize > nSizeLast) return E_FAIL;
}
}
CRegKey rkApp;
CString str;
DWORD dwErr;
str.Format(c_szAtlPerfPerformanceKey, GetAppName());
dwErr = rkApp.Open(HKEY_LOCAL_MACHINE, str);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
rkApp.SetBinaryValue(c_szAtlPerfMap, blob, *LPDWORD(blob.m_pData));
return S_OK;
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
}
inline CPerfMon::CategoryInfo* CPerfMon::_FindCategoryInfo(DWORD dwCategoryId) throw()
{
for (UINT i=0; i<_GetNumCategories(); i++)
{
CategoryInfo* pCategoryInfo = _GetCategoryInfo(i);
if (pCategoryInfo->m_dwCategoryId == dwCategoryId)
return pCategoryInfo;
}
return NULL;
}
inline CPerfMon::CounterInfo* CPerfMon::_FindCounterInfo(CategoryInfo* pCategoryInfo, DWORD dwCounterId)
{
ATLENSURE_RETURN_VAL(pCategoryInfo != NULL, NULL);
for (DWORD i=0; i<pCategoryInfo->_GetNumCounters(); i++)
{
CounterInfo* pCounter = pCategoryInfo->_GetCounterInfo(i);
if (pCounter->m_dwCounterId == dwCounterId)
return pCounter;
}
return NULL;
}
inline CPerfMon::CounterInfo* CPerfMon::_FindCounterInfo(DWORD dwCategoryId, DWORD dwCounterId) throw()
{
CategoryInfo* pCategoryInfo = _FindCategoryInfo(dwCategoryId);
if (pCategoryInfo != NULL)
return _FindCounterInfo(pCategoryInfo, dwCounterId);
return NULL;
}
inline BOOL CPerfMon::_WantCategoryType(__in_z LPWSTR szValue, __in DWORD dwCategoryId) throw(...)
{
ATLASSERT(szValue != NULL);
if (lstrcmpiW(c_szAtlPerfGlobal, szValue) == 0)
return TRUE;
CString strList(szValue);
int nStart = 0;
CString strNum = strList.Tokenize(_T(" "), nStart);
while (!strNum.IsEmpty())
{
if (_ttoi(strNum) == int(dwCategoryId))
return TRUE;
strNum = strList.Tokenize(_T(" "), nStart);
}
return FALSE;
}
inline LPBYTE CPerfMon::_AllocData(LPBYTE& pData, ULONG nBytesAvail, ULONG* pnBytesUsed, size_t nBytesNeeded)
{
ATLENSURE_RETURN_VAL(pnBytesUsed != NULL, NULL);
ULONG newSize = *pnBytesUsed+static_cast<ULONG>(nBytesNeeded);
if ((newSize < *pnBytesUsed) || (newSize < (ULONG) nBytesNeeded) || (nBytesAvail < newSize))
return NULL;
LPBYTE p = pData;
pData += nBytesNeeded;
*pnBytesUsed += (ULONG) nBytesNeeded;
return p;
}
inline DWORD& CPerfMon::_GetBlockId(CAtlFileMappingBase* pBlock)
{
DWORD* pDw = _GetBlockId_NoThrow(pBlock);
ATLENSURE(pDw);
return *pDw;
}
inline DWORD* CPerfMon::_GetBlockId_NoThrow(CAtlFileMappingBase* pBlock)
{
if (pBlock == NULL)
return NULL;
return LPDWORD(LPBYTE(pBlock->GetData()) + m_nSchemaSize);
}
inline void CPerfMon::_FillCategoryType(CategoryInfo* pCategoryInfo) throw()
{
PERF_OBJECT_TYPE& type = pCategoryInfo->m_cache;
type.DefinitionLength = sizeof(PERF_OBJECT_TYPE) + sizeof(PERF_COUNTER_DEFINITION) * pCategoryInfo->_GetNumCounters();
type.TotalByteLength = type.DefinitionLength; // we will add the instance definitions/counter blocks as we go
type.HeaderLength = sizeof(PERF_OBJECT_TYPE);
type.ObjectNameTitleIndex = pCategoryInfo->m_nNameId;
type.ObjectNameTitle = NULL;
type.ObjectHelpTitleIndex = pCategoryInfo->m_nHelpId;
type.ObjectHelpTitle = NULL;
type.DetailLevel = pCategoryInfo->m_dwDetailLevel;
type.NumCounters = pCategoryInfo->_GetNumCounters();
type.DefaultCounter = pCategoryInfo->m_nDefaultCounter;
if (pCategoryInfo->m_nInstanceLess == PERF_NO_INSTANCES)
type.NumInstances = PERF_NO_INSTANCES;
else
type.NumInstances = 0; // this will be calculated as objects are processed
type.CodePage = 0;
type.PerfTime.QuadPart = 0;
QueryPerformanceFrequency (&(type.PerfFreq));
}
inline void CPerfMon::_FillCounterDef(CounterInfo* pCounterInfo, ULONG* pnCounterBlockSize) throw()
{
PERF_COUNTER_DEFINITION& def = pCounterInfo->m_cache;
def.ByteLength = sizeof(PERF_COUNTER_DEFINITION);
def.CounterNameTitleIndex = pCounterInfo->m_nNameId;
def.CounterNameTitle = NULL;
def.CounterHelpTitleIndex = pCounterInfo->m_nHelpId;
def.CounterHelpTitle = NULL;
def.DefaultScale = pCounterInfo->m_nDefaultScale;
def.DetailLevel = pCounterInfo->m_dwDetailLevel;
def.CounterType = pCounterInfo->m_dwCounterType;
DWORD dwAlignOfCounter=0;
switch (pCounterInfo->m_dwCounterType & ATLPERF_SIZE_MASK)
{
case PERF_SIZE_DWORD:
def.CounterSize = sizeof(DWORD);
dwAlignOfCounter = sizeof(DWORD);
break;
case PERF_SIZE_LARGE:
def.CounterSize = sizeof(__int64);
dwAlignOfCounter = sizeof(__int64);
break;
case PERF_SIZE_ZERO:
def.CounterSize = 0;
dwAlignOfCounter = 0;
break;
case PERF_SIZE_VARIABLE_LEN:
ATLASSERT((pCounterInfo->m_dwCounterType & ATLPERF_TYPE_MASK) == PERF_TYPE_TEXT);
if ((pCounterInfo->m_dwCounterType & ATLPERF_TEXT_MASK) == PERF_TEXT_UNICODE)
{
def.CounterSize = (DWORD) AtlAlignUp(pCounterInfo->m_nMaxCounterSize * sizeof(WCHAR), sizeof(DWORD));
}
else
{
def.CounterSize = (DWORD) AtlAlignUp(pCounterInfo->m_nMaxCounterSize * sizeof(char), sizeof(DWORD));
}
break;
}
*pnCounterBlockSize = AtlAlignUp(*pnCounterBlockSize, dwAlignOfCounter);
def.CounterOffset = *pnCounterBlockSize;
*pnCounterBlockSize += def.CounterSize;
}
inline HRESULT CPerfMon::_CollectInstance(
CategoryInfo* pCategoryInfo,
LPBYTE& pData,
ULONG nBytesAvail,
ULONG* pnBytesUsed,
CPerfObject* _pInstance,
PERF_OBJECT_TYPE* pObjectType,
PERF_COUNTER_DEFINITION* pCounterDefs
) throw()
{
DWORD dwInstance = _pInstance->m_dwInstance;
// grab a snapshot of the object
USES_ATL_SAFE_ALLOCA;
CPerfObject* pInstance = (CPerfObject*) _ATL_SAFE_ALLOCA(_pInstance->m_nAllocSize, _ATL_SAFE_ALLOCA_DEF_THRESHOLD);
if (pInstance == NULL)
{
return E_OUTOFMEMORY;
}
Checked::memcpy_s(pInstance, _pInstance->m_nAllocSize, _pInstance, _pInstance->m_nAllocSize);
// if it was changed or deleted between when we first saw it and when we copied
// it, then forget about whatever happens to be there for this collection period
if (pInstance->m_dwCategoryId != pCategoryInfo->m_dwCategoryId ||
dwInstance != pInstance->m_dwInstance ||
pInstance->m_nRefCount == 0)
return S_OK;
// we have a copy of something that claims to be the object type we're expecting
// put it into the data blob
PERF_INSTANCE_DEFINITION* pInstanceDef = NULL;
if (pCategoryInfo->m_nInstanceLess == PERF_NO_INSTANCES)
pObjectType->NumInstances = PERF_NO_INSTANCES;
else
{
pObjectType->NumInstances++;
// create an instance definition
pInstanceDef = _AllocStruct(pData, nBytesAvail, pnBytesUsed, (PERF_INSTANCE_DEFINITION*) NULL);
if (pInstanceDef == NULL)
return E_OUTOFMEMORY;
pInstanceDef->ParentObjectTitleIndex = 0;
pInstanceDef->ParentObjectInstance = 0;
pInstanceDef->UniqueID = PERF_NO_UNIQUE_ID;
// handle the instance name
LPCWSTR szInstNameSrc = LPCWSTR(LPBYTE(pInstance)+pInstance->m_nInstanceNameOffset);
pInstanceDef->NameLength = (ULONG)(wcslen(szInstNameSrc)+1)*sizeof(WCHAR);
// align at 8 bytes per Q262335
ULONG nNameAlloc = (ULONG) AtlAlignUp(pInstanceDef->NameLength, 8);
LPWSTR szInstNameDest = (LPWSTR) _AllocData(pData, nBytesAvail, pnBytesUsed, nNameAlloc);
if (szInstNameDest == NULL)
return E_OUTOFMEMORY;
Checked::memcpy_s(szInstNameDest, nNameAlloc, szInstNameSrc, pInstanceDef->NameLength);
pInstanceDef->NameOffset = ULONG(LPBYTE(szInstNameDest) - LPBYTE(pInstanceDef));
pInstanceDef->ByteLength = DWORD(sizeof(PERF_INSTANCE_DEFINITION) + nNameAlloc);
}
// create the counter block + data
LPBYTE pCounterData = _AllocData(pData, nBytesAvail, pnBytesUsed, pCategoryInfo->m_nCounterBlockSize);
if (pCounterData == NULL)
return E_OUTOFMEMORY;
// fill in the counter block header for the data
PERF_COUNTER_BLOCK* pCounterBlock = (PERF_COUNTER_BLOCK*) pCounterData;
pCounterBlock->ByteLength = pCategoryInfo->m_nCounterBlockSize;
// fill in the data
for (ULONG i=0; i<pObjectType->NumCounters; i++)
{
CounterInfo* pCounterInfo = pCategoryInfo->_GetCounterInfo(i);
PERF_COUNTER_DEFINITION& def = pCounterDefs[i];
LPBYTE pSrc = LPBYTE(pInstance)+pCounterInfo->m_nDataOffset;
LPBYTE pDest = pCounterData+def.CounterOffset;
switch (pCounterInfo->m_dwCounterType & ATLPERF_SIZE_MASK)
{
case PERF_SIZE_DWORD:
*LPDWORD(pDest) = *LPDWORD(pSrc);
break;
case PERF_SIZE_LARGE:
*(ULONGLONG*)(pDest) = *(ULONGLONG*)(pSrc);
break;
case PERF_SIZE_VARIABLE_LEN:
if ((pCounterInfo->m_dwCounterType & ATLPERF_TEXT_MASK) == PERF_TEXT_UNICODE)
{
LPCWSTR szSrc = reinterpret_cast<LPCWSTR>(pSrc);
LPWSTR szDest = reinterpret_cast<LPWSTR>(pDest);
size_t nLen = __min(wcslen(szSrc), pCounterInfo->m_nMaxCounterSize-1);
Checked::wcsncpy_s(szDest, pCounterInfo->m_nMaxCounterSize-1, szSrc, nLen);
szDest[nLen] = 0;
}
else
{
LPCSTR szSrc = reinterpret_cast<LPCSTR>(pSrc);
LPSTR szDest = reinterpret_cast<LPSTR>(pDest);
size_t nLen = __min(strlen(szSrc), pCounterInfo->m_nMaxCounterSize-1);
Checked::strncpy_s(szDest, pCounterInfo->m_nMaxCounterSize-1, szSrc, nLen);
szDest[nLen] = 0;
}
break;
}
}
if (pInstanceDef != NULL)
pObjectType->TotalByteLength += pInstanceDef->ByteLength;
pObjectType->TotalByteLength += pCounterBlock->ByteLength;
return S_OK;
}
inline HRESULT CPerfMon::_CollectInstance(
CategoryInfo* pCategoryInfo,
LPBYTE& pData,
ULONG nBytesAvail,
ULONG* pnBytesUsed,
PERF_OBJECT_TYPE* pObjectType,
PERF_COUNTER_DEFINITION* pCounterDefs
) throw()
{
// specialization to collect an instanceless object with no instance data
ATLASSERT(pCategoryInfo->m_nInstanceLess == PERF_NO_INSTANCES);
pObjectType->NumInstances = PERF_NO_INSTANCES;
// create the counter block + data
LPBYTE pCounterData = _AllocData(pData, nBytesAvail, pnBytesUsed, pCategoryInfo->m_nCounterBlockSize);
if (pCounterData == NULL)
return E_OUTOFMEMORY;
// fill in the counter block header for the data
PERF_COUNTER_BLOCK* pCounterBlock = (PERF_COUNTER_BLOCK*) pCounterData;
pCounterBlock->ByteLength = pCategoryInfo->m_nCounterBlockSize;
// fill in the data
for (ULONG i=0; i<pObjectType->NumCounters; i++)
{
CounterInfo* pCounterInfo = pCategoryInfo->_GetCounterInfo(i);
PERF_COUNTER_DEFINITION& def = pCounterDefs[i];
LPBYTE pDest = pCounterData+def.CounterOffset;
switch (pCounterInfo->m_dwCounterType & ATLPERF_SIZE_MASK)
{
case PERF_SIZE_DWORD:
*LPDWORD(pDest) = 0;
break;
case PERF_SIZE_LARGE:
*PULONGLONG(pDest) = 0;
break;
case PERF_SIZE_VARIABLE_LEN:
if ((pCounterInfo->m_dwCounterType & ATLPERF_TEXT_MASK) == PERF_TEXT_UNICODE)
memset(pDest, 0, pCounterInfo->m_nMaxCounterSize*sizeof(WCHAR));
else
memset(pDest, 0, pCounterInfo->m_nMaxCounterSize*sizeof(CHAR));
break;
}
}
pObjectType->TotalByteLength += pCounterBlock->ByteLength;
return S_OK;
}
inline HRESULT CPerfMon::_CollectCategoryType(
CategoryInfo* pCategoryInfo,
LPBYTE pData,
ULONG nBytesAvail,
ULONG* pnBytesUsed
) throw()
{
ATLENSURE_RETURN(pCategoryInfo != NULL);
ATLASSERT(pnBytesUsed != NULL);
// write the object definition out
PERF_OBJECT_TYPE* pObjectType = _AllocStruct(pData, nBytesAvail, pnBytesUsed, (PERF_OBJECT_TYPE*) NULL);
if (pObjectType == NULL)
return E_OUTOFMEMORY;
Checked::memcpy_s(pObjectType, sizeof(PERF_OBJECT_TYPE), &pCategoryInfo->m_cache, sizeof(PERF_OBJECT_TYPE));
// save a pointer to the first counter entry and counter definition.
// we'll need them when we create the PERF_COUNTER_BLOCK data
PERF_COUNTER_DEFINITION* pCounterDefs = reinterpret_cast<PERF_COUNTER_DEFINITION*>(pData);
// write the counter definitions out
for (DWORD i=0; i<pCategoryInfo->_GetNumCounters(); i++)
{
CounterInfo* pCounterInfo = pCategoryInfo->_GetCounterInfo(i);
PERF_COUNTER_DEFINITION* pCounterDef = _AllocStruct(pData, nBytesAvail, pnBytesUsed, (PERF_COUNTER_DEFINITION*) NULL);
if (pCounterDef == NULL)
return E_OUTOFMEMORY;
Checked::memcpy_s(pCounterDef, sizeof(PERF_COUNTER_DEFINITION), &pCounterInfo->m_cache, sizeof(PERF_COUNTER_DEFINITION));
// set PerfTime and PerfFreq for PERF_ELAPSED_TIME counter.
if(pCounterDef->CounterType == PERF_ELAPSED_TIME)
{
LARGE_INTEGER currTime;
if (FALSE != QueryPerformanceCounter(&currTime))
pObjectType->PerfTime = currTime;
else
pObjectType->PerfTime.QuadPart = 0;
QueryPerformanceFrequency (&(pObjectType->PerfFreq));
}
}
// search for objects of the appropriate type and write out their instance/counter data
bool bGotInstance = false;
CAtlFileMappingBase* pCurrentBlock = _GetNextBlock(NULL);
if (pCurrentBlock != NULL)
{
CPerfObject* pInstance = _GetFirstInstance(pCurrentBlock);
while (pInstance && pInstance->m_nAllocSize != 0)
{
if (pInstance->m_dwCategoryId == pCategoryInfo->m_dwCategoryId)
{
bGotInstance = true;
HRESULT hr = _CollectInstance(pCategoryInfo, pData, nBytesAvail,
pnBytesUsed, pInstance, pObjectType, pCounterDefs);
if (FAILED(hr))
return hr;
}
pInstance = _GetNextInstance(pInstance);
ATLENSURE_RETURN(pInstance!= NULL);
if (pInstance->m_nAllocSize == (ULONG) -1)
{
pCurrentBlock = _GetNextBlock(pCurrentBlock);
if (pCurrentBlock == NULL)
pInstance = NULL;
else
pInstance = _GetFirstInstance(pCurrentBlock);
}
}
}
if (pCategoryInfo->m_nInstanceLess == PERF_NO_INSTANCES && !bGotInstance)
{
// we have an instanceless (singleton) object with no data. send zeroed data
HRESULT hr = _CollectInstance(pCategoryInfo, pData, nBytesAvail,
pnBytesUsed, pObjectType, pCounterDefs);
if (FAILED(hr))
return hr;
}
return S_OK;
}
inline DWORD CPerfMon::Open(LPWSTR szDeviceNames) throw()
{
(szDeviceNames); // unused
return 0;
}
inline DWORD CPerfMon::Collect(
__in_z LPWSTR szValue,
__deref_inout_bcount(*pcbBytes) LPVOID* ppData,
__inout LPDWORD pcbBytes,
__inout LPDWORD pcObjectTypes
) throw()
{
_ATLTRY
{
if (FAILED(_OpenAllBlocks()))
{
*pcbBytes = 0;
*pcObjectTypes = 0;
return ERROR_SUCCESS;
}
LPBYTE pData = LPBYTE(*ppData);
ULONG nBytesLeft = *pcbBytes;
*pcbBytes = 0;
if (_GetNumCategories() == 0)
{
// nothing is providing data. we need to load the map directly
// from the registry in order to provide category/counter data
CRegKey rkApp;
DWORD dwErr;
CString strAppKey;
strAppKey.Format(c_szAtlPerfPerformanceKey, GetAppName());
dwErr = rkApp.Open(HKEY_LOCAL_MACHINE, strAppKey, KEY_READ);
if (dwErr != ERROR_SUCCESS)
{
*pcbBytes = 0;
*pcObjectTypes = 0;
return ERROR_SUCCESS;
}
ULONG nBytes = 0;
dwErr = rkApp.QueryBinaryValue(c_szAtlPerfMap, NULL, &nBytes);
if (dwErr != ERROR_SUCCESS)
{
*pcbBytes = 0;
*pcObjectTypes = 0;
return ERROR_SUCCESS;
}
CHeapPtr<DWORD> buf;
if (!buf.Allocate((nBytes+3)/4))
{
*pcbBytes = 0;
*pcObjectTypes = 0;
return ERROR_SUCCESS;
}
dwErr = rkApp.QueryBinaryValue(c_szAtlPerfMap, buf, &nBytes);
if (dwErr != ERROR_SUCCESS)
{
*pcbBytes = 0;
*pcObjectTypes = 0;
return ERROR_SUCCESS;
}
if (FAILED(_LoadMap(buf)))
{
*pcbBytes = 0;
*pcObjectTypes = 0;
return ERROR_SUCCESS;
}
}
for (UINT i=0; i<_GetNumCategories(); i++)
{
CategoryInfo* pCategoryInfo = _GetCategoryInfo(i);
if (_WantCategoryType(szValue, pCategoryInfo->m_nNameId))
{
ULONG nBytesUsed = 0;
HRESULT hr = _CollectCategoryType(pCategoryInfo, pData, nBytesLeft, &nBytesUsed);
if (hr == E_OUTOFMEMORY)
{
*pcbBytes = 0;
*pcObjectTypes = 0;
return ERROR_MORE_DATA;
}
else if (FAILED(hr))
{
*pcbBytes = 0;
*pcObjectTypes = 0;
return ERROR_SUCCESS;
}
(*pcObjectTypes)++;
(*pcbBytes) += nBytesUsed;
nBytesLeft -= nBytesUsed;
pData += nBytesUsed;
}
}
*ppData = pData;
return ERROR_SUCCESS;
}
_ATLCATCHALL()
{
*pcbBytes = 0;
*pcObjectTypes = 0;
return ERROR_SUCCESS;
}
}
inline DWORD CPerfMon::Close() throw()
{
UnInitialize();
return ERROR_SUCCESS;
}
#ifdef _ATL_PERF_REGISTER
#pragma warning (push)
#pragma warning(disable : 4996)
inline void CPerfMon::_AppendStrings(
LPTSTR& pszNew,
CAtlArray<CString>& astrStrings,
ULONG iFirstIndex
) throw()
{
for (UINT iString = 0; iString < astrStrings.GetCount(); iString++)
{
INT nFormatChars = _stprintf(pszNew, _T("%d"), iFirstIndex+2*iString);
pszNew += nFormatChars + 1;
_tcscpy(pszNew, astrStrings[iString]);
pszNew += astrStrings[iString].GetLength() + 1;
}
}
#pragma warning (pop)
inline HRESULT CPerfMon::_AppendRegStrings(
CRegKey& rkLang,
LPCTSTR szValue,
CAtlArray<CString>& astrStrings,
ULONG nNewStringSize,
ULONG iFirstIndex,
ULONG iLastIndex
) throw()
{
_ATLTRY
{
// load the existing strings, add the new data, and resave the strings
ULONG nCharsOrig = 0;
ULONG nCharsNew;
DWORD dwErr;
dwErr = rkLang.QueryMultiStringValue(szValue, NULL, &nCharsOrig);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
nCharsNew = nCharsOrig + nNewStringSize;
CString strOrig;
dwErr = rkLang.QueryMultiStringValue(szValue, CStrBuf(strOrig, nCharsOrig, CStrBuf::SET_LENGTH), &nCharsOrig);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
LPCTSTR pszOrig = strOrig;
CString strNew;
CStrBuf szNew(strNew, nCharsNew, CStrBuf::SET_LENGTH);
LPTSTR pszNew = szNew;
bool bNewStringsAdded = false;
while (*pszOrig != '\0')
{
ULONG iIndex = _ttoi(pszOrig);
int nLen = (int) _tcslen(pszOrig) + 1; // get the length of the index and null
nLen += (int) _tcslen(pszOrig+nLen) + 1; // add the length of the description and null
if (!bNewStringsAdded && iIndex >= iFirstIndex)
{
LPTSTR pszOld =pszNew;
_AppendStrings(pszNew, astrStrings, iFirstIndex);
bNewStringsAdded = true;
ULONG nCharsNewLast = nCharsNew;
nCharsNew -= ULONG(pszNew-pszOld);
if(nCharsNew > nCharsNewLast)
{
return E_FAIL;
}
}
if (iIndex < iFirstIndex || iIndex > iLastIndex)
{
Checked::memmove_s(pszNew, nCharsNew, pszOrig, nLen*sizeof(TCHAR));
pszNew += nLen;
}
pszOrig += nLen;
}
if (!bNewStringsAdded)
_AppendStrings(pszNew, astrStrings, iFirstIndex);
*pszNew++ = '\0'; // must have 2 null terminators at end of multi_sz
dwErr = rkLang.SetMultiStringValue(szValue, strNew);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
return S_OK;
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
}
inline HRESULT CPerfMon::_RemoveRegStrings(
CRegKey& rkLang,
LPCTSTR szValue,
ULONG iFirstIndex,
ULONG iLastIndex
) throw()
{
_ATLTRY
{
// load the existing strings, remove the data, and resave the strings
DWORD nChars = 0;
DWORD dwErr;
dwErr = rkLang.QueryMultiStringValue(szValue, NULL, &nChars);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
CString str;
CStrBuf szBuf(str, nChars, CStrBuf::SET_LENGTH);
DWORD nMaxLen = nChars*sizeof(TCHAR);
dwErr = rkLang.QueryMultiStringValue(szValue, szBuf, &nChars);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
LPCTSTR pszRead = szBuf;
LPTSTR pszWrite = szBuf;
while (*pszRead != '\0')
{
ULONG iIndex = _ttoi(pszRead);
int nLen = (int) _tcslen(pszRead) + 1; // get the length of the index and null
nLen += (int) _tcslen(pszRead+nLen) + 1; // add the length of the description and null
if (iIndex < iFirstIndex || iIndex > iLastIndex)
{
Checked::memmove_s(pszWrite, nMaxLen , pszRead, nLen*sizeof(TCHAR));
UINT nMaxLenLast = nMaxLen;
nMaxLen -= nLen*sizeof(TCHAR);
if(nMaxLen > nMaxLenLast) return E_FAIL;
pszWrite += nLen;
}
pszRead += nLen;
}
*pszWrite++ = '\0'; // must have 2 null terminators at end of multi_sz
dwErr = rkLang.SetMultiStringValue(szValue, szBuf);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
return S_OK;
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
}
inline HRESULT CPerfMon::_ReserveStringRange(DWORD& dwFirstCounter, DWORD& dwFirstHelp) throw()
{
CRegKey rkApp;
CString strAppKey;
DWORD dwErr;
_ATLTRY
{
strAppKey.Format(c_szAtlPerfPerformanceKey, GetAppName());
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
DWORD nNumStrings = _GetNumCategoriesAndCounters();
dwErr = rkApp.Open(HKEY_LOCAL_MACHINE, strAppKey);
if (dwErr == ERROR_SUCCESS)
{
// see if we already have a sufficient range reserved
DWORD dwFirstAppCounter;
DWORD dwFirstAppHelp;
DWORD dwLastAppCounter;
DWORD dwLastAppHelp;
if (rkApp.QueryDWORDValue(c_szAtlPerfFirstCounter, dwFirstAppCounter) == ERROR_SUCCESS &&
rkApp.QueryDWORDValue(c_szAtlPerfFirstHelp, dwFirstAppHelp) == ERROR_SUCCESS &&
rkApp.QueryDWORDValue(c_szAtlPerfLastCounter, dwLastAppCounter) == ERROR_SUCCESS &&
rkApp.QueryDWORDValue(c_szAtlPerfLastHelp, dwLastAppHelp) == ERROR_SUCCESS &&
dwLastAppCounter-dwFirstAppCounter+2 >= 2*nNumStrings &&
dwLastAppHelp-dwFirstAppHelp+2 >= 2*nNumStrings)
{
dwFirstCounter = dwFirstAppCounter;
dwFirstHelp = dwFirstAppHelp;
return S_OK;
}
}
CRegKey rkPerfLib;
dwErr = rkPerfLib.Open(HKEY_LOCAL_MACHINE, c_szAtlPerfPerfLibKey);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
if (!rkApp)
{
dwErr = rkApp.Create(HKEY_LOCAL_MACHINE, strAppKey);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
}
// figure out the counter range
DWORD dwLastCounter;
DWORD dwLastHelp;
dwErr = rkPerfLib.QueryDWORDValue(c_szAtlPerfLastCounter, dwLastCounter);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkPerfLib.QueryDWORDValue(c_szAtlPerfLastHelp, dwLastHelp);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwFirstCounter = dwLastCounter + 2;
dwFirstHelp = dwLastHelp + 2;
dwLastCounter += 2*nNumStrings;
dwLastHelp += 2*nNumStrings;
dwErr = rkPerfLib.SetDWORDValue(c_szAtlPerfLastCounter, dwLastCounter);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkPerfLib.SetDWORDValue(c_szAtlPerfLastHelp, dwLastHelp);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
// register the used counter range
dwErr = rkApp.SetDWORDValue(c_szAtlPerfFirstCounter, dwFirstCounter);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkApp.SetDWORDValue(c_szAtlPerfLastCounter, dwLastCounter);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkApp.SetDWORDValue(c_szAtlPerfFirstHelp, dwFirstHelp);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkApp.SetDWORDValue(c_szAtlPerfLastHelp, dwLastHelp);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
return S_OK;
}
inline HRESULT CPerfMon::Register(
LPCTSTR szOpenFunc,
LPCTSTR szCollectFunc,
LPCTSTR szCloseFunc,
HINSTANCE hDllInstance /* == _AtlBaseModule.GetModuleInstance() */
) throw()
{
ATLASSERT(szOpenFunc != NULL);
ATLASSERT(szCollectFunc != NULL);
ATLASSERT(szCloseFunc != NULL);
CString str;
DWORD dwErr;
HRESULT hr;
hr = CreateMap(LANGIDFROMLCID(GetThreadLocale()), hDllInstance);
if (FAILED(hr)){
hr = CreateMap(LANGIDFROMLCID(1033), hDllInstance);
if (FAILED(hr))
return hr;
}
CString strAppKey;
_ATLTRY
{
strAppKey.Format(c_szAtlPerfPerformanceKey, GetAppName());
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
// if we're already registered, unregister so we can redo registration
_UnregisterStrings();
// reserve a range for our counter and help strings
DWORD dwFirstCounter = 0;
DWORD dwFirstHelp = 0;
hr = _ReserveStringRange(dwFirstCounter, dwFirstHelp);
if (FAILED(hr))
return hr;
DWORD dwCurrentName = dwFirstCounter;
DWORD dwCurrentHelp = dwFirstHelp;
for (UINT i=0; i<_GetNumCategories(); i++)
{
CategoryInfo* pCategoryInfo = _GetCategoryInfo(i);
pCategoryInfo->m_nNameId = dwCurrentName;
dwCurrentName += 2;
pCategoryInfo->m_nHelpId = dwCurrentHelp;
dwCurrentHelp += 2;
for (UINT j=0; j<pCategoryInfo->_GetNumCounters(); j++)
{
CounterInfo* pCounterInfo = pCategoryInfo->_GetCounterInfo(j);
pCounterInfo->m_nNameId = dwCurrentName;
dwCurrentName += 2;
pCounterInfo->m_nHelpId = dwCurrentHelp;
dwCurrentHelp += 2;
}
}
// register the app entry points
CRegKey rkApp;
dwErr = rkApp.Create(HKEY_LOCAL_MACHINE, strAppKey);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
_ATLTRY
{
DWORD dwFLen = GetModuleFileName(hDllInstance, CStrBuf(str, MAX_PATH), MAX_PATH);
if( dwFLen == 0 )
return AtlHresultFromLastError();
else if( dwFLen == MAX_PATH )
return HRESULT_FROM_WIN32(ERROR_INSUFFICIENT_BUFFER);
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
dwErr = rkApp.SetStringValue(c_szAtlPerfLibrary, str);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkApp.SetStringValue(c_szAtlPerfOpen, szOpenFunc);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkApp.SetStringValue(c_szAtlPerfCollect, szCollectFunc);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkApp.SetStringValue(c_szAtlPerfClose, szCloseFunc);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkApp.SetStringValue(c_szAtlPerfLanguages, _T(""));
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
hr = _SaveMap();
if (FAILED(hr))
return hr;
// if the dll is disabled, reenable it since we just reregistered it
rkApp.DeleteValue(_T("Disable Performance Counters"));
return S_OK;
}
inline HRESULT CPerfMon::RegisterStrings(
LANGID language /* = MAKELANGID(LANG_NEUTRAL, SUBLANG_NEUTRAL) */,
HINSTANCE hResInstance /* = _AtlBaseModule.GetResourceInstance() */
) throw()
{
_ATLTRY
{
CString str;
DWORD dwErr;
HRESULT hr;
CRegKey rkLang;
CRegKey rkApp;
LANGID wPrimaryLanguage = (LANGID) PRIMARYLANGID(language);
if (language == MAKELANGID(LANG_NEUTRAL, SUBLANG_NEUTRAL))
{
//First try current thread locale
language = LANGIDFROMLCID(GetThreadLocale());
wPrimaryLanguage = (LANGID) PRIMARYLANGID(language);
}
str.Format(c_szAtlPerfPerfLibLangKey, wPrimaryLanguage);
dwErr = rkLang.Open(HKEY_LOCAL_MACHINE, str);
if (dwErr == ERROR_FILE_NOT_FOUND)
{
// failed using current thread, so try default system lcid
language = GetSystemDefaultLangID();
wPrimaryLanguage = (LANGID) PRIMARYLANGID(language);
str.Format(c_szAtlPerfPerfLibLangKey, wPrimaryLanguage);
dwErr = rkLang.Open(HKEY_LOCAL_MACHINE, str);
}
if (dwErr == ERROR_FILE_NOT_FOUND)
return S_FALSE; // the language isn't installed on the system
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
hr = CreateMap(language, hResInstance);
if (FAILED(hr))
return hr;
// load list of language strings already registered
str.Format(c_szAtlPerfPerformanceKey, GetAppName());
dwErr = rkApp.Open(HKEY_LOCAL_MACHINE, str);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
DWORD dwLangsLen = 0;
CString strLangs;
dwErr = rkApp.QueryStringValue(c_szAtlPerfLanguages, NULL, &dwLangsLen);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
ULONG nLangsBuffSize = dwLangsLen+4;
CStrBuf szLangs(strLangs, nLangsBuffSize, CStrBuf::SET_LENGTH); // reserve room for adding new language
dwErr = rkApp.QueryStringValue(c_szAtlPerfLanguages, szLangs, &dwLangsLen);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwLangsLen--; // don't count '\0'
// see if this language has already been registered and if so, return
TCHAR szNewLang[5];
_sntprintf_s(szNewLang, _countof(szNewLang), _countof(szNewLang)-1, _T("%3.3x "), wPrimaryLanguage);
if (strLangs.Find(szNewLang) != -1)
return S_OK;
// load the strings we want to append and figure out how much extra space is needed for them
// (including up to 5-digit index values and 2 null separators)
CAtlArray<CString> astrCounters;
CAtlArray<CString> astrHelp;
ULONG nNewCounterSize = 0;
ULONG nNewHelpSize = 0;
for (UINT i=0; i<_GetNumCategories(); i++)
{
CategoryInfo* pCategoryInfo = _GetCategoryInfo(i);
astrCounters.Add(pCategoryInfo->m_strName);
astrHelp.Add(pCategoryInfo->m_strHelp);
for (UINT j=0; j<pCategoryInfo->_GetNumCounters(); j++)
{
CounterInfo* pCounterInfo = pCategoryInfo->_GetCounterInfo(j);
astrCounters.Add(pCounterInfo->m_strName);
astrHelp.Add(pCounterInfo->m_strHelp);
}
}
for (size_t i=0; i<astrCounters.GetCount(); i++)
{
nNewCounterSize += astrCounters[i].GetLength() + 7;
nNewHelpSize += astrHelp[i].GetLength() + 7;
}
DWORD dwFirstCounter;
DWORD dwFirstHelp;
DWORD dwLastCounter;
DWORD dwLastHelp;
dwErr = rkApp.QueryDWORDValue(c_szAtlPerfFirstCounter, dwFirstCounter);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkApp.QueryDWORDValue(c_szAtlPerfFirstHelp, dwFirstHelp);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkApp.QueryDWORDValue(c_szAtlPerfLastCounter, dwLastCounter);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkApp.QueryDWORDValue(c_szAtlPerfLastHelp, dwLastHelp);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
hr = _AppendRegStrings(rkLang, c_szAtlPerfCounter, astrCounters, nNewCounterSize, dwFirstCounter, dwLastCounter);
if (FAILED(hr))
return hr;
hr = _AppendRegStrings(rkLang, c_szAtlPerfHelp, astrHelp, nNewHelpSize, dwFirstHelp, dwLastHelp);
if (FAILED(hr))
return hr;
// add the language to the list of installed languages
Checked::tcscpy_s(szLangs+dwLangsLen, nLangsBuffSize-dwLangsLen, szNewLang);
dwErr = rkApp.SetStringValue(c_szAtlPerfLanguages, szLangs);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
return S_OK;
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
}
inline BOOL CPerfMon::EnumResLangProc(
HINSTANCE hModule,
LPCTSTR szType,
LPCTSTR szName,
LANGID wIDLanguage,
LPARAM lParam
) throw()
{
hModule; // unused
szType; // unused
szName; // unused
CAtlArray<LANGID>* pLangs = reinterpret_cast<CAtlArray<LANGID>*>(lParam);
_ATLTRY
{
pLangs->Add(wIDLanguage);
}
_ATLCATCHALL()
{
return FALSE;
}
return TRUE;
}
inline HRESULT CPerfMon::RegisterAllStrings(
HINSTANCE hResInstance /* = NULL */
) throw()
{
HRESULT hrReturn = S_FALSE;
HRESULT hr;
UINT nRes;
hr = CreateMap(0, hResInstance, &nRes);
if (FAILED(hr))
return hr;
if (nRes == 0)
return RegisterStrings(0, hResInstance);
if (hResInstance != NULL)
return _RegisterAllStrings(nRes, hResInstance);
for (int i = 0; hResInstance = _AtlBaseModule.GetHInstanceAt(i), hResInstance != NULL; i++)
{
hr = _RegisterAllStrings(nRes, hResInstance);
if (FAILED(hr))
return hr;
if (hr == S_OK)
hrReturn = S_OK;
}
return hrReturn;
}
inline HRESULT CPerfMon::_RegisterAllStrings(
UINT nRes,
HINSTANCE hResInstance
) throw()
{
HRESULT hrReturn = S_FALSE;
HRESULT hr;
CAtlArray<LANGID> langs;
if (!EnumResourceLanguages(hResInstance, RT_STRING, MAKEINTRESOURCE((nRes>>4)+1), EnumResLangProc, reinterpret_cast<LPARAM>(&langs)))
return AtlHresultFromLastError();
for (UINT i=0; i<langs.GetCount(); i++)
{
hr = RegisterStrings(langs[i], hResInstance);
if (FAILED(hr))
return hr;
if (hr == S_OK)
hrReturn = S_OK;
}
return hrReturn;
}
inline HRESULT CPerfMon::_UnregisterStrings() throw()
{
_ATLTRY
{
CString str;
HRESULT hr;
DWORD dwErr;
// unregister the PerfMon counter and help strings
CRegKey rkApp;
str.Format(c_szAtlPerfPerformanceKey, GetAppName());
dwErr = rkApp.Open(HKEY_LOCAL_MACHINE, str);
//The register strings was unregistered.
if (dwErr == ERROR_FILE_NOT_FOUND)
return S_OK;
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
DWORD dwFirstAppCounter;
DWORD dwFirstAppHelp;
DWORD dwLastAppCounter;
DWORD dwLastAppHelp;
dwErr = rkApp.QueryDWORDValue(c_szAtlPerfFirstCounter, dwFirstAppCounter);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkApp.QueryDWORDValue(c_szAtlPerfFirstHelp, dwFirstAppHelp);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkApp.QueryDWORDValue(c_szAtlPerfLastCounter, dwLastAppCounter);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkApp.QueryDWORDValue(c_szAtlPerfLastHelp, dwLastAppHelp);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
// iterate through the installed languages and delete them all
DWORD nChars = 0;
dwErr = rkApp.QueryStringValue(c_szAtlPerfLanguages, NULL, &nChars);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
CString strLangs;
dwErr = rkApp.QueryStringValue(c_szAtlPerfLanguages, CStrBuf(strLangs, nChars, CStrBuf::SET_LENGTH), &nChars);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
int nIndex = 0;
CString strLang = strLangs.Tokenize(_T(" "), nIndex);
while (!strLang.IsEmpty())
{
CRegKey rkLang;
dwErr = rkLang.Open(HKEY_LOCAL_MACHINE, CString(c_szAtlPerfPerfLibKey) + _T("\\") + strLang);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
hr = _RemoveRegStrings(rkLang, c_szAtlPerfCounter, dwFirstAppCounter, dwLastAppCounter);
if (FAILED(hr))
return hr;
hr = _RemoveRegStrings(rkLang, c_szAtlPerfHelp, dwFirstAppHelp, dwLastAppHelp);
if (FAILED(hr))
return hr;
strLang = strLangs.Tokenize(_T(" "), nIndex);
}
dwErr = rkApp.SetStringValue(c_szAtlPerfLanguages, _T(""));
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
return S_OK;
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
}
inline HRESULT CPerfMon::Unregister() throw()
{
CString str;
HRESULT hr;
DWORD dwErr;
CRegKey rkPerfLib;
CRegKey rkApp;
hr = _UnregisterStrings();
if (FAILED(hr))
return hr;
dwErr = rkPerfLib.Open(HKEY_LOCAL_MACHINE, c_szAtlPerfPerfLibKey);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
_ATLTRY
{
str.Format(c_szAtlPerfPerformanceKey, GetAppName());
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
dwErr = rkApp.Open(HKEY_LOCAL_MACHINE, str);
// The performance counter was unregistered
if (dwErr == ERROR_FILE_NOT_FOUND)
return S_OK;
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
DWORD dwLastCounter;
DWORD dwLastHelp;
DWORD dwFirstAppCounter;
DWORD dwFirstAppHelp;
DWORD dwLastAppCounter;
DWORD dwLastAppHelp;
dwErr = rkPerfLib.QueryDWORDValue(c_szAtlPerfLastCounter, dwLastCounter);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkPerfLib.QueryDWORDValue(c_szAtlPerfLastHelp, dwLastHelp);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkApp.QueryDWORDValue(c_szAtlPerfFirstCounter, dwFirstAppCounter);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkApp.QueryDWORDValue(c_szAtlPerfFirstHelp, dwFirstAppHelp);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkApp.QueryDWORDValue(c_szAtlPerfLastCounter, dwLastAppCounter);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkApp.QueryDWORDValue(c_szAtlPerfLastHelp, dwLastAppHelp);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
// rewind the Last Help/Last Counter values if possible
if (dwLastCounter == dwLastAppCounter)
{
dwErr = rkPerfLib.SetDWORDValue(c_szAtlPerfLastCounter, dwFirstAppCounter-2);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
}
if (dwLastHelp == dwLastAppHelp)
{
dwErr = rkPerfLib.SetDWORDValue(c_szAtlPerfLastHelp, dwFirstAppHelp-2);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
}
rkApp.Close();
// delete the app key
CRegKey rkServices;
_ATLTRY
{
str.Format(c_szAtlPerfServicesKey, GetAppName());
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
dwErr = rkServices.Open(HKEY_LOCAL_MACHINE, str);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
dwErr = rkServices.RecurseDeleteKey(c_szAtlPerfPerformance);
if (dwErr != ERROR_SUCCESS)
return AtlHresultFromWin32(dwErr);
return S_OK;
}
#endif
inline HRESULT CPerfMon::Initialize() throw()
{
CMutex tempLock;
CString strAppName;
HRESULT hr;
_ATLTRY
{
strAppName = GetAppName();
ATLASSUME(m_aMem.GetCount() == 0);
CAccessToken at;
if (!at.GetEffectiveToken(TOKEN_QUERY))
return E_FAIL;
CSid self;
if (!at.GetUser(&self))
return E_FAIL;
// set up security information for creating the mutex
CDacl dacl;
dacl.AddAllowedAce(Sids::NetworkService(),GENERIC_READ);
dacl.AddAllowedAce(Sids::Admins(), GENERIC_ALL);
dacl.AddAllowedAce(Sids::System(), GENERIC_ALL);
dacl.AddAllowedAce(self, GENERIC_ALL);
m_sd.SetDacl(dacl);
m_sd.SetOwner(self);
CSecurityAttributes sa;
sa.Set(m_sd);
// create a mutex to handle syncronizing access to the shared memory area
CString strMutexName;
strMutexName.Format(_T("Global\\ATLPERF_%s_LOCK"), strAppName);
tempLock.Create(&sa, FALSE, strMutexName);
if (tempLock.m_h == NULL)
return AtlHresultFromLastError();
if (GetLastError() == ERROR_ALREADY_EXISTS)
{
// prevent us from using an object someone else has opened
if (::SetSecurityInfo(tempLock, SE_KERNEL_OBJECT,
DACL_SECURITY_INFORMATION | OWNER_SECURITY_INFORMATION,
const_cast<SID*>(self.GetPSID()),
NULL,
const_cast<ACL*>(dacl.GetPACL()),
NULL) != ERROR_SUCCESS)
return E_FAIL;
}
// now set up the dacl for creating shared memory segments and store it
dacl.AddAllowedAce(Sids::Interactive(), GENERIC_READ);
m_sd.SetDacl(dacl);
// create a shared memory area to share data between the app being measured and the client doing the measuring
{
CMutexLock lock(tempLock);
BOOL bExisted = FALSE;
CAtlFileMappingBase* pMem;
pMem = _AllocNewBlock(NULL, &bExisted);
if (pMem == NULL)
return E_OUTOFMEMORY;
if (!bExisted)
{
// copy the map from the registry to the shared memory
CRegKey rkApp;
DWORD dwErr;
CString strAppKey;
strAppKey.Format(c_szAtlPerfPerformanceKey, GetAppName());
dwErr = rkApp.Open(HKEY_LOCAL_MACHINE, strAppKey, KEY_READ);
if (dwErr != ERROR_SUCCESS)
{
m_aMem.RemoveAll();
return AtlHresultFromWin32(dwErr);
}
ULONG nBytes = m_nAllocSize;
dwErr = rkApp.QueryBinaryValue(c_szAtlPerfMap, pMem->GetData(), &nBytes);
if (dwErr != ERROR_SUCCESS)
{
m_aMem.RemoveAll();
return AtlHresultFromWin32(dwErr);
}
}
hr = _LoadMap(LPDWORD(pMem->GetData()));
if (FAILED(hr))
{
m_aMem.RemoveAll();
return hr;
}
m_nSchemaSize = *LPDWORD(pMem->GetData());
m_nHeaderSize = m_nSchemaSize + sizeof(DWORD);
m_nHeaderSize = AtlAlignUp(m_nHeaderSize,16);
}
m_lock.Attach(tempLock.Detach());
}
_ATLCATCHALL()
{
m_aMem.RemoveAll();
return E_OUTOFMEMORY;
}
return S_OK;
}
inline void CPerfMon::UnInitialize() throw()
{
if (m_lock.m_h != NULL)
m_lock.Close();
m_aMem.RemoveAll();
ClearMap();
}
inline HRESULT CPerfMon::_CreateInstance(
DWORD dwCategoryId,
DWORD dwInstance,
LPCWSTR szInstanceName,
CPerfObject** ppInstance,
bool bByName
) throw()
{
CPerfObject* pEmptyBlock = NULL;
if (ppInstance == NULL)
return E_POINTER;
CAtlFileMappingBase* pCurrentBlock = _GetNextBlock(NULL);
if (pCurrentBlock == NULL || pCurrentBlock->GetData() == NULL || m_lock.m_h == NULL)
return E_UNEXPECTED; // Initialize must succeed before calling CreateInstance
*ppInstance = NULL;
CategoryInfo* pCategoryInfo = _FindCategoryInfo(dwCategoryId);
if (pCategoryInfo == NULL)
return E_INVALIDARG;
if (szInstanceName == NULL && bByName)
return E_INVALIDARG;
if (pCategoryInfo->m_nInstanceLess == PERF_NO_INSTANCES &&
(dwInstance != 0 || szInstanceName != NULL))
return E_INVALIDARG;
CPerfLock lock(this);
if (FAILED(lock.GetStatus()))
return lock.GetStatus();
CPerfObject* pInstance = _GetFirstInstance(pCurrentBlock);
ULONG nMaxInstance = 0;
ULONG nUsedSpace = 0;
// walk all of the existing objects trying to find one that matches the request
while (pInstance->m_nAllocSize != 0)
{
nUsedSpace += pInstance->m_nAllocSize;
if (pInstance->m_dwCategoryId == dwCategoryId)
{
nMaxInstance = __max(nMaxInstance, pInstance->m_dwInstance);
// check to see if we've found the one the caller wants
if (!bByName && pInstance->m_dwInstance == dwInstance &&
(pCategoryInfo->m_nInstanceLess == PERF_NO_INSTANCES || dwInstance != 0))
{
*ppInstance = pInstance;
pInstance->m_nRefCount++;
return S_OK;
}
if (bByName)
{
LPWSTR szInstName = (LPWSTR(LPBYTE(pInstance)+pInstance->m_nInstanceNameOffset));
if (wcsncmp(szInstName, szInstanceName, pCategoryInfo->m_nMaxInstanceNameLen-1) == 0)
{
*ppInstance = pInstance;
pInstance->m_nRefCount++;
return S_OK;
}
}
}
if (pInstance->m_nAllocSize == pCategoryInfo->m_nAllocSize && pInstance->m_nRefCount == 0)
pEmptyBlock = pInstance;
pInstance = _GetNextInstance(pInstance);
ATLENSURE_RETURN(pInstance!= NULL);
if (pInstance->m_nAllocSize == 0 &&
m_nHeaderSize + nUsedSpace + pCategoryInfo->m_nAllocSize + sizeof(CPerfObject) > m_nAllocSize)
{
// we've reached the end of the block and have no room to allocate an object of this
// type. cap the block with a sentinel
pInstance->m_nAllocSize = (ULONG) -1;
}
// check for an end-of-shared-mem sentinel
if (pInstance->m_nAllocSize == (ULONG) -1)
{
nUsedSpace = 0;
CAtlFileMappingBase* pNextBlock = _GetNextBlock(pCurrentBlock);
if (pNextBlock == NULL)
{
// we've reached the last block of shared mem.
// the instance hasn't been found, so either use a
// previously freed instance block (pEmptyBlock) or allocate a new
// shared mem block to hold the new instance
if (pEmptyBlock == NULL)
{
pNextBlock = _AllocNewBlock(pCurrentBlock);
if (pNextBlock == NULL)
return E_OUTOFMEMORY;
}
else
break;
}
pCurrentBlock = pNextBlock;
pInstance = _GetFirstInstance(pCurrentBlock);
}
}
// allocate a new object
if (pEmptyBlock != NULL)
pInstance = pEmptyBlock;
else
pInstance->m_nAllocSize = pCategoryInfo->m_nAllocSize;
if (dwInstance == 0 && pCategoryInfo->m_nInstanceLess != PERF_NO_INSTANCES)
pInstance->m_dwInstance = nMaxInstance + 1;
else
pInstance->m_dwInstance = dwInstance;
pInstance->m_nRefCount = 1;
// copy the instance name, truncate if necessary
if (pCategoryInfo->m_nInstanceLess != PERF_NO_INSTANCES)
{
ULONG nNameLen = (ULONG)__min(wcslen(szInstanceName), pCategoryInfo->m_nMaxInstanceNameLen-1);
ULONG nNameBytes = (nNameLen+1) * sizeof(WCHAR);
pInstance->m_nInstanceNameOffset = pInstance->m_nAllocSize-nNameBytes;
Checked::memcpy_s(LPBYTE(pInstance)+pInstance->m_nInstanceNameOffset, pInstance->m_nAllocSize-pInstance->m_nInstanceNameOffset, szInstanceName, nNameBytes);
LPWSTR(LPBYTE(pInstance)+pInstance->m_nInstanceNameOffset)[nNameLen] = 0;
}
// copy the CategoryId last: it won't be collected until this is set
pInstance->m_dwCategoryId = pCategoryInfo->m_dwCategoryId;
*ppInstance = pInstance;
return S_OK;
}
inline HRESULT CPerfMon::CreateInstance(
DWORD dwCategoryId,
DWORD dwInstance,
LPCWSTR szInstanceName,
CPerfObject** ppInstance
) throw()
{
return _CreateInstance(dwCategoryId, dwInstance, szInstanceName, ppInstance, false);
}
inline HRESULT CPerfMon::CreateInstanceByName(
DWORD dwCategoryId,
LPCWSTR szInstanceName,
CPerfObject** ppInstance
) throw()
{
return _CreateInstance(dwCategoryId, 0, szInstanceName, ppInstance, true);
}
inline HRESULT CPerfMon::ReleaseInstance(CPerfObject* pInstance) throw()
{
ATLASSERT(pInstance != NULL);
if (pInstance == NULL)
return E_INVALIDARG;
CPerfLock lock(this);
if (FAILED(lock.GetStatus()))
return lock.GetStatus();
if (--pInstance->m_nRefCount == 0)
{
pInstance->m_dwInstance = 0;
pInstance->m_dwCategoryId = 0;
}
return S_OK;
}
inline HRESULT CPerfMon::LockPerf(DWORD dwTimeout /* == INFINITE */) throw()
{
if (m_lock.m_h == NULL)
return E_UNEXPECTED;
DWORD dwRes = WaitForSingleObject(m_lock.m_h, dwTimeout);
if (dwRes == WAIT_ABANDONED || dwRes == WAIT_OBJECT_0)
return S_OK;
if (dwRes == WAIT_TIMEOUT)
return HRESULT_FROM_WIN32(ERROR_TIMEOUT);
return AtlHresultFromLastError();
}
inline void CPerfMon::UnlockPerf() throw()
{
m_lock.Release();
}
// map building routines
inline HRESULT CPerfMon::AddCategoryDefinition(
DWORD dwCategoryId,
LPCTSTR szCategoryName,
LPCTSTR szHelpString,
DWORD dwDetailLevel,
INT nDefaultCounter,
BOOL bInstanceLess,
UINT nStructSize,
UINT nMaxInstanceNameLen) throw()
{
// must have one and only one of these
ATLASSERT(!bInstanceLess ^ !nMaxInstanceNameLen);
// get the things that can fail out of the way first
CString strName;
CString strHelp;
_ATLTRY
{
strName = szCategoryName;
strHelp = szHelpString;
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
if (!m_categories.SetCount(m_categories.GetCount()+1))
{
return E_OUTOFMEMORY;
}
// category has been added, set the data
CategoryInfo* pCategoryInfo = _GetCategoryInfo(_GetNumCategories()-1);
pCategoryInfo->m_dwCategoryId = dwCategoryId;
pCategoryInfo->m_dwDetailLevel = dwDetailLevel;
pCategoryInfo->m_nDefaultCounter = nDefaultCounter;
pCategoryInfo->m_nInstanceLess = bInstanceLess ? PERF_NO_INSTANCES : 0;
pCategoryInfo->m_nStructSize = nStructSize;
pCategoryInfo->m_nMaxInstanceNameLen = nMaxInstanceNameLen;
pCategoryInfo->m_nAllocSize = nStructSize + nMaxInstanceNameLen*sizeof(WCHAR);
pCategoryInfo->m_strName = strName;
pCategoryInfo->m_strHelp = strHelp;
pCategoryInfo->m_nNameId = 0;
pCategoryInfo->m_nHelpId = 0;
return S_OK;
}
inline HRESULT CPerfMon::AddCounterDefinition(
DWORD dwCounterId,
LPCTSTR szCounterName,
LPCTSTR szHelpString,
DWORD dwDetailLevel,
DWORD dwCounterType,
ULONG nMaxCounterSize,
UINT nOffset,
INT nDefaultScale) throw()
{
// must add category BEFORE adding counter!
ATLASSERT(_GetNumCategories() > 0);
CounterInfo counter;
counter.m_dwCounterId = dwCounterId;
_ATLTRY
{
counter.m_strName = szCounterName;
counter.m_strHelp = szHelpString;
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
counter.m_dwDetailLevel = dwDetailLevel;
counter.m_dwCounterType = dwCounterType;
counter.m_nDefaultScale = nDefaultScale;
counter.m_nMaxCounterSize = nMaxCounterSize;
counter.m_nDataOffset = nOffset;
counter.m_nNameId = 0;
counter.m_nHelpId = 0;
// add the counter to the category
CategoryInfo* pCategoryInfo = _GetCategoryInfo(_GetNumCategories()-1);
_ATLTRY
{
pCategoryInfo->m_counters.Add(counter);
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
if (counter.m_nMaxCounterSize > 0)
{
ATLASSERT(counter.m_dwCounterType & PERF_TYPE_TEXT);
pCategoryInfo->m_nAllocSize += counter.m_nMaxCounterSize * sizeof(WCHAR);
}
return S_OK;
}
inline HRESULT CPerfMon::RegisterCategory(
WORD wLanguage,
HINSTANCE hResInstance,
UINT* pSampleRes,
DWORD dwCategoryId,
UINT nNameString,
UINT nHelpString,
DWORD dwDetail,
BOOL bInstanceless,
UINT nStructSize,
UINT nMaxInstanceNameLen,
INT nDefaultCounter) throw()
{
if (pSampleRes)
*pSampleRes = nNameString;
CString strName;
CString strHelp;
_ATLTRY
{
if (!strName.LoadString(hResInstance, nNameString, wLanguage) ||
!strHelp.LoadString(hResInstance, nHelpString, wLanguage))
{
return E_FAIL;
}
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
return RegisterCategory(
wLanguage,
hResInstance,
pSampleRes,
dwCategoryId,
strName,
strHelp,
dwDetail,
bInstanceless,
nStructSize,
nMaxInstanceNameLen,
nDefaultCounter);
}
inline HRESULT CPerfMon::RegisterCategory(
WORD /* wLanguage */,
HINSTANCE /* hResInstance */,
UINT* /* pSampleRes */,
DWORD dwCategoryId,
LPCTSTR szNameString,
LPCTSTR szHelpString,
DWORD dwDetail,
BOOL bInstanceless,
UINT nStructSize,
UINT nMaxInstanceNameLen,
INT nDefaultCounter) throw()
{
return AddCategoryDefinition(
dwCategoryId,
szNameString,
szHelpString,
dwDetail,
nDefaultCounter,
bInstanceless,
nStructSize,
nMaxInstanceNameLen);
}
inline HRESULT CPerfMon::RegisterCounter(
WORD wLanguage,
HINSTANCE hResInstance,
DWORD dwCounterId,
UINT nNameString,
UINT nHelpString,
DWORD dwDetail,
DWORD dwCounterType,
ULONG nMaxCounterSize,
UINT nOffset,
INT nDefaultScale) throw()
{
CString strName;
CString strHelp;
_ATLTRY
{
if (!strName.LoadString(hResInstance, nNameString, wLanguage) ||
!strHelp.LoadString(hResInstance, nHelpString, wLanguage))
{
return E_FAIL;
}
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
return RegisterCounter(
wLanguage,
hResInstance,
dwCounterId,
strName,
strHelp,
dwDetail,
dwCounterType,
nMaxCounterSize,
nOffset,
nDefaultScale);
}
inline HRESULT CPerfMon::RegisterCounter(
WORD /* wLanguage */,
HINSTANCE /* hResInstance */,
DWORD dwCounterId,
LPCTSTR szNameString,
LPCTSTR szHelpString,
DWORD dwDetail,
DWORD dwCounterType,
ULONG nMaxCounterSize,
UINT nOffset,
INT nDefaultScale) throw()
{
return AddCounterDefinition(
dwCounterId,
szNameString,
szHelpString,
dwDetail,
dwCounterType,
nMaxCounterSize,
nOffset,
nDefaultScale);
}
inline void CPerfMon::ClearMap() throw()
{
m_categories.RemoveAll();
}
#ifndef _ATL_PERF_NOXML
ATL_NOINLINE inline HRESULT CPerfMon::PersistToXML(IStream *pStream, BOOL bFirst/*=TRUE*/, BOOL bLast/*=TRUE*/) throw(...)
{
ATLASSERT(pStream != NULL);
if (pStream == NULL)
return E_INVALIDARG;
CPerfLock lock(this);
if (FAILED(lock.GetStatus()))
return ERROR_SUCCESS;
CStringA strXML;
HRESULT hr = S_OK;
ULONG nLen = 0;
if (bFirst)
{
strXML = "<?xml version=\"1.0\" ?>\r\n<perfPersist>\r\n";
hr = pStream->Write(strXML, strXML.GetLength(), &nLen);
if (hr != S_OK)
return hr;
}
strXML.Format("\t<perfmon name=\"%s\">\r\n", CT2CA(GetAppName()));
hr = pStream->Write(strXML, strXML.GetLength(), &nLen);
for (UINT i=0; i<_GetNumCategories(); i++)
{
CategoryInfo* pCategoryInfo = _GetCategoryInfo(i);
CAtlFileMappingBase *pCurrentBlock = _GetNextBlock(NULL);
CPerfObject *pInstance = _GetFirstInstance(pCurrentBlock);
strXML.Format("\t\t<perfObject perfid=\"%d\">\r\n",
pCategoryInfo->m_dwCategoryId, pCategoryInfo->m_nNameId, pCategoryInfo->m_nHelpId);
hr = pStream->Write(strXML, strXML.GetLength(), &nLen);
if (hr != S_OK)
return E_FAIL;
while (pInstance && pInstance->m_nAllocSize)
{
if (pInstance->m_dwCategoryId == pCategoryInfo->m_dwCategoryId)
{
if (pCategoryInfo->m_nInstanceLess != PERF_NO_INSTANCES)
{
// handle the instance name
LPCWSTR wszInstNameSrc = LPCWSTR(LPBYTE(pInstance)+pInstance->m_nInstanceNameOffset);
int nInstLen = (int) wcslen(wszInstNameSrc);
// convert to UTF8
int nLength = AtlUnicodeToUTF8(wszInstNameSrc, nInstLen, NULL, 0);
CHeapPtr<CHAR> szUTF8;
if ((nLength < 0) || (nLength+1<nLength) || !szUTF8.Allocate(nLength+1))
return E_OUTOFMEMORY;
nLength = AtlUnicodeToUTF8(wszInstNameSrc, nInstLen, szUTF8, nLength);
szUTF8[nLength] = '\0';
strXML.Format("\t\t\t<instance name=\"%s\" id=\"%d\">\r\n", szUTF8, pInstance->m_dwInstance);
hr = pStream->Write(strXML, strXML.GetLength(), &nLen);
if (hr != S_OK)
return hr;
}
for (UINT j=0; j<pCategoryInfo->_GetNumCounters(); j++)
{
CounterInfo *pCounterInfo = pCategoryInfo->_GetCounterInfo(j);
switch (pCounterInfo->m_dwCounterType & ATLPERF_SIZE_MASK)
{
case PERF_SIZE_DWORD:
{
strXML.Format("\t\t\t\t<counter type=\"perf_size_dword\" value=\"%d\" offset=\"%d\"/>\r\n",
*LPDWORD(LPBYTE(pInstance)+pCounterInfo->m_nDataOffset),
pCounterInfo->m_nDataOffset);
break;
}
case PERF_SIZE_LARGE:
{
strXML.Format("\t\t\t\t<counter type=\"perf_size_large\" value=\"%d\" offset=\"%d\"/>\r\n",
*PULONGLONG(LPBYTE(pInstance)+pCounterInfo->m_nDataOffset),
pCounterInfo->m_nDataOffset);
break;
}
case PERF_SIZE_VARIABLE_LEN:
{
CHeapPtr<CHAR> szUTF8;
LPBYTE pSrc = LPBYTE(pInstance)+pCounterInfo->m_nDataOffset;
if ((pCounterInfo->m_dwCounterType & ATLPERF_TEXT_MASK) == PERF_TEXT_UNICODE)
{
ULONG nTextLen = (ULONG)wcslen(LPCWSTR(pSrc));
// convert to UTF8
nLen = AtlUnicodeToUTF8(LPCWSTR(pSrc), nTextLen, NULL, 0);
if (!szUTF8.Allocate(nLen+1))
return E_OUTOFMEMORY;
nLen = AtlUnicodeToUTF8(LPCWSTR(pSrc), nTextLen, szUTF8, nLen);
szUTF8[nLen] = '\0';
strXML.Format("\t\t\t\t<counter type=\"perf_size_variable_len_unicode\" value=\"%s\" offset=\"%d\"/>\r\n",
szUTF8,
pCounterInfo->m_nDataOffset);
}
else
{
ULONG nTextLen = (ULONG)strlen(LPCSTR(pSrc));
if (!szUTF8.Allocate(nTextLen+1))
return E_OUTOFMEMORY;
Checked::strcpy_s(szUTF8, nTextLen+1, LPCSTR(pSrc));
strXML.Format("\t\t\t\t<counter type=\"perf_size_variable_len_ansi\" value=\"%s\" offset=\"%d\"/>\r\n",
szUTF8,
pCounterInfo->m_nDataOffset);
}
break;
}
default:
// error:
return E_FAIL;
}
hr = pStream->Write(strXML, strXML.GetLength(), &nLen);
if (hr != S_OK)
return hr;
}
if (pCategoryInfo->m_nInstanceLess != PERF_NO_INSTANCES)
{
hr = pStream->Write("\t\t\t</instance>\r\n", sizeof("\t\t\t</instance>\r\n")-1, &nLen);
if (hr != S_OK)
return hr;
}
}
pInstance = _GetNextInstance(pInstance);
ATLENSURE_RETURN(pInstance!= NULL);
if (pInstance->m_nAllocSize == (ULONG)-1)
{
pCurrentBlock = _GetNextBlock(pCurrentBlock);
if (pCurrentBlock == NULL)
pInstance = NULL;
else
pInstance = _GetFirstInstance(pCurrentBlock);
}
}
hr = pStream->Write("\t\t</perfObject>\r\n", sizeof("\t\t</perfObject>\r\n")-1, &nLen);
if (hr != S_OK)
return hr;
}
hr = pStream->Write("\t</perfmon>\r\n", sizeof("\t</perfmon>\r\n")-1, &nLen);
if (hr != S_OK)
return hr;
if (hr == S_OK && bLast)
hr = pStream->Write("</perfPersist>", sizeof("</perfPersist>")-1, &nLen);
return hr;
}
// This function is very lenient with inappropriate XML
ATL_NOINLINE inline HRESULT CPerfMon::LoadFromXML(IStream *pStream) throw(...)
{
ATLASSERT(pStream != NULL);
if (pStream == NULL)
return E_INVALIDARG;
// Get a lock
CPerfLock lock(this);
if (FAILED(lock.GetStatus()))
return ERROR_SUCCESS;
CComPtr<IXMLDOMDocument> spdoc;
// load the xml
HRESULT hr = CoCreateInstance(__uuidof(DOMDocument), NULL, CLSCTX_INPROC, __uuidof(IXMLDOMDocument), (void **) &spdoc);
if (FAILED(hr))
{
return hr;
}
spdoc->put_async(VARIANT_FALSE);
CComPtr<IPersistStreamInit> spSI;
hr = spdoc->QueryInterface(&spSI);
if (hr != S_OK)
return hr;
hr = spSI->Load(pStream);
if (hr != S_OK)
return hr;
// validate that it is a perfPersist stream
CComPtr<IXMLDOMElement> spRoot;
hr = spdoc->get_documentElement(&spRoot);
if (hr != S_OK)
return hr;
CComBSTR bstrName;
hr = spRoot->get_baseName(&bstrName);
if (wcscmp(bstrName, L"perfPersist"))
return S_FALSE;
// find the appropriate perfmon node
CComPtr<IXMLDOMNode> spChild;
hr = spRoot->get_firstChild(&spChild);
while (hr == S_OK)
{
bstrName.Empty();
hr = spChild->get_baseName(&bstrName);
if (hr == S_OK)
{
if (!wcscmp(bstrName, L"perfmon"))
{
bstrName.Empty();
hr = _GetAttribute(spChild, L"name", &bstrName);
if (hr == S_OK)
{
if (!_tcscmp(CW2CT(bstrName), GetAppName()))
break;
}
}
}
CComPtr<IXMLDOMNode> spNext;
hr = spChild->get_nextSibling(&spNext);
spChild.Attach(spNext.Detach());
}
// there is no perfmon node in the XML for the current CPerfMon class
if (hr != S_OK)
return S_FALSE;
CComPtr<IXMLDOMNode> spPerfRoot;
spPerfRoot.Attach(spChild.Detach());
// iterate over the objects in the perfmon subtree
// this is the loop that does the real work
hr = spPerfRoot->get_firstChild(&spChild);
while (hr == S_OK)
{
// see if it's a perfObject
bstrName.Empty();
hr = spChild->get_baseName(&bstrName);
if (hr != S_OK || wcscmp(bstrName, L"perfObject"))
return S_FALSE;
// get the perfid
bstrName.Empty();
hr = _GetAttribute(spChild, L"perfid", &bstrName);
DWORD dwPerfId = _wtoi(bstrName);
// iterate over children
CComPtr<IXMLDOMNode> spInstChild;
hr = spChild->get_firstChild(&spInstChild);
while (hr == S_OK)
{
// see if it's a instance
bstrName.Empty();
hr = spInstChild->get_baseName(&bstrName);
if (hr != S_OK || wcscmp(bstrName, L"instance"))
return S_FALSE;
// get the instance name
bstrName.Empty();
hr = _GetAttribute(spInstChild, L"name", &bstrName);
if (hr != S_OK)
return S_FALSE;
// get the instance id
bstrName.Empty();
hr = _GetAttribute(spChild, L"id", &bstrName);
if (hr != S_OK)
return S_FALSE;
DWORD dwInstance = _wtoi(bstrName);
// create the instance
CPerfObject *pInstance = NULL;
hr = CreateInstance(dwPerfId, dwInstance++, bstrName, &pInstance);
if (hr != S_OK)
return S_FALSE;
// iterate over the counters and set the data
CComPtr<IXMLDOMNode> spCntrChild;
hr = spInstChild->get_firstChild(&spCntrChild);
while (hr == S_OK)
{
// get the base name
bstrName.Empty();
hr = spCntrChild->get_baseName(&bstrName);
if (hr != S_OK || wcscmp(bstrName, L"counter"))
return S_FALSE;
// get the type
bstrName.Empty();
hr = _GetAttribute(spCntrChild, L"type", &bstrName);
if (hr != S_OK)
return S_FALSE;
DWORD dwType;
if (!wcscmp(bstrName, L"perf_size_dword"))
dwType = PERF_SIZE_DWORD;
else if (!wcscmp(bstrName, L"perf_size_large"))
dwType = PERF_SIZE_LARGE;
else if (!wcscmp(bstrName, L"perf_size_variable_len_ansi"))
dwType = PERF_SIZE_VARIABLE_LEN;
else if (!wcscmp(bstrName, L"perf_size_variable_len_unicode"))
dwType = PERF_SIZE_VARIABLE_LEN | PERF_TEXT_UNICODE;
else
return S_FALSE;
// get the value
bstrName.Empty();
hr = _GetAttribute(spCntrChild, L"value", &bstrName);
if (hr != S_OK)
return S_FALSE;
CComBSTR bstrOffset;
hr = _GetAttribute(spCntrChild, L"offset", &bstrOffset);
if (hr != S_OK)
return S_FALSE;
WCHAR *pStop = NULL;
DWORD dwOffset = wcstoul(bstrOffset, &pStop, 10);
if (dwType == PERF_SIZE_DWORD) // add it as a DWORD
{
DWORD dwVal = wcstoul(bstrName, &pStop, 10);
*LPDWORD(LPBYTE(pInstance)+dwOffset) = dwVal;
}
else if (dwType == PERF_SIZE_LARGE) // add it is a ULONGLONG
{
ULONGLONG qwVal = _wcstoui64(bstrName, &pStop, 10);
*PULONGLONG(LPBYTE(pInstance)+dwOffset) = qwVal;
}
else if (dwType == PERF_SIZE_VARIABLE_LEN) // add it as an ansi string
{
AtlW2AHelper(LPSTR(LPBYTE(pInstance)+dwOffset), bstrName, bstrName.Length(), ATL::_AtlGetConversionACP());
}
else // add it as a unicode string
{
Checked::memcpy_s(LPBYTE(pInstance)+dwOffset, pInstance->m_nAllocSize-dwOffset, bstrName, bstrName.Length()*sizeof(WCHAR));
}
CComPtr<IXMLDOMNode> spCntrNext;
hr = spCntrChild->get_nextSibling(&spCntrNext);
spCntrChild.Attach(spCntrNext.Detach());
}
CComPtr<IXMLDOMNode> spInstNext;
hr = spInstChild->get_nextSibling(&spInstNext);
spInstChild.Attach(spInstNext.Detach());
}
CComPtr<IXMLDOMNode> spNext;
hr = spChild->get_nextSibling(&spNext);
spChild.Attach(spNext.Detach());
}
return S_OK;
}
// a little utility function to retrieve a named attribute from a node
ATL_NOINLINE inline HRESULT CPerfMon::_GetAttribute(IXMLDOMNode *pNode, LPCWSTR szAttrName, BSTR *pbstrVal) throw()
{
ATLENSURE_RETURN(pNode != NULL);
ATLASSERT(szAttrName != NULL);
ATLENSURE_RETURN(pbstrVal != NULL);
*pbstrVal = NULL;
CComPtr<IXMLDOMNamedNodeMap> spAttrs;
HRESULT hr = pNode->get_attributes(&spAttrs);
if (hr != S_OK)
return hr;
CComPtr<IXMLDOMNode> spAttr;
hr = spAttrs->getNamedItem((BSTR) szAttrName, &spAttr);
if (hr != S_OK)
return hr;
CComVariant varVal;
hr = spAttr->get_nodeValue(&varVal);
if (hr != S_OK)
return hr;
hr = varVal.ChangeType(VT_BSTR);
if (hr != S_OK)
return hr;
*pbstrVal = varVal.bstrVal;
varVal.vt = VT_EMPTY;
return S_OK;
}
#endif // _ATL_PERF_NOXML
#if defined(_ATL_PERF_REGISTER) & !defined(_ATL_PERF_NOEXPORT)
ATL_NOINLINE inline HRESULT RegisterPerfMon(HINSTANCE hDllInstance /* = _AtlBaseModule.GetModuleInstance() */) throw()
{
CPerfMon **ppPerf = &__pperfA;
HRESULT hr = S_OK;
while (ppPerf != &__pperfZ)
{
if (*ppPerf != NULL)
{
hr = (*ppPerf)->Register(_T( ATLPERF_FUNCID_OPEN ), _T( ATLPERF_FUNCID_COLLECT ), _T( ATLPERF_FUNCID_CLOSE ), hDllInstance);
if (FAILED(hr))
return hr;
hr = (*ppPerf)->RegisterAllStrings(hDllInstance);
if (FAILED(hr))
return hr;
}
ppPerf++;
}
return S_OK;
}
ATL_NOINLINE inline HRESULT UnregisterPerfMon() throw()
{
CPerfMon **ppPerf = &__pperfA;
HRESULT hr = S_OK;
while (ppPerf != &__pperfZ)
{
if (*ppPerf != NULL)
{
hr = (*ppPerf)->Unregister();
if (FAILED(hr))
return hr;
}
ppPerf++;
}
return S_OK;
}
extern "C" ATL_NOINLINE inline DWORD __declspec(dllexport) WINAPI OpenPerfMon(LPWSTR lpDeviceNames) throw()
{
CPerfMon **ppPerf = &__pperfA;
DWORD dwErr = 0;
while (ppPerf != &__pperfZ)
{
if (*ppPerf != NULL)
{
dwErr = (*ppPerf)->Open(lpDeviceNames);
if (dwErr != 0)
return dwErr;
}
ppPerf++;
}
return 0;
}
extern "C" ATL_NOINLINE inline DWORD __declspec(dllexport) WINAPI CollectPerfMon(LPWSTR lpwszValue, LPVOID* lppData,
LPDWORD lpcbBytes, LPDWORD lpcObjectTypes) throw()
{
DWORD dwOrigBytes = *lpcbBytes;
DWORD dwBytesRemaining = *lpcbBytes;
CPerfMon **ppPerf = &__pperfA;
DWORD dwErr = 0;
while (ppPerf != &__pperfZ)
{
if (*ppPerf != NULL)
{
dwErr = (*ppPerf)->Collect(lpwszValue, lppData, lpcbBytes, lpcObjectTypes);
if (dwErr != 0)
return dwErr;
dwBytesRemaining -= *lpcbBytes;
*lpcbBytes = dwBytesRemaining;
}
ppPerf++;
}
*lpcbBytes = dwOrigBytes - dwBytesRemaining;
return 0;
}
extern "C" ATL_NOINLINE inline DWORD __declspec(dllexport) WINAPI ClosePerfMon() throw()
{
CPerfMon **ppPerf = &__pperfA;
while (ppPerf != &__pperfZ)
{
if (*ppPerf != NULL)
{
(*ppPerf)->Close();
}
ppPerf++;
}
return 0;
}
#endif // defined(_ATL_PERF_REGISTER) & !defined(_ATL_PERF_NOEXPORT)
} // namespace ATL
#pragma warning(pop)
#endif // __ATLPERF_INL__
<|start_filename|>source/SProxy/Element.cpp<|end_filename|>
//
// Element.cpp
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#include "stdafx.h"
#include "Element.h"
#include "ComplexType.h"
#include "SimpleType.h"
#include "Schema.h"
#include "Attribute.h"
#include "Content.h"
CComplexType * CElement::AddComplexType(CComplexType * p)
{
CAutoPtr<CComplexType> spOut;
if (p== NULL)
{
spOut.Attach( new CComplexType );
p = spOut;
}
if (p != NULL)
{
if (m_elements.AddTail(p) != NULL)
{
spOut.Detach();
return p;
}
}
return NULL;
}
CSimpleType * CElement::AddSimpleType(CSimpleType * p)
{
CAutoPtr<CSimpleType> spOut;
if (p== NULL)
{
spOut.Attach( new CSimpleType );
p = spOut;
}
if (p != NULL)
{
if (m_elements.AddTail(p) != NULL)
{
spOut.Detach();
return p;
}
}
return NULL;
}
CXSDElement * CElement::GetType()
{
CXSDElement *pRet = NULL;
if (m_type.GetName().GetLength())
{
CSchema *pSchema = GetParentSchema();
if (pSchema != NULL)
{
CStringW strUri;
if (SUCCEEDED(/*pSchema->*/GetNamespaceUri(m_type.GetPrefix(), strUri)))
{
pRet = pSchema->GetNamedItem(strUri, m_type.GetName());
}
}
}
//
// TODO: appropriate errors
//
return pRet;
}
const wchar_t * CElement::GetTargetNamespace()
{
if (m_type.GetName().GetLength())
{
// CSchema *pSchema = GetParentSchema();
// if (pSchema != NULL)
// {
CStringW strUri;
if (SUCCEEDED(/*pSchema->*/GetNamespaceUri(m_type.GetPrefix(), strUri)))
{
return strUri;
}
// }
}
EmitFileError(IDS_SDL_UNRESOLVED_NAMESPACE, const_cast<CElement*>(this), 0, m_type.GetPrefix());
return NULL;
}
<|start_filename|>source/SProxy/WSDLPortTypeOperation.h<|end_filename|>
//
// WSDLPortTypeOperation.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
#include "XMLElement.h"
#include "WSDLPortTypeIO.h"
#include "Emit.h"
//
// TODO: merge CSoapOperation with CSoapBinding if possible (the code is very similar)
//
class CSoapOperation
{
private:
CStringW m_strSoapAction;
SOAPSTYLE m_style;
inline HRESULT ValidateStyle(const wchar_t *wsz, int cch)
{
m_style = ::GetStyle(wsz, cch);
if (m_style != SOAPSTYLE_UNK)
{
return S_OK;
}
return E_FAIL;
}
public:
CSoapOperation()
:m_style(SOAPSTYLE_UNK)
{
}
inline HRESULT SetSoapAction(const wchar_t *wszName, int cchName)
{
if (!wszName)
{
return E_FAIL;
}
m_strSoapAction.SetString(wszName, cchName);
return S_OK;
}
inline HRESULT SetSoapAction(const CStringW& strName)
{
m_strSoapAction = strName;
return S_OK;
}
inline const CStringW& GetSoapAction()
{
return m_strSoapAction;
}
inline HRESULT SetStyle(const wchar_t *wszName, int cchName)
{
if (!wszName)
{
return E_FAIL;
}
return ValidateStyle(wszName, cchName);
}
inline HRESULT SetStyle(const CStringW& strName)
{
return ValidateStyle(strName, strName.GetLength());
}
inline const SOAPSTYLE GetStyle()
{
return m_style;
}
};
class CHttpOperation
{
private:
CStringW m_strLocation;
public:
inline HRESULT SetLocation(const wchar_t *wszName, int cchName)
{
if (!wszName)
{
return E_FAIL;
}
m_strLocation.SetString(wszName, cchName);
return S_OK;
}
inline HRESULT SetLocation(const CStringW& strName)
{
m_strLocation = strName;
return S_OK;
}
inline const CStringW& GetLocation()
{
return m_strLocation;
}
};
class CWSDLPortTypeOperation : public CXMLElement
{
private:
CStringW m_strName;
CStringW m_strDocumentation;
CStringW m_strParameterOrder;
CAutoPtr<CWSDLPortTypeInput> m_pInput;
CAutoPtr<CWSDLPortTypeOutput> m_pOutput;
CAtlPtrList<CWSDLPortTypeFault *> m_faults;
CAutoPtr<CSoapOperation> m_pSoapOperation;
CAutoPtr<CHttpOperation> m_pHttpOperation;
public:
inline CSoapOperation * AddSoapOperation(CSoapOperation *pBinding = NULL)
{
if (pBinding == NULL)
{
pBinding = new CSoapOperation;
}
m_pSoapOperation.Free();
m_pSoapOperation.Attach( pBinding );
return m_pSoapOperation;
}
inline CSoapOperation * GetSoapOperation()
{
return m_pSoapOperation;
}
inline CHttpOperation * AddHttpOperation(CHttpOperation *pBinding = NULL)
{
if (pBinding == NULL)
{
pBinding = new CHttpOperation;
}
m_pHttpOperation.Free();
m_pHttpOperation.Attach( pBinding );
return m_pHttpOperation;
}
inline CHttpOperation * GetHttpOperation()
{
return m_pHttpOperation;
}
inline CWSDLPortTypeInput * AddInput()
{
if (!m_pInput)
{
m_pInput.Free();
m_pInput.Attach( new CWSDLPortTypeInput );
}
return m_pInput;
}
inline CWSDLPortTypeOutput * AddOutput()
{
if (!m_pOutput)
{
m_pOutput.Free();
m_pOutput.Attach( new CWSDLPortTypeOutput );
}
return m_pOutput;
}
inline CWSDLPortTypeInput * GetInput()
{
return m_pInput;
}
inline CWSDLPortTypeOutput * GetOutput()
{
return m_pOutput;
}
inline HRESULT SetName(const CStringW& strName)
{
m_strName = strName;
return S_OK;
}
inline HRESULT SetName(const wchar_t *wszName, int cchName)
{
if (!wszName)
{
return E_FAIL;
}
m_strName.SetString(wszName, cchName);
return S_OK;
}
inline const CStringW& GetName()
{
return m_strName;
}
inline const CStringW& GetParameterOrder()
{
return m_strParameterOrder;
}
inline HRESULT SetParameterOrder(const CStringW& str)
{
m_strParameterOrder = str;
return S_OK;
}
inline HRESULT SetParameterOrder(const wchar_t *wsz, int cch)
{
if (!wsz)
{
return E_FAIL;
}
m_strParameterOrder.SetString(wsz, cch);
return S_OK;
}
inline CWSDLPortTypeFault * AddFault()
{
CAutoPtr<CWSDLPortTypeFault> p ( new CWSDLPortTypeFault );
if (p != NULL)
{
if (m_faults.AddTail(p) != NULL)
{
return p.Detach();
}
}
EmitErrorHr(E_OUTOFMEMORY);
return NULL;
}
inline CWSDLPortTypeFault * AddFault(CWSDLPortTypeFault *p)
{
if (m_faults.AddTail(p) != NULL)
{
return p;
}
EmitErrorHr(E_OUTOFMEMORY);
return NULL;
}
inline POSITION GetFirstFault()
{
return m_faults.GetHeadPosition();
}
inline CWSDLPortTypeFault * GetNextFault(POSITION &pos)
{
return m_faults.GetNext(pos);
}
};
<|start_filename|>include/atlsoap.h<|end_filename|>
// This is a part of the Active Template Library.
// Copyright (C) Microsoft Corporation
// All rights reserved.
//
// This source code is only intended as a supplement to the
// Active Template Library Reference and related
// electronic documentation provided with the library.
// See these sources for detailed information regarding the
// Active Template Library product.
#ifndef __ATLSOAP_H__
#define __ATLSOAP_H__
#pragma once
#if (defined(_WINSOCKAPI_) && !defined(_WINSOCK2API_))
#error require winsock2.h -- include <winsock2.h> before you include <windows.h>
#endif
#if !defined(_WIN32_WCE) && ((_WIN32_WINNT < 0x0400) && (_WIN32_WINDOWS <= 0x0400))
#error require _WIN32_WINNT >= 0x0400 or _WIN32_WINDOWS > 0x0400
#endif
#ifndef ATLSOAP_TRACE
#if defined(_ATLSOAP_TRACE_XML) && !defined(_WIN32_WCE)
#define ATLSOAP_TRACE(__data, __len) AtlSoapTraceXML(__data, __len)
#else
#define ATLSOAP_TRACE(__data, __len) __noop
#endif
#endif // ATLSOAP_TRACE
// override this macro to ATL_BASE64_FLAG_NOCRLF if you do
// not want Base64-encoded binary data to contain CRLFs
#ifndef ATLSOAP_BASE64_FLAGS
#define ATLSOAP_BASE64_FLAGS ATL_BASE64_FLAG_NONE
#endif // ATLSOAP_BASE64_FLAGS
#ifndef _WIN32_WCE
[ emitidl(restricted) ];
#endif // _WIN32_WCE
#include <winsock2.h>
#include <atlstr.h>
#include <atlcoll.h>
#include <atlbase.h>
#include <msxml2.h>
#include <atlenc.h>
#ifndef _WIN32_WCE
#include <fcntl.h>
#else
#include <altcecrt.h>
#endif // _WIN32_WCE
#include <float.h>
#include <math.h>
#include <limits>
#include <atlisapi.h>
#ifndef _WIN32_WCE
#include <atlstencil.h>
#endif // _WIN32_WCE
#include <atlhttp.h>
#include <atlhttp.inl>
#pragma warning(push)
#pragma warning(disable: 4625) // copy constructor could not be generated because a base class copy constructor is inaccessible
#pragma warning(disable: 4626) // assignment operator could not be generated because a base class assignment operator is inaccessible
#pragma warning(disable: 4061) // enumerate 'enum value' in switch of enum 'enum type' is not explicitly handled by a case label
#ifndef _CPPUNWIND
#pragma warning(disable: 4702) // unreachable code
#endif // _CPPUNWIND
#ifndef ATLSOAP_NOWININET
#include <wininet.h>
#ifndef ATLSOAPINET_CLIENT
#define ATLSOAPINET_CLIENT _T("VCSoapClient")
#endif
#endif
#ifndef _ATL_NO_DEFAULT_LIBS
#ifndef _WIN32_WCE
#pragma comment(lib, "msxml2.lib")
#endif // _WIN32_WCE
#ifndef ATLSOAP_NOWININET
#pragma comment(lib, "wininet.lib")
#endif
#endif
#define _ATLSOAP_MAKEWIDESTR( str ) L ## str
#define ATLSOAP_MAKEWIDESTR( str ) _ATLSOAP_MAKEWIDESTR( str )
#pragma pack(push,_ATL_PACKING)
namespace ATL
{
#ifndef _WIN32_WCE
ATL_NOINLINE inline void AtlSoapTraceXML(LPBYTE pdwData, DWORD dwLen)
{
HANDLE hStdOut = GetStdHandle(STD_OUTPUT_HANDLE);
if (hStdOut != INVALID_HANDLE_VALUE)
{
DWORD dwWritten;
WriteFile(hStdOut,
"\n-----------------------------------------------------------------\n",
sizeof("\n-----------------------------------------------------------------\n")-1,
&dwWritten, NULL);
WriteFile(hStdOut, pdwData, dwLen, &dwWritten, NULL);
WriteFile(hStdOut,
"\n-----------------------------------------------------------------\n",
sizeof("\n-----------------------------------------------------------------\n")-1,
&dwWritten, NULL);
}
}
#endif // _WIN32_WCE
////////////////////////////////////////////////////////////////////////////////
//
// IStreamImpl - stub IStream implementation class
//
////////////////////////////////////////////////////////////////////////////////
class IStreamImpl : public IStream
{
public:
HRESULT __stdcall Read(void * /*pDest*/, ULONG /*nMaxLen*/, ULONG * /*pnRead*/)
{
return E_NOTIMPL;
}
HRESULT __stdcall Write(const void * /*pv*/, ULONG /*cb*/, ULONG * /*pcbWritten*/)
{
return E_NOTIMPL;
}
HRESULT __stdcall Seek(LARGE_INTEGER /*dlibMove*/, DWORD /*dwOrigin*/,
ULARGE_INTEGER * /*pLibNewPosition*/)
{
return E_NOTIMPL;
}
HRESULT __stdcall SetSize(ULARGE_INTEGER /*libNewSize*/)
{
return E_NOTIMPL;
}
HRESULT __stdcall CopyTo(IStream * /*pStream*/, ULARGE_INTEGER /*cb*/,
ULARGE_INTEGER * /*pcbRead*/, ULARGE_INTEGER * /*pcbWritten*/)
{
return E_NOTIMPL;
}
HRESULT __stdcall Commit(DWORD /*grfCommitFlags*/)
{
return E_NOTIMPL;
}
HRESULT __stdcall Revert()
{
return E_NOTIMPL;
}
HRESULT __stdcall LockRegion(ULARGE_INTEGER /*libOffset*/, ULARGE_INTEGER /*cb*/, DWORD /*dwLockType*/)
{
return E_NOTIMPL;
}
HRESULT __stdcall UnlockRegion(ULARGE_INTEGER /*libOffset*/, ULARGE_INTEGER /*cb*/, DWORD /*dwLockType*/)
{
return E_NOTIMPL;
}
HRESULT __stdcall Stat(STATSTG * /*pstatstg*/, DWORD /*grfStatFlag*/)
{
return E_NOTIMPL;
}
HRESULT __stdcall Clone(IStream ** /*ppstm*/)
{
return E_NOTIMPL;
}
}; // class IStreamImpl
////////////////////////////////////////////////////////////////////////////////
//
// CStreamOnServerContext
//
////////////////////////////////////////////////////////////////////////////////
class CStreamOnServerContext : public IStreamImpl
{
public:
HRESULT __stdcall QueryInterface(REFIID riid, void **ppv)
{
if (ppv == NULL)
{
return E_POINTER;
}
*ppv = NULL;
if (InlineIsEqualGUID(riid, IID_IUnknown) ||
InlineIsEqualGUID(riid, IID_IStream) ||
InlineIsEqualGUID(riid, IID_ISequentialStream))
{
*ppv = static_cast<IStream *>(this);
return S_OK;
}
return E_NOINTERFACE;
}
ULONG __stdcall AddRef()
{
return 1;
}
ULONG __stdcall Release()
{
return 1;
}
private:
IHttpServerContext * m_pServerContext;
DWORD m_dwBytesRead;
public:
CStreamOnServerContext(IHttpServerContext *pServerContext = NULL)
: m_pServerContext(pServerContext), m_dwBytesRead(0)
{
}
void SetServerContext(IHttpServerContext *pServerContext)
{
ATLASSUME( m_pServerContext == NULL );
m_pServerContext = pServerContext;
}
HRESULT __stdcall Read(void *pDest, ULONG nMaxLen, ULONG *pnRead)
{
ATLENSURE( pDest != NULL );
ATLASSUME( m_pServerContext != NULL );
DWORD dwToRead = __min(m_pServerContext->GetTotalBytes()-m_dwBytesRead, nMaxLen);
if (ReadClientData(m_pServerContext, (LPSTR) pDest, &dwToRead, m_dwBytesRead) != FALSE)
{
m_dwBytesRead+= dwToRead;
if (pnRead != NULL)
{
*pnRead = dwToRead;
}
return S_OK;
}
ATLTRACE( _T("ATLSOAP: CStreamOnServerContext::Read -- ReadClientData failed.\r\n") );
return E_FAIL;
}
}; // class CStreamOnServerContext
////////////////////////////////////////////////////////////////////////////////
//
// CReadStreamOnSocket
//
////////////////////////////////////////////////////////////////////////////////
template <typename TSocketClass>
class CReadStreamOnSocket : public IStreamImpl
{
public:
HRESULT __stdcall QueryInterface(REFIID riid, void **ppv)
{
if (ppv == NULL)
{
return E_POINTER;
}
*ppv = NULL;
if (InlineIsEqualGUID(riid, IID_IUnknown) ||
InlineIsEqualGUID(riid, IID_IStream) ||
InlineIsEqualGUID(riid, IID_ISequentialStream))
{
*ppv = static_cast<IStream *>(this);
return S_OK;
}
return E_NOINTERFACE;
}
ULONG __stdcall AddRef()
{
return 1;
}
ULONG __stdcall Release()
{
return 1;
}
private:
CAtlHttpClientT<TSocketClass> * m_pSocket;
LPCSTR m_szBuffer;
LPCSTR m_szCurr;
long m_nBodyLen;
public:
CReadStreamOnSocket()
: m_pSocket(NULL), m_szBuffer(NULL), m_szCurr(NULL), m_nBodyLen(0)
{
}
BOOL Init(CAtlHttpClientT<TSocketClass> *pSocket)
{
ATLENSURE( pSocket != NULL );
m_pSocket = pSocket;
m_szBuffer = (LPCSTR) pSocket->GetBody();
ATLSOAP_TRACE( (LPBYTE) pSocket->GetBody(), pSocket->GetBodyLength() );
if (m_szBuffer != NULL)
{
m_szCurr = m_szBuffer;
m_nBodyLen = pSocket->GetBodyLength();
if (m_nBodyLen != 0)
{
return TRUE;
}
}
ATLTRACE( _T("ATLSOAP: CReadStreamOnSocket::Init failed.\r\n") );
return FALSE;
}
HRESULT __stdcall Read(void *pDest, ULONG nMaxLen, ULONG *pnRead)
{
ATLASSERT( pDest != NULL );
ATLASSUME( m_pSocket != NULL );
ATLASSUME( m_szBuffer != NULL );
if (pnRead != NULL)
{
*pnRead = 0;
}
long nRead = (int) (m_szCurr-m_szBuffer);
if (nRead < m_nBodyLen)
{
long nLength = __min((int)(m_nBodyLen-nRead), (LONG) nMaxLen);
Checked::memcpy_s(pDest, nMaxLen, m_szCurr, nLength);
m_szCurr+= nLength;
if (pnRead != NULL)
{
*pnRead = (ULONG) nLength;
}
}
return S_OK;
}
}; // class CReadStreamOnSocket
////////////////////////////////////////////////////////////////////////////////
//
// CWriteStreamOnCString
//
////////////////////////////////////////////////////////////////////////////////
class CWriteStreamOnCString : public IWriteStream
{
public:
CStringA m_str;
virtual ~CWriteStreamOnCString()
{
}
HRESULT WriteStream(LPCSTR szOut, int nLen, LPDWORD pdwWritten)
{
ATLENSURE_RETURN( szOut != NULL );
if (nLen < 0)
{
nLen = (int) strlen(szOut);
}
_ATLTRY
{
m_str.Append(szOut, nLen);
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
if (pdwWritten != NULL)
{
*pdwWritten = (DWORD) nLen;
}
return S_OK;
}
HRESULT FlushStream()
{
return S_OK;
}
void Cleanup()
{
m_str.Empty();
}
}; // class CWriteStreamOnCString
////////////////////////////////////////////////////////////////////////////////
//
// Namespaces
//
////////////////////////////////////////////////////////////////////////////////
#define SOAPENV_NAMESPACEA "http://schemas.xmlsoap.org/soap/envelope/"
#define SOAPENV_NAMESPACEW ATLSOAP_MAKEWIDESTR( SOAPENV_NAMESPACEA )
#define SOAPENC_NAMESPACEA "http://schemas.xmlsoap.org/soap/encoding/"
#define SOAPENC_NAMESPACEW ATLSOAP_MAKEWIDESTR( SOAPENC_NAMESPACEA )
#define XSI_NAMESPACEA "http://www.w3.org/2001/XMLSchema-instance"
#define XSI_NAMESPACEW ATLSOAP_MAKEWIDESTR( XSI_NAMESPACEA )
#define XSD_NAMESPACEA "http://www.w3.org/2001/XMLSchema"
#define XSD_NAMESPACEW ATLSOAP_MAKEWIDESTR( XSD_NAMESPACEA )
#ifndef ATLSOAP_GENERIC_NAMESPACE
#define ATLSOAP_GENERIC_NAMESPACE L"http://www.tempuri.org"
#endif
////////////////////////////////////////////////////////////////////////////////
//
// Helpers
//
////////////////////////////////////////////////////////////////////////////////
inline HRESULT GetAttribute(
__in ISAXAttributes *pAttributes,
__in_ecount(cchName) const wchar_t *wszAttrName, __in int cchName,
__out_ecount_part(*pcchValue, *pcchValue) const wchar_t **pwszValue, __inout int *pcchValue,
__in_ecount_opt(cchNamespace) wchar_t *wszNamespace = NULL, __in int cchNamespace = 0)
{
if (!pAttributes || !wszAttrName || !pwszValue || !pcchValue)
{
return E_INVALIDARG;
}
*pwszValue = NULL;
*pcchValue = 0;
if (!wszNamespace)
{
return (pAttributes->getValueFromQName(wszAttrName, cchName, pwszValue, pcchValue) == S_OK ? S_OK : E_FAIL);
}
return (pAttributes->getValueFromName(wszNamespace, cchNamespace,
wszAttrName, cchName, pwszValue, pcchValue) == S_OK ? S_OK : E_FAIL);
}
inline HRESULT GetAttribute(
__in ISAXAttributes *pAttributes,
__in_ecount(cchName) const wchar_t *wszAttrName, __in int cchName,
__inout CStringW &strValue,
__in_ecount_opt(cchNamespace) wchar_t *wszNamespace = NULL, __in int cchNamespace = 0)
{
const wchar_t *wszValue = NULL;
int cchValue = 0;
if (!pAttributes || !wszAttrName)
{
return E_INVALIDARG;
}
HRESULT hr;
if (!wszNamespace)
{
hr = (pAttributes->getValueFromQName(wszAttrName, cchName, &wszValue, &cchValue) == S_OK ? S_OK : E_FAIL);
}
else
{
hr = (pAttributes->getValueFromName(wszNamespace, cchNamespace,
wszAttrName, cchName, &wszValue, &cchValue) == S_OK ? S_OK : E_FAIL);
}
if (hr == S_OK)
{
_ATLTRY
{
strValue.SetString(wszValue, cchValue);
}
_ATLCATCHALL()
{
ATLTRACE( _T("ATLSOAP: GetAttribute -- out of memory.\r\n") );
hr = E_OUTOFMEMORY;
}
}
return hr;
}
inline const wchar_t *SkipWhitespace(const wchar_t *wsz)
{
while (*wsz && iswspace(*wsz))
++wsz;
return wsz;
}
} // namespace ATL
#pragma pack(pop)
////////////////////////////////////////////////////////////////////////////////
//
// BLOB data type - use this struct when you want to send BLOB data
// the attribute provider and proxy generator will only properly special
// case blob data when using this struct.
//
////////////////////////////////////////////////////////////////////////////////
#ifndef _WIN32_WCE
[ export ]
#endif // _WIN32_WCE
typedef struct _tagATLSOAP_BLOB
{
unsigned long size;
unsigned char *data;
} ATLSOAP_BLOB;
#ifndef _ATL_SOAP_NO_PARAMETER_VALIDATIONS
#define _ATL_VALIDATE_PARAMETER_END(p)\
do \
{ \
if(*(p) !='\0') \
return E_FAIL; \
} while(0)
#else
#define _ATL_VALIDATE_PARAMETER_END(p)
#endif
// All non-integral types have specializations which
// will be called. The following function will be called
// only for integral types
#pragma push_macro("max")
#pragma push_macro("min")
#undef max
#undef min
template <typename T>
inline HRESULT AtlGetSAXValue(T * pVal , const wchar_t * wsz , int cch )
{
__int64 nVal = *pVal;
if (FAILED(AtlGetSAXValue(&nVal, wsz, cch)))
return E_FAIL;
#ifndef _ATL_SOAP_NO_PARAMETER_VALIDATIONS
if(nVal < std::numeric_limits<T>::min() || nVal > std::numeric_limits<T>::max())
return E_FAIL;
#endif
*pVal = T(nVal);
return S_OK;
}
#pragma pop_macro("max")
#pragma pop_macro("min")
////////////////////////////////////////////////////////////////////////////////
//
// AtlGetXMLValue (for IXMLDOMDocument) - get the real type from the XML data
//
///////////////////////////////////////////////////////////////////////////////
//
// generic IXMLDOMNode template function
// delegates to AtlGetSAXValue
//
template <typename T>
inline HRESULT AtlGetXMLValue(IXMLDOMNode *pParam, T *pVal)
{
CComBSTR bstrVal;
HRESULT hr = AtlGetXMLValue(pParam, &bstrVal);
if (SUCCEEDED(hr))
{
hr = AtlGetSAXValue(pVal, bstrVal, bstrVal.Length());
}
return hr;
}
// specialization for BSTR
template <>
inline HRESULT AtlGetXMLValue<BSTR>(IXMLDOMNode *pParam, BSTR *pbstrVal)
{
if (pParam == NULL)
{
return E_INVALIDARG;
}
if (pbstrVal == NULL)
{
return E_POINTER;
}
CComPtr<IXMLDOMNode> spChild;
if (pParam->get_firstChild(&spChild) == S_OK)
{
CComPtr<IXMLDOMNode> spXmlChild;
if (spChild->get_firstChild(&spXmlChild) == S_OK)
{
return (pParam->get_xml(pbstrVal) == S_OK ? S_OK : E_FAIL);
}
}
return (pParam->get_text(pbstrVal) == S_OK) ? S_OK : E_FAIL;
}
////////////////////////////////////////////////////////////////////////////////
//
// AtlGetSAXValue - (for SAX or generic) get the real type from the XML data
//
////////////////////////////////////////////////////////////////////////////////
template <>
inline HRESULT AtlGetSAXValue<bool>(bool *pVal, __in_z const wchar_t *wsz, int cch)
{
ATLENSURE( wsz != NULL );
if (!pVal)
{
return E_POINTER;
}
*pVal = false;
HRESULT hr = E_FAIL;
switch (wsz[0])
{
case L'1':
{
if (cch==1)
{
*pVal = true;
hr = S_OK;
}
break;
}
case L'0':
{
if (cch==1)
{
*pVal = false;
hr = S_OK;
}
break;
}
case L't':
{
if (cch==sizeof("true")-1 && !wcsncmp(wsz, L"true", cch))
{
*pVal = true;
hr = S_OK;
}
break;
}
case L'f':
{
if (cch==sizeof("false")-1 && !wcsncmp(wsz, L"false", cch))
{
*pVal = false;
hr = S_OK;
}
break;
}
}
return hr;
}
template <>
inline HRESULT AtlGetSAXValue<__int64>(__int64 *pVal, __in_z const wchar_t *wsz, int cch)
{
ATLENSURE_RETURN( wsz != NULL );
if (!pVal)
{
return E_POINTER;
}
_ATLTRY
{
CFixedStringT<CStringW, 1024> wstr(wsz, cch);
const wchar_t *pStart = ATL::SkipWhitespace(static_cast<LPCWSTR>(wstr));
const wchar_t *pEnd;
__int64 i = 0;
errno_t errnoValue = AtlStrToNum(&i, pStart, const_cast<wchar_t **>(&pEnd), 10);
if (errnoValue == ERANGE)
{
return E_FAIL;//overflow or underflow case
}
pEnd = ATL::SkipWhitespace(pEnd);
_ATL_VALIDATE_PARAMETER_END(pEnd);
*pVal = i;
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
return S_OK;
}
template <>
inline HRESULT AtlGetSAXValue<unsigned __int64>(unsigned __int64 *pVal, __in_z const wchar_t *wsz, int cch)
{
ATLENSURE_RETURN( wsz != NULL );
if (!pVal)
{
return E_POINTER;
}
_ATLTRY
{
CFixedStringT<CStringW, 1024> wstr(wsz, cch);
const wchar_t *pStart = ATL::SkipWhitespace(static_cast<LPCWSTR>(wstr));
const wchar_t *pEnd;
unsigned __int64 i = 0;
errno_t errnoValue = AtlStrToNum(&i, pStart, const_cast<wchar_t **>(&pEnd), 10);
if (errnoValue == ERANGE)
{
return E_FAIL;//overflow or underflow case
}
pEnd = ATL::SkipWhitespace(pEnd);
_ATL_VALIDATE_PARAMETER_END(pEnd);
*pVal = i;
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
return S_OK;
}
template <>
inline HRESULT AtlGetSAXValue<double>(double *pVal, __in_z const wchar_t *wsz, int cch)
{
ATLENSURE_RETURN( wsz != NULL );
if (!pVal)
{
return E_POINTER;
}
if ((cch == 3) && (wsz[0]==L'I') && (!wcsncmp(wsz, L"INF", cch)))
{
*(((int *) pVal)+0) = 0x0000000;
*(((int *) pVal)+1) = 0x7FF00000;
}
else if ((cch == 3) && (wsz[0]==L'N') && (!wcsncmp(wsz, L"NaN", cch)))
{
*(((int *) pVal)+0) = 0x0000000;
*(((int *) pVal)+1) = 0xFFF80000;
}
else if ((cch == 4) && (wsz[1]==L'I') && (!wcsncmp(wsz, L"-INF", cch)))
{
*(((int *) pVal)+0) = 0x0000000;
*(((int *) pVal)+1) = 0xFFF00000;
}
else
{
errno_t errnoValue = 0;
_ATLTRY
{
CFixedStringT<CStringW, 1024> wstr(wsz, cch);
const wchar_t *pStart = ATL::SkipWhitespace(static_cast<LPCWSTR>(wstr));
const wchar_t *pEnd;
double d = 0.0;
errnoValue = AtlStrToNum(&d, pStart, const_cast<wchar_t **>(&pEnd));
pEnd = ATL::SkipWhitespace(pEnd);
_ATL_VALIDATE_PARAMETER_END(pEnd);
*pVal = d;
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
if ((*pVal == -HUGE_VAL) || (*pVal == HUGE_VAL) || (errnoValue == ERANGE))
{
return E_FAIL;
}
}
return S_OK;
}
template <>
inline HRESULT AtlGetSAXValue<float>(float *pVal, __in_z const wchar_t *wsz, int cch)
{
ATLASSERT( wsz != NULL );
if (!pVal)
{
return E_POINTER;
}
double d = *pVal;
if (SUCCEEDED(AtlGetSAXValue(&d, wsz, cch)))
{
#ifdef _ATL_SOAP_PARAMETER_VALIDATIONS
if(d > FLT_MAX || d < -FLT_MAX)
return E_FAIL;
#endif
*pVal = (float) d;
return S_OK;
}
return E_FAIL;
}
template <>
inline HRESULT AtlGetSAXValue<BSTR>(BSTR *pVal, __in_z const wchar_t *wsz, int cch)
{
ATLASSERT( wsz != NULL );
if (pVal == NULL)
{
return E_POINTER;
}
*pVal = SysAllocStringLen(wsz, cch);
return ((*pVal != NULL) ? S_OK : E_OUTOFMEMORY);
}
inline HRESULT AtlGetSAXBlobValue(
ATLSOAP_BLOB *pVal,
const wchar_t *wsz,
int cch,
IAtlMemMgr *pMemMgr,
bool bHex = false)
{
ATLENSURE_RETURN( wsz != NULL );
ATLENSURE_RETURN( pMemMgr != NULL );
if (pVal == NULL)
{
return E_POINTER;
}
if (pVal->data != NULL)
{
return E_INVALIDARG;
}
pVal->data = NULL;
pVal->size = 0;
int nLength = AtlUnicodeToUTF8(wsz, cch, NULL, 0);
if (nLength != 0)
{
char * pSrc = (char *) pMemMgr->Allocate(nLength);
if (pSrc != NULL)
{
nLength = AtlUnicodeToUTF8(wsz, cch, pSrc, nLength);
if (nLength != 0)
{
pVal->data = (unsigned char *) pMemMgr->Allocate(nLength);
if (pVal->data != NULL)
{
BOOL bRet;
int nDataLength = nLength;
if (!bHex)
{
bRet = Base64Decode(pSrc, nLength, pVal->data, &nDataLength);
}
else
{
bRet = AtlHexDecode(pSrc, nLength, pVal->data, &nDataLength);
}
if (bRet)
{
pVal->size = nDataLength;
}
}
}
pMemMgr->Free(pSrc);
}
}
if (pVal->size == 0)
{
if (pVal->data != NULL)
{
pMemMgr->Free(pVal->data);
pVal->data = NULL;
}
}
return S_OK;
}
////////////////////////////////////////////////////////////////////////////////
//
// AtlGenXMLValue template and specializations
//
////////////////////////////////////////////////////////////////////////////////
template <typename T>
inline HRESULT AtlGenXMLValue(__in IWriteStream *pStream, __in T *pVal)
{
if ((pStream == NULL) || (pVal == NULL))
{
return E_INVALIDARG;
}
//
// delegate to CWriteStreamHelper
//
CWriteStreamHelper s(pStream);
return (s.Write(*pVal) == TRUE ? S_OK : E_FAIL);
}
#ifdef _NATIVE_WCHAR_T_DEFINED
template <>
inline HRESULT AtlGenXMLValue<wchar_t>(__in IWriteStream *pStream, __in wchar_t *pVal)
{
return AtlGenXMLValue(pStream, (unsigned short *)pVal);
}
#endif
template <>
inline HRESULT AtlGenXMLValue<wchar_t *>(__in IWriteStream *pStream, __deref_inout_z wchar_t **pVal)
{
if ((pStream == NULL) || (*pVal == NULL))
{
return E_INVALIDARG;
}
wchar_t *wszWrite = *pVal;
int nSrcLen = (int)wcslen(*pVal);
int nCnt = EscapeXML(*pVal, nSrcLen, NULL, 0);
if (nCnt > nSrcLen)
{
nCnt++;
wszWrite = (wchar_t *)calloc((nCnt),sizeof(wchar_t));
if (wszWrite == NULL)
{
return E_OUTOFMEMORY;
}
nCnt = EscapeXML(*pVal, nSrcLen, wszWrite, nCnt);
if (nCnt == 0)
{
free(wszWrite);
return E_FAIL;
}
wszWrite[nCnt] = L'\0';
nSrcLen = nCnt;
}
nCnt = AtlUnicodeToUTF8(wszWrite, nSrcLen, NULL, 0);
HRESULT hr = E_FAIL;
if ((nCnt == 0) || (nCnt == nSrcLen))
{
CWriteStreamHelper s(pStream);
hr = (s.Write(wszWrite) == TRUE ? S_OK : E_FAIL);
}
else
{
nCnt++;
CHeapPtr<char> szWrite;
szWrite.AllocateBytes((size_t)(nCnt));
if (szWrite != NULL)
{
nCnt = AtlUnicodeToUTF8(wszWrite, nSrcLen, szWrite, nCnt);
if (nCnt != 0)
{
hr = pStream->WriteStream(szWrite, nCnt, NULL);
}
}
else
{
ATLTRACE( _T("ATLSOAP: AtlGenXMLValue<wchar_t *> -- out of memory.\r\n") );
hr = E_OUTOFMEMORY;
}
}
if (wszWrite != *pVal)
{
free(wszWrite);
}
return hr;
}
template <>
inline HRESULT AtlGenXMLValue<double>(IWriteStream *pStream, double *pVal)
{
if ((pStream == NULL) || (pVal == NULL))
{
return E_INVALIDARG;
}
HRESULT hr;
switch (_fpclass(*pVal))
{
case _FPCLASS_SNAN:
case _FPCLASS_QNAN:
{
hr = pStream->WriteStream("NaN", 3, NULL);
break;
}
case _FPCLASS_NINF:
{
hr = pStream->WriteStream("-INF", 4, NULL);
break;
}
case _FPCLASS_PINF:
{
hr = pStream->WriteStream("INF", 3, NULL);
break;
}
case _FPCLASS_NZ:
{
hr = pStream->WriteStream("-0", 2, NULL);
break;
}
default:
{
/***
* 2 = sign + decimal point
* ndec = decimal digits
* 5 = exponent letter (e or E), exponent sign, three digits exponent
* 1 = extra space for rounding
* 1 = string terminator '\0'
***/
const int ndec = 512;
CHAR szBuf[ndec+9];
szBuf[0] = '\0';
Checked::gcvt_s(szBuf, _countof(szBuf), *pVal, ndec);
size_t nLen = strlen(szBuf);
if (nLen && szBuf[nLen-1] == '.')
{
szBuf[--nLen] = '\0';
}
hr = pStream->WriteStream(szBuf, (int)nLen, NULL);
break;
}
}
return hr;
}
template <>
inline HRESULT AtlGenXMLValue<float>(IWriteStream *pStream, float *pVal)
{
if ((pStream == NULL) || (pVal == NULL))
{
return E_INVALIDARG;
}
double d = *pVal;
return AtlGenXMLValue(pStream, &d);
}
template <>
inline HRESULT AtlGenXMLValue<bool>(IWriteStream *pStream, bool *pVal)
{
if ((pStream == NULL) || (pVal == NULL))
{
return E_INVALIDARG;
}
if (*pVal == true)
{
return pStream->WriteStream("true", sizeof("true")-1, NULL);
}
return pStream->WriteStream("false", sizeof("false")-1, NULL);
}
inline HRESULT AtlGenXMLBlobValue(
IWriteStream *pStream,
ATLSOAP_BLOB *pVal,
IAtlMemMgr *pMemMgr,
bool bHex = false)
{
if ((pStream == NULL) || (pVal == NULL) || (pMemMgr == NULL))
{
return E_INVALIDARG;
}
HRESULT hr = E_FAIL;
int nLength;
if (!bHex)
{
nLength = Base64EncodeGetRequiredLength(pVal->size, ATLSOAP_BASE64_FLAGS);
}
else
{
nLength = AtlHexEncodeGetRequiredLength(pVal->size);
}
char *pEnc = (char *) pMemMgr->Allocate(nLength);
if (pEnc != NULL)
{
BOOL bRet;
if (!bHex)
{
bRet = Base64Encode(pVal->data, pVal->size, pEnc, &nLength, ATLSOAP_BASE64_FLAGS);
}
else
{
bRet = AtlHexEncode(pVal->data, pVal->size, pEnc, &nLength);
}
if (bRet)
{
hr = pStream->WriteStream(pEnc, nLength, NULL);
}
pMemMgr->Free(pEnc);
}
return hr;
}
template <typename T>
inline HRESULT AtlCleanupValue(T * /*pVal*/)
{
return S_OK;
}
inline HRESULT AtlCleanupBlobValue(ATLSOAP_BLOB *pVal, IAtlMemMgr *pMemMgr)
{
if ((pVal == NULL) || (pMemMgr == NULL))
{
return E_INVALIDARG;
}
if (pVal->data != NULL)
{
pMemMgr->Free(pVal->data);
pVal->data = NULL;
pVal->size = 0;
}
return S_OK;
}
template <>
inline HRESULT AtlCleanupValue<ATLSOAP_BLOB>(ATLSOAP_BLOB *pVal)
{
ATLTRACE( _T("Warning: AtlCleanupValue<ATLSOAP_BLOB> was called -- assuming CRT allocator.\r\n") );
if (pVal == NULL)
{
return E_INVALIDARG;
}
if (pVal->data != NULL)
{
free(pVal->data);
pVal->data = NULL;
pVal->size = 0;
}
return S_OK;
}
template <>
inline HRESULT AtlCleanupValue<BSTR>(BSTR *pVal)
{
if (pVal == NULL)
{
// should never happen
ATLASSERT( FALSE );
return E_INVALIDARG;
}
if ((*pVal) != NULL)
{
// null strings are okay
SysFreeString(*pVal);
*pVal = NULL;
}
return S_OK;
}
template <typename T>
inline HRESULT AtlCleanupValueEx(T *pVal, IAtlMemMgr *pMemMgr)
{
pMemMgr;
return AtlCleanupValue(pVal);
}
template <>
inline HRESULT AtlCleanupValueEx<ATLSOAP_BLOB>(ATLSOAP_BLOB *pVal, IAtlMemMgr *pMemMgr)
{
return AtlCleanupBlobValue(pVal, pMemMgr);
}
// single dimensional arrays
template <typename T>
inline HRESULT AtlCleanupArray(T *pArray, int nCnt)
{
if (pArray == NULL)
{
return E_INVALIDARG;
}
for (int i=0; i<nCnt; i++)
{
AtlCleanupValue(&pArray[i]);
}
return S_OK;
}
template <typename T>
inline HRESULT AtlCleanupArrayEx(T *pArray, int nCnt, IAtlMemMgr *pMemMgr)
{
if (pArray == NULL)
{
return E_INVALIDARG;
}
for (int i=0; i<nCnt; i++)
{
AtlCleanupValueEx(&pArray[i], pMemMgr);
}
return S_OK;
}
// multi-dimensional arrays
template <typename T>
inline HRESULT AtlCleanupArrayMD(T *pArray, const int *pDims)
{
if ((pArray == NULL) || (pDims == NULL))
{
return E_INVALIDARG;
}
// calculate size
int nCnt = 1;
for (int i=1; i<=pDims[0]; i++)
{
nCnt*= pDims[i];
}
return AtlCleanupArray(pArray, nCnt);
}
template <typename T>
inline HRESULT AtlCleanupArrayMDEx(T *pArray, const int *pDims, IAtlMemMgr *pMemMgr)
{
if ((pArray == NULL) || (pDims == NULL))
{
return E_INVALIDARG;
}
// calculate size
int nCnt = 1;
for (int i=1; i<=pDims[0]; i++)
{
nCnt*= pDims[i];
}
return AtlCleanupArrayEx(pArray, nCnt, pMemMgr);
}
#pragma pack(push,_ATL_PACKING)
namespace ATL
{
////////////////////////////////////////////////////////////////////////////////
//
// CSAXSoapErrorHandler
//
////////////////////////////////////////////////////////////////////////////////
class CSAXSoapErrorHandler : public ISAXErrorHandler
{
private:
CFixedStringT<CStringW, 256> m_strParseError;
public:
virtual ~CSAXSoapErrorHandler()
{
}
HRESULT __stdcall QueryInterface(REFIID riid, void **ppv)
{
if (!ppv)
{
return E_POINTER;
}
if (InlineIsEqualGUID(riid, __uuidof(ISAXErrorHandler)) ||
InlineIsEqualGUID(riid, __uuidof(IUnknown)))
{
*ppv = static_cast<ISAXErrorHandler*>(this);
return S_OK;
}
return E_NOINTERFACE;
}
ULONG __stdcall AddRef()
{
return 1;
}
ULONG __stdcall Release()
{
return 1;
}
const CStringW& GetParseError()
{
return m_strParseError;
}
HRESULT __stdcall error(
ISAXLocator *pLocator,
const wchar_t *wszErrorMessage,
HRESULT hrErrorCode)
{
(pLocator);
(wszErrorMessage);
(hrErrorCode);
ATLTRACE( _T("ATLSOAP: parse error: %ws\r\n"), wszErrorMessage );
_ATLTRY
{
m_strParseError = wszErrorMessage;
}
_ATLCATCHALL()
{
return E_FAIL;
}
return hrErrorCode;
}
HRESULT __stdcall fatalError(
ISAXLocator *pLocator,
const wchar_t *wszErrorMessage,
HRESULT hrErrorCode)
{
(pLocator);
(wszErrorMessage);
(hrErrorCode);
ATLTRACE( _T("ATLSOAP: fatal parse error: %ws\r\n"), wszErrorMessage );
_ATLTRY
{
m_strParseError = wszErrorMessage;
}
_ATLCATCHALL()
{
return E_FAIL;
}
return hrErrorCode;
}
HRESULT __stdcall ignorableWarning(
ISAXLocator *pLocator,
const wchar_t *wszErrorMessage,
HRESULT hrErrorCode)
{
(pLocator);
(wszErrorMessage);
(hrErrorCode);
ATLTRACE( _T("ATLSOAP: ignorable warning: %ws\r\n"), wszErrorMessage );
return hrErrorCode;
}
};
////////////////////////////////////////////////////////////////////////////////
//
// ISAXContentHandlerImpl
//
////////////////////////////////////////////////////////////////////////////////
class ISAXContentHandlerImpl :
public ISAXContentHandler
{
public:
//
// ISAXContentHandler interface
//
HRESULT __stdcall putDocumentLocator(ISAXLocator * /*pLocator*/)
{
return S_OK;
}
HRESULT __stdcall startDocument()
{
return S_OK;
}
HRESULT __stdcall endDocument()
{
return S_OK;
}
HRESULT __stdcall startPrefixMapping(
const wchar_t * /*wszPrefix*/,
int /*cchPrefix*/,
const wchar_t * /*wszUri*/,
int /*cchUri*/)
{
return S_OK;
}
HRESULT __stdcall endPrefixMapping(
const wchar_t * /*wszPrefix*/,
int /*cchPrefix*/)
{
return S_OK;
}
HRESULT __stdcall startElement(
const wchar_t * /*wszNamespaceUri*/,
int /*cchNamespaceUri*/,
const wchar_t * /*wszLocalName*/,
int /*cchLocalName*/,
const wchar_t * /*wszQName*/,
int /*cchQName*/,
ISAXAttributes * /*pAttributes*/)
{
return S_OK;
}
HRESULT __stdcall endElement(
const wchar_t * /*wszNamespaceUri*/,
int /*cchNamespaceUri*/,
const wchar_t * /*wszLocalName*/,
int /*cchLocalName*/,
const wchar_t * /*wszQName*/,
int /*cchQName*/)
{
return S_OK;
}
HRESULT __stdcall characters(
const wchar_t * /*wszChars*/,
int /*cchChars*/)
{
return S_OK;
}
HRESULT __stdcall ignorableWhitespace(
const wchar_t * /*wszChars*/,
int /*cchChars*/)
{
return S_OK;
}
HRESULT __stdcall processingInstruction(
const wchar_t * /*wszTarget*/,
int /*cchTarget*/,
const wchar_t * /*wszData*/,
int /*cchData*/)
{
return S_OK;
}
HRESULT __stdcall skippedEntity(
const wchar_t * /*wszName*/,
int /*cchName*/)
{
return S_OK;
}
}; // class ISAXContentHandlerImpl
////////////////////////////////////////////////////////////////////////////////
//
// SAX skip element handler utility class
// (skip an element and all its child elements)
//
////////////////////////////////////////////////////////////////////////////////
class CSkipHandler : public ISAXContentHandlerImpl
{
public:
virtual ~CSkipHandler()
{
}
HRESULT __stdcall QueryInterface(REFIID riid, void **ppv)
{
if (ppv == NULL)
{
return E_POINTER;
}
*ppv = NULL;
if (InlineIsEqualGUID(riid, IID_IUnknown) ||
InlineIsEqualGUID(riid, IID_ISAXContentHandler))
{
*ppv = static_cast<ISAXContentHandler *>(this);
return S_OK;
}
return E_NOINTERFACE;
}
ULONG __stdcall AddRef()
{
return 1;
}
ULONG __stdcall Release()
{
return 1;
}
private:
DWORD m_dwReset;
CComPtr<ISAXXMLReader> m_spReader;
CComPtr<ISAXContentHandler> m_spParent;
DWORD DisableReset(DWORD dwCnt = 1)
{
m_dwReset += dwCnt;
return m_dwReset;
}
DWORD EnableReset()
{
if (m_dwReset > 0)
{
--m_dwReset;
}
return m_dwReset;
}
public:
CSkipHandler(ISAXContentHandler *pParent = NULL, ISAXXMLReader *pReader = NULL)
: m_spParent(pParent), m_spReader(pReader), m_dwReset(1)
{
}
void SetParent(ISAXContentHandler *pParent)
{
m_spParent = pParent;
}
void DetachParent()
{
m_spParent.Detach();
}
void SetReader(ISAXXMLReader *pReader)
{
m_spReader = pReader;
}
HRESULT __stdcall startElement(
const wchar_t * /*wszNamespaceUri*/,
int /*cchNamespaceUri*/,
const wchar_t * /*wszLocalName*/,
int /*cchLocalName*/,
const wchar_t * /*wszQName*/,
int /*cchQName*/,
ISAXAttributes * /*pAttributes*/)
{
DisableReset();
return S_OK;
}
HRESULT __stdcall endElement(
const wchar_t * /*wszNamespaceUri*/,
int /*cchNamespaceUri*/,
const wchar_t * /*wszLocalName*/,
int /*cchLocalName*/,
const wchar_t * /*wszQName*/,
int /*cchQName*/)
{
if (EnableReset() == 0)
{
m_spReader->putContentHandler(m_spParent);
}
return S_OK;
}
}; // class CSkipHandler
////////////////////////////////////////////////////////////////////////////////
//
// SAX string builder class
//
////////////////////////////////////////////////////////////////////////////////
class CSAXStringBuilder : public ISAXContentHandlerImpl
{
public:
HRESULT __stdcall QueryInterface(REFIID riid, void **ppv)
{
if (ppv == NULL)
{
return E_POINTER;
}
*ppv = NULL;
if (InlineIsEqualGUID(riid, IID_IUnknown) ||
InlineIsEqualGUID(riid, IID_ISAXContentHandler))
{
*ppv = static_cast<ISAXContentHandler *>(this);
return S_OK;
}
return E_NOINTERFACE;
}
ULONG __stdcall AddRef()
{
return 1;
}
ULONG __stdcall Release()
{
return 1;
}
private:
ISAXContentHandler * m_pParent;
ISAXXMLReader * m_pReader;
DWORD m_dwReset;
CFixedStringT<CStringW, 64> m_str;
DWORD DisableReset(DWORD dwReset = 1)
{
m_dwReset+= dwReset;
return m_dwReset;
}
DWORD EnableReset()
{
if (m_dwReset > 0)
{
--m_dwReset;
}
return m_dwReset;
}
public:
CSAXStringBuilder(ISAXXMLReader *pReader = NULL, ISAXContentHandler *pParent = NULL)
:m_pReader(pReader), m_pParent(pParent), m_dwReset(0)
{
}
virtual ~CSAXStringBuilder()
{
}
void SetReader(ISAXXMLReader *pReader)
{
m_pReader = pReader;
}
void SetParent(ISAXContentHandler *pParent)
{
m_pParent = pParent;
}
const CStringW& GetString()
{
return m_str;
}
void Clear()
{
m_str.Empty();
m_dwReset = 0;
}
HRESULT __stdcall startElement(
const wchar_t * /*wszNamespaceUri*/,
int /*cchNamespaceUri*/,
const wchar_t * /*wszLocalName*/,
int /*cchLocalName*/,
const wchar_t *wszQName,
int cchQName,
ISAXAttributes *pAttributes)
{
if (m_dwReset == 0)
{
// if there is unescaped, nested XML, must disable
// an additional time for the first element
DisableReset();
}
DisableReset();
int nAttrs = 0;
HRESULT hr = pAttributes->getLength(&nAttrs);
_ATLTRY
{
if (SUCCEEDED(hr))
{
m_str.Append(L"<", 1);
m_str.Append(wszQName, cchQName);
const wchar_t *wszAttrNamespaceUri = NULL;
const wchar_t *wszAttrLocalName = NULL;
const wchar_t *wszAttrQName = NULL;
const wchar_t *wszAttrValue = NULL;
int cchAttrUri = 0;
int cchAttrLocalName = 0;
int cchAttrQName = 0;
int cchAttrValue = 0;
for (int i=0; i<nAttrs; i++)
{
hr = pAttributes->getName(i, &wszAttrNamespaceUri, &cchAttrUri,
&wszAttrLocalName, &cchAttrLocalName, &wszAttrQName, &cchAttrQName);
if (FAILED(hr))
{
ATLTRACE( _T("ATLSOAP: CSAXStringBuilder::startElement -- MSXML error.\r\n") );
break;
}
m_str.Append(L" ", 1);
m_str.Append(wszAttrQName, cchAttrQName);
hr = pAttributes->getValue(i, &wszAttrValue, &cchAttrValue);
if (FAILED(hr))
{
ATLTRACE( _T("ATLSOAP: CSAXStringBuilder::startElement -- MSXML error.\r\n") );
break;
}
m_str.Append(L"=\"", sizeof("=\"")-1);
if (cchAttrValue != 0)
{
m_str.Append(wszAttrValue, cchAttrValue);
}
m_str.Append(L"\"", 1);
}
if (SUCCEEDED(hr))
{
m_str.Append(L">", 1);
}
}
}
_ATLCATCHALL()
{
ATLTRACE( _T("ATLSOAP: CSAXStringBuilder::startElement -- out of memory.\r\n") );
hr = E_OUTOFMEMORY;
}
return hr;
}
HRESULT __stdcall endElement(
const wchar_t * wszNamespaceUri,
int cchNamespaceUri,
const wchar_t * wszLocalName,
int cchLocalName,
const wchar_t *wszQName,
int cchQName)
{
HRESULT hr = S_OK;
_ATLTRY
{
if (EnableReset() == 0)
{
hr = m_pParent->characters((LPCWSTR) m_str, m_str.GetLength());
if (SUCCEEDED(hr))
{
hr = m_pParent->endElement(wszNamespaceUri, cchNamespaceUri,
wszLocalName, cchLocalName, wszQName, cchQName);
}
m_pReader->putContentHandler(m_pParent);
}
if (m_dwReset > 0)
{
m_str.Append(L"</", 2);
m_str.Append(wszQName, cchQName);
m_str.Append(L">", 1);
}
}
_ATLCATCHALL()
{
ATLTRACE( _T("ATLSOAP: CSAXStringBuilder::endElement -- out of memory.\r\n") );
hr = E_OUTOFMEMORY;
}
return hr;
}
HRESULT __stdcall characters(
const wchar_t *wszChars,
int cchChars)
{
_ATLTRY
{
m_str.Append(wszChars, cchChars);
}
_ATLCATCHALL()
{
ATLTRACE( _T("ATLSOAP: CSAXStringBuilder::characters -- out of memory.\r\n") );
return E_OUTOFMEMORY;
}
return S_OK;
}
HRESULT __stdcall ignorableWhitespace(
const wchar_t *wszChars,
int cchChars)
{
_ATLTRY
{
m_str.Append(wszChars, cchChars);
}
_ATLCATCHALL()
{
ATLTRACE( _T("ATLSOAP: CSAXStringBuilder::ignorableWhitespace -- out of memory.\r\n") );
return E_OUTOFMEMORY;
}
return S_OK;
}
}; // class CSAXStringBuilder
} // namespace ATL
#pragma pack(pop)
////////////////////////////////////////////////////////////////////////////////
//
// SOAP data structure definitions
//
////////////////////////////////////////////////////////////////////////////////
//
// ***************************** WARNING *****************************
// THESE STRUCTURES ARE INTERNAL ONLY, FOR USE WITH THE ATL SERVER SOAP
// ATTRIBUTES. USERS SHOULD NOT USE THESE TYPES DIRECTLY. ABSOLUTELY NO
// GUARANTEES ARE MADE ABOUT BACKWARD COMPATIBILITY FOR DIRECT USE OF
// THESE TYPES.
//
////////////////////////////////////////////////////////////////////////////////
//
// BEGIN PRIVATE DEFINITIONS
//
////////////////////////////////////////////////////////////////////////////////
inline HRESULT AtlSoapGetArraySize(ISAXAttributes *pAttributes, size_t *pnSize,
const wchar_t **pwszTypeStart = NULL, const wchar_t **pwszTypeEnd = NULL)
{
if (pnSize == NULL)
{
return E_POINTER;
}
if (pAttributes == NULL)
{
return E_INVALIDARG;
}
*pnSize = 0;
HRESULT hr = S_OK;
_ATLTRY
{
const wchar_t *wszTmp;
int cch;
hr = GetAttribute(pAttributes, L"arrayType", sizeof("arrayType")-1,
&wszTmp, &cch, SOAPENC_NAMESPACEW, sizeof(SOAPENC_NAMESPACEA)-1);
if ((SUCCEEDED(hr)) && (wszTmp != NULL))
{
hr = E_FAIL;
CFixedStringT<CStringW, 1024> wstrArrayType(wszTmp, cch);
const wchar_t *wsz = static_cast<LPCWSTR>(wstrArrayType);
const wchar_t *wszTypeStart = NULL;
const wchar_t *wszTypeEnd = NULL;
// skip spaces
while (iswspace(*wsz) != 0)
{
wsz++;
}
// no need to walk the string if the caller is not interested
if ((pwszTypeStart != NULL) && (pwszTypeEnd != NULL))
{
wszTypeStart = wsz;
wszTypeEnd = wcschr(wszTypeStart, L':');
if (wszTypeEnd != NULL)
{
wszTypeStart = wszTypeEnd+1;
}
}
// SOAP Section 5 encodings are of the form:
// <soap_enc namespace>:arrayType="<type_qname>[dim1(,dim_i)*]
// for example: SOAP-ENC:arrayType="xsd:string[2,4]"
wsz = wcschr(wsz, L'[');
if (wsz != NULL)
{
wszTypeEnd = wsz-1;
if (wsz[1] == ']')
{
return S_FALSE;
}
*pnSize = 1;
// get the size of each dimension
while (wsz != NULL)
{
wsz++;
int nDim = _wtoi(wsz);
if (nDim < 0)
{
hr = E_FAIL;
break;
}
*pnSize *= (size_t) nDim;
if (!nDim)
{
break;
}
wsz = wcschr(wsz, L',');
}
if ((pwszTypeStart != NULL) && (pwszTypeEnd != NULL))
{
*pwszTypeStart = wszTypeStart;
*pwszTypeEnd = wszTypeEnd;
}
hr = S_OK;
}
}
else
{
// not a section-5 encoding
hr = S_FALSE;
}
}
_ATLCATCHALL()
{
hr = E_OUTOFMEMORY;
}
return hr;
}
inline size_t AtlSoapGetArrayDims(const int *pDims)
{
if (pDims == NULL)
{
return 0;
}
size_t nRet = 1;
for (int i=1; i<=pDims[0]; i++)
{
nRet *= pDims[i];
}
return nRet;
}
enum SOAPFLAGS
{
SOAPFLAG_NONE = 0x00000000,
SOAPFLAG_IN = 0x00000001,
SOAPFLAG_OUT = 0x00000002,
SOAPFLAG_RETVAL = 0x00000004,
SOAPFLAG_DYNARR = 0x00000008,
SOAPFLAG_FIXEDARR = 0x00000010,
SOAPFLAG_MUSTUNDERSTAND = 0x00000020,
SOAPFLAG_UNKSIZE = 0x00000040,
SOAPFLAG_READYSTATE = 0x00000080,
SOAPFLAG_FIELD = 0x00000100,
SOAPFLAG_NOMARSHAL = 0x00000200,
SOAPFLAG_NULLABLE = 0x00000400,
SOAPFLAG_DOCUMENT = 0x00000800,
SOAPFLAG_RPC = 0x00001000,
SOAPFLAG_LITERAL = 0x00002000,
SOAPFLAG_ENCODED = 0x00004000,
SOAPFLAG_PID = 0x00008000,
SOAPFLAG_PAD = 0x00010000,
SOAPFLAG_CHAIN = 0x00020000,
SOAPFLAG_SIZEIS = 0x00040000,
SOAPFLAG_DYNARRWRAPPER = 0x00080000
};
enum SOAPMAPTYPE
{
SOAPMAP_ERR = 0,
SOAPMAP_ENUM,
SOAPMAP_FUNC,
SOAPMAP_STRUCT,
SOAPMAP_UNION,
SOAPMAP_HEADER,
SOAPMAP_PARAM
};
struct _soapmap;
struct _soapmapentry
{
ULONG nHash;
const char * szField;
const WCHAR * wszField;
int cchField;
int nVal;
DWORD dwFlags;
size_t nOffset;
const int * pDims;
const _soapmap * pChain;
int nSizeIs;
ULONG nNamespaceHash;
const char *szNamespace;
const wchar_t *wszNamespace;
int cchNamespace;
};
struct _soapmap
{
ULONG nHash;
const char * szName;
const wchar_t * wszName;
int cchName;
int cchWName;
SOAPMAPTYPE mapType;
const _soapmapentry * pEntries;
size_t nElementSize;
size_t nElements;
int nRetvalIndex;
DWORD dwCallFlags;
ULONG nNamespaceHash;
const char *szNamespace;
const wchar_t *wszNamespace;
int cchNamespace;
};
enum SOAPTYPES
{
SOAPTYPE_ERR = -2,
SOAPTYPE_UNK = -1,
SOAPTYPE_STRING = 0,
SOAPTYPE_BOOLEAN,
SOAPTYPE_FLOAT,
SOAPTYPE_DOUBLE,
SOAPTYPE_DECIMAL,
SOAPTYPE_DURATION,
SOAPTYPE_HEXBINARY,
SOAPTYPE_BASE64BINARY,
SOAPTYPE_ANYURI,
SOAPTYPE_ID,
SOAPTYPE_IDREF,
SOAPTYPE_ENTITY,
SOAPTYPE_NOTATION,
SOAPTYPE_QNAME,
SOAPTYPE_NORMALIZEDSTRING,
SOAPTYPE_TOKEN,
SOAPTYPE_LANGUAGE,
SOAPTYPE_IDREFS,
SOAPTYPE_ENTITIES,
SOAPTYPE_NMTOKEN,
SOAPTYPE_NMTOKENS,
SOAPTYPE_NAME,
SOAPTYPE_NCNAME,
SOAPTYPE_INTEGER,
SOAPTYPE_NONPOSITIVEINTEGER,
SOAPTYPE_NEGATIVEINTEGER,
SOAPTYPE_LONG,
SOAPTYPE_INT,
SOAPTYPE_SHORT,
SOAPTYPE_BYTE,
SOAPTYPE_NONNEGATIVEINTEGER,
SOAPTYPE_UNSIGNEDLONG,
SOAPTYPE_UNSIGNEDINT,
SOAPTYPE_UNSIGNEDSHORT,
SOAPTYPE_UNSIGNEDBYTE,
SOAPTYPE_POSITIVEINTEGER,
SOAPTYPE_DATETIME,
SOAPTYPE_TIME,
SOAPTYPE_DATE,
SOAPTYPE_GMONTH,
SOAPTYPE_GYEARMONTH,
SOAPTYPE_GYEAR,
SOAPTYPE_GMONTHDAY,
SOAPTYPE_GDAY,
SOAPTYPE_USERBASE = 0x00001000
};
inline ULONG AtlSoapHashStr(const char * sz)
{
ULONG nHash = 0;
while (*sz != 0)
{
nHash = (nHash<<5)+nHash+(*sz);
sz++;
}
return nHash;
}
inline ULONG AtlSoapHashStr(const wchar_t * sz)
{
ULONG nHash = 0;
while (*sz != 0)
{
nHash = (nHash<<5)+nHash+(*sz);
sz++;
}
return nHash;
}
inline ULONG AtlSoapHashStr(const char * sz, int cch)
{
ULONG nHash = 0;
for (int i=0; i<cch; i++)
{
nHash = (nHash<<5)+nHash+(*sz);
sz++;
}
return nHash;
}
inline ULONG AtlSoapHashStr(const wchar_t * sz, int cch)
{
ULONG nHash = 0;
for (int i=0; i<cch; i++)
{
nHash = (nHash<<5)+nHash+(*sz);
sz++;
}
return nHash;
}
inline size_t AtlSoapGetElementSize(SOAPTYPES type)
{
size_t nRet;
switch (type)
{
case SOAPTYPE_BOOLEAN:
nRet = sizeof(bool);
break;
case SOAPTYPE_FLOAT:
nRet = sizeof(float);
break;
case SOAPTYPE_DOUBLE:
case SOAPTYPE_DECIMAL:
nRet = sizeof(double);
break;
case SOAPTYPE_HEXBINARY:
case SOAPTYPE_BASE64BINARY:
nRet = sizeof(ATLSOAP_BLOB);
break;
case SOAPTYPE_INTEGER:
case SOAPTYPE_NONPOSITIVEINTEGER:
case SOAPTYPE_NEGATIVEINTEGER:
case SOAPTYPE_LONG:
nRet = sizeof(__int64);
break;
case SOAPTYPE_INT:
nRet = sizeof(int);
break;
case SOAPTYPE_SHORT:
nRet = sizeof(short);
break;
case SOAPTYPE_BYTE:
nRet = sizeof(char);
break;
case SOAPTYPE_POSITIVEINTEGER:
case SOAPTYPE_NONNEGATIVEINTEGER:
case SOAPTYPE_UNSIGNEDLONG:
nRet = sizeof(unsigned __int64);
break;
case SOAPTYPE_UNSIGNEDINT:
nRet = sizeof(unsigned int);
break;
case SOAPTYPE_UNSIGNEDSHORT:
nRet = sizeof(unsigned short);
break;
case SOAPTYPE_UNSIGNEDBYTE:
nRet = sizeof(unsigned char);
break;
default:
if ((type != SOAPTYPE_ERR) && (type != SOAPTYPE_UNK) && (type != SOAPTYPE_USERBASE))
{
// treat as string
nRet = sizeof(BSTR);
}
else
{
ATLTRACE( _T("ATLSOAP: AtlSoapGetElementSize -- internal error.\r\n") );
// should never get here
ATLASSERT( FALSE );
nRet = 0;
}
break;
}
return nRet;
}
inline HRESULT AtlSoapGetElementValue(const wchar_t *wsz, int cch,
void *pVal, SOAPTYPES type, IAtlMemMgr *pMemMgr)
{
HRESULT hr = E_FAIL;
switch (type)
{
case SOAPTYPE_BOOLEAN:
hr = AtlGetSAXValue((bool *)pVal, wsz, cch);
break;
case SOAPTYPE_FLOAT:
hr = AtlGetSAXValue((float *)pVal, wsz, cch);
break;
case SOAPTYPE_DOUBLE:
case SOAPTYPE_DECIMAL:
hr = AtlGetSAXValue((double *)pVal, wsz, cch);
break;
case SOAPTYPE_HEXBINARY:
hr = AtlGetSAXBlobValue((ATLSOAP_BLOB *)pVal, wsz, cch, pMemMgr, true);
break;
case SOAPTYPE_BASE64BINARY:
hr = AtlGetSAXBlobValue((ATLSOAP_BLOB *)pVal, wsz, cch, pMemMgr, false);
break;
case SOAPTYPE_INTEGER:
case SOAPTYPE_NONPOSITIVEINTEGER:
case SOAPTYPE_NEGATIVEINTEGER:
case SOAPTYPE_LONG:
hr = AtlGetSAXValue((__int64 *)pVal, wsz, cch);
break;
case SOAPTYPE_INT:
hr = AtlGetSAXValue((int *)pVal, wsz, cch);
break;
case SOAPTYPE_SHORT:
hr = AtlGetSAXValue((short *)pVal, wsz, cch);
break;
case SOAPTYPE_BYTE:
hr = AtlGetSAXValue((char *)pVal, wsz, cch);
break;
case SOAPTYPE_POSITIVEINTEGER:
case SOAPTYPE_NONNEGATIVEINTEGER:
case SOAPTYPE_UNSIGNEDLONG:
hr = AtlGetSAXValue((unsigned __int64 *)pVal, wsz, cch);
break;
case SOAPTYPE_UNSIGNEDINT:
hr = AtlGetSAXValue((unsigned int *)pVal, wsz, cch);
break;
case SOAPTYPE_UNSIGNEDSHORT:
hr = AtlGetSAXValue((unsigned short *)pVal, wsz, cch);
break;
case SOAPTYPE_UNSIGNEDBYTE:
hr = AtlGetSAXValue((unsigned char *)pVal, wsz, cch);
break;
default:
if ((type != SOAPTYPE_ERR) && (type != SOAPTYPE_UNK) && (type != SOAPTYPE_USERBASE))
{
hr = AtlGetSAXValue((BSTR *)pVal, wsz, cch);
}
#ifdef _DEBUG
else
{
ATLTRACE( _T("ATLSOAP: AtlSoapGetElementValue -- internal error.\r\n") );
// should never get here
ATLASSERT( FALSE );
}
#endif
break;
}
return hr;
}
inline HRESULT AtlSoapGenElementValue(void *pVal, IWriteStream *pStream, SOAPTYPES type, IAtlMemMgr *pMemMgr)
{
HRESULT hr = E_FAIL;
switch (type)
{
case SOAPTYPE_BOOLEAN:
hr = AtlGenXMLValue(pStream, (bool *)pVal);
break;
case SOAPTYPE_FLOAT:
hr = AtlGenXMLValue(pStream, (float *)pVal);
break;
case SOAPTYPE_DOUBLE:
case SOAPTYPE_DECIMAL:
hr = AtlGenXMLValue(pStream, (double *)pVal);
break;
case SOAPTYPE_HEXBINARY:
hr = AtlGenXMLBlobValue(pStream, (ATLSOAP_BLOB *)pVal, pMemMgr, true);
break;
case SOAPTYPE_BASE64BINARY:
hr = AtlGenXMLBlobValue(pStream, (ATLSOAP_BLOB *)pVal, pMemMgr, false);
break;
case SOAPTYPE_INTEGER:
case SOAPTYPE_NONPOSITIVEINTEGER:
case SOAPTYPE_NEGATIVEINTEGER:
case SOAPTYPE_LONG:
hr = AtlGenXMLValue(pStream, (__int64 *)pVal);
break;
case SOAPTYPE_INT:
hr = AtlGenXMLValue(pStream, (int *)pVal);
break;
case SOAPTYPE_SHORT:
hr = AtlGenXMLValue(pStream, (short *)pVal);
break;
case SOAPTYPE_BYTE:
hr = AtlGenXMLValue(pStream, (char *)pVal);
break;
case SOAPTYPE_POSITIVEINTEGER:
case SOAPTYPE_NONNEGATIVEINTEGER:
case SOAPTYPE_UNSIGNEDLONG:
hr = AtlGenXMLValue(pStream, (unsigned __int64 *)pVal);
break;
case SOAPTYPE_UNSIGNEDINT:
hr = AtlGenXMLValue(pStream, (unsigned int *)pVal);
break;
case SOAPTYPE_UNSIGNEDSHORT:
hr = AtlGenXMLValue(pStream, (unsigned short *)pVal);
break;
case SOAPTYPE_UNSIGNEDBYTE:
hr = AtlGenXMLValue(pStream, (unsigned char *)pVal);
break;
default:
if ((type != SOAPTYPE_ERR) && (type != SOAPTYPE_UNK) && (type != SOAPTYPE_USERBASE))
{
hr = AtlGenXMLValue(pStream, (BSTR *)pVal);
}
#ifdef _DEBUG
else
{
ATLTRACE( _T("ATLSOAP: AtlSoapGenElementValue -- internal error.\r\n" ) );
// should never get here
ATLASSERT( FALSE );
}
#endif
break;
}
return hr;
}
inline HRESULT AtlSoapCleanupElement(void *pVal, SOAPTYPES type, IAtlMemMgr *pMemMgr)
{
HRESULT hr = S_OK;
switch (type)
{
case SOAPTYPE_BOOLEAN:
case SOAPTYPE_FLOAT:
case SOAPTYPE_DOUBLE:
case SOAPTYPE_DECIMAL:
case SOAPTYPE_INT:
case SOAPTYPE_INTEGER:
case SOAPTYPE_NONPOSITIVEINTEGER:
case SOAPTYPE_NEGATIVEINTEGER:
case SOAPTYPE_LONG:
case SOAPTYPE_SHORT:
case SOAPTYPE_BYTE:
case SOAPTYPE_POSITIVEINTEGER:
case SOAPTYPE_NONNEGATIVEINTEGER:
case SOAPTYPE_UNSIGNEDLONG:
case SOAPTYPE_UNSIGNEDINT:
case SOAPTYPE_UNSIGNEDSHORT:
case SOAPTYPE_UNSIGNEDBYTE:
break;
case SOAPTYPE_HEXBINARY:
case SOAPTYPE_BASE64BINARY:
hr = AtlCleanupBlobValue((ATLSOAP_BLOB *)pVal, pMemMgr);
break;
default:
if ((type != SOAPTYPE_ERR) && (type != SOAPTYPE_UNK) && (type != SOAPTYPE_USERBASE))
{
// treat as string
hr = AtlCleanupValue((BSTR *)pVal);
}
#ifdef _DEBUG
else
{
ATLTRACE( _T("ATLSOAP: AtlSoapCleanupElement -- internal error.\r\n" ) );
// should never get here
ATLASSERT( FALSE );
}
#endif
break;
}
return hr;
}
////////////////////////////////////////////////////////////////////////////////
//
// END PRIVATE DEFINITIONS
//
////////////////////////////////////////////////////////////////////////////////
#define SOAP_ENVELOPEA "Envelope"
#define SOAP_ENVELOPEW ATLSOAP_MAKEWIDESTR( SOAP_ENVELOPEA )
#define SOAP_HEADERA "Header"
#define SOAP_HEADERW ATLSOAP_MAKEWIDESTR( SOAP_HEADERA )
#define SOAP_BODYA "Body"
#define SOAP_BODYW ATLSOAP_MAKEWIDESTR( SOAP_BODYA )
#pragma pack(push,_ATL_PACKING)
namespace ATL
{
//
// SOAP fault helpers
//
enum SOAP_ERROR_CODE
{
SOAP_E_UNK=0,
SOAP_E_VERSION_MISMATCH=100,
SOAP_E_MUST_UNDERSTAND=200,
SOAP_E_CLIENT=300,
SOAP_E_SERVER=400
};
// forward declaration of CSoapFault
class CSoapFault;
class CSoapFaultParser : public ISAXContentHandlerImpl
{
private:
CSoapFault *m_pFault;
DWORD m_dwState;
const static DWORD STATE_ERROR = 0;
const static DWORD STATE_ENVELOPE = 1;
const static DWORD STATE_BODY = 2;
const static DWORD STATE_START = 4;
const static DWORD STATE_FAULTCODE = 8;
const static DWORD STATE_FAULTSTRING = 16;
const static DWORD STATE_FAULTACTOR = 32;
const static DWORD STATE_DETAIL = 64;
const static DWORD STATE_RESET = 128;
const static DWORD STATE_SKIP = 256;
CComPtr<ISAXXMLReader> m_spReader;
CSAXStringBuilder m_stringBuilder;
CSkipHandler m_skipHandler;
const wchar_t *m_wszSoapPrefix;
int m_cchSoapPrefix;
public:
virtual ~CSoapFaultParser()
{
m_skipHandler.DetachParent();
}
// IUnknown interface
HRESULT __stdcall QueryInterface(REFIID riid, void **ppv)
{
if (ppv == NULL)
{
return E_POINTER;
}
*ppv = NULL;
if (InlineIsEqualGUID(riid, IID_IUnknown) ||
InlineIsEqualGUID(riid, IID_ISAXContentHandler))
{
*ppv = static_cast<ISAXContentHandler *>(this);
return S_OK;
}
return E_NOINTERFACE;
}
ULONG __stdcall AddRef()
{
return 1;
}
ULONG __stdcall Release()
{
return 1;
}
// constructor
CSoapFaultParser(CSoapFault *pFault, ISAXXMLReader *pReader)
:m_pFault(pFault), m_dwState(STATE_ERROR), m_spReader(pReader)
{
ATLASSERT( pFault != NULL );
ATLASSERT( pReader != NULL );
}
// ISAXContentHandler interface
HRESULT __stdcall startElement(
const wchar_t * wszNamespaceUri,
int cchNamespaceUri,
const wchar_t * wszLocalName,
int cchLocalName,
const wchar_t * /*wszQName*/,
int /*cchQName*/,
ISAXAttributes * /*pAttributes*/)
{
struct _faultmap
{
const wchar_t *wszTag;
int cchTag;
DWORD dwState;
};
const static _faultmap s_faultParseMap[] =
{
{ L"Envelope", sizeof("Envelope")-1, CSoapFaultParser::STATE_ENVELOPE },
{ L"Body", sizeof("Body")-1, CSoapFaultParser::STATE_BODY },
{ L"Header", sizeof("Header")-1, CSoapFaultParser::STATE_BODY },
{ L"Fault", sizeof("Fault")-1, CSoapFaultParser::STATE_START },
{ L"faultcode", sizeof("faultcode")-1, CSoapFaultParser::STATE_FAULTCODE },
{ L"faultstring", sizeof("faultstring")-1, CSoapFaultParser::STATE_FAULTSTRING },
{ L"faultactor", sizeof("faultactor")-1, CSoapFaultParser::STATE_FAULTACTOR },
{ L"detail", sizeof("detail")-1, CSoapFaultParser::STATE_DETAIL }
};
if (m_spReader.p == NULL)
{
ATLTRACE( _T("ATLSOAP: CSoapFaultParser::startElement -- ISAXXMLReader is NULL.\r\n" ) );
return E_INVALIDARG;
}
m_dwState &= ~STATE_RESET;
for (int i=0; i<(sizeof(s_faultParseMap)/sizeof(s_faultParseMap[0])); i++)
{
if ((cchLocalName == s_faultParseMap[i].cchTag) &&
(!wcsncmp(wszLocalName, s_faultParseMap[i].wszTag, cchLocalName)))
{
DWORD dwState = s_faultParseMap[i].dwState;
if ((dwState & (STATE_START | STATE_ENVELOPE | STATE_BODY)) == 0)
{
m_stringBuilder.SetReader(m_spReader);
m_stringBuilder.SetParent(this);
m_stringBuilder.Clear();
m_spReader->putContentHandler( &m_stringBuilder );
}
else
{
if ((dwState <= m_dwState) ||
(cchNamespaceUri != sizeof(SOAPENV_NAMESPACEA)-1) ||
(wcsncmp(wszNamespaceUri, SOAPENV_NAMESPACEW, cchNamespaceUri)))
{
ATLTRACE( _T("ATLSOAP: CSoapFaultParser::startElement -- malformed SOAP fault.\r\n" ) );
return E_FAIL;
}
}
m_dwState = dwState;
return S_OK;
}
}
if (m_dwState > STATE_START)
{
m_dwState = STATE_SKIP;
m_skipHandler.SetReader(m_spReader);
m_skipHandler.SetParent(this);
m_spReader->putContentHandler( &m_skipHandler );
return S_OK;
}
ATLTRACE( _T("ATLSOAP: CSoapFaultParser::startElement -- malformed SOAP fault.\r\n" ) );
return E_FAIL;
}
HRESULT __stdcall startPrefixMapping(
const wchar_t * wszPrefix,
int cchPrefix,
const wchar_t * wszUri,
int cchUri)
{
if ((cchUri == sizeof(SOAPENV_NAMESPACEA)-1) &&
(!wcsncmp(wszUri, SOAPENV_NAMESPACEW, cchUri)))
{
m_wszSoapPrefix = wszPrefix;
m_cchSoapPrefix = cchPrefix;
}
return S_OK;
}
HRESULT __stdcall characters(
const wchar_t * wszChars,
int cchChars);
};
extern __declspec(selectany) const int ATLS_SOAPFAULT_CNT = 4;
class CSoapFault
{
private:
struct _faultcode
{
const wchar_t *wsz;
int cch;
const wchar_t *wszFaultString;
int cchFaultString;
SOAP_ERROR_CODE errCode;
};
static const _faultcode s_faultCodes[];
public:
// members
SOAP_ERROR_CODE m_soapErrCode;
CStringW m_strFaultCode;
CStringW m_strFaultString;
CStringW m_strFaultActor;
CStringW m_strDetail;
CSoapFault()
: m_soapErrCode(SOAP_E_UNK)
{
}
HRESULT SetErrorCode(
const wchar_t *wsz,
const wchar_t *wszSoapPrefix,
int cch = -1,
int cchSoapPrefix = -1,
bool bSetFaultString = true)
{
if ((wsz == NULL) || (wszSoapPrefix == NULL))
{
return E_INVALIDARG;
}
if (cch == -1)
{
cch = (int) wcslen(wsz);
}
while (*wsz && iswspace(*wsz))
{
++wsz;
--cch;
}
if (cchSoapPrefix == -1)
{
cchSoapPrefix = (int) wcslen(wszSoapPrefix);
}
const wchar_t *wszLocalName = wcschr(wsz, L':');
if (wszLocalName == NULL)
{
// faultCode must be QName
ATLTRACE( _T("ATLSOAP: CSoapFault::SetErrorCode -- faultCode is not a QName.\r\n" ) );
return E_FAIL;
}
// make sure the namespace of the fault is the
// SOAPENV namespace
if ((cchSoapPrefix != (int)(wszLocalName-wsz)) ||
(wcsncmp(wsz, wszSoapPrefix, cchSoapPrefix)))
{
ATLTRACE( _T("ATLSOAP: CSoapFault::SetErrorCode -- fault namespace is incorrect.\r\n" ) );
return E_FAIL;
}
wszLocalName++;
cch -= (int) (wszLocalName-wsz);
_ATLTRY
{
for (int i=0; i<ATLS_SOAPFAULT_CNT; i++)
{
if ((cch == s_faultCodes[i].cch) &&
(!wcsncmp(wszLocalName, s_faultCodes[i].wsz, cch)))
{
m_soapErrCode = s_faultCodes[i].errCode;
if (bSetFaultString != false)
{
m_strFaultString.SetString(s_faultCodes[i].wszFaultString, s_faultCodes[i].cchFaultString);
break;
}
}
}
if (m_strFaultString.GetLength() == 0)
{
m_strFaultCode.SetString(wszLocalName, cch);
}
}
_ATLCATCHALL()
{
ATLTRACE( _T("ATLSOAP: CSoapFault::SetErrorCode -- out of memory.\r\n" ) );
return E_OUTOFMEMORY;
}
return S_OK;
}
HRESULT ParseFault(IStream *pStream, ISAXXMLReader *pReader = NULL)
{
if (pStream == NULL)
{
ATLTRACE( _T("ATLSOAP: CSoapFault::ParseFault -- NULL IStream was passed.\r\n" ) );
return E_INVALIDARG;
}
CComPtr<ISAXXMLReader> spReader;
if (pReader != NULL)
{
spReader = pReader;
}
else
{
if (FAILED(spReader.CoCreateInstance(ATLS_SAXXMLREADER_CLSID, NULL, CLSCTX_INPROC_SERVER)))
{
ATLTRACE( _T("ATLSOAP: CSoapFault::ParseFault -- CoCreateInstance of SAXXMLReader failed.\r\n" ) );
return E_FAIL;
}
}
Clear();
CSoapFaultParser parser(const_cast<CSoapFault *>(this), spReader);
spReader->putContentHandler(&parser);
CComVariant varStream;
varStream = static_cast<IUnknown*>(pStream);
HRESULT hr = spReader->parse(varStream);
spReader->putContentHandler(NULL);
return hr;
}
HRESULT GenerateFault(IWriteStream *pWriteStream)
{
if ((pWriteStream == NULL) || (m_soapErrCode == SOAP_E_UNK))
{
return E_INVALIDARG;
}
ATLASSERT( (m_soapErrCode == SOAP_E_UNK) ||
(m_soapErrCode == SOAP_E_VERSION_MISMATCH) ||
(m_soapErrCode == SOAP_E_MUST_UNDERSTAND) ||
(m_soapErrCode == SOAP_E_CLIENT) ||
(m_soapErrCode == SOAP_E_SERVER) );
HRESULT hr = S_OK;
_ATLTRY
{
const wchar_t *wszFaultCode = NULL;
if (m_strFaultCode.GetLength() == 0)
{
for (int i=0; i<4; i++)
{
if (s_faultCodes[i].errCode == m_soapErrCode)
{
if (m_strFaultString.GetLength() == 0)
{
m_strFaultString.SetString(s_faultCodes[i].wszFaultString,
s_faultCodes[i].cchFaultString);
}
wszFaultCode = s_faultCodes[i].wsz;
break;
}
}
}
if (wszFaultCode == NULL)
{
if (m_strFaultCode.GetLength() != 0)
{
wszFaultCode = m_strFaultCode;
}
else
{
ATLTRACE( _T("CSoapFault::GenerateFault -- missing/invalid fault code.\r\n") );
return E_FAIL;
}
}
const LPCSTR s_szErrorFormat =
"<SOAP:Envelope xmlns:SOAP=\"" SOAPENV_NAMESPACEA "\">"
"<SOAP:Body>"
"<SOAP:Fault>"
"<faultcode>SOAP:%ws</faultcode>"
"<faultstring>%ws</faultstring>"
"%s%ws%s"
"<detail>%ws</detail>"
"</SOAP:Fault>"
"</SOAP:Body>"
"</SOAP:Envelope>";
CStringA strFault;
strFault.Format(s_szErrorFormat, wszFaultCode, m_strFaultString,
m_strFaultActor.GetLength() ? "<faultactor>" : "", m_strFaultActor,
m_strFaultActor.GetLength() ? "</faultactor>" : "",
m_strDetail);
hr = pWriteStream->WriteStream(strFault, strFault.GetLength(), NULL);
}
_ATLCATCHALL()
{
ATLTRACE( _T("ATLSOAP: CSoapFault::GenerateFault -- out of memory.\r\n" ) );
hr = E_OUTOFMEMORY;
}
return hr;
}
void Clear()
{
m_soapErrCode = SOAP_E_UNK;
m_strFaultCode.Empty();
m_strFaultString.Empty();
m_strFaultActor.Empty();
m_strDetail.Empty();
}
}; // class CSoapFault
#define DECLARE_SOAP_FAULT(__name, __faultstring, __errcode) \
{ L ## __name, sizeof(__name)-1, L ## __faultstring, sizeof(__faultstring), __errcode },
__declspec(selectany) const CSoapFault::_faultcode CSoapFault::s_faultCodes[] =
{
DECLARE_SOAP_FAULT("VersionMismatch", "SOAP Version Mismatch Error", SOAP_E_VERSION_MISMATCH)
DECLARE_SOAP_FAULT("MustUnderstand", "SOAP Must Understand Error", SOAP_E_MUST_UNDERSTAND)
DECLARE_SOAP_FAULT("Client", "SOAP Invalid Request", SOAP_E_CLIENT)
DECLARE_SOAP_FAULT("Server", "SOAP Server Application Faulted", SOAP_E_SERVER)
};
ATL_NOINLINE inline HRESULT __stdcall CSoapFaultParser::characters(
const wchar_t * wszChars,
int cchChars)
{
if (m_pFault == NULL)
{
return E_INVALIDARG;
}
if (m_dwState & STATE_RESET)
{
return S_OK;
}
HRESULT hr = E_FAIL;
_ATLTRY
{
switch (m_dwState)
{
case STATE_FAULTCODE:
if (m_pFault->m_soapErrCode == SOAP_E_UNK)
{
hr = m_pFault->SetErrorCode(wszChars, m_wszSoapPrefix,
cchChars, m_cchSoapPrefix, false);
}
break;
case STATE_FAULTSTRING:
if (m_pFault->m_strFaultString.GetLength() == 0)
{
m_pFault->m_strFaultString.SetString(wszChars, cchChars);
hr = S_OK;
}
break;
case STATE_FAULTACTOR:
if (m_pFault->m_strFaultActor.GetLength() == 0)
{
m_pFault->m_strFaultActor.SetString(wszChars, cchChars);
hr = S_OK;
}
break;
case STATE_DETAIL:
if (m_pFault->m_strDetail.GetLength() == 0)
{
m_pFault->m_strDetail.SetString(wszChars, cchChars);
hr = S_OK;
}
break;
case STATE_START: case STATE_ENVELOPE : case STATE_BODY : case STATE_SKIP:
hr = S_OK;
break;
default:
// should never get here
ATLASSERT( FALSE );
break;
}
}
_ATLCATCHALL()
{
ATLTRACE( _T("ATLSOAP: CSoapFaultParser::characters -- out of memory.\r\n" ) );
hr = E_OUTOFMEMORY;
}
m_dwState |= STATE_RESET;
return hr;
}
////////////////////////////////////////////////////////////////////////////////
//
// CSoapRootHandler - the class that does most of the work
//
////////////////////////////////////////////////////////////////////////////////
#ifndef ATLSOAP_STACKSIZE
// 16 will be plenty for the 99% case
#define ATLSOAP_STACKSIZE 16
#endif
#ifndef ATLSOAP_GROWARRAY
#define ATLSOAP_GROWARRAY 10
#endif
class CSoapRootHandler : public ISAXContentHandlerImpl
{
private:
friend class _CSDLGenerator;
//
// state constants
//
const static DWORD SOAP_START = 0;
const static DWORD SOAP_ENVELOPE = 1;
const static DWORD SOAP_HEADERS = 2;
const static DWORD SOAP_BODY = 3;
const static DWORD SOAP_PARAMS = 4;
const static DWORD SOAP_CALLED = 5;
const static DWORD SOAP_RESPONSE = 6;
const static DWORD SOAP_HEADERS_DONE = 7;
//
// hash values for SOAP namespaces and elements
//
const static ULONG SOAP_ENV = 0x5D3574E2;
const static ULONG SOAP_ENC = 0xBD62724B;
const static ULONG ENVELOPE = 0xDBE6009E;
const static ULONG HEADER = 0xAF4DFFC9;
const static ULONG BODY = 0x0026168E;
//
// XSD Names
//
struct XSDEntry
{
wchar_t * wszName;
char * szName;
int cchName;
};
const static XSDEntry s_xsdNames[];
//
// CBitVector - a dynamically sized bit vector class
//
class CBitVector
{
private:
// 64 bits will handle the 99% case
unsigned __int64 m_nBits;
// when we need to grow
unsigned __int64 * m_pBits;
size_t m_nSize;
bool Grow(size_t nIndex)
{
// Think carefully
// In our current implementation, CHAR_BIT==8, and sizeof(m_nBits)==8. Easy to confuse the two.
// We do math in bits, so this is our max size
ATLENSURE(nIndex<SIZE_MAX/((sizeof(m_nBits)*CHAR_BIT)));
// round up to nearest 64 bits
size_t nAllocSizeBits = nIndex+((sizeof(m_nBits)*CHAR_BIT)-(nIndex%(sizeof(m_nBits)*CHAR_BIT)));
size_t nAllocSizeBytes = nAllocSizeBits/CHAR_BIT;
if (m_pBits != &m_nBits)
{
unsigned __int64 * pNewBits=NULL;
pNewBits = (unsigned __int64 *) realloc(m_pBits, nAllocSizeBytes );
if(!pNewBits)
{
return false;
}
m_pBits=pNewBits;
}
else
{
m_pBits = (unsigned __int64 *) malloc(nAllocSizeBytes );
if (m_pBits != NULL)
{
Checked::memcpy_s(m_pBits, nAllocSizeBytes, &m_nBits, sizeof(m_nBits));
}
}
if (m_pBits != NULL)
{
// set new bits to 0
memset(m_pBits+(m_nSize/(CHAR_BIT*sizeof(m_nBits))), 0x00, (nAllocSizeBits-m_nSize)/CHAR_BIT);
m_nSize = nAllocSizeBits;
return true;
}
ATLTRACE( _T("ATLSOAP: CBitVector::Grow -- out of memory.\r\n" ) );
return false;
}
public:
CBitVector()
: m_nBits(0), m_nSize(sizeof(m_nBits)*CHAR_BIT)
{
m_pBits = &m_nBits;
}
CBitVector(const CBitVector&)
{
m_pBits = &m_nBits;
}
const CBitVector& operator=(const CBitVector& that)
{
if (this != &that)
{
m_pBits = &m_nBits;
}
return *this;
}
bool GetBit(size_t nIndex) const
{
if (nIndex >= m_nSize)
{
return false;
}
size_t i = nIndex/(sizeof(m_nBits)*CHAR_BIT);
size_t nBits = nIndex-i*(sizeof(m_nBits)*CHAR_BIT);
return ((m_pBits[i] >> nBits) & 0x01);
}
bool SetBit(size_t nIndex)
{
if (nIndex >= m_nSize)
{
if (!Grow(nIndex))
{
return false;
}
}
size_t i = nIndex/(sizeof(m_nBits)*CHAR_BIT);
size_t nBits = nIndex-i*(sizeof(m_nBits)*CHAR_BIT);
m_pBits[i] |= (((unsigned __int64) 1) << nBits);
return true;
}
void Clear()
{
if (m_pBits == &m_nBits)
{
m_nBits = 0;
}
else
{
memset(m_pBits, 0x00, (m_nSize/CHAR_BIT));
}
}
~CBitVector()
{
if (m_pBits != &m_nBits)
{
free(m_pBits);
}
m_pBits = &m_nBits;
m_nSize = sizeof(m_nBits)*CHAR_BIT;
}
void RelocateFixup()
{
if (m_nSize <= sizeof(m_nBits)*CHAR_BIT)
{
m_pBits = &m_nBits;
}
}
}; // class CBitVector
//
// Parsing State
//
struct ParseState
{
void *pvElement;
DWORD dwFlags;
size_t nAllocSize;
size_t nExpectedElements;
size_t nElement;
const _soapmap *pMap;
const _soapmapentry *pEntry;
// mark when we get an item
CBitVector vec;
size_t nDepth;
ParseState(void *pvElement_ = NULL, DWORD dwFlags_ = 0,
size_t nAllocSize_ = 0, size_t nExpectedElements_ = 0,
size_t nElement_ = 0, const _soapmap *pMap_ = NULL,
const _soapmapentry *pEntry_ = NULL)
: pvElement(pvElement_), dwFlags(dwFlags_), nAllocSize(nAllocSize_),
nExpectedElements(nExpectedElements_), nElement(nElement_), pMap(pMap_),
pEntry(pEntry_), nDepth(0)
{
vec.Clear();
}
ParseState(const ParseState& that)
{
pvElement = that.pvElement;
dwFlags = that.dwFlags;
nAllocSize = that.nAllocSize;
nExpectedElements = that.nExpectedElements;
nElement = that.nElement;
pMap = that.pMap;
pEntry = that.pEntry;
nDepth = that.nDepth;
vec.Clear();
}
~ParseState()
{
pvElement = NULL;
dwFlags = 0;
nAllocSize = 0;
nExpectedElements = 0;
nElement = 0;
pMap = NULL;
pEntry = NULL;
nDepth = 0;
vec.Clear();
}
void RelocateFixup()
{
vec.RelocateFixup();
}
}; // struct ParseState
class CParseStateElementTraits : public CDefaultElementTraits<ParseState>
{
public:
// CBitVector relocate fixup
static void RelocateElements( ParseState* pDest, ParseState* pSrc, size_t nElements )
{
CDefaultElementTraits<ParseState>::RelocateElements(pDest, pSrc, nElements);
// fixup CBitVector
for (size_t i=0; i<nElements; i++)
{
pDest[i].RelocateFixup();
}
}
};
class CResponseGenerator
{
public:
HRESULT StartEnvelope(IWriteStream *pStream)
{
ATLENSURE_RETURN( pStream != NULL );
return pStream->WriteStream("<soap:Envelope "
"xmlns:soap=\"" SOAPENV_NAMESPACEA "\" "
"xmlns:xsi=\"" XSI_NAMESPACEA "\" "
"xmlns:xsd=\"" XSD_NAMESPACEA "\" "
"xmlns:soapenc=\"" SOAPENC_NAMESPACEA "\">",
sizeof("<soap:Envelope "
"xmlns:soap=\"" SOAPENV_NAMESPACEA "\" "
"xmlns:xsi=\"" XSI_NAMESPACEA "\" "
"xmlns:xsd=\"" XSD_NAMESPACEA "\" "
"xmlns:soapenc=\"" SOAPENC_NAMESPACEA "\">")-1,
NULL);
}
HRESULT StartHeaders(IWriteStream *pStream, const _soapmap *pMap)
{
ATLENSURE_RETURN( pStream != NULL );
ATLENSURE_RETURN( pMap != NULL );
HRESULT hr = pStream->WriteStream("<soap:Header", sizeof("<soap:Header")-1, NULL);
if (SUCCEEDED(hr))
{
if ((pMap->dwCallFlags & (SOAPFLAG_RPC | SOAPFLAG_ENCODED)) !=
(SOAPFLAG_RPC | SOAPFLAG_ENCODED))
{
// qualify document/literal by default
// For this version, ATL Server will not respect
// the elementForm* attributes in an XSD schema
hr = pStream->WriteStream(" xmlns=\"", sizeof(" xmlns=\"")-1, NULL);
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream(pMap->szNamespace, pMap->cchNamespace, NULL);
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream("\">", sizeof("\">")-1, NULL);
}
}
}
else
{
// rpc/encoded
hr = pStream->WriteStream(">", sizeof(">")-1, NULL);
}
}
return hr;
}
HRESULT EndHeaders(IWriteStream *pStream)
{
ATLENSURE_RETURN( pStream != NULL );
return pStream->WriteStream("</soap:Header>", sizeof("</soap:Header>")-1, NULL);
}
virtual HRESULT StartBody(IWriteStream *pStream)
{
ATLENSURE_RETURN( pStream != NULL );
return pStream->WriteStream(
"<soap:Body>", sizeof("<soap:Body>")-1, NULL);
}
HRESULT EndBody(IWriteStream *pStream)
{
ATLENSURE_RETURN( pStream != NULL );
return pStream->WriteStream("</soap:Body>", sizeof("</soap:Body>")-1, NULL);
}
HRESULT EndEnvelope(IWriteStream *pStream)
{
ATLENSURE_RETURN( pStream != NULL );
return pStream->WriteStream("</soap:Envelope>", sizeof("</soap:Envelope>")-1, NULL);
}
virtual HRESULT StartMap(IWriteStream *pStream, const _soapmap *pMap, bool bClient) = 0;
virtual HRESULT EndMap(IWriteStream *pStream, const _soapmap *pMap, bool bClient) = 0;
virtual HRESULT StartEntry(IWriteStream *pStream, const _soapmap *pMap, const _soapmapentry *pEntry)
{
ATLENSURE_RETURN( pStream != NULL );
ATLENSURE_RETURN( pEntry != NULL );
// output name
HRESULT hr = pStream->WriteStream("<", 1, NULL);
if (SUCCEEDED(hr))
{
const char *szHeaderNamespace = NULL;
int cchHeaderNamespace = 0;
if ((pMap != NULL) && (pMap->mapType == SOAPMAP_HEADER) &&
((pEntry->pChain != NULL) &&
(pEntry->pChain->szNamespace !=NULL)) ||
(pEntry->szNamespace != NULL))
{
hr = pStream->WriteStream("snp:", sizeof("snp:")-1, NULL);
if (SUCCEEDED(hr))
{
szHeaderNamespace = pEntry->pChain ?
pEntry->pChain->szNamespace : pEntry->szNamespace;
cchHeaderNamespace = pEntry->pChain ?
pEntry->pChain->cchNamespace : pEntry->cchNamespace;
}
}
if (SUCCEEDED(hr))
{
if ((pEntry->dwFlags & SOAPFLAG_RETVAL)==0)
{
hr = pStream->WriteStream(pEntry->szField, pEntry->cchField, NULL);
}
else
{
hr = pStream->WriteStream("return", sizeof("return")-1, NULL);
}
if (SUCCEEDED(hr))
{
if (szHeaderNamespace != NULL)
{
ATLASSERT( cchHeaderNamespace != 0 );
hr = pStream->WriteStream(" xmlns:snp=\"", sizeof(" xmlns:snp=\"")-1, NULL);
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream(szHeaderNamespace, cchHeaderNamespace, NULL);
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream("\"", sizeof("\"")-1, NULL);
}
}
}
}
}
}
if (SUCCEEDED(hr))
{
if (pEntry->dwFlags & SOAPFLAG_MUSTUNDERSTAND)
{
// output mustUnderstand
hr = pStream->WriteStream(" soap:mustUnderstand=\"1\"", sizeof(" soap:mustUnderstand=\"1\"")-1, NULL);
}
}
return hr;
}
HRESULT EndEntry(IWriteStream *pStream, const _soapmap *pMap, const _soapmapentry *pEntry)
{
ATLENSURE_RETURN( pStream != NULL );
ATLENSURE_RETURN( pEntry != NULL );
HRESULT hr = pStream->WriteStream("</", 2, NULL);
if (SUCCEEDED(hr))
{
if ((pMap != NULL) &&
(pMap->mapType == SOAPMAP_HEADER) &&
((pEntry->pChain != NULL) &&
(pEntry->pChain->szNamespace !=NULL)) ||
(pEntry->szNamespace != NULL))
{
hr = pStream->WriteStream("snp:", sizeof("snp:")-1, NULL);
}
if ((pEntry->dwFlags & SOAPFLAG_RETVAL)==0)
{
hr = pStream->WriteStream(pEntry->szField, pEntry->cchField, NULL);
}
else
{
hr = pStream->WriteStream("return", sizeof("return")-1, NULL);
}
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream(">", 1, NULL);
}
}
return hr;
}
}; // class CResponseGenerator
class CDocLiteralGenerator : public CResponseGenerator
{
public:
HRESULT StartMap(IWriteStream *pStream, const _soapmap *pMap, bool bClient)
{
ATLENSURE_RETURN( pStream != NULL );
ATLENSURE_RETURN( pMap != NULL );
HRESULT hr = S_OK;
// output type name
hr = pStream->WriteStream("<", 1, NULL);
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream(pMap->szName, pMap->cchName, NULL);
if (SUCCEEDED(hr))
{
if ((pMap->mapType == SOAPMAP_FUNC) &&
(bClient == false) &&
(pMap->dwCallFlags & SOAPFLAG_PID))
{
hr = pStream->WriteStream("Response", sizeof("Response")-1, NULL);
if (FAILED(hr))
{
return hr;
}
}
if (pMap->mapType == SOAPMAP_FUNC)
{
hr = pStream->WriteStream(" xmlns=\"", sizeof(" xmlns=\"")-1, NULL);
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream(pMap->szNamespace, pMap->cchNamespace, NULL);
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream("\">", sizeof("\">")-1, NULL);
}
}
}
else
{
hr = pStream->WriteStream(">", 1, NULL);
}
}
}
return hr;
}
HRESULT EndMap(IWriteStream *pStream, const _soapmap *pMap, bool bClient)
{
ATLENSURE_RETURN( pStream != NULL );
ATLENSURE_RETURN( pMap != NULL );
HRESULT hr = pStream->WriteStream("</", sizeof("</")-1, NULL);
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream(pMap->szName, pMap->cchName, NULL);
if (SUCCEEDED(hr))
{
if ((pMap->mapType == SOAPMAP_FUNC) &&
(bClient == false) &&
(pMap->dwCallFlags & SOAPFLAG_PID))
{
hr = pStream->WriteStream("Response", sizeof("Response")-1, NULL);
if (FAILED(hr))
{
return hr;
}
}
hr = pStream->WriteStream(">", 1, NULL);
}
}
return hr;
}
}; // class CDocLiteralGenerator
class CPIDGenerator : public CDocLiteralGenerator
{
};
class CPADGenerator : public CDocLiteralGenerator
{
public:
virtual HRESULT StartEntry(IWriteStream *pStream, const _soapmap *pMap, const _soapmapentry *pEntry)
{
ATLENSURE_RETURN( pStream != NULL );
ATLENSURE_RETURN( pEntry != NULL );
HRESULT hr = __super::StartEntry(pStream, pMap, pEntry);
if (SUCCEEDED(hr) && (pMap->dwCallFlags & SOAPFLAG_PAD))
{
hr = pStream->WriteStream(" xmlns=\"", sizeof(" xmlns=\"")-1, NULL);
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream(pMap->szNamespace, pMap->cchNamespace, NULL);
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream("\"", sizeof("\"")-1, NULL);
}
}
}
return hr;
}
}; // class CPADGenerator
class CRpcEncodedGenerator : public CResponseGenerator
{
public:
HRESULT StartBody(IWriteStream *pStream)
{
ATLENSURE_RETURN( pStream != NULL );
return pStream->WriteStream(
"<soap:Body soap:encodingStyle=\"" SOAPENC_NAMESPACEA "\">",
sizeof("<soap:Body soap:encodingStyle=\"" SOAPENC_NAMESPACEA "\">")-1, NULL);
}
HRESULT StartMap(IWriteStream *pStream, const _soapmap *pMap, bool bClient)
{
ATLENSURE_RETURN( pStream != NULL );
ATLENSURE_RETURN( pMap != NULL );
(bClient); // unused for rpc/encoded
HRESULT hr = pStream->WriteStream("<snp:", sizeof("<snp:")-1, NULL);
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream(pMap->szName, pMap->cchName, NULL);
if (SUCCEEDED(hr))
{
if (pMap->mapType == SOAPMAP_FUNC)
{
hr = pStream->WriteStream(" xmlns:snp=\"", sizeof(" xmlns:snp=\"")-1, NULL);
if (SUCCEEDED(hr))
{
ATLASSERT( pMap->szNamespace != NULL );
hr = pStream->WriteStream(pMap->szNamespace, pMap->cchNamespace, NULL);
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream("\">", sizeof("\">")-1, NULL);
}
}
}
else
{
hr = pStream->WriteStream(">", 1, NULL);
}
}
}
return hr;
}
HRESULT EndMap(IWriteStream *pStream, const _soapmap *pMap, bool bClient)
{
ATLENSURE_RETURN( pStream != NULL );
ATLENSURE_RETURN( pMap != NULL );
(bClient); // unused for rpc/encoded
HRESULT hr = pStream->WriteStream("</snp:", sizeof("</snp:")-1, NULL);
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream(pMap->szName, pMap->cchName, NULL);
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream(">", 1, NULL);
}
}
return hr;
}
}; // class CRpcEncodedGenerator
//
// members
//
CAtlArray<ParseState, CParseStateElementTraits> m_stateStack;
size_t m_nState;
DWORD m_dwState;
CComPtr<ISAXXMLReader> m_spReader;
CSAXStringBuilder m_stringBuilder;
CSkipHandler m_skipHandler;
IAtlMemMgr * m_pMemMgr;
static CCRTHeap m_crtHeap;
bool m_bClient;
void *m_pvParam;
bool m_bNullCheck;
bool m_bChildCheck;
bool m_bCharacters;
size_t m_nDepth;
typedef CFixedStringT<CStringW, 16> REFSTRING;
// used for rpc/encoded messages with href's
typedef CAtlMap<REFSTRING, ParseState, CStringRefElementTraits<REFSTRING> > REFMAP;
REFMAP m_refMap;
//
// Implementation helpers
//
HRESULT PushState(void *pvElement = NULL, const _soapmap *pMap = NULL,
const _soapmapentry *pEntry = NULL, DWORD dwFlags = 0, size_t nAllocSize = 0,
size_t nExpectedElements = 0, size_t nElement = 0)
{
if (m_stateStack.IsEmpty())
{
// 16 will be plenty for the 99% case
if (!m_stateStack.SetCount(0, 16))
{
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::PushState -- out of memory.\r\n" ) );
return E_OUTOFMEMORY;
}
}
size_t nCnt = m_stateStack.GetCount();
m_nState = m_stateStack.Add();
if (m_stateStack.GetCount() <= nCnt)
{
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::PushState -- out of memory.\r\n" ) );
return E_OUTOFMEMORY;
}
ParseState &state = m_stateStack[m_nState];
state.pvElement = pvElement;
state.dwFlags = dwFlags;
state.nAllocSize = nAllocSize;
state.nExpectedElements = nExpectedElements;
state.nElement = nElement;
state.pMap = pMap;
state.pEntry = pEntry;
state.nDepth = m_nDepth;
return S_OK;
}
ParseState& GetState()
{
return m_stateStack[m_nState];
}
void PopState(bool bForce = false)
{
if ((m_nState != 0) || (bForce != false))
{
m_stateStack.RemoveAt(m_nState);
--m_nState;
}
}
BOOL IsEqualElement(int cchLocalNameCheck, const wchar_t *wszLocalNameCheck,
int cchNamespaceUriCheck, const wchar_t *wszNamespaceUriCheck,
int cchLocalName, const wchar_t *wszLocalName,
int cchNamespaceUri, const wchar_t *wszNamespaceUri)
{
ATLENSURE(wszLocalName);
ATLENSURE(wszLocalNameCheck);
ATLENSURE(wszNamespaceUri);
ATLENSURE(wszNamespaceUriCheck);
if (cchLocalName == cchLocalNameCheck &&
cchNamespaceUri == cchNamespaceUriCheck &&
!wcsncmp(wszLocalName, wszLocalNameCheck, cchLocalName) &&
!wcsncmp(wszNamespaceUri, wszNamespaceUriCheck, cchNamespaceUri))
{
return TRUE;
}
return FALSE;
}
ATL_FORCEINLINE BOOL IsEqualString(const wchar_t *wszStr1, int cchStr1, const wchar_t *wszStr2, int cchStr2)
{
ATLENSURE( wszStr1 != NULL );
ATLENSURE( wszStr2 != NULL );
ATLENSURE( cchStr1 >= 0 );
ATLENSURE( cchStr2 >= 0 );
if (cchStr1 == cchStr2)
{
return !wcsncmp(wszStr1, wszStr2, cchStr2);
}
return FALSE;
}
ATL_FORCEINLINE BOOL IsEqualStringHash(const wchar_t *wszStr1, int cchStr1, ULONG nHash1,
const wchar_t *wszStr2, int cchStr2, ULONG nHash2)
{
ATLENSURE( wszStr1 != NULL );
ATLENSURE( wszStr2 != NULL );
ATLENSURE( cchStr1 >= 0 );
ATLENSURE( cchStr2 >= 0 );
if (nHash1 == nHash2)
{
return IsEqualString(wszStr1, cchStr1, wszStr2, cchStr2);
}
return FALSE;
}
BOOL IsEqualElement(int cchLocalNameCheck, const wchar_t *wszLocalNameCheck,
int cchLocalName, const wchar_t *wszLocalName)
{
if (cchLocalName == cchLocalNameCheck &&
!wcsncmp(wszLocalName, wszLocalNameCheck, cchLocalName))
{
return TRUE;
}
return FALSE;
}
void SetOffsetValue(void *pBase, void *pSrc, size_t nOffset)
{
void **ppDest = (void **)(((unsigned char *)pBase)+nOffset);
*ppDest = pSrc;
}
bool IsRpcEncoded()
{
if ((m_stateStack[0].pMap->dwCallFlags & (SOAPFLAG_RPC | SOAPFLAG_ENCODED)) ==
(SOAPFLAG_RPC | SOAPFLAG_ENCODED))
{
return true;
}
return false;
}
HRESULT ValidateArrayEntry(
ParseState& state,
const wchar_t *wszLocalName,
int cchLocalName)
{
(cchLocalName);
(wszLocalName);
ATLASSERT( state.pEntry != NULL );
// SOAP Section 5.4.2
// check number of elements
if (state.nElement == state.nExpectedElements)
{
// too many elements
if ((state.dwFlags & SOAPFLAG_UNKSIZE)==0)
{
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::ValidateArrayEntry -- too many elements.\r\n" ) );
return E_FAIL;
}
ATLASSERT( IsRpcEncoded() == false );
// see if we need to allocate more
if (state.nElement == state.nAllocSize)
{
unsigned char **ppArr = (unsigned char **)state.pvElement;
size_t nNewElement=0;
HRESULT hr=E_FAIL;
if(FAILED(hr=::ATL::AtlMultiply(&nNewElement, state.nElement, static_cast<size_t>(2))))
{
return hr;
}
hr = AllocateArray(state.pEntry, (void **)ppArr, __max(nNewElement, ATLSOAP_GROWARRAY), state.nElement);
if (SUCCEEDED(hr))
{
state.nAllocSize = __max((state.nElement)*2, ATLSOAP_GROWARRAY);
}
return hr;
}
}
return S_OK;
}
HRESULT CheckID(
const wchar_t *wszNamespaceUri,
const wchar_t *wszLocalName,
int cchLocalName,
ISAXAttributes *pAttributes)
{
(cchLocalName);
(wszLocalName);
(wszNamespaceUri);
ATLASSERT( pAttributes != NULL );
const wchar_t *wsz = NULL;
int cch = 0;
HRESULT hr = GetAttribute(pAttributes, L"id", sizeof("id")-1, &wsz, &cch);
if ((hr == S_OK) && (wsz != NULL))
{
const REFMAP::CPair *p = NULL;
_ATLTRY
{
REFSTRING strRef(wsz, cch);
p = m_refMap.Lookup(strRef);
if (p == NULL)
{
return S_FALSE;
}
}
_ATLCATCHALL()
{
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::CheckID -- out of memory.\r\n" ) );
return E_OUTOFMEMORY;
}
ATLASSERT( IsRpcEncoded() == true );
const ParseState& state = p->m_value;
// disallow href-chaining
hr = CheckHref(state.pEntry, state.pvElement, pAttributes);
if (hr != S_FALSE)
{
return E_FAIL;
}
hr = S_OK;
// do array stuff
if (state.dwFlags & (SOAPFLAG_FIXEDARR | SOAPFLAG_DYNARR))
{
hr = GetSection5Info(state, state.pEntry, pAttributes);
}
else
{
// only structs and arrays are allowed for hrefs
ATLASSERT( state.pEntry->pChain != NULL );
ATLASSERT( state.pEntry->pChain->mapType == SOAPMAP_STRUCT );
// structs must have child entries
m_bChildCheck = state.pEntry->pChain->nElements != 0;
if (S_OK != PushState(state.pvElement, state.pEntry->pChain, state.pEntry,
state.dwFlags, 0, state.pEntry->pChain->nElements))
{
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::CheckID -- out of memory.\n" ) );
hr = E_OUTOFMEMORY;
}
}
m_refMap.DisableAutoRehash();
m_refMap.RemoveAtPos(const_cast<REFMAP::CPair*>(p));
m_refMap.EnableAutoRehash();
return hr;
}
return S_FALSE;
}
HRESULT GetElementEntry(
ParseState& state,
const wchar_t *wszNamespaceUri,
const wchar_t *wszLocalName,
int cchLocalName,
ISAXAttributes *pAttributes,
const _soapmapentry **ppEntry)
{
ATLENSURE_RETURN( state.pMap != NULL );
ATLENSURE_RETURN( ppEntry != NULL );
*ppEntry = NULL;
const _soapmapentry *pEntries = state.pMap->pEntries;
DWORD dwIncludeFlags;
DWORD dwExcludeFlags;
HRESULT hr = CheckID(wszNamespaceUri, wszLocalName, cchLocalName, pAttributes);
if (hr != S_FALSE)
{
if (hr == S_OK)
{
hr = S_FALSE;
}
return hr;
}
if (m_bClient != false)
{
dwIncludeFlags = SOAPFLAG_OUT;
dwExcludeFlags = SOAPFLAG_IN;
}
else
{
dwIncludeFlags = SOAPFLAG_IN;
dwExcludeFlags = SOAPFLAG_OUT;
}
ULONG nHash = AtlSoapHashStr(wszLocalName, cchLocalName);
for (size_t i=0; pEntries[i].nHash != 0; i++)
{
if (nHash == pEntries[i].nHash &&
((pEntries[i].dwFlags & dwIncludeFlags) ||
((pEntries[i].dwFlags & dwExcludeFlags) == 0)) &&
IsEqualElement(pEntries[i].cchField, pEntries[i].wszField,
cchLocalName, wszLocalName)/* &&
!wcscmp(wszNamespaceUri, wszNamespace)*/)
{
// check bit vector
if (state.vec.GetBit(i) == false)
{
if (state.vec.SetBit(i) == false)
{
return E_OUTOFMEMORY;
}
}
else
{
// already received this element
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::GetElementEntry -- duplicate element was sent.\r\n" ) );
return E_FAIL;
}
state.nElement++;
*ppEntry = &pEntries[i];
return S_OK;
}
}
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::GetElementEntry -- element not found: %.*ws.\r\n" ), cchLocalName, wszLocalName );
return E_FAIL;
}
HRESULT CheckMustUnderstandHeader(ISAXAttributes *pAttributes)
{
ATLASSERT( pAttributes != NULL );
const wchar_t* wszMustUnderstand;
int cchMustUnderstand;
bool bMustUnderstand= false;
if (SUCCEEDED(GetAttribute(pAttributes, L"mustUnderstand", sizeof("mustUnderstand")-1,
&wszMustUnderstand, &cchMustUnderstand,
SOAPENV_NAMESPACEW, sizeof(SOAPENV_NAMESPACEA)-1)) &&
(wszMustUnderstand != NULL))
{
if (FAILED(AtlGetSAXValue(&bMustUnderstand, wszMustUnderstand, cchMustUnderstand)))
{
bMustUnderstand = true;
}
}
if (bMustUnderstand == false)
{
ATLASSERT( GetReader() != NULL );
m_skipHandler.SetReader(GetReader());
m_skipHandler.SetParent(this);
return GetReader()->putContentHandler( &m_skipHandler );
}
else
{
SoapFault(SOAP_E_MUST_UNDERSTAND, NULL, 0);
}
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::CheckMustUnderstandHeader -- unknown \"mustUnderstand\" SOAP Header was received.\r\n" ) );
return E_FAIL;
}
HRESULT AllocateArray(
const _soapmapentry *pEntry,
void **ppArr, size_t nElements,
size_t nCurrElements = 0)
{
ATLENSURE_RETURN( ppArr != NULL );
ATLENSURE_RETURN( pEntry != NULL );
size_t nElementSize;
if (pEntry->nVal != SOAPTYPE_UNK)
{
nElementSize = AtlSoapGetElementSize((SOAPTYPES) pEntry->nVal);
}
else // UDT
{
ATLENSURE_RETURN( pEntry->pChain != NULL );
nElementSize = pEntry->pChain->nElementSize;
}
if (nElementSize != 0)
{
if (*ppArr == NULL)
{
ATLASSERT( nCurrElements == 0 );
size_t nBytes=0;
HRESULT hr=S_OK;
if( FAILED(hr=::ATL::AtlMultiply(&nBytes, nElementSize, nElements)))
{
return hr;
}
*ppArr = m_pMemMgr->Allocate(nBytes);
}
else // *ppArr != NULL
{
ATLASSERT( nCurrElements != 0 );
size_t nBytes=0;
HRESULT hr=S_OK;
if( FAILED(hr=::ATL::AtlAdd(&nBytes, nElements, nCurrElements)) ||
FAILED(hr=::ATL::AtlMultiply(&nBytes, nElementSize, nBytes)))
{
return hr;
}
*ppArr = m_pMemMgr->Reallocate(*ppArr, nBytes);
}
}
else
{
// internal error
ATLASSERT( FALSE );
return E_FAIL;
}
if (*ppArr == NULL)
{
return E_OUTOFMEMORY;
}
memset(((unsigned char *)(*ppArr))+(nCurrElements*nElementSize), 0x00, nElements*nElementSize);
return S_OK;
}
HRESULT GetSection5Info(
const ParseState& state,
const _soapmapentry *pEntry,
ISAXAttributes *pAttributes)
{
ATLENSURE_RETURN( pEntry != NULL );
ATLENSURE_RETURN( pAttributes != NULL );
HRESULT hr;
if (IsRpcEncoded() != false)
{
// check for href
// we ONLY do this for rpc/encoded (required for interop)
// NOTE: ATL Server does not support object graphs, so
// only single-reference elements are allowed
hr = CheckHref(pEntry, state.pvElement, pAttributes,
pEntry->dwFlags, SOAPFLAG_READYSTATE);
if (hr != S_FALSE)
{
return hr;
}
}
size_t nElements;
DWORD dwFlags = 0;
hr = AtlSoapGetArraySize(pAttributes, &nElements);
if (FAILED(hr))
{
return hr;
}
size_t nAllocSize = 0;
size_t nElementsPush = 0;
if (pEntry->dwFlags & SOAPFLAG_DYNARR)
{
// set size_is value
ATLENSURE_RETURN( state.pMap != NULL );
int *pnSizeIs = (int *)(((unsigned char *)state.pvElement)+
(state.pMap->pEntries[pEntry->nSizeIs].nOffset));
if (hr != S_OK)
{
if (IsRpcEncoded())
{
// rpc/encoded requires soapenc:arrayType attribute
return E_FAIL;
}
nElements = ATLSOAP_GROWARRAY;
nAllocSize = ATLSOAP_GROWARRAY;
dwFlags |= SOAPFLAG_UNKSIZE;
*pnSizeIs = 0;
}
else
{
*pnSizeIs = (int)nElements;
if (nElements == 0)
{
// soapenc:arrayType="type[0]"
// treat as null array
m_bNullCheck = true;
// push an emtpy state
return PushState();
}
nElementsPush = nElements;
}
void *p = NULL;
hr = AllocateArray(pEntry, &p, nElements);
if (hr != S_OK)
{
return hr;
}
SetOffsetValue(state.pvElement, p, pEntry->nOffset);
}
else
{
// for fixed-size arrays, we know the number of elements
ATLASSERT( pEntry->dwFlags & SOAPFLAG_FIXEDARR );
if (hr == S_OK)
{
if (nElements != AtlSoapGetArrayDims(pEntry->pDims))
{
return E_FAIL;
}
}
else
{
hr = S_OK;
nElements = AtlSoapGetArrayDims(pEntry->pDims);
}
nElementsPush = nElements;
}
dwFlags |= pEntry->dwFlags;
// push element with array flag
if (S_OK != PushState(((unsigned char *)state.pvElement)+pEntry->nOffset,
state.pMap, pEntry, dwFlags & ~SOAPFLAG_READYSTATE, nAllocSize, nElementsPush))
{
return E_OUTOFMEMORY;
}
m_bChildCheck = true;
return S_OK;
}
void * UpdateArray(ParseState& state, const _soapmapentry *pEntry)
{
ATLENSURE(pEntry);
size_t nSize;
void *pVal = NULL;
if (pEntry->nVal != SOAPTYPE_UNK)
{
nSize = AtlSoapGetElementSize((SOAPTYPES) pEntry->nVal);
}
else
{
ATLENSURE( pEntry->pChain != NULL );
nSize = pEntry->pChain->nElementSize;
}
if (state.dwFlags & SOAPFLAG_FIXEDARR)
{
unsigned char *ppArr = (unsigned char *)state.pvElement;
pVal = ppArr+(state.nElement*nSize);
}
else
{
ATLASSERT( state.dwFlags & SOAPFLAG_DYNARR );
unsigned char **ppArr = (unsigned char **)state.pvElement;
pVal = (*ppArr)+(state.nElement*nSize);
if (state.dwFlags & SOAPFLAG_UNKSIZE)
{
ATLASSERT( IsRpcEncoded() == false );
// need to use the previous state's pvElement to update the size_is value
ATLASSUME( m_nState > 0 );
int *pnSizeIs = (int *)(((unsigned char *)m_stateStack[m_nState-1].pvElement)+
(state.pMap->pEntries[pEntry->nSizeIs].nOffset));
// update size_is parameter
*pnSizeIs = (int)(state.nElement+1);
state.nExpectedElements++;
}
}
state.nElement++;
return pVal;
}
HRESULT ProcessString(const _soapmapentry *pEntry, void *pVal)
{
ATLENSURE_RETURN( pEntry != NULL );
// set to the string builder class
ATLASSERT( GetReader() != NULL );
m_stringBuilder.SetReader(GetReader());
m_stringBuilder.SetParent(this);
m_stringBuilder.Clear();
GetReader()->putContentHandler( &m_stringBuilder );
if (S_OK != PushState(pVal, NULL, pEntry, SOAPFLAG_READYSTATE | pEntry->dwFlags))
{
return E_OUTOFMEMORY;
}
return S_OK;
}
HRESULT CheckHref(
const _soapmapentry *pEntry,
void *pVal,
ISAXAttributes *pAttributes,
DWORD dwIncludeFlags = 0,
DWORD dwExcludeFlags = 0)
{
ATLASSERT( pEntry != NULL );
ATLASSERT( pVal != NULL );
ATLASSERT( pAttributes != NULL );
const wchar_t *wsz = NULL;
int cch = 0;
HRESULT hr = GetAttribute(pAttributes, L"href", sizeof("href")-1, &wsz, &cch);
if ((hr == S_OK) && (wsz != NULL))
{
// only allow hrefs on structs and arrays
if (((pEntry->dwFlags & (SOAPFLAG_FIXEDARR | SOAPFLAG_DYNARR))==0) &&
(pEntry->pChain == NULL || pEntry->pChain->mapType != SOAPMAP_STRUCT))
{
ATLTRACE( _T("ATL Server only allows href's on arrays and structs.\r\n") );
return E_FAIL;
}
ATLASSERT( IsRpcEncoded() == true );
_ATLTRY
{
if (*wsz == L'#')
{
wsz++;
cch--;
}
REFSTRING strRef(wsz, cch);
if (m_refMap.Lookup(strRef) != NULL)
{
// ATL Server does not support multi-reference objects
ATLASSERT( FALSE );
return E_FAIL;
}
ParseState& currState = GetState();
if ((currState.pEntry != NULL) && (currState.pEntry->dwFlags & (SOAPFLAG_FIXEDARR | SOAPFLAG_DYNARR)))
{
// it is an array item
ATLASSERT( currState.nElement != 0 );
// exclude array flags for href'd array elements
dwExcludeFlags |= SOAPFLAG_FIXEDARR | SOAPFLAG_DYNARR;
}
ParseState state;
state.pvElement = pVal;
state.dwFlags = (pEntry->dwFlags | dwIncludeFlags) & ~dwExcludeFlags;
state.nExpectedElements = 0;
state.nElement = 0;
state.pMap = GetState().pMap;
state.pEntry = pEntry;
if (!m_refMap.SetAt(strRef, state))
{
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::CheckHref -- out of memory.\r\n" ) );
return E_OUTOFMEMORY;
}
// make sure there are no child elements
m_bNullCheck = true;
// push an emtpy state
return PushState();
}
_ATLCATCHALL()
{
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::CheckHref -- out of memory.\r\n" ) );
return E_OUTOFMEMORY;
}
}
return S_FALSE;
}
HRESULT ProcessUDT(
const _soapmapentry *pEntry,
void *pVal)
{
ATLENSURE_RETURN( pEntry != NULL );
ATLENSURE_RETURN( pVal != NULL );
ATLENSURE_RETURN( pEntry->nVal != SOAPTYPE_ERR );
ATLENSURE_RETURN( pEntry->nVal != SOAPTYPE_USERBASE );
// if it is a complex type, get the chain entry
// and push the new state on the stack
DWORD dwFlags = pEntry->dwFlags;
if (pEntry->pChain->mapType != SOAPMAP_ENUM)
{
// struct
dwFlags &= ~(SOAPFLAG_FIXEDARR | SOAPFLAG_DYNARR);
m_bChildCheck = pEntry->pChain->nElements != 0;
}
else
{
// enum
dwFlags |= SOAPFLAG_READYSTATE;
// enums must not have child elements
m_bNullCheck = true;
// enums must be specified
m_bCharacters = true;
}
if (S_OK != PushState(pVal, pEntry->pChain, pEntry, dwFlags, 0, pEntry->pChain->nElements))
{
return E_OUTOFMEMORY;
}
return S_OK;
}
HRESULT ChainEntry(
const ParseState& state,
const wchar_t *wszNamespaceUri,
int cchNamespaceUri,
const wchar_t *wszLocalName,
int cchLocalName,
ISAXAttributes *pAttributes)
{
ATLENSURE_RETURN( state.pMap != NULL );
// PAD is only supported on the client
const _soapmap *pMap = state.pMap;
if ((pMap->dwCallFlags & SOAPFLAG_CHAIN)==0)
{
return S_FALSE;
}
ATLENSURE_RETURN( pMap->dwCallFlags & SOAPFLAG_PAD );
ATLASSUME( m_bClient == true );
ATLENSURE_RETURN( pMap->nElements == 1 );
const _soapmapentry *pEntries = pMap->pEntries;
ATLENSURE_RETURN( pEntries != NULL );
int nIndex;
if (pEntries[0].dwFlags & SOAPFLAG_OUT)
{
nIndex = 0;
}
else
{
nIndex = 1;
}
const _soapmapentry *pEntry = &pEntries[nIndex];
ATLENSURE_RETURN( pEntry->nHash != 0 );
ATLENSURE_RETURN( pEntry->pChain != NULL );
if (S_OK != PushState(state.pvElement, pEntry->pChain, pEntry, pEntry->dwFlags, 0, pEntry->pChain->nElements))
{
return E_OUTOFMEMORY;
}
return ProcessParams(wszNamespaceUri, cchNamespaceUri, wszLocalName, cchLocalName, pAttributes);
}
HRESULT IsNullEntry(const _soapmapentry *pEntry, ISAXAttributes *pAttributes)
{
ATLASSERT( pEntry != NULL );
ATLASSERT( pAttributes != NULL );
HRESULT hr = E_FAIL;
bool bNull = false;
const wchar_t *wszNull = NULL;
int cchNull = 0;
hr = GetAttribute(pAttributes, L"nil", sizeof("nil")-1, &wszNull, &cchNull,
XSI_NAMESPACEW, sizeof(XSI_NAMESPACEA)-1);
if ((hr == S_OK) && (wszNull != NULL))
{
hr = AtlGetSAXValue(&bNull, wszNull, cchNull);
if (hr == S_OK)
{
if (bNull != false)
{
if (pEntry->dwFlags & SOAPFLAG_NULLABLE)
{
m_bNullCheck = true;
// push an emtpy state
return PushState();
}
// non-nullable element
return E_FAIL;
}
}
}
return S_FALSE;
}
HRESULT ProcessParams(
const wchar_t *wszNamespaceUri,
int cchNamespaceUri,
const wchar_t *wszLocalName,
int cchLocalName,
ISAXAttributes *pAttributes)
{
(wszNamespaceUri);
(cchNamespaceUri);
if (m_stateStack.IsEmpty())
{
if (m_dwState == SOAP_HEADERS)
{
return CheckMustUnderstandHeader(pAttributes);
}
return E_FAIL;
}
ParseState &state = GetState();
ATLASSERT( state.pvElement != NULL );
HRESULT hr = E_FAIL;
const _soapmapentry *pEntry = NULL;
// if array element
if (state.dwFlags & (SOAPFLAG_FIXEDARR | SOAPFLAG_DYNARR))
{
hr = ValidateArrayEntry(state, wszLocalName, cchLocalName);
if (SUCCEEDED(hr))
{
pEntry = state.pEntry;
}
else
{
return hr;
}
}
else // not an array element
{
// special-case for PAD with type=
hr = ChainEntry(state, wszNamespaceUri, cchNamespaceUri,
wszLocalName, cchLocalName, pAttributes);
if (hr == S_FALSE)
{
hr = GetElementEntry(state, wszNamespaceUri, wszLocalName, cchLocalName, pAttributes, &pEntry);
if (hr != S_OK)
{
if (hr == S_FALSE)
{
hr = S_OK;
}
else if (m_dwState == SOAP_HEADERS)
{
hr = CheckMustUnderstandHeader(pAttributes);
}
return hr;
}
ATLASSERT( pEntry != NULL );
}
else
{
return hr;
}
}
hr = IsNullEntry(pEntry, pAttributes);
if (hr != S_FALSE)
{
return hr;
}
hr = S_OK;
ATLENSURE_RETURN(pEntry);
// if is array
if (((pEntry->pDims != NULL) || (pEntry->dwFlags & (SOAPFLAG_FIXEDARR | SOAPFLAG_DYNARR))) &&
((state.dwFlags & (SOAPFLAG_FIXEDARR | SOAPFLAG_DYNARR)) == 0))
{
// get SOAP section-5 info (if it is there)
return GetSection5Info(state, pEntry, pAttributes);
}
else
{
// if it is a simple type, push a new (ready) state on the stack
void *pVal;
if (state.dwFlags & (SOAPFLAG_FIXEDARR | SOAPFLAG_DYNARR))
{
pVal = UpdateArray(state, pEntry);
ATLASSERT( pVal != NULL );
}
else
{
pVal = (((unsigned char *)state.pvElement)+pEntry->nOffset);
}
if (IsRpcEncoded() != false)
{
// check for href
// we ONLY do this for rpc/encoded (required for interop)
// NOTE: ATL Server does not support object graphs, so
// only single-reference elements are allowed
hr = CheckHref(pEntry, pVal, pAttributes);
if (hr != S_FALSE)
{
return hr;
}
hr = S_OK;
}
if (pEntry->nVal != SOAPTYPE_UNK)
{
// simple types should not have child elements
m_bNullCheck = true;
// if it is a string
if ((pEntry->nVal == SOAPTYPE_STRING) || (pEntry->nVal == SOAPTYPE_BASE64BINARY))
{
hr = ProcessString(pEntry, pVal);
}
else
{
// expect characters for all non-string simple types
m_bCharacters = true;
// basic simple type
if (S_OK != PushState(pVal, NULL, pEntry, SOAPFLAG_READYSTATE | pEntry->dwFlags))
{
hr = E_OUTOFMEMORY;
}
}
}
else
{
hr = ProcessUDT(pEntry, pVal);
if (pEntry->dwFlags & (SOAPFLAG_DYNARRWRAPPER))
{
// We're moving to the **first** entry in the dynamic array wrapper.
// We know it is the first entry because the dynamic array wrapper is created
// by sproxy and it guarantees this layouts.
++m_nDepth;
ProcessParams (wszNamespaceUri, cchNamespaceUri, pEntry->pChain->pEntries[0].wszField,
pEntry->pChain->pEntries[0].cchField, pAttributes);
}
}
}
return hr;
}
size_t GetSizeIsValue(void *pvParam, const _soapmap *pMap, const _soapmapentry *pEntry)
{
ATLENSURE( pvParam != NULL );
ATLENSURE( pMap != NULL );
ATLENSURE( pEntry != NULL );
int nSizeIs = pEntry->nSizeIs;
size_t nOffset = pMap->pEntries[nSizeIs].nOffset;
void *pVal = ((unsigned char *)pvParam)+nOffset;
__int64 nVal = 0;
switch(pMap->pEntries[nSizeIs].nVal)
{
case SOAPTYPE_INTEGER:
case SOAPTYPE_NONPOSITIVEINTEGER:
case SOAPTYPE_NEGATIVEINTEGER:
case SOAPTYPE_LONG:
nVal = *((__int64 *)pVal);
break;
case SOAPTYPE_INT:
nVal = *((int *)pVal);
break;
case SOAPTYPE_SHORT:
nVal = *((short *)pVal);
break;
case SOAPTYPE_BYTE:
nVal = *((char *)pVal);
break;
case SOAPTYPE_POSITIVEINTEGER:
case SOAPTYPE_NONNEGATIVEINTEGER:
case SOAPTYPE_UNSIGNEDLONG:
unsigned __int64 n;
n = *((unsigned __int64 *)pVal);
if (n > _I64_MAX)
{
// come on ...
nVal = 0;
}
else
{
nVal = (__int64)n;
}
break;
case SOAPTYPE_UNSIGNEDINT:
nVal = *((unsigned int *)pVal);
break;
case SOAPTYPE_UNSIGNEDSHORT:
nVal = *((unsigned short *)pVal);
break;
case SOAPTYPE_UNSIGNEDBYTE:
nVal = *((unsigned char *)pVal);
break;
default:
nVal = 0;
}
if (nVal < 0)
{
nVal = 0;
}
return (size_t) nVal;
}
HRESULT GenerateArrayInfo(const _soapmapentry *pEntry, const int *pDims, IWriteStream *pStream)
{
ATLENSURE_RETURN( pEntry != NULL );
ATLENSURE_RETURN( pStream != NULL );
HRESULT hr = S_OK;
if (pEntry->nVal != SOAPTYPE_UNK)
{
// xsd type
hr = pStream->WriteStream(" soapenc:arrayType=\"xsd:",
sizeof(" soapenc:arrayType=\"xsd:")-1, NULL);
}
else
{
ATLENSURE_RETURN( pEntry->pChain != NULL );
hr = pStream->WriteStream(" xmlns:q1=\"", sizeof(" xmlns:q1=\"")-1, NULL);
if (SUCCEEDED(hr))
{
if (pEntry->pChain->szNamespace != NULL)
{
hr = pStream->WriteStream(pEntry->pChain->szNamespace, pEntry->pChain->cchNamespace, NULL);
}
else
{
hr = pStream->WriteStream(GetNamespaceUriA(), -1, NULL);
}
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream("\"", 1, NULL);
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream(" soapenc:arrayType=\"q1:",
sizeof(" soapenc:arrayType=\"q1:")-1, NULL);
}
}
}
}
if (FAILED(hr))
{
return hr;
}
if (pEntry->nVal != SOAPTYPE_UNK)
{
hr = pStream->WriteStream(s_xsdNames[pEntry->nVal].szName ,
s_xsdNames[pEntry->nVal].cchName, NULL);
}
else
{
ATLASSERT( pEntry->pChain != NULL );
hr = pStream->WriteStream(pEntry->pChain->szName, pEntry->pChain->cchName, NULL);
}
if (FAILED(hr))
{
return hr;
}
hr = pStream->WriteStream("[", 1, NULL);
if (FAILED(hr))
{
return hr;
}
CWriteStreamHelper s( pStream );
for (int i=1; i<=pDims[0]; i++)
{
if (!s.Write(pDims[i]) ||
((i < pDims[0]) && (S_OK != pStream->WriteStream(", ", 2, NULL))))
{
return E_FAIL;
}
}
hr = pStream->WriteStream("]\"", 2, NULL);
if (FAILED(hr))
{
return hr;
}
return S_OK;
}
HRESULT GenerateXSDWrapper(bool bStart, int nVal, bool bNull, IWriteStream *pStream)
{
ATLENSURE_RETURN( pStream != NULL );
HRESULT hr = pStream->WriteStream((bStart != false) ? "<" : "</",
(bStart != false) ? 1 : 2, NULL);
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream(s_xsdNames[nVal].szName,
s_xsdNames[nVal].cchName, NULL);
if ((bNull != false) && (SUCCEEDED(hr)))
{
hr = pStream->WriteStream(" xsi:nil=\"1\"", sizeof(" xsi:nil=\"1\"")-1, NULL);
}
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream(">", 1, NULL);
}
}
return hr;
}
HRESULT GenerateGenericWrapper(bool bStart, const _soapmap *pMap, IWriteStream *pStream)
{
ATLENSURE_RETURN( pStream != NULL );
ATLENSURE_RETURN( pMap != NULL );
HRESULT hr = pStream->WriteStream((bStart != false) ? "<" : "</",
(bStart != false) ? 1 : 2, NULL);
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream(pMap->szName, pMap->cchName, NULL);
if (SUCCEEDED(hr))
{
hr = pStream->WriteStream(">", 1, NULL);
}
}
return hr;
}
HRESULT GetArrayInformation(
IWriteStream *pStream,
const _soapmap *pMap,
const _soapmapentry *pEntry,
void *pvParam,
size_t &nCnt,
size_t &nElementSize)
{
ATLENSURE_RETURN( pStream != NULL );
ATLENSURE_RETURN( pMap != NULL );
ATLENSURE_RETURN( pEntry != NULL );
ATLENSURE_RETURN( pvParam != NULL );
const int *pDims = NULL;
int arrDims[2] = { 0 };
if (pEntry->dwFlags & SOAPFLAG_FIXEDARR)
{
pDims = pEntry->pDims;
}
else
{
ATLASSERT( pEntry->dwFlags & SOAPFLAG_DYNARR );
nCnt = GetSizeIsValue(pvParam, pMap, pEntry);
if (nCnt == 0)
{
// array size should only be zero if array is NULL
// did you forget to set the array size?
ATLASSERT( FALSE );
return E_FAIL;
}
arrDims[0] = 1;
arrDims[1] = (int) nCnt;
pDims = arrDims;
}
// output array information
HRESULT hr = GenerateArrayInfo(pEntry, pDims, pStream);
if (FAILED(hr))
{
return hr;
}
if (SUCCEEDED(hr))
{
nCnt = AtlSoapGetArrayDims(pDims);
// did you forget to set the size_is value?
ATLASSERT( nCnt != 0 );
if (pEntry->nVal != SOAPTYPE_UNK)
{
nElementSize = AtlSoapGetElementSize((SOAPTYPES) pEntry->nVal);
}
else
{
ATLENSURE_RETURN( pEntry->pChain != NULL );
nElementSize = pEntry->pChain->nElementSize;
}
}
return hr;
}
HRESULT GenerateEnum(IWriteStream *pStream, void *pVal, const _soapmapentry *pEntry, bool bArray)
{
ATLENSURE_RETURN( pStream != NULL );
ATLENSURE_RETURN( pVal != NULL );
ATLENSURE_RETURN( pEntry != NULL );
int nVal = *((int *)pVal);
const _soapmapentry *pEnumEntries = pEntry->pChain->pEntries;
ATLENSURE_RETURN( pEnumEntries != NULL );
size_t j;
HRESULT hr = E_FAIL;
for (j=0; pEnumEntries[j].nHash != 0; j++)
{
if (nVal == pEnumEntries[j].nVal)
{
hr = pStream->WriteStream(pEnumEntries[j].szField, pEnumEntries[j].cchField, NULL);
if ((bArray != false) && (SUCCEEDED(hr)))
{
hr = GenerateGenericWrapper(false, pEntry->pChain, pStream);
}
break;
}
}
return hr;
}
HRESULT GenerateHeaders(CResponseGenerator *pGenerator, const _soapmap *pMap, IWriteStream *pStream)
{
ATLENSURE_RETURN( pStream != NULL );
ATLENSURE_RETURN( pMap != NULL );
ATLENSURE_RETURN( pGenerator != NULL );
DWORD dwIncludeFlags = SOAPFLAG_OUT;
if (m_bClient != false)
{
dwIncludeFlags = SOAPFLAG_IN;
}
size_t nCnt = 0;
for (size_t i=0; pMap->pEntries[i].nHash != 0; i++)
{
if (pMap->pEntries[i].dwFlags & dwIncludeFlags)
{
nCnt++;
}
}
// no headers to be sent
if (nCnt == 0)
{
return S_OK;
}
HRESULT hr = pGenerator->StartHeaders(pStream, pMap);
if (SUCCEEDED(hr))
{
hr = GenerateResponseHelper(pGenerator, pMap, GetHeaderValue(), pStream);
if (SUCCEEDED(hr))
{
hr = pGenerator->EndHeaders(pStream);
}
}
return hr;
}
bool IsNullElement(const _soapmapentry *pEntry, void *pVal, DWORD dwExcludeFlags=0)
{
ATLENSURE( pEntry != NULL );
ATLENSURE( pVal != NULL );
bool bNull = false;
DWORD dwFlags = pEntry->dwFlags & ~dwExcludeFlags;
if (dwFlags & SOAPFLAG_DYNARR)
{
unsigned char **ppArr = (unsigned char **)pVal;
if (*ppArr == NULL)
{
bNull = true;
}
}
else if (pEntry->nVal == SOAPTYPE_STRING)
{
BSTR *pBSTR = (BSTR *)pVal;
if (*pBSTR == NULL)
{
bNull = true;
}
}
else if ((pEntry->nVal == SOAPTYPE_BASE64BINARY) || (pEntry->nVal == SOAPTYPE_HEXBINARY))
{
if (((ATLSOAP_BLOB *)pVal)->data == NULL)
{
bNull = true;
}
}
return bNull;
}
HRESULT GenerateNull(IWriteStream *pStream)
{
ATLENSURE_RETURN( pStream != NULL );
return pStream->WriteStream(" xsi:nil=\"1\"/>", sizeof(" xsi:nil=\"1\"/>")-1, NULL);
}
HRESULT GenerateResponseHelper(CResponseGenerator *pGenerator, const _soapmap *pMap, void *pvParam, IWriteStream *pStream,
bool bArrayElement = false)
{
ATLENSURE_RETURN( pGenerator != NULL );
ATLENSURE_RETURN( pMap != NULL );
ATLENSURE_RETURN( pStream != NULL );
HRESULT hr = S_OK;
if ((bArrayElement != false) &&
((pMap->dwCallFlags & SOAPFLAG_PAD)==0))
{
hr = pGenerator->StartMap(pStream, pMap, m_bClient);
if (FAILED(hr))
{
return hr;
}
}
ATLENSURE_RETURN( pMap->pEntries != NULL );
const _soapmapentry *pEntries = pMap->pEntries;
size_t i;
DWORD dwIncludeFlags;
DWORD dwExcludeFlags;
if (m_bClient != false)
{
dwIncludeFlags = SOAPFLAG_IN;
dwExcludeFlags = SOAPFLAG_OUT;
}
else
{
dwIncludeFlags = SOAPFLAG_OUT;
dwExcludeFlags = SOAPFLAG_IN;
}
for (i=0; pEntries[i].nHash != 0; i++)
{
if (((pEntries[i].dwFlags & dwIncludeFlags) ||
((pEntries[i].dwFlags & dwExcludeFlags)==0)) &&
((pEntries[i].dwFlags & SOAPFLAG_NOMARSHAL)==0))
{
hr = pGenerator->StartEntry(pStream, pMap, &pEntries[i]);
if (FAILED(hr))
{
return hr;
}
size_t nElementSize = 0;
size_t nCnt = 1;
ATLASSERT( pvParam != NULL );
void *pvCurrent = ((unsigned char *)pvParam)+pEntries[i].nOffset;
if (IsNullElement(&pEntries[i], pvCurrent))
{
hr = GenerateNull(pStream);
if (SUCCEEDED(hr))
{
continue;
}
return hr;
}
bool bArray = (pEntries[i].dwFlags & (SOAPFLAG_FIXEDARR | SOAPFLAG_DYNARR)) != 0;
if (bArray != false)
{
hr = GetArrayInformation(pStream, pMap, &pEntries[i], pvParam, nCnt, nElementSize);
}
hr = pStream->WriteStream(">", 1, NULL);
if (FAILED(hr))
{
return hr;
}
for (size_t nElement=0; nElement<nCnt; nElement++)
{
void *pVal;
// get updated value
if (bArray != false)
{
if (pEntries[i].dwFlags & SOAPFLAG_FIXEDARR)
{
unsigned char *ppArr = (unsigned char *)pvCurrent;
pVal = ppArr+(nElement*nElementSize);
}
else
{
ATLASSERT( pEntries[i].dwFlags & SOAPFLAG_DYNARR );
unsigned char **ppArr = (unsigned char **)pvCurrent;
pVal = (*ppArr)+(nElement*nElementSize);
}
}
else
{
pVal = pvCurrent;
}
if (pEntries[i].nVal != SOAPTYPE_UNK)
{
bool bNull = false;
if (bArray != false)
{
bNull = IsNullElement(&pEntries[i], pVal, SOAPFLAG_DYNARR | SOAPFLAG_FIXEDARR);
hr = GenerateXSDWrapper(true, pEntries[i].nVal, bNull, pStream);
if (FAILED(hr))
{
return hr;
}
}
if (bNull == false)
{
hr = AtlSoapGenElementValue(pVal, pStream, (SOAPTYPES) pEntries[i].nVal, GetMemMgr());
}
if ((SUCCEEDED(hr)) && (bArray != false))
{
hr = GenerateXSDWrapper(false, pEntries[i].nVal, false, pStream);
}
if (FAILED(hr))
{
return hr;
}
}
else
{
ATLASSERT( pEntries[i].pChain != NULL );
if (pEntries[i].pChain->mapType != SOAPMAP_ENUM)
{
// struct
hr = GenerateResponseHelper(pGenerator, pEntries[i].pChain, pVal, pStream, bArray);
}
else
{
if (bArray != false)
{
hr = GenerateGenericWrapper(true, pEntries[i].pChain, pStream);
if (FAILED(hr))
{
return hr;
}
}
hr = GenerateEnum(pStream, pVal, &pEntries[i], bArray);
}
}
}
// output element close
if (SUCCEEDED(hr))
{
hr = pGenerator->EndEntry(pStream, pMap, &pEntries[i]);
}
}
if (FAILED(hr))
{
return hr;
}
}
if ((bArrayElement != false) &&
((pMap->dwCallFlags & SOAPFLAG_PAD)==0))
{
// output type name
hr = pGenerator->EndMap(pStream, pMap, m_bClient);
}
return hr;
}
void CleanupHelper(const _soapmap *pMap, void *pvParam)
{
ATLENSURE( pMap != NULL );
ATLENSURE( pMap->pEntries != NULL );
if (pvParam == NULL)
{
return;
}
const _soapmapentry *pEntries = pMap->pEntries;
size_t i;
for (i=0; pEntries[i].nHash != 0; i++)
{
if ((m_bClient != false) && ((pEntries[i].dwFlags & SOAPFLAG_OUT)==0))
{
// skip in-only headers on the client
continue;
}
void *pvCheck = ((unsigned char *)pvParam)+pEntries[i].nOffset;
if (IsNullElement(&pEntries[i], pvCheck))
{
continue;
}
size_t nElementSize = 0;
size_t nCnt = 1;
const int *pDims = NULL;
int arrDims[2] = { 0 };
bool bArray = (pEntries[i].dwFlags & (SOAPFLAG_FIXEDARR | SOAPFLAG_DYNARR)) != 0;
if (bArray != false)
{
if (pEntries[i].dwFlags & SOAPFLAG_FIXEDARR)
{
pDims = pEntries[i].pDims;
}
else
{
ATLASSERT( pEntries[i].dwFlags & SOAPFLAG_DYNARR );
nCnt = GetSizeIsValue(pvParam, pMap, &pEntries[i]);
arrDims[0] = 1;
arrDims[1] = (int) nCnt;
pDims = arrDims;
}
nCnt = AtlSoapGetArrayDims(pDims);
if (pEntries[i].nVal != SOAPTYPE_UNK)
{
nElementSize = AtlSoapGetElementSize((SOAPTYPES) pEntries[i].nVal);
}
else
{
ATLENSURE( pEntries[i].pChain != NULL );
nElementSize = pEntries[i].pChain->nElementSize;
}
}
void *pvCurrent = ((unsigned char *)pvParam)+pEntries[i].nOffset;
for (size_t nElement=0; nElement<nCnt; nElement++)
{
void *pVal;
// get updated value
if (bArray != false)
{
if (pEntries[i].dwFlags & SOAPFLAG_FIXEDARR)
{
unsigned char *ppArr = (unsigned char *)pvCurrent;
pVal = ppArr+(nElement*nElementSize);
}
else
{
ATLASSERT( pEntries[i].dwFlags & SOAPFLAG_DYNARR );
unsigned char **ppArr = (unsigned char **)pvCurrent;
if (*ppArr == NULL)
{
break;
}
pVal = (*ppArr)+(nElement*nElementSize);
}
}
else
{
pVal = pvCurrent;
}
if (pEntries[i].nVal != SOAPTYPE_UNK)
{
AtlSoapCleanupElement(pVal, (SOAPTYPES) pEntries[i].nVal, GetMemMgr());
}
else
{
ATLENSURE( pEntries[i].pChain != NULL );
if (pEntries[i].pChain->mapType != SOAPMAP_ENUM)
{
CleanupHelper(pEntries[i].pChain, pVal);
}
}
}
if (pEntries[i].dwFlags & SOAPFLAG_DYNARR)
{
// free it
unsigned char **ppArr = (unsigned char **)pvCurrent;
ATLENSURE( ppArr != NULL );
if (*ppArr != NULL)
{
m_pMemMgr->Free(*ppArr);
*ppArr = NULL;
}
}
}
}
const _soapmap * GetSoapMapFromName(
const wchar_t * wszName,
int cchName = -1,
const wchar_t * wszNamespaceUri = NULL,
int cchNamespaceUri = -1,
int *pnVal = NULL,
bool bHeader = false)
{
(cchNamespaceUri);
const _soapmap ** pEntry = NULL;
if (bHeader == false)
{
pEntry = GetFunctionMap();
}
else
{
pEntry = GetHeaderMap();
}
if (pEntry == NULL)
{
return NULL;
}
if (cchName < 0)
{
cchName = (int)wcslen(wszName);
}
if ((cchNamespaceUri < 0) && (wszNamespaceUri != NULL))
{
cchNamespaceUri = (int)wcslen(wszNamespaceUri);
}
ULONG nFunctionHash = AtlSoapHashStr(wszName, cchName);
ULONG nNamespaceHash = wszNamespaceUri ? AtlSoapHashStr(wszNamespaceUri, cchNamespaceUri) : 0;
int i;
for (i=0; pEntry[i] != NULL; i++)
{
if ((IsEqualStringHash(wszName, cchName, nFunctionHash,
pEntry[i]->wszName, pEntry[i]->cchWName, pEntry[i]->nHash) != FALSE) &&
(!wszNamespaceUri ||
IsEqualStringHash(wszNamespaceUri, cchNamespaceUri, nNamespaceHash,
pEntry[i]->wszNamespace, pEntry[i]->cchNamespace, pEntry[i]->nNamespaceHash) != FALSE))
{
break;
}
}
if (pnVal != NULL)
{
*pnVal = i;
}
return pEntry[i];
}
HRESULT CheckEndElement(const ParseState& state)
{
// check for all elements
if (state.nElement == state.nExpectedElements)
{
return S_OK;
}
// error check for fixed arrays
if (state.dwFlags & SOAPFLAG_FIXEDARR)
{
return E_FAIL;
}
// check for dynamic arrays
if (state.dwFlags & SOAPFLAG_DYNARR)
{
// check for dynamic arrays with known size
// (from soap:arrayType attribute)
if ((state.dwFlags & SOAPFLAG_UNKSIZE)==0)
{
return E_FAIL;
}
}
DWORD dwIncludeFlags;
DWORD dwExcludeFlags;
if (m_bClient != false)
{
dwIncludeFlags = SOAPFLAG_OUT;
dwExcludeFlags = SOAPFLAG_IN;
}
else
{
dwIncludeFlags = SOAPFLAG_IN;
dwExcludeFlags = SOAPFLAG_OUT;
}
if (state.pMap != NULL)
{
// ensure all omitted elements were nullable elements or nomarshal elements
const _soapmapentry *pEntries = state.pMap->pEntries;
for (size_t i=0; pEntries[i].nHash != 0; i++)
{
if ((pEntries[i].dwFlags & dwIncludeFlags) ||
((pEntries[i].dwFlags & dwExcludeFlags)==0))
{
if (state.vec.GetBit(i) == false)
{
if (((pEntries[i].dwFlags & (SOAPFLAG_NULLABLE | SOAPFLAG_NOMARSHAL))==0) && (pEntries[i].nVal != SOAPTYPE_UNK))
{
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::CheckEndElement -- invalid number of elements for parameter/field\r\n") );
return E_FAIL;
}
}
}
}
}
return S_OK;
}
HRESULT CheckSoapHeaders(const ParseState &state)
{
DWORD dwIncludeFlags;
DWORD dwExcludeFlags;
if (m_bClient != false)
{
dwIncludeFlags = SOAPFLAG_OUT;
dwExcludeFlags = SOAPFLAG_IN;
}
else
{
dwIncludeFlags = SOAPFLAG_IN;
dwExcludeFlags = SOAPFLAG_OUT;
}
if (state.pMap != NULL)
{
ATLASSERT( state.pMap->mapType == SOAPMAP_HEADER );
// ensure all omitted elements were nullable elements, nomarshal elements, or non-required elements
const _soapmapentry *pEntries = state.pMap->pEntries;
for (size_t i=0; pEntries[i].nHash != 0; i++)
{
if ((pEntries[i].dwFlags & dwIncludeFlags) ||
((pEntries[i].dwFlags & dwExcludeFlags)==0))
{
if (state.vec.GetBit(i) == false)
{
bool bNoOmit = (pEntries[i].dwFlags & (SOAPFLAG_NULLABLE | SOAPFLAG_NOMARSHAL))==0;
if ((bNoOmit != false) ||
((bNoOmit != false) && (pEntries[i].dwFlags & SOAPFLAG_MUSTUNDERSTAND)))
{
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::CheckSoapHeaders -- missing header\r\n") );
return E_FAIL;
}
}
}
}
}
return S_OK;
}
HRESULT CheckEndHeaders(
const wchar_t * wszNamespaceUri,
int cchNamespaceUri,
const wchar_t * wszLocalName,
int cchLocalName)
{
if (IsEqualElement(sizeof(SOAP_HEADERA)-1, SOAP_HEADERW,
sizeof(SOAPENV_NAMESPACEA)-1, SOAPENV_NAMESPACEW,
cchLocalName, wszLocalName,
cchNamespaceUri, wszNamespaceUri))
{
m_dwState = SOAP_HEADERS_DONE;
return S_OK;
}
// some sort of error
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::endElement -- invalid SOAP message format while processing headers.\r\n" ) );
return E_FAIL;
}
protected:
ISAXXMLReader * SetReader(ISAXXMLReader *pReader)
{
ISAXXMLReader *pPrevRdr = m_spReader;
m_spReader = pReader;
return pPrevRdr;
}
ISAXXMLReader * GetReader()
{
return m_spReader;
}
HRESULT SetSoapMapFromName(
const wchar_t * wszName,
int cchName = -1,
const wchar_t * wszNamespaceUri = NULL,
int cchNamespaceUri = -1,
bool bHeader = false)
{
ATLENSURE_RETURN( wszName != NULL );
int nVal;
const _soapmap *pMap = NULL;
if (m_stateStack.GetCount() != 0)
{
ATLASSUME( m_stateStack[0].pMap != NULL );
nVal = (int) m_stateStack[0].nAllocSize;
ATLASSERT( GetFunctionMap() != NULL );
pMap = GetFunctionMap()[nVal];
}
else
{
pMap = GetSoapMapFromName(wszName, cchName,
wszNamespaceUri, cchNamespaceUri, &nVal, bHeader);
}
if (pMap == NULL)
{
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::SetSoapMapFromName -- _soapmap not found for: %.*ws, with namespace %.*ws\r\n"),
(int)wcslen(wszName), wszName, wszNamespaceUri ? (int)wcslen(wszNamespaceUri) : 0, wszNamespaceUri ? wszNamespaceUri : L"");
return E_FAIL;
}
HRESULT hr = E_OUTOFMEMORY;
// allocate the parameter struct
void *pvParam = NULL;
if (bHeader != false)
{
pvParam = GetHeaderValue();
}
else
{
if (m_bClient == false)
{
m_pvParam = m_pMemMgr->Allocate(pMap->nElementSize);
}
pvParam = m_pvParam;
}
if (pvParam != NULL)
{
if (bHeader == false)
{
memset(pvParam, 0x00, pMap->nElementSize);
}
// push initial state
if (m_stateStack.GetCount() != 0)
{
m_stateStack.RemoveAll();
}
hr = PushState(pvParam, pMap, NULL, 0, nVal, pMap->nElements);
if (FAILED(hr))
{
if ((m_bClient == false) && (bHeader == false))
{
m_pMemMgr->Free(pvParam);
}
}
}
#ifdef _DEBUG
if (hr == E_OUTOFMEMORY)
{
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::SetSoapMapFromName -- out of memory.\r\n" ) );
}
#endif // _DEBUG
return hr;
}
// implementation
virtual const _soapmap ** GetFunctionMap() = 0;
virtual const _soapmap ** GetHeaderMap() = 0;
virtual const wchar_t * GetNamespaceUri() = 0;
virtual const char * GetServiceName() = 0;
virtual const char * GetNamespaceUriA() = 0;
virtual HRESULT CallFunction(
void *pvParam,
const wchar_t *wszLocalName, int cchLocalName,
size_t nItem) = 0;
virtual void * GetHeaderValue() = 0;
public:
CSoapRootHandler(ISAXXMLReader *pReader = NULL)
: m_pMemMgr(&m_crtHeap), m_spReader(pReader), m_bClient(false),
m_nState(0), m_pvParam(NULL), m_nDepth(0)
{
InitHandlerState();
}
virtual ~CSoapRootHandler()
{
m_skipHandler.DetachParent();
}
IAtlMemMgr * SetMemMgr(IAtlMemMgr *pMemMgr)
{
IAtlMemMgr *pPrevMgr = m_pMemMgr;
m_pMemMgr = pMemMgr;
return pPrevMgr;
}
IAtlMemMgr * GetMemMgr()
{
return m_pMemMgr;
}
// override this function to do SOAP Fault handling
virtual HRESULT SoapFault(
SOAP_ERROR_CODE /*errCode*/,
const wchar_t * /*wszDetail*/,
int /*cchDetail*/)
{
if (m_bClient != false)
{
return S_OK;
}
// SOAP servers must implement this function
ATLASSERT( FALSE );
return E_FAIL;
}
//
// implementation
//
void InitHandlerState()
{
m_bNullCheck = false;
m_bCharacters = false;
m_bChildCheck = false;
m_dwState = SOAP_START;
}
HRESULT __stdcall startDocument()
{
InitHandlerState();
return S_OK;
}
HRESULT __stdcall startElement(
const wchar_t *wszNamespaceUri,
int cchNamespaceUri,
const wchar_t *wszLocalName,
int cchLocalName,
const wchar_t * wszQName,
int cchQName,
ISAXAttributes *pAttributes)
{
if (m_bNullCheck || m_bCharacters)
{
// make sure elements that aren't supposed to have child elements
// do not have child elements, and where we were expecting
// characters, we got them
return E_FAIL;
}
m_bChildCheck = false;
++m_nDepth;
HRESULT hr = S_OK;
switch (m_dwState)
{
case SOAP_PARAMS: case SOAP_HEADERS:
{
hr = ProcessParams(wszNamespaceUri, cchNamespaceUri, wszLocalName,
cchLocalName, pAttributes);
break;
}
case SOAP_START: case SOAP_ENVELOPE: case SOAP_HEADERS_DONE:
{
ULONG nNamespaceHash = AtlSoapHashStr(wszNamespaceUri,
cchNamespaceUri);
if (nNamespaceHash != SOAP_ENV)
{
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::startElement -- incorrect SOAP-ENV namespace.\r\n" ) );
return E_FAIL;
}
ULONG nElementHash = AtlSoapHashStr(wszLocalName, cchLocalName);
if (nElementHash == ENVELOPE &&
IsEqualElement(
sizeof(SOAP_ENVELOPEA)-1, SOAP_ENVELOPEW,
sizeof(SOAPENV_NAMESPACEA)-1, SOAPENV_NAMESPACEW,
cchLocalName, wszLocalName,
cchNamespaceUri, wszNamespaceUri))
{
// Envelope must be first element in package
if (m_dwState != SOAP_START)
{
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::startElement -- invalid SOAP message format: \"Envelope\" in unexpected location.\r\n" ) );
hr = E_FAIL;
}
m_dwState = SOAP_ENVELOPE;
}
else if (nElementHash == HEADER &&
IsEqualElement(sizeof(SOAP_HEADERA)-1, SOAP_HEADERW,
sizeof(SOAPENV_NAMESPACEA)-1, SOAPENV_NAMESPACEW,
cchLocalName, wszLocalName,
cchNamespaceUri, wszNamespaceUri))
{
if (m_dwState != SOAP_ENVELOPE)
{
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::startElement -- invalid SOAP message format: \"Headers\" in unexpected location.\r\n" ) );
hr = E_FAIL;
}
m_dwState = SOAP_HEADERS;
}
else if (nElementHash == BODY &&
IsEqualElement(sizeof(SOAP_BODYA)-1, SOAP_BODYW,
sizeof(SOAPENV_NAMESPACEA)-1, SOAPENV_NAMESPACEW,
cchLocalName, wszLocalName,
cchNamespaceUri, wszNamespaceUri))
{
if (m_dwState == SOAP_START)
{
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::startElement -- invalid SOAP message format: \"Body\" in unexpected location.\r\n" ) );
hr = E_FAIL;
}
m_dwState = SOAP_BODY;
}
break;
}
case SOAP_BODY:
{
hr = DispatchSoapCall(wszNamespaceUri, cchNamespaceUri,
wszLocalName, cchLocalName);
m_dwState = SOAP_PARAMS;
if (SUCCEEDED(hr))
{
if (GetState().pMap->dwCallFlags & SOAPFLAG_PAD)
{
hr = startElement(wszNamespaceUri, cchNamespaceUri,
wszLocalName, cchLocalName, wszQName, cchQName,
pAttributes);
}
}
break;
}
#ifdef _DEBUG
default:
{
// should never get here -- internal error
ATLASSERT( FALSE );
}
#endif // _DEBUG
}
return hr;
}
HRESULT __stdcall characters(
const wchar_t *wszChars,
int cchChars)
{
m_bCharacters = false;
// if it is a ready state, get the value
if (m_stateStack.IsEmpty() == false)
{
ParseState& state = GetState();
if ((state.dwFlags & SOAPFLAG_READYSTATE) &&
((state.dwFlags & SOAPFLAG_SIZEIS)==0)) // don't marshal struct size_is elements -- should be filled in by array marshaling code
{
if ((state.pMap == NULL) || (state.pMap->mapType != SOAPMAP_ENUM))
{
return AtlSoapGetElementValue(wszChars, cchChars,
state.pvElement, (SOAPTYPES)state.pEntry->nVal, GetMemMgr());
}
else
{
// enum
ATLASSERT( state.pMap != NULL );
ATLASSERT( state.pMap->pEntries != NULL );
ULONG nHash = AtlSoapHashStr(wszChars, cchChars);
const _soapmapentry *pEntries = state.pMap->pEntries;
size_t i;
for (i=0; pEntries[i].nHash != 0; i++)
{
if ((nHash == pEntries[i].nHash) &&
(cchChars == pEntries[i].cchField) &&
(!wcsncmp(wszChars, pEntries[i].wszField, cchChars)))
{
break;
}
}
if (pEntries[i].nHash != 0)
{
*((int *)state.pvElement) = pEntries[i].nVal;
state.nElement++;
return S_OK;
}
// no matching enum entry found
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::characters -- no matching enum entry found for: %.*ws.\r\n" ), cchChars, wszChars );
return E_FAIL;
}
}
}
// otherwise, ignore
return S_OK;
}
HRESULT __stdcall endElement(
const wchar_t * wszNamespaceUri,
int cchNamespaceUri,
const wchar_t * wszLocalName,
int cchLocalName,
const wchar_t * /*wszQName*/,
int /*cchQName*/)
{
static bool bDynArrWrapper = false;
if (m_bCharacters)
{
return E_FAIL;
}
m_bNullCheck = false;
if (m_stateStack.IsEmpty() != false)
{
return S_OK;
}
if (!bDynArrWrapper && (m_nState > 1))
{
ParseState prevState = m_stateStack.GetAt(m_nState - 1);
ParseState curState = m_stateStack.GetAt(m_nState);
if (prevState.dwFlags & SOAPFLAG_DYNARRWRAPPER)
{
bDynArrWrapper = true;
endElement (wszNamespaceUri, cchNamespaceUri, curState.pEntry->wszField,
curState.pEntry->cchField, NULL, 0);
}
}
else
{
bDynArrWrapper = false;
}
--m_nDepth;
const ParseState& state = GetState();
if ((m_dwState == SOAP_HEADERS) && (m_stateStack.GetCount() == 1))
{
return CheckEndHeaders(wszNamespaceUri, cchNamespaceUri, wszLocalName, cchLocalName);
}
if (state.dwFlags & (SOAPFLAG_FIXEDARR | SOAPFLAG_DYNARR))
{
if (state.dwFlags & SOAPFLAG_READYSTATE)
{
PopState();
}
const ParseState& currstate = GetState();
ATLENSURE_RETURN( currstate.pEntry != NULL );
if (m_nDepth == (currstate.nDepth-1))
{
if (S_OK != CheckEndElement(currstate))
{
// invalid number of elements
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::endElement -- invalid number of array elements for array parameter %.*ws.\r\n"),
currstate.pEntry->cchField, currstate.pEntry->wszField );
return E_FAIL;
}
PopState();
}
}
else
{
if (S_OK != CheckEndElement(state))
{
return E_FAIL;
}
PopState();
}
return S_OK;
}
HRESULT SetClientStruct(void *pvParam, int nMapIndex)
{
ATLENSURE_RETURN( pvParam != NULL );
ATLENSURE_RETURN( nMapIndex >= 0 );
// this is the params struct
// store for later use
m_pvParam = pvParam;
const _soapmap ** pEntries = GetHeaderMap();
ATLENSURE_RETURN( pEntries != NULL );
// push header value
return PushState(GetHeaderValue(), pEntries[nMapIndex], NULL, 0, nMapIndex, pEntries[nMapIndex]->nElements);
}
void ResetClientState(bool bFull = false)
{
m_stateStack.RemoveAll();
m_nState = 0;
if (bFull != false)
{
m_dwState = SOAP_START;
m_pvParam = NULL;
}
}
HRESULT CreateReader()
{
return m_spReader.CoCreateInstance(ATLS_SAXXMLREADER_CLSID, NULL, CLSCTX_INPROC_SERVER);
}
HRESULT InitializeSOAP(IServiceProvider *pProvider)
{
HRESULT hr = S_OK;
if (m_spReader.p == NULL)
{
hr = E_FAIL;
if (pProvider != NULL)
{
IAtlMemMgr *pMemMgr = NULL;
hr = pProvider->QueryService(__uuidof(IAtlMemMgr),
__uuidof(IAtlMemMgr), (void **)&pMemMgr);
if ((SUCCEEDED(hr)) && (pMemMgr != NULL))
{
SetMemMgr(pMemMgr);
}
hr = pProvider->QueryService(__uuidof(ISAXXMLReader),
__uuidof(ISAXXMLReader), (void **)&m_spReader);
}
if (FAILED(hr))
{
hr = CreateReader();
}
}
if (SUCCEEDED(hr))
{
hr = m_spReader->putContentHandler(this);
}
#ifdef _DEBUG
else
{
ATLTRACE( _T("ATLSOAP: CSoapRootHandler::InitializeSOAP -- failed to get SAXXMLReader.\r\n" ) );
}
#endif // _DEBUG
return hr;
}
void UninitializeSOAP()
{
if (m_spReader.p != NULL)
{
m_spReader->putContentHandler(NULL);
m_spReader.Release();
}
}
virtual HRESULT DispatchSoapCall(const wchar_t *wszNamespaceUri,
int cchNamespaceUri, const wchar_t *wszLocalName,
int cchLocalName)
{
HRESULT hr = S_OK;
if (m_stateStack.IsEmpty() == false)
{
ATLASSUME( m_stateStack[0].pMap != NULL );
// check to see if all required and non-nullable SOAP headers were sent
if (m_stateStack[0].pMap->mapType == SOAPMAP_HEADER)
{
hr = CheckSoapHeaders(m_stateStack[0]);
}
if (SUCCEEDED(hr))
{
hr = SetSoapMapFromName(wszLocalName, cchLocalName,
wszNamespaceUri, cchNamespaceUri);
}
}
else
{
// get the appropriate function map
hr = SetSoapMapFromName(wszLocalName, cchLocalName,
wszNamespaceUri, cchNamespaceUri);
if (SUCCEEDED(hr))
{
// set the SOAP Header map for the function
ATLASSUME( m_stateStack.IsEmpty() == false );
const _soapmap **ppHeaderMap = GetHeaderMap();
ATLENSURE_RETURN( ppHeaderMap != NULL );
// create a temporary parse state for checking headers
ParseState state;
state.pMap = ppHeaderMap[m_stateStack[0].nAllocSize];
ATLENSURE_RETURN( state.pMap != NULL );
// check to see if all required and non-nullable SOAP headers were sent
hr = CheckSoapHeaders(state);
}
}
return hr;
}
virtual HRESULT BeginParse(IStream *pStream)
{
ATLASSERT( pStream != NULL );
CComVariant varStream;
varStream = static_cast<IUnknown*>(pStream);
HRESULT hr = m_spReader->parse(varStream);
if (SUCCEEDED(hr))
{
if (m_refMap.GetCount() != 0)
{
hr = E_FAIL;
}
}
return hr;
}
HRESULT CallFunctionInternal()
{
HRESULT hr = E_FAIL;
const ParseState& state = m_stateStack[0];
hr = CallFunction(
state.pvElement,
state.pMap->wszName,
state.pMap->cchWName,
state.nAllocSize);
return hr;
}
virtual HRESULT GenerateResponse(IWriteStream *pStream)
{
ATLASSUME( m_stateStack.IsEmpty() == false );
ATLASSUME( m_stateStack[0].pMap != NULL );
ATLASSUME( m_stateStack[0].pvElement != NULL );
const ParseState& state = m_stateStack[0];
const _soapmap *pHeaderMap = NULL;
if (m_bClient == false)
{
const _soapmap **ppHeaderMap = GetHeaderMap();
if (ppHeaderMap != NULL)
{
pHeaderMap = ppHeaderMap[state.nAllocSize];
}
}
else
{
pHeaderMap = state.pMap;
}
const _soapmap *pFuncMap = NULL;
if (m_bClient == false)
{
pFuncMap = state.pMap;
}
else
{
const _soapmap **ppFuncMap = GetFunctionMap();
ATLENSURE_RETURN( ppFuncMap != NULL );
pFuncMap = ppFuncMap[state.nAllocSize];
}
ATLENSURE_RETURN( pFuncMap != NULL );
CRpcEncodedGenerator rpcGen;
CPADGenerator padGen;
CPIDGenerator pidGen;
CResponseGenerator *pGenerator = NULL;
if ((pFuncMap->dwCallFlags & (SOAPFLAG_RPC | SOAPFLAG_ENCODED)) == (SOAPFLAG_RPC | SOAPFLAG_ENCODED))
{
pGenerator = &rpcGen;
}
else if (pFuncMap->dwCallFlags & SOAPFLAG_PID)
{
ATLASSERT( (pFuncMap->dwCallFlags & (SOAPFLAG_DOCUMENT | SOAPFLAG_LITERAL)) == (SOAPFLAG_DOCUMENT | SOAPFLAG_LITERAL) );
pGenerator = &pidGen;
}
else
{
ATLASSERT( (pFuncMap->dwCallFlags & (SOAPFLAG_DOCUMENT | SOAPFLAG_LITERAL)) == (SOAPFLAG_DOCUMENT | SOAPFLAG_LITERAL) );
ATLASSERT( pFuncMap->dwCallFlags & SOAPFLAG_PAD );
pGenerator = &padGen;
}
HRESULT hr = pGenerator->StartEnvelope(pStream);
if (SUCCEEDED(hr))
{
// generate headers if necessary
hr = GenerateHeaders(pGenerator, pHeaderMap, pStream);
if (SUCCEEDED(hr))
{
hr = pGenerator->StartBody(pStream);
if (SUCCEEDED(hr))
{
hr = GenerateResponseHelper(pGenerator, pFuncMap, m_pvParam, pStream, true);
if (SUCCEEDED(hr))
{
hr = pGenerator->EndBody(pStream);
if (SUCCEEDED(hr))
{
hr = pGenerator->EndEnvelope(pStream);
}
}
}
}
}
return hr;
}
virtual void Cleanup()
{
// cleanup headers
CleanupHeaders();
if ((m_stateStack.IsEmpty() == false) && (m_pvParam != NULL))
{
const _soapmap **ppFuncMap = GetFunctionMap();
ATLENSURE( ppFuncMap != NULL );
const _soapmap *pFuncMap = ppFuncMap[m_stateStack[0].nAllocSize];
ATLENSURE( pFuncMap != NULL );
CleanupHelper(pFuncMap, m_pvParam);
if (m_bClient == false)
{
m_pMemMgr->Free(m_pvParam);
m_stateStack.RemoveAll();
}
}
}
virtual void CleanupHeaders()
{
if (m_stateStack.IsEmpty() == false)
{
const _soapmap **ppHeaderMap = GetHeaderMap();
ATLENSURE( ppHeaderMap != NULL );
const _soapmap *pHeaderMap = ppHeaderMap[m_stateStack[0].nAllocSize];
ATLENSURE( pHeaderMap != NULL );
CleanupHelper(pHeaderMap, GetHeaderValue());
}
}
void SetClient(bool bClient)
{
m_bClient = bClient;
}
}; // class CSoapRootHandler
#define DECLARE_XSD_ENTRY( __name ) \
{ L ## __name, __name, sizeof(__name)-1 },
__declspec(selectany) const CSoapRootHandler::XSDEntry CSoapRootHandler::s_xsdNames[] =
{
DECLARE_XSD_ENTRY("string")
DECLARE_XSD_ENTRY("boolean")
DECLARE_XSD_ENTRY("float")
DECLARE_XSD_ENTRY("double")
DECLARE_XSD_ENTRY("decimal")
DECLARE_XSD_ENTRY("duration")
DECLARE_XSD_ENTRY("hexBinary")
DECLARE_XSD_ENTRY("base64Binary")
DECLARE_XSD_ENTRY("anyURI")
DECLARE_XSD_ENTRY("ID")
DECLARE_XSD_ENTRY("IDREF")
DECLARE_XSD_ENTRY("ENTITY")
DECLARE_XSD_ENTRY("NOTATION")
DECLARE_XSD_ENTRY("QName")
DECLARE_XSD_ENTRY("normalizedString")
DECLARE_XSD_ENTRY("token")
DECLARE_XSD_ENTRY("language")
DECLARE_XSD_ENTRY("IDREFS")
DECLARE_XSD_ENTRY("ENTITIES")
DECLARE_XSD_ENTRY("NMTOKEN")
DECLARE_XSD_ENTRY("NMTOKENS")
DECLARE_XSD_ENTRY("Name")
DECLARE_XSD_ENTRY("NCName")
DECLARE_XSD_ENTRY("integer")
DECLARE_XSD_ENTRY("nonPositiveInteger")
DECLARE_XSD_ENTRY("negativeInteger")
DECLARE_XSD_ENTRY("long")
DECLARE_XSD_ENTRY("int")
DECLARE_XSD_ENTRY("short")
DECLARE_XSD_ENTRY("byte")
DECLARE_XSD_ENTRY("nonNegativeInteger")
DECLARE_XSD_ENTRY("unsignedLong")
DECLARE_XSD_ENTRY("unsignedInt")
DECLARE_XSD_ENTRY("unsignedShort")
DECLARE_XSD_ENTRY("unsignedByte")
DECLARE_XSD_ENTRY("positiveInteger")
DECLARE_XSD_ENTRY("dateTime")
DECLARE_XSD_ENTRY("time")
DECLARE_XSD_ENTRY("date")
DECLARE_XSD_ENTRY("gMonth")
DECLARE_XSD_ENTRY("gYearMonth")
DECLARE_XSD_ENTRY("gYear")
DECLARE_XSD_ENTRY("gMonthDay")
DECLARE_XSD_ENTRY("gDay")
};
__declspec(selectany) CCRTHeap CSoapRootHandler::m_crtHeap;
template <typename THandler>
class CSoapHandler :
public CSoapRootHandler,
public CComObjectRootEx<CComMultiThreadModel>,
public IRequestHandlerImpl<THandler>
{
protected:
HTTP_CODE m_hcErr;
CHttpResponse *m_pHttpResponse;
// heap for SOAP requests
CWin32Heap m_heap;
// default heap is COM heap (SOAP Servers can double as COM objects)
CComHeap m_comHeap;
public:
BEGIN_COM_MAP(CSoapHandler<THandler>)
COM_INTERFACE_ENTRY(ISAXContentHandler)
COM_INTERFACE_ENTRY(IRequestHandler)
END_COM_MAP()
CSoapHandler()
:m_pHttpResponse(NULL), m_hcErr(HTTP_SUCCESS)
{
SetMemMgr(&m_comHeap);
}
void SetHttpError(HTTP_CODE hcErr)
{
m_hcErr = hcErr;
}
HRESULT SoapFault(
SOAP_ERROR_CODE errCode,
const wchar_t *wszDetail,
int cchDetail)
{
ATLASSUME( m_pHttpResponse != NULL );
SetHttpError(AtlsHttpError(500, SUBERR_NO_PROCESS));
m_pHttpResponse->ClearHeaders();
m_pHttpResponse->ClearContent();
m_pHttpResponse->SetContentType("text/xml");
m_pHttpResponse->SetStatusCode(500);
CSoapFault fault;
if (wszDetail != NULL)
{
if (cchDetail < 0)
{
cchDetail = (int) wcslen(wszDetail);
}
_ATLTRY
{
fault.m_strDetail.SetString(wszDetail, cchDetail);
}
_ATLCATCHALL()
{
ATLTRACE( _T("CSoapHandler::SoapFault -- out of memory.\r\n" ) );
return E_OUTOFMEMORY;
}
}
fault.m_soapErrCode = errCode;
fault.GenerateFault(m_pHttpResponse);
return S_OK;
}
HTTP_CODE InitializeHandler(AtlServerRequest *pRequestInfo, IServiceProvider *pProvider)
{
m_hcErr = IRequestHandlerImpl<THandler>::InitializeHandler(pRequestInfo, pProvider);
if (m_hcErr == HTTP_SUCCESS)
{
HRESULT hr = InitializeSOAP(m_spServiceProvider);
if (SUCCEEDED(hr))
{
// try to use the per-thread heap
CIsapiWorker *pWorker = pRequestInfo->pExtension->GetThreadWorker();
if (pWorker != NULL)
{
m_heap.Attach(pWorker->m_hHeap, false);
SetMemMgr(&m_heap);
}
return m_hcErr;
}
}
// some initialization failure
CHttpResponse HttpResponse(pRequestInfo->pServerContext);
m_pHttpResponse = &HttpResponse;
SoapFault(SOAP_E_SERVER, NULL, 0);
m_pHttpResponse = NULL;
return m_hcErr;
}
HTTP_CODE HandleRequest(AtlServerRequest *pRequestInfo, IServiceProvider * /*pProvider*/)
{
// SOAPACTION header is required per the SOAP 1.1
// mainly so firewalls can filter on it.
char szBuf[ATL_URL_MAX_URL_LENGTH+1];
szBuf[0] = '\0';
DWORD dwLen = ATL_URL_MAX_URL_LENGTH;
if ( m_spServerContext->GetServerVariable("HTTP_SOAPACTION", szBuf, &dwLen) != FALSE )
{
if ( dwLen >= 2 )
{
// drop the last "
szBuf[dwLen-2] = '\0';
char *szMethod = strrchr(szBuf, '#');
if (szMethod != NULL)
{
_ATLTRY
{
// ignore return code here
SetSoapMapFromName(CA2W( szMethod+1 ), -1, GetNamespaceUri(), -1, true);
}
_ATLCATCHALL()
{
return AtlsHttpError(500, ISE_SUBERR_OUTOFMEM);
}
}
}
}
else
{
// SOAP requestion that use the HTTP transport
// must have a SOAPACTION header.
return HTTP_ERROR(500, ISE_SUBERR_SOAPNOSOAPACTION);
}
// set the header map
CHttpResponse HttpResponse(pRequestInfo->pServerContext);
m_pHttpResponse = &HttpResponse;
CStreamOnServerContext s(pRequestInfo->pServerContext);
#ifdef _DEBUG
CSAXSoapErrorHandler err;
GetReader()->putErrorHandler(&err);
#endif // _DEBUG
HRESULT hr = BeginParse(&s);
#ifdef _DEBUG
// release the error handler
GetReader()->putErrorHandler(NULL);
#endif // _DEBUG
if (FAILED(hr))
{
Cleanup();
if (m_hcErr == HTTP_SUCCESS)
{
SoapFault(SOAP_E_CLIENT, NULL, NULL);
}
return m_hcErr;
}
_ATLTRY
{
hr = CallFunctionInternal();
}
_ATLCATCHALL()
{
// cleanup before propagating user exception
Cleanup();
HttpResponse.Detach();
_ATLRETHROW;
}
if (FAILED(hr))
{
Cleanup();
HttpResponse.ClearHeaders();
HttpResponse.ClearContent();
if (m_hcErr != HTTP_SUCCESS)
{
HttpResponse.SetStatusCode(HTTP_ERROR_CODE(m_hcErr));
return HTTP_SUCCESS_NO_PROCESS;
}
HttpResponse.SetStatusCode(500);
GenerateAppError(&HttpResponse, hr);
return AtlsHttpError(500, SUBERR_NO_PROCESS);
}
HttpResponse.SetContentType("text/xml");
hr = GenerateResponse(&HttpResponse);
Cleanup();
if (FAILED(hr))
{
SoapFault(SOAP_E_SERVER, NULL, 0);
return m_hcErr;
}
return HTTP_SUCCESS;
}
virtual ATL_NOINLINE HRESULT GenerateAppError(IWriteStream *pStream, HRESULT hr)
{
if (pStream == NULL)
{
return E_INVALIDARG;
}
LPWSTR pwszMessage = NULL;
DWORD dwLen = ::FormatMessageW(FORMAT_MESSAGE_ALLOCATE_BUFFER|FORMAT_MESSAGE_FROM_SYSTEM,
NULL, hr, 0, (LPWSTR) &pwszMessage, 0, NULL);
if (dwLen == 0)
{
pwszMessage = L"Application Error";
}
hr = SoapFault(SOAP_E_SERVER, pwszMessage, dwLen ? dwLen : -1);
if (dwLen != 0)
{
::LocalFree(pwszMessage);
}
return hr;
}
void UninitializeHandler()
{
UninitializeSOAP();
}
};
// client error states
enum SOAPCLIENT_ERROR
{
SOAPCLIENT_SUCCESS=0, // everything succeeded
SOAPCLIENT_INITIALIZE_ERROR, // initialization failed -- most likely an MSXML installation problem
SOAPCLIENT_OUTOFMEMORY, // out of memory
SOAPCLIENT_GENERATE_ERROR, // failed in generating the response
SOAPCLIENT_CONNECT_ERROR, // failed connecting to server
SOAPCLIENT_SEND_ERROR, // failed in sending message
SOAPCLIENT_SERVER_ERROR, // server error
SOAPCLIENT_SOAPFAULT, // a SOAP Fault was returned by the server
SOAPCLIENT_PARSEFAULT_ERROR, // failed in parsing SOAP fault
SOAPCLIENT_READ_ERROR, // failed in reading response
SOAPCLIENT_PARSE_ERROR // failed in parsing response
};
template <typename TSocketClass = ZEvtSyncSocket>
class CSoapSocketClientT
{
private:
CUrl m_url;
CWriteStreamOnCString m_writeStream;
CReadStreamOnSocket<TSocketClass> m_readStream;
DWORD m_dwTimeout;
SOAPCLIENT_ERROR m_errorState;
protected:
virtual HRESULT GetClientReader(ISAXXMLReader **pReader)
{
if (pReader == NULL)
{
return E_POINTER;
}
*pReader = NULL;
CComPtr<ISAXXMLReader> spReader;
HRESULT hr = spReader.CoCreateInstance(ATLS_SAXXMLREADER_CLSID, NULL, CLSCTX_INPROC_SERVER);
if (SUCCEEDED(hr))
{
*pReader = spReader.Detach();
}
return hr;
}
public:
// note : not shared across stock client implementations
CAtlHttpClientT<TSocketClass> m_socket;
CSoapFault m_fault;
// constructor
CSoapSocketClientT(LPCTSTR szUrl)
: m_dwTimeout(0), m_errorState(SOAPCLIENT_SUCCESS)
{
TCHAR szTmp[ATL_URL_MAX_URL_LENGTH];
if(AtlEscapeUrl(szUrl,szTmp,0,ATL_URL_MAX_URL_LENGTH-1,ATL_URL_BROWSER_MODE))
m_url.CrackUrl(szTmp);
}
CSoapSocketClientT(LPCTSTR szServer, LPCTSTR szUri, ATL_URL_PORT nPort=80)
: m_dwTimeout(0), m_errorState(SOAPCLIENT_SUCCESS)
{
ATLASSERT( szServer != NULL );
ATLASSERT( szUri != NULL );
m_url.SetUrlPath(szUri);
m_url.SetHostName(szServer);
m_url.SetPortNumber(nPort);
}
~CSoapSocketClientT()
{
CleanupClient();
}
SOAPCLIENT_ERROR GetClientError()
{
return m_errorState;
}
void SetClientError(SOAPCLIENT_ERROR errorState)
{
m_errorState = errorState;
}
IWriteStream * GetWriteStream()
{
return &m_writeStream;
}
HRESULT GetReadStream(IStream **ppStream)
{
if (ppStream == NULL)
{
return E_POINTER;
}
*ppStream = &m_readStream;
return S_OK;
}
void CleanupClient()
{
m_writeStream.Cleanup();
m_fault.Clear();
SetClientError(SOAPCLIENT_SUCCESS);
}
HRESULT SendRequest(LPCTSTR szAction)
{
HRESULT hr = E_FAIL;
_ATLTRY
{
// create extra headers to send with request
CFixedStringT<CString, 256> strExtraHeaders(szAction);
strExtraHeaders.Append(_T("Accept: text/xml\r\n"), sizeof("Accept: text/xml\r\n")-1);
CAtlNavigateData navData;
navData.SetMethod(ATL_HTTP_METHOD_POST);
navData.SetPort(m_url.GetPortNumber());
navData.SetExtraHeaders(strExtraHeaders);
navData.SetPostData((LPBYTE)(LPCSTR) m_writeStream.m_str, m_writeStream.m_str.GetLength(), _T("text/xml; charset=utf-8"));
ATLSOAP_TRACE( (LPBYTE)(LPCSTR)m_writeStream.m_str, m_writeStream.m_str.GetLength() );
if (m_dwTimeout != 0)
{
navData.SetSocketTimeout(m_dwTimeout);
}
if (m_socket.Navigate(&m_url, &navData) != false)
{
if (GetStatusCode() == 200)
{
hr = (m_readStream.Init(&m_socket) != FALSE ? S_OK : E_FAIL);
if (hr != S_OK)
{
SetClientError(SOAPCLIENT_READ_ERROR);
}
}
else if (GetStatusCode() == 202)
{
// for one-way methods
hr = S_OK;
}
else
{
SetClientError(SOAPCLIENT_SERVER_ERROR);
}
}
else if (GetStatusCode() == 500)
{
SetClientError(SOAPCLIENT_SOAPFAULT);
// if returned 500, get the SOAP fault
if (m_readStream.Init(&m_socket) != FALSE)
{
CComPtr<ISAXXMLReader> spReader;
if (SUCCEEDED(GetClientReader(&spReader)))
{
CComPtr<IStream> spReadStream;
if (SUCCEEDED(GetReadStream(&spReadStream)))
{
if (FAILED(m_fault.ParseFault(spReadStream, spReader)))
{
SetClientError(SOAPCLIENT_PARSEFAULT_ERROR);
}
}
}
}
}
else
{
SetClientError(SOAPCLIENT_SEND_ERROR);
}
}
_ATLCATCHALL()
{
hr = E_FAIL;
}
return hr;
}
HRESULT SetUrl(LPCTSTR szUrl)
{
TCHAR szTmp[ATL_URL_MAX_URL_LENGTH];
if(!AtlEscapeUrl(szUrl,szTmp,0,ATL_URL_MAX_URL_LENGTH-1,ATL_URL_BROWSER_MODE))
{
return E_FAIL;
}
return (m_url.CrackUrl(szTmp) != FALSE) ? S_OK : E_FAIL;
}
HRESULT GetUrl(__out_ecount_part_z(*pdwLen, *pdwLen) LPTSTR szUrl, __inout LPDWORD pdwLen)
{
if ((szUrl == NULL) || (pdwLen == NULL))
{
return E_INVALIDARG;
}
return (m_url.CreateUrl(szUrl, pdwLen) != FALSE) ? S_OK : E_FAIL;
}
HRESULT SetProxy(LPCTSTR szProxy = NULL, short nProxyPort = 80)
{
BOOL bRet = m_socket.SetProxy(szProxy, nProxyPort);
return (bRet != FALSE) ? S_OK : E_FAIL;
}
void SetTimeout(DWORD dwTimeout)
{
m_dwTimeout = dwTimeout;
}
int GetStatusCode()
{
return m_socket.GetStatus();
}
}; // CSoapSocketClientT
#ifndef ATLSOAP_NOWININET
class CReadStreamOnInet : public IStreamImpl
{
public:
HRESULT __stdcall QueryInterface(REFIID riid, void **ppv)
{
if (ppv == NULL)
{
return E_POINTER;
}
*ppv = NULL;
if (InlineIsEqualGUID(riid, IID_IUnknown) ||
InlineIsEqualGUID(riid, IID_IStream) ||
InlineIsEqualGUID(riid, IID_ISequentialStream))
{
*ppv = static_cast<IStream *>(this);
return S_OK;
}
return E_NOINTERFACE;
}
ULONG __stdcall AddRef()
{
return 1;
}
ULONG __stdcall Release()
{
return 1;
}
private:
HINTERNET m_hFile;
public:
CReadStreamOnInet()
:m_hFile(NULL)
{
}
void Init(HINTERNET hFile)
{
m_hFile = hFile;
}
HRESULT STDMETHODCALLTYPE Read(void *pDest, ULONG dwMaxLen, ULONG *pdwRead)
{
BOOL bRet = InternetReadFile(m_hFile, pDest, dwMaxLen, pdwRead);
return (bRet != FALSE) ? S_OK : E_FAIL;
}
}; // CStreamOnInet
class CSoapWininetClient
{
private:
CUrl m_url;
CWriteStreamOnCString m_writeStream;
CReadStreamOnInet m_readStream;
CString m_strProxy;
DWORD m_dwTimeout;
CFixedStringT<CString, ATL_URL_MAX_URL_LENGTH+1> m_strUrl;
SOAPCLIENT_ERROR m_errorState;
void CloseAll()
{
if (m_hRequest != NULL)
{
InternetCloseHandle(m_hRequest);
m_hRequest = NULL;
}
if (m_hConnection != NULL)
{
InternetCloseHandle(m_hConnection);
m_hConnection = NULL;
}
if (m_hInternet != NULL)
{
InternetCloseHandle(m_hInternet);
m_hInternet = NULL;
}
}
HRESULT ConnectToServer()
{
if (m_hConnection != NULL)
{
return S_OK;
}
m_hInternet = InternetOpen(
ATLSOAPINET_CLIENT,
m_strProxy.GetLength() ? (INTERNET_OPEN_TYPE_PRECONFIG | INTERNET_OPEN_TYPE_PROXY) : INTERNET_OPEN_TYPE_PRECONFIG,
m_strProxy.GetLength() ? (LPCTSTR) m_strProxy : NULL,
NULL, 0);
if (m_hInternet != NULL)
{
if (m_dwTimeout != 0)
{
InternetSetOption(m_hInternet, INTERNET_OPTION_CONNECT_TIMEOUT,
&m_dwTimeout, sizeof(m_dwTimeout));
InternetSetOption(m_hInternet, INTERNET_OPTION_RECEIVE_TIMEOUT,
&m_dwTimeout, sizeof(m_dwTimeout));
InternetSetOption(m_hInternet, INTERNET_OPTION_SEND_TIMEOUT,
&m_dwTimeout, sizeof(m_dwTimeout));
}
m_hConnection = InternetConnect(m_hInternet, m_url.GetHostName(),
(INTERNET_PORT) m_url.GetPortNumber(), NULL, NULL,
INTERNET_SERVICE_HTTP, 0, NULL);
if (m_hConnection != NULL)
{
return S_OK;
}
}
CloseAll();
return E_FAIL;
}
protected:
virtual HRESULT GetClientReader(ISAXXMLReader **pReader)
{
if (pReader == NULL)
{
return E_POINTER;
}
*pReader = NULL;
CComPtr<ISAXXMLReader> spReader;
HRESULT hr = spReader.CoCreateInstance(ATLS_SAXXMLREADER_CLSID, NULL, CLSCTX_INPROC_SERVER);
if (SUCCEEDED(hr))
{
*pReader = spReader.Detach();
}
return hr;
}
public:
// note : not shared across stock client implementations
HINTERNET m_hInternet;
HINTERNET m_hConnection;
HINTERNET m_hRequest;
CSoapFault m_fault;
CSoapWininetClient(LPCTSTR szUrl)
:m_hInternet(NULL), m_hConnection(NULL), m_hRequest(NULL), m_dwTimeout(0), m_errorState(SOAPCLIENT_SUCCESS)
{
TCHAR szTmp[ATL_URL_MAX_URL_LENGTH];
if(AtlEscapeUrl(szUrl,szTmp,0,ATL_URL_MAX_URL_LENGTH-1,ATL_URL_BROWSER_MODE))
{
if (m_url.CrackUrl(szTmp) != FALSE)
{
SetProxy();
_ATLTRY
{
m_strUrl.SetString(m_url.GetUrlPath(), m_url.GetUrlPathLength());
m_strUrl.Append(m_url.GetExtraInfo(), m_url.GetExtraInfoLength());
}
_ATLCATCHALL()
{
}
}
}
}
CSoapWininetClient(LPCTSTR szServer, LPCTSTR szUri, short nPort=80)
:m_hInternet(NULL), m_hConnection(NULL), m_hRequest(NULL), m_dwTimeout(0), m_errorState(SOAPCLIENT_SUCCESS)
{
if (m_url.SetHostName(szServer) != FALSE)
{
if (m_url.SetUrlPath(szUri) != FALSE)
{
if (m_url.SetPortNumber((ATL_URL_PORT) nPort) != FALSE)
{
_ATLTRY
{
m_strUrl.SetString(m_url.GetUrlPath(), m_url.GetUrlPathLength());
m_strUrl.Append(m_url.GetExtraInfo(), m_url.GetExtraInfoLength());
}
_ATLCATCHALL()
{
}
}
}
}
}
virtual ~CSoapWininetClient()
{
CleanupClient();
CloseAll();
}
SOAPCLIENT_ERROR GetClientError()
{
return m_errorState;
}
void SetClientError(SOAPCLIENT_ERROR errorState)
{
m_errorState = errorState;
}
IWriteStream * GetWriteStream()
{
return &m_writeStream;
}
HRESULT GetReadStream(IStream **ppStream)
{
if (ppStream == NULL)
{
return E_POINTER;
}
*ppStream = &m_readStream;
return S_OK;
}
void CleanupClient()
{
m_writeStream.Cleanup();
if (m_hRequest != NULL)
{
InternetCloseHandle(m_hRequest);
m_hRequest = NULL;
}
m_fault.Clear();
SetClientError(SOAPCLIENT_SUCCESS);
}
HRESULT SendRequest(LPCTSTR szAction)
{
if (ConnectToServer() != S_OK)
{
SetClientError(SOAPCLIENT_CONNECT_ERROR);
return E_FAIL;
}
CString strHeaders;
_ATLTRY
{
strHeaders.Append(szAction);
strHeaders.Append(_T("Content-Type: text/xml; charset=utf-8\r\n"));
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
static LPCTSTR s_szAcceptTypes[] = { _T("text/*"), NULL };
m_hRequest = HttpOpenRequest(m_hConnection, _T("POST"),
m_strUrl, _T("HTTP/1.0"), NULL,
s_szAcceptTypes,
INTERNET_FLAG_NO_UI | INTERNET_FLAG_KEEP_CONNECTION | ((m_url.GetScheme() == ATL_URL_SCHEME_HTTPS) ? INTERNET_FLAG_SECURE : 0)
, NULL);
if (m_hRequest != NULL)
{
if (FALSE != HttpSendRequest(m_hRequest, strHeaders, (DWORD) strHeaders.GetLength(),
(void *)(LPCSTR)m_writeStream.m_str, m_writeStream.m_str.GetLength()))
{
m_readStream.Init(m_hRequest);
if (GetStatusCode() != HTTP_STATUS_SERVER_ERROR)
{
return S_OK;
}
else
{
SetClientError(SOAPCLIENT_SOAPFAULT);
CComPtr<ISAXXMLReader> spReader;
if (SUCCEEDED(GetClientReader(&spReader)))
{
CComPtr<IStream> spReadStream;
if (SUCCEEDED(GetReadStream(&spReadStream)))
{
if (FAILED(m_fault.ParseFault(spReadStream, spReader)))
{
SetClientError(SOAPCLIENT_PARSEFAULT_ERROR);
}
}
}
}
}
}
else
{
SetClientError(SOAPCLIENT_SEND_ERROR);
}
return E_FAIL;
}
HRESULT SetUrl(LPCTSTR szUrl)
{
CloseAll();
TCHAR szTmp[ATL_URL_MAX_URL_LENGTH];
if(!AtlEscapeUrl(szUrl,szTmp,0,ATL_URL_MAX_URL_LENGTH-1,ATL_URL_BROWSER_MODE))
{
return E_FAIL;
}
if (m_url.CrackUrl(szTmp) != FALSE)
{
_ATLTRY
{
m_strUrl.SetString(m_url.GetUrlPath(), m_url.GetUrlPathLength());
m_strUrl.Append(m_url.GetExtraInfo(), m_url.GetExtraInfoLength());
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
return S_OK;
}
return E_FAIL;
}
HRESULT GetUrl(LPTSTR szUrl, LPDWORD pdwLen)
{
if ((szUrl == NULL) || (pdwLen == NULL))
{
return E_INVALIDARG;
}
return (m_url.CreateUrl(szUrl, pdwLen) != FALSE) ? S_OK : E_FAIL;
}
HRESULT SetProxy(LPCTSTR szProxy = NULL, short nProxyPort = 80)
{
_ATLTRY
{
if (szProxy && szProxy[0])
{
m_strProxy.Format(_T("http=http://%s:%d https=http://%s:%d"), szProxy, nProxyPort, szProxy, nProxyPort);
}
else
{
m_strProxy.Empty();
}
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
return S_OK;
}
void SetTimeout(DWORD dwTimeout)
{
m_dwTimeout = dwTimeout;
}
int GetStatusCode()
{
DWORD dwLen = 255;
TCHAR szBuf[256];
if (HttpQueryInfo(m_hRequest, HTTP_QUERY_STATUS_CODE, szBuf, &dwLen, NULL))
{
szBuf[dwLen] = '\0';
return _ttoi(szBuf);
}
return 0;
}
}; // CSoapWininetClient
#endif
#ifndef ATLSOAP_NOMSXML_INET
class CSoapMSXMLInetClient
{
private:
CUrl m_url;
CWriteStreamOnCString m_writeStream;
DWORD m_dwTimeout;
SOAPCLIENT_ERROR m_errorState;
HRESULT ConnectToServer()
{
TCHAR szURL[ATL_URL_MAX_URL_LENGTH];
DWORD dwLen = ATL_URL_MAX_URL_LENGTH;
HRESULT hr = E_FAIL;
if (m_spHttpRequest)
return S_OK;
if (!m_url.CreateUrl(szURL, &dwLen))
return E_FAIL;
hr = m_spHttpRequest.CoCreateInstance(__uuidof(ServerXMLHTTP30));
if (hr != S_OK)
return hr;
CComVariant vEmpty;
hr = m_spHttpRequest->open( CComBSTR(L"POST"),
CComBSTR(szURL),
CComVariant(VARIANT_BOOL(VARIANT_FALSE)),
vEmpty,
vEmpty );
if (hr != S_OK)
{
m_spHttpRequest.Release();
return hr;
}
return S_OK;
}
protected:
virtual HRESULT GetClientReader(ISAXXMLReader **pReader)
{
if (pReader == NULL)
{
return E_POINTER;
}
*pReader = NULL;
CComPtr<ISAXXMLReader> spReader;
HRESULT hr = spReader.CoCreateInstance(ATLS_SAXXMLREADER_CLSID, NULL, CLSCTX_INPROC_SERVER);
if (SUCCEEDED(hr))
{
*pReader = spReader.Detach();
}
return hr;
}
public:
// note : not shared across stock client implementations
CComPtr<IServerXMLHTTPRequest> m_spHttpRequest;
CSoapFault m_fault;
CSoapMSXMLInetClient(LPCTSTR szUrl)
:m_dwTimeout(0), m_errorState(SOAPCLIENT_SUCCESS)
{
m_url.CrackUrl(szUrl);
}
CSoapMSXMLInetClient(LPCTSTR szServer, LPCTSTR szUri, short nPort=80)
: m_dwTimeout(0), m_errorState(SOAPCLIENT_SUCCESS)
{
m_url.SetHostName(szServer);
m_url.SetUrlPath(szUri);
m_url.SetPortNumber((ATL_URL_PORT) nPort);
}
virtual ~CSoapMSXMLInetClient()
{
CleanupClient();
}
SOAPCLIENT_ERROR GetClientError()
{
return m_errorState;
}
void SetClientError(SOAPCLIENT_ERROR errorState)
{
m_errorState = errorState;
}
IWriteStream * GetWriteStream()
{
return &m_writeStream;
}
HRESULT GetReadStream(IStream **ppStream)
{
if (ppStream == NULL)
{
return E_POINTER;
}
*ppStream = NULL;
HRESULT hr = E_FAIL;
if (m_spHttpRequest)
{
VARIANT vResponseStream;
VariantInit(&vResponseStream);
hr = m_spHttpRequest->get_responseStream(&vResponseStream);
if (S_OK == hr)
{
hr = E_FAIL;
if ((vResponseStream.vt == VT_UNKNOWN) && (vResponseStream.punkVal != NULL))
{
// we return the refcount with the pointer!
hr = vResponseStream.punkVal->QueryInterface(__uuidof(IStream), (void **)ppStream);
}
else
{
SetClientError(SOAPCLIENT_READ_ERROR);
}
}
VariantClear(&vResponseStream);
}
return hr;
}
void CleanupClient()
{
m_writeStream.Cleanup();
m_spHttpRequest.Release();
m_fault.Clear();
SetClientError(SOAPCLIENT_SUCCESS);
}
HRESULT SendRequest(LPCTSTR szAction)
{
if (ConnectToServer() != S_OK)
{
SetClientError(SOAPCLIENT_CONNECT_ERROR);
return E_FAIL;
}
// set the action header
LPCTSTR szColon = _tcschr(szAction, _T(':'));
if (szColon != NULL)
{
do
{
szColon++;
} while (_istspace(static_cast<unsigned char>(*szColon)));
if (FAILED(m_spHttpRequest->setRequestHeader(
CComBSTR( L"SOAPAction" ), CComBSTR( szColon ))))
{
SetClientError(SOAPCLIENT_SEND_ERROR);
return E_FAIL;
}
} // if SOAPAction header not properly formed, attempt to send anyway
if (FAILED(m_spHttpRequest->setRequestHeader(CComBSTR( L"Content-Type" ), CComBSTR(L"text/xml; charset=utf-8"))))
{
SetClientError(SOAPCLIENT_SEND_ERROR);
return E_FAIL;
}
// set timeout
if (m_dwTimeout != 0)
{
long nTimeout = (long) m_dwTimeout;
m_spHttpRequest->setTimeouts(nTimeout, nTimeout, nTimeout, nTimeout);
// reset timeout
m_dwTimeout = 0;
}
CComVariant vBody(m_writeStream.m_str);
HRESULT hr = m_spHttpRequest->send(vBody);
if ((SUCCEEDED(hr)) && (GetStatusCode() == 500))
{
hr = E_FAIL;
CComPtr<ISAXXMLReader> spReader;
if (SUCCEEDED(GetClientReader(&spReader)))
{
SetClientError(SOAPCLIENT_SOAPFAULT);
CComPtr<IStream> spReadStream;
if (SUCCEEDED(GetReadStream(&spReadStream)))
{
if (FAILED(m_fault.ParseFault(spReadStream, spReader)))
{
SetClientError(SOAPCLIENT_PARSEFAULT_ERROR);
}
}
}
}
else if (FAILED(hr))
{
SetClientError(SOAPCLIENT_SEND_ERROR);
}
return hr;
}
HRESULT SetUrl(LPCTSTR szUrl)
{
CleanupClient();
return (m_url.CrackUrl(szUrl) != FALSE ? S_OK : E_FAIL);
}
HRESULT GetUrl(LPTSTR szUrl, LPDWORD pdwLen)
{
if ((szUrl == NULL) || (pdwLen == NULL))
{
return E_INVALIDARG;
}
return (m_url.CreateUrl(szUrl, pdwLen) != FALSE) ? S_OK : E_FAIL;
}
void SetTimeout(DWORD dwTimeout)
{
m_dwTimeout = dwTimeout;
}
int GetStatusCode()
{
long lStatus;
if (m_spHttpRequest->get_status(&lStatus) == S_OK)
{
return (int) lStatus;
}
return 0;
}
HRESULT SetProxy(LPCTSTR szProxy = NULL, short nProxyPort = 80)
{
(szProxy);
(nProxyPort);
ATLTRACE( _T("CSoapMSXMLInetClient does not support SetProxy") );
return S_OK;
}
}; // CSoapMSXMLInetClient
#endif
class _CSDLGenerator : public ITagReplacerImpl<_CSDLGenerator>
{
private:
typedef CAtlMap<CStringA, const _soapmap *, CStringElementTraits<CStringA> > WSDLMAP;
typedef CAtlMap<CStringA, const _soapmapentry *, CStringElementTraits<CStringA> > HEADERMAP;
HRESULT GenerateWSDLHelper(const _soapmap *pMap, WSDLMAP& structMap, WSDLMAP& enumMap)
{
ATLENSURE_RETURN( pMap != NULL );
const _soapmapentry *pEntries = pMap->pEntries;
ATLENSURE_RETURN( pEntries != NULL );
HRESULT hr = S_OK;
for (int i=0; pEntries[i].nHash != 0; i++)
{
if (pEntries[i].nVal == SOAPTYPE_UNK)
{
ATLENSURE_RETURN( pEntries[i].pChain != NULL );
_ATLTRY
{
POSITION pos = NULL;
CStringA strName(pEntries[i].pChain->szName, pEntries[i].pChain->cchName);
if (pEntries[i].pChain->mapType == SOAPMAP_STRUCT)
{
pos = structMap.SetAt(strName, pEntries[i].pChain);
}
else if (pEntries[i].pChain->mapType == SOAPMAP_ENUM)
{
pos = enumMap.SetAt(strName, pEntries[i].pChain);
}
if (pos == NULL)
{
hr = E_OUTOFMEMORY;
break;
}
}
_ATLCATCHALL()
{
hr = E_OUTOFMEMORY;
break;
}
hr = GenerateWSDLHelper(pEntries[i].pChain, structMap, enumMap);
if (FAILED(hr))
{
break;
}
}
}
return hr;
}
HTTP_CODE IsUDT(const _soapmapentry *pEntry)
{
ATLENSURE( pEntry != NULL );
return (pEntry->nVal != SOAPTYPE_UNK) ? HTTP_S_FALSE : HTTP_SUCCESS;
}
HTTP_CODE GetSoapDims(const _soapmapentry *pEntry)
{
ATLENSURE( pEntry != NULL );
if (pEntry->pDims[0] != 0)
{
if (SUCCEEDED(m_pWriteStream->WriteStream("[", 1, NULL)))
{
for (int i=1; i<=pEntry->pDims[0]; i++)
{
if (m_writeHelper.Write(pEntry->pDims[i]) != FALSE)
{
if (i < pEntry->pDims[0])
{
if (FAILED(m_pWriteStream->WriteStream(", ", 2, NULL)))
{
return HTTP_FAIL;
}
}
}
}
if (SUCCEEDED(m_pWriteStream->WriteStream("]", 1, NULL)))
{
return HTTP_SUCCESS;
}
}
}
return HTTP_FAIL;
}
const _soapmap **m_pFuncs;
const _soapmap **m_pHeaders;
int m_nFunc;
int m_nParam;
int m_nHeader;
WSDLMAP m_structMap;
WSDLMAP m_enumMap;
POSITION m_currUDTPos;
int m_nCurrUDTField;
HEADERMAP m_headerMap;
POSITION m_currHeaderPos;
CWriteStreamHelper m_writeHelper;
CStringA m_strServiceName;
CStringA m_strNamespaceUri;
IWriteStream *m_pWriteStream;
CComPtr<IHttpServerContext> m_spHttpServerContext;
DWORD m_dwCallFlags;
protected:
void SetWriteStream(IWriteStream *pStream)
{
m_pWriteStream = pStream;
m_writeHelper.Attach(m_pWriteStream);
}
void SetHttpServerContext(IHttpServerContext *pServerContext)
{
m_spHttpServerContext = pServerContext;
}
static HTTP_CODE GetSoapType(int nVal, IWriteStream *pStream)
{
return (pStream->WriteStream(CSoapRootHandler::s_xsdNames[nVal].szName,
CSoapRootHandler::s_xsdNames[nVal].cchName, NULL) == S_OK) ? HTTP_SUCCESS : HTTP_FAIL;
}
HRESULT InitializeSDL(CSoapRootHandler *pHdlr)
{
m_pFuncs = pHdlr->GetFunctionMap();
if (m_pFuncs == NULL)
{
return E_FAIL;
}
ATLASSUME( m_pFuncs[0] != NULL );
m_dwCallFlags = m_pFuncs[0]->dwCallFlags;
size_t i;
for (i=0; m_pFuncs[i] != NULL; i++)
{
const _soapmap *pMap = m_pFuncs[i];
HRESULT hr = GenerateWSDLHelper(pMap, m_structMap, m_enumMap);
if (FAILED(hr))
{
return hr;
}
}
m_pHeaders = pHdlr->GetHeaderMap();
if (m_pHeaders != NULL)
{
for (i=0; m_pHeaders[i] != NULL; i++)
{
const _soapmap *pMap = m_pHeaders[i];
HRESULT hr = GenerateWSDLHelper(pMap, m_structMap, m_enumMap);
if (FAILED(hr))
{
return hr;
}
}
for (i=0; m_pHeaders[i] != NULL; i++)
{
const _soapmap *pMap = m_pHeaders[i];
for (size_t j=0; pMap->pEntries[j].nHash != 0; j++)
{
HRESULT hr = S_OK;
_ATLTRY
{
if (m_headerMap.SetAt(pMap->pEntries[j].szField, &pMap->pEntries[j]) == NULL)
{
hr = E_OUTOFMEMORY;
}
}
_ATLCATCHALL()
{
hr = E_OUTOFMEMORY;
}
if (FAILED(hr))
{
return hr;
}
}
}
}
_ATLTRY
{
m_strServiceName = pHdlr->GetServiceName();
m_strNamespaceUri = pHdlr->GetNamespaceUriA();
}
_ATLCATCHALL()
{
return E_OUTOFMEMORY;
}
return S_OK;
}
virtual const char * GetHandlerName() = 0;
public:
_CSDLGenerator()
:m_pFuncs(NULL), m_nFunc(-1), m_nParam(-1),
m_currUDTPos(NULL), m_nCurrUDTField(-1),
m_pWriteStream(NULL), m_nHeader(-1), m_currHeaderPos(NULL)
{
}
virtual ~_CSDLGenerator()
{
}
HTTP_CODE OnGetURL()
{
char szURL[ATL_URL_MAX_URL_LENGTH];
DWORD dwUrlSize = sizeof(szURL);
char szServer[ATL_URL_MAX_HOST_NAME_LENGTH];
DWORD dwServerSize = sizeof(szServer);
char szHttps[16];
DWORD dwHttpsLen = sizeof(szHttps);
char szPort[ATL_URL_MAX_PORT_NUMBER_LENGTH+1];
DWORD dwPortLen = sizeof(szPort);
if (m_spHttpServerContext->GetServerVariable("URL", szURL, &dwUrlSize) != FALSE)
{
if (m_spHttpServerContext->GetServerVariable("SERVER_NAME", szServer, &dwServerSize) != FALSE)
{
bool bHttps = false;
if ((m_spHttpServerContext->GetServerVariable("HTTPS", szHttps, &dwHttpsLen) != FALSE) &&
(!_stricmp(szHttps, "ON")))
{
bHttps = true;
}
if (m_spHttpServerContext->GetServerVariable("SERVER_PORT", szPort, &dwPortLen) != FALSE)
{
_ATLTRY
{
CStringA strUrl;
strUrl.Format("http%s://%s:%s%s?Handler=%s", bHttps ? "s" : "", szServer, szPort, szURL, GetHandlerName());
CA2W wszUrl(strUrl);
wchar_t *pwszUrl = wszUrl;
HRESULT hr = AtlGenXMLValue(m_pWriteStream, &pwszUrl);
return SUCCEEDED(hr) ? HTTP_SUCCESS : HTTP_FAIL;
}
_ATLCATCHALL()
{
return HTTP_FAIL;
}
}
}
}
return HTTP_FAIL;
}
HTTP_CODE OnGetNamespace()
{
return (m_pWriteStream->WriteStream(m_strNamespaceUri,
m_strNamespaceUri.GetLength(), NULL) == S_OK) ? HTTP_SUCCESS : HTTP_FAIL;
}
HTTP_CODE OnGetNextFunction()
{
m_nFunc++;
if (m_pFuncs[m_nFunc] == NULL)
{
m_nFunc = -1;
return HTTP_S_FALSE;
}
return HTTP_SUCCESS;
}
HTTP_CODE OnGetFunctionName()
{
return (m_pWriteStream->WriteStream(m_pFuncs[m_nFunc]->szName,
m_pFuncs[m_nFunc]->cchName, NULL) == S_OK) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnGetNextParameter()
{
++m_nParam;
if (m_pFuncs[m_nFunc]->pEntries[m_nParam].nHash != 0)
{
if (m_pFuncs[m_nFunc]->pEntries[m_nParam].dwFlags & SOAPFLAG_NOMARSHAL)
{
return OnGetNextParameter();
}
return HTTP_SUCCESS;
}
m_nParam = -1;
return HTTP_S_FALSE;
}
HTTP_CODE OnIsInParameter()
{
return (m_pFuncs[m_nFunc]->pEntries[m_nParam].dwFlags & SOAPFLAG_IN) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnGetParameterName()
{
HRESULT hr = S_OK;
if (m_pFuncs[m_nFunc]->pEntries[m_nParam].dwFlags & SOAPFLAG_RETVAL)
{
hr = m_pWriteStream->WriteStream("return", sizeof("return")-1, NULL);
}
else
{
hr = m_pWriteStream->WriteStream(m_pFuncs[m_nFunc]->pEntries[m_nParam].szField,
m_pFuncs[m_nFunc]->pEntries[m_nParam].cchField, NULL);
}
return (hr == S_OK) ? HTTP_SUCCESS : HTTP_FAIL;
}
HTTP_CODE OnNotIsArrayParameter()
{
return (m_pFuncs[m_nFunc]->pEntries[m_nParam].dwFlags & (SOAPFLAG_FIXEDARR | SOAPFLAG_DYNARR))
? HTTP_S_FALSE: HTTP_SUCCESS;
}
HTTP_CODE OnIsParameterUDT()
{
return IsUDT(&m_pFuncs[m_nFunc]->pEntries[m_nParam]);
}
HTTP_CODE OnGetParameterSoapType()
{
if (m_pFuncs[m_nFunc]->pEntries[m_nParam].nVal != SOAPTYPE_UNK)
{
return GetSoapType(m_pFuncs[m_nFunc]->pEntries[m_nParam].nVal, m_pWriteStream);
}
ATLASSUME( m_pFuncs[m_nFunc]->pEntries[m_nParam].pChain != NULL );
return (m_pWriteStream->WriteStream(m_pFuncs[m_nFunc]->pEntries[m_nParam].pChain->szName,
m_pFuncs[m_nFunc]->pEntries[m_nParam].pChain->cchName, NULL) == S_OK) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnIsParameterDynamicArray()
{
return (m_pFuncs[m_nFunc]->pEntries[m_nParam].dwFlags & SOAPFLAG_DYNARR) ? HTTP_SUCCESS: HTTP_S_FALSE;
}
HTTP_CODE OnIsArrayParameter()
{
return (OnNotIsArrayParameter() != HTTP_SUCCESS) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnIsParameterOneDimensional()
{
return (m_pFuncs[m_nFunc]->pEntries[m_nParam].pDims[0] == 1) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnGetParameterArraySize()
{
return (m_writeHelper.Write(m_pFuncs[m_nFunc]->pEntries[m_nParam].pDims[1]) != FALSE)
? HTTP_SUCCESS : HTTP_FAIL;
}
HTTP_CODE OnGetParameterArraySoapDims()
{
return GetSoapDims(&m_pFuncs[m_nFunc]->pEntries[m_nParam]);
}
HTTP_CODE OnIsOutParameter()
{
return (m_pFuncs[m_nFunc]->pEntries[m_nParam].dwFlags & SOAPFLAG_OUT) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnGetNextEnum()
{
if (m_currUDTPos == NULL)
{
m_currUDTPos = m_enumMap.GetStartPosition();
}
else
{
m_enumMap.GetNext(m_currUDTPos);
}
return (m_currUDTPos != NULL) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnGetEnumName()
{
const _soapmap *pMap = m_enumMap.GetValueAt(m_currUDTPos);
return (m_pWriteStream->WriteStream(pMap->szName, pMap->cchName, NULL) == S_OK) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnGetNextEnumElement()
{
const _soapmap *pMap = m_enumMap.GetValueAt(m_currUDTPos);
++m_nCurrUDTField;
if (pMap->pEntries[m_nCurrUDTField].nHash != 0)
{
return HTTP_SUCCESS;
}
m_nCurrUDTField = -1;
return HTTP_S_FALSE;
}
HTTP_CODE OnGetEnumElementName()
{
const _soapmap *pMap = m_enumMap.GetValueAt(m_currUDTPos);
return (m_pWriteStream->WriteStream(pMap->pEntries[m_nCurrUDTField].szField,
pMap->pEntries[m_nCurrUDTField].cchField, NULL) == S_OK) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnGetNextStruct()
{
if (m_currUDTPos == NULL)
{
m_currUDTPos = m_structMap.GetStartPosition();
}
else
{
m_structMap.GetNext(m_currUDTPos);
}
return (m_currUDTPos != NULL) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnGetStructName()
{
const _soapmap *pMap = m_enumMap.GetValueAt(m_currUDTPos);
return (m_pWriteStream->WriteStream(pMap->szName, pMap->cchName, NULL) == S_OK) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnGetNextStructField()
{
const _soapmap *pMap = m_structMap.GetValueAt(m_currUDTPos);
++m_nCurrUDTField;
if (pMap->pEntries[m_nCurrUDTField].nHash != 0)
{
return HTTP_SUCCESS;
}
m_nCurrUDTField = -1;
return HTTP_S_FALSE;
}
HTTP_CODE OnGetStructFieldName()
{
const _soapmap *pMap = m_structMap.GetValueAt(m_currUDTPos);
return (m_pWriteStream->WriteStream(pMap->pEntries[m_nCurrUDTField].szField,
pMap->pEntries[m_nCurrUDTField].cchField, NULL) == S_OK) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnNotIsArrayField()
{
const _soapmap *pMap = m_structMap.GetValueAt(m_currUDTPos);
return (pMap->pEntries[m_nCurrUDTField].dwFlags & (SOAPFLAG_FIXEDARR | SOAPFLAG_DYNARR)) ? HTTP_S_FALSE : HTTP_SUCCESS;
}
HTTP_CODE OnIsFieldUDT()
{
const _soapmap *pMap = m_structMap.GetValueAt(m_currUDTPos);
return IsUDT(&pMap->pEntries[m_nCurrUDTField]);
}
HTTP_CODE OnGetStructFieldSoapType()
{
const _soapmap *pMap = m_structMap.GetValueAt(m_currUDTPos);
if (pMap->pEntries[m_nCurrUDTField].nVal != SOAPTYPE_UNK)
{
return GetSoapType(pMap->pEntries[m_nCurrUDTField].nVal, m_pWriteStream);
}
ATLASSERT( pMap->pEntries[m_nCurrUDTField].pChain != NULL );
return (m_pWriteStream->WriteStream(pMap->pEntries[m_nCurrUDTField].pChain->szName,
pMap->pEntries[m_nCurrUDTField].pChain->cchName, NULL) == S_OK) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnIsArrayField()
{
return (OnNotIsArrayField() != HTTP_SUCCESS) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnIsFieldDynamicArray()
{
const _soapmap *pMap = m_structMap.GetValueAt(m_currUDTPos);
return (pMap->pEntries[m_nCurrUDTField].dwFlags & SOAPFLAG_DYNARR) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnGetFieldSizeIsName()
{
const _soapmap *pMap = m_structMap.GetValueAt(m_currUDTPos);
int nIndex = pMap->pEntries[m_nCurrUDTField].nSizeIs;
ATLASSERT( nIndex >= 0 );
return (m_pStream->WriteStream(pMap->pEntries[nIndex].szField,
pMap->pEntries[nIndex].cchField, NULL) == S_OK) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnIsFieldOneDimensional()
{
const _soapmap *pMap = m_structMap.GetValueAt(m_currUDTPos);
return (pMap->pEntries[m_nCurrUDTField].pDims[0] == 1) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnGetFieldArraySize()
{
const _soapmap *pMap = m_structMap.GetValueAt(m_currUDTPos);
return (m_writeHelper.Write(pMap->pEntries[m_nCurrUDTField].pDims[1]) != FALSE) ?
HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnGetFieldArraySoapDims()
{
const _soapmap *pMap = m_structMap.GetValueAt(m_currUDTPos);
return GetSoapDims(&pMap->pEntries[m_nCurrUDTField]);
}
HTTP_CODE OnGetServiceName()
{
return (m_pWriteStream->WriteStream(m_strServiceName,
m_strServiceName.GetLength(), NULL) == S_OK) ? HTTP_SUCCESS : HTTP_FAIL;
}
HTTP_CODE OnGetNextHeader()
{
if (m_currHeaderPos == NULL)
{
m_currHeaderPos = m_headerMap.GetStartPosition();
}
else
{
m_headerMap.GetNext(m_currHeaderPos);
}
return (m_currHeaderPos != NULL) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnIsInHeader()
{
return (m_pHeaders[m_nFunc]->pEntries[m_nHeader].dwFlags & SOAPFLAG_IN)
? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnIsOutHeader()
{
return (m_pHeaders[m_nFunc]->pEntries[m_nHeader].dwFlags & SOAPFLAG_OUT)
? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnIsRequiredHeader()
{
return (m_pHeaders[m_nFunc]->pEntries[m_nHeader].dwFlags & SOAPFLAG_MUSTUNDERSTAND)
? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnGetHeaderName()
{
const _soapmapentry *pEntry = m_headerMap.GetValueAt(m_currHeaderPos);
return (m_pWriteStream->WriteStream(pEntry->szField,
pEntry->cchField, NULL) == S_OK) ? HTTP_SUCCESS : HTTP_FAIL;
}
HTTP_CODE OnNotIsArrayHeader()
{
const _soapmapentry *pEntry = m_headerMap.GetValueAt(m_currHeaderPos);
return (pEntry->dwFlags & SOAPFLAG_FIXEDARR) ? HTTP_S_FALSE : HTTP_SUCCESS;
}
HTTP_CODE OnIsHeaderUDT()
{
return IsUDT(m_headerMap.GetValueAt(m_currHeaderPos));
}
HTTP_CODE OnGetHeaderSoapType()
{
const _soapmapentry *pEntry = m_headerMap.GetValueAt(m_currHeaderPos);
if (pEntry->nVal != SOAPTYPE_UNK)
{
return GetSoapType(pEntry->nVal, m_pWriteStream);
}
ATLENSURE( pEntry->pChain != NULL );
return (m_pWriteStream->WriteStream(pEntry->pChain->szName,
pEntry->pChain->cchName, NULL) == S_OK) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnIsHeaderOneDimensional()
{
const _soapmapentry *pEntry = m_headerMap.GetValueAt(m_currHeaderPos);
return (pEntry->pDims[0] == 1) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnGetHeaderArraySize()
{
const _soapmapentry *pEntry = m_headerMap.GetValueAt(m_currHeaderPos);
return (m_writeHelper.Write(pEntry->pDims[1]) != FALSE) ?
HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE OnGetHeaderArraySoapDims()
{
return GetSoapDims(m_headerMap.GetValueAt(m_currHeaderPos));
}
HTTP_CODE OnGetNextFunctionHeader()
{
++m_nHeader;
if (m_pHeaders[m_nFunc]->pEntries[m_nHeader].nHash != 0)
{
if (m_pHeaders[m_nFunc]->pEntries[m_nHeader].dwFlags & SOAPFLAG_NOMARSHAL)
{
return OnGetNextHeader();
}
return HTTP_SUCCESS;
}
m_nHeader = -1;
return HTTP_S_FALSE;
}
HTTP_CODE OnGetFunctionHeaderName()
{
return (m_pWriteStream->WriteStream(
m_pHeaders[m_nFunc]->pEntries[m_nHeader].szField,
m_pHeaders[m_nFunc]->pEntries[m_nHeader].cchField,
NULL) == S_OK) ? HTTP_SUCCESS : HTTP_FAIL;
}
HTTP_CODE OnIsArrayHeader()
{
return (OnNotIsArrayHeader() == HTTP_SUCCESS) ? HTTP_S_FALSE : HTTP_SUCCESS;
}
HTTP_CODE OnIsDocumentLiteral()
{
if ((m_dwCallFlags & (SOAPFLAG_DOCUMENT | SOAPFLAG_LITERAL)) ==
(SOAPFLAG_DOCUMENT | SOAPFLAG_LITERAL))
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE OnIsRpcEncoded()
{
if ((m_dwCallFlags & (SOAPFLAG_RPC | SOAPFLAG_ENCODED)) ==
(SOAPFLAG_RPC | SOAPFLAG_ENCODED))
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
#pragma warning (push)
#pragma warning (disable : 4640) // construction of local static object is not thread-safe
BEGIN_REPLACEMENT_METHOD_MAP(_CSDLGenerator)
REPLACEMENT_METHOD_ENTRY("GetNamespace", OnGetNamespace)
REPLACEMENT_METHOD_ENTRY("GetNextFunction", OnGetNextFunction)
REPLACEMENT_METHOD_ENTRY("GetFunctionName", OnGetFunctionName)
REPLACEMENT_METHOD_ENTRY("GetNextParameter", OnGetNextParameter)
REPLACEMENT_METHOD_ENTRY("IsInParameter", OnIsInParameter)
REPLACEMENT_METHOD_ENTRY("GetParameterName", OnGetParameterName)
REPLACEMENT_METHOD_ENTRY("NotIsArrayParameter", OnNotIsArrayParameter)
REPLACEMENT_METHOD_ENTRY("IsParameterUDT", OnIsParameterUDT)
REPLACEMENT_METHOD_ENTRY("GetParameterSoapType", OnGetParameterSoapType)
REPLACEMENT_METHOD_ENTRY("IsParameterDynamicArray", OnIsParameterDynamicArray)
REPLACEMENT_METHOD_ENTRY("IsArrayParameter", OnIsArrayParameter)
REPLACEMENT_METHOD_ENTRY("IsParameterOneDimensional", OnIsParameterOneDimensional)
REPLACEMENT_METHOD_ENTRY("GetParameterArraySize", OnGetParameterArraySize)
REPLACEMENT_METHOD_ENTRY("GetParameterArraySoapDims", OnGetParameterArraySoapDims)
REPLACEMENT_METHOD_ENTRY("IsOutParameter", OnIsOutParameter)
REPLACEMENT_METHOD_ENTRY("GetNextEnum", OnGetNextEnum)
REPLACEMENT_METHOD_ENTRY("GetEnumName", OnGetEnumName)
REPLACEMENT_METHOD_ENTRY("GetNextEnumElement", OnGetNextEnumElement)
REPLACEMENT_METHOD_ENTRY("GetEnumElementName", OnGetEnumElementName)
REPLACEMENT_METHOD_ENTRY("GetNextStruct", OnGetNextStruct)
REPLACEMENT_METHOD_ENTRY("GetStructName", OnGetStructName)
REPLACEMENT_METHOD_ENTRY("GetNextStructField", OnGetNextStructField)
REPLACEMENT_METHOD_ENTRY("GetStructFieldName", OnGetStructFieldName)
REPLACEMENT_METHOD_ENTRY("NotIsArrayField", OnNotIsArrayField)
REPLACEMENT_METHOD_ENTRY("IsFieldUDT", OnIsFieldUDT)
REPLACEMENT_METHOD_ENTRY("GetStructFieldSoapType", OnGetStructFieldSoapType)
REPLACEMENT_METHOD_ENTRY("IsArrayField", OnIsArrayField)
REPLACEMENT_METHOD_ENTRY("IsFieldOneDimensional", OnIsFieldOneDimensional)
REPLACEMENT_METHOD_ENTRY("GetFieldArraySize", OnGetFieldArraySize)
REPLACEMENT_METHOD_ENTRY("GetFieldArraySoapDims", OnGetFieldArraySoapDims)
REPLACEMENT_METHOD_ENTRY("GetServiceName", OnGetServiceName)
REPLACEMENT_METHOD_ENTRY("GetURL", OnGetURL)
REPLACEMENT_METHOD_ENTRY("GetNextHeader", OnGetNextHeader)
REPLACEMENT_METHOD_ENTRY("GetHeaderName", OnGetHeaderName)
REPLACEMENT_METHOD_ENTRY("NotIsArrayHeader", OnNotIsArrayHeader)
REPLACEMENT_METHOD_ENTRY("IsArrayHeader", OnIsArrayHeader)
REPLACEMENT_METHOD_ENTRY("IsHeaderUDT", OnIsHeaderUDT)
REPLACEMENT_METHOD_ENTRY("GetHeaderSoapType", OnGetHeaderSoapType)
REPLACEMENT_METHOD_ENTRY("IsHeaderOneDimensional", OnIsHeaderOneDimensional)
REPLACEMENT_METHOD_ENTRY("GetHeaderArraySize", OnGetHeaderArraySize)
REPLACEMENT_METHOD_ENTRY("GetHeaderArraySoapDims", OnGetHeaderArraySoapDims)
REPLACEMENT_METHOD_ENTRY("GetNextFunctionHeader", OnGetNextFunctionHeader)
REPLACEMENT_METHOD_ENTRY("GetFunctionHeaderName", OnGetFunctionHeaderName)
REPLACEMENT_METHOD_ENTRY("IsInHeader", OnIsInHeader)
REPLACEMENT_METHOD_ENTRY("IsOutHeader", OnIsOutHeader)
REPLACEMENT_METHOD_ENTRY("IsRequiredHeader", OnIsRequiredHeader)
REPLACEMENT_METHOD_ENTRY("IsDocumentLiteral", OnIsDocumentLiteral)
REPLACEMENT_METHOD_ENTRY("IsRpcEncoded", OnIsRpcEncoded)
REPLACEMENT_METHOD_ENTRY("IsFieldDynamicArray", OnIsFieldDynamicArray)
REPLACEMENT_METHOD_ENTRY("GetFieldSizeIsName", OnGetFieldSizeIsName)
END_REPLACEMENT_METHOD_MAP()
#pragma warning (pop)
}; // class _CSDLGenerator
#if defined(_WIN32_WCE) && !defined(_CE_DCOM) && !defined(_CE_ALLOW_SINGLE_THREADED_OBJECTS_IN_MTA)
#error atlsoap.h requires Windows CE platform to have DCOM support or _CE_ALLOW_SINGLE_THREADED_OBJECTS_IN_MTA needs to be defined.
#endif
template <class THandler, const char *szHandlerName>
class CSDLGenerator :
public _CSDLGenerator,
public IRequestHandlerImpl< CSDLGenerator<THandler,szHandlerName> >,
public CComObjectRootEx<CComSingleThreadModel>
{
private:
public:
typedef CSDLGenerator<THandler, szHandlerName> _sdlGenerator;
BEGIN_COM_MAP(_sdlGenerator)
COM_INTERFACE_ENTRY(IRequestHandler)
COM_INTERFACE_ENTRY(ITagReplacer)
END_COM_MAP()
HTTP_CODE InitializeHandler(AtlServerRequest *pRequestInfo, IServiceProvider *pServiceProvider)
{
IRequestHandlerImpl<CSDLGenerator>::InitializeHandler(pRequestInfo, pServiceProvider);
CComObjectStack<THandler> handler;
if (FAILED(InitializeSDL(&handler)))
{
return HTTP_FAIL;
}
CStencil s;
HTTP_CODE hcErr = s.LoadFromString(s_szAtlsWSDLSrf, (DWORD) strlen(s_szAtlsWSDLSrf));
if (hcErr == HTTP_SUCCESS)
{
hcErr = HTTP_FAIL;
CHttpResponse HttpResponse(pRequestInfo->pServerContext);
HttpResponse.SetContentType("text/xml");
if (s.ParseReplacements(this) != false)
{
s.FinishParseReplacements();
SetStream(&HttpResponse);
SetWriteStream(&HttpResponse);
SetHttpServerContext(m_spServerContext);
ATLASSERT( s.ParseSuccessful() != false );
hcErr = s.Render(this, &HttpResponse);
}
}
return hcErr;
}
const char * GetHandlerName()
{
return szHandlerName;
}
}; // class CSDLGenerator
} // namespace ATL
#pragma pack(pop)
#pragma warning(pop)
#endif // __ATLSOAP_H__
<|start_filename|>source/SProxy/WSDLServiceParser.cpp<|end_filename|>
//
// WSDLServiceParser.cpp
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#include "stdafx.h"
#include "Util.h"
#include "WSDLServiceParser.h"
#include "WSDLServicePortParser.h"
#include "WSDLService.h"
#include "Attribute.h"
#include "Content.h"
#include "Element.h"
#include "ComplexType.h"
TAG_METHOD_IMPL(CWSDLServiceParser, OnDocumentation)
{
TRACE_PARSE_ENTRY();
return SkipElement();
}
TAG_METHOD_IMPL(CWSDLServiceParser, OnPort)
{
TRACE_PARSE_ENTRY();
CWSDLService *pCurr = GetService();
if (pCurr != NULL)
{
CWSDLPort *pElem = pCurr->AddPort();
if (pElem != NULL)
{
SetXMLElementInfo(pElem, pCurr, GetLocator());
CAutoPtr<CWSDLServicePortParser> p( new CWSDLServicePortParser(GetReader(), this, GetLevel(), pElem) );
if (p != NULL)
{
if (g_ParserList.AddHead(p) != NULL)
{
return p.Detach()->GetAttributes(pAttributes);
}
}
}
}
EmitErrorHr(E_OUTOFMEMORY);
return E_FAIL;
}
ATTR_METHOD_IMPL(CWSDLServiceParser, OnName)
{
TRACE_PARSE_ENTRY();
CWSDLService * pCurr = GetService();
if (pCurr != NULL)
{
return pCurr->SetName(wszValue, cchValue);
}
EmitErrorHr(E_OUTOFMEMORY);
return E_FAIL;
}
HRESULT __stdcall CWSDLServiceParser::startPrefixMapping(
const wchar_t *wszPrefix,
int cchPrefix,
const wchar_t *wszUri,
int cchUri)
{
CWSDLService * pCurr = GetService();
if (pCurr != NULL)
{
return pCurr->SetNamespaceUri(wszPrefix, cchPrefix, wszUri, cchUri);
}
return E_FAIL;
}
HRESULT CWSDLServiceParser::OnUnrecognizedTag(
const wchar_t *wszNamespaceUri, int cchNamespaceUri,
const wchar_t *wszLocalName, int cchLocalName,
const wchar_t * /*wszQName*/, int /*cchQName*/,
ISAXAttributes * /*pAttributes*/) throw()
{
CWSDLService * pCurr = GetService();
if (pCurr != NULL)
{
int nLine;
int nCol;
GetLocator()->getLineNumber(&nLine);
GetLocator()->getColumnNumber(&nCol);
EmitFileWarning(IDS_SDL_SKIP_EXTENSIBILITY,
pCurr->GetParentDocument()->GetDocumentUri(),
nLine,
nCol,
0,
wszNamespaceUri,
wszLocalName);
}
return SkipElement();
}
<|start_filename|>source/SProxy/WSDLService.h<|end_filename|>
//
// WSDLService.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
#include "XMLElement.h"
#include "WSDLPort.h"
class CWSDLService : public CXMLElement
{
private:
CStringW m_strDocumentation;
CStringW m_strName;
CAtlPtrList<CWSDLPort *> m_ports;
public:
inline CWSDLPort * AddPort(CWSDLPort * p = NULL)
{
CAutoPtr<CWSDLPort> spOut;
if (p == NULL)
{
spOut.Attach( new CWSDLPort );
p = spOut;
}
if (p != NULL)
{
if (m_ports.AddTail(p) != NULL)
{
spOut.Detach();
return p;
}
}
return NULL;
}
POSITION GetFirstPort()
{
return m_ports.GetHeadPosition();
}
CWSDLPort * GetNextPort(POSITION &pos)
{
return m_ports.GetNext(pos);
}
inline HRESULT SetName(const wchar_t *wszName, int cchName)
{
if (!wszName)
{
return E_FAIL;
}
m_strName.SetString(wszName, cchName);
return S_OK;
}
inline HRESULT SetName(const CStringW& strName)
{
m_strName = strName;
return S_OK;
}
inline const CStringW& GetName()
{
return m_strName;
}
};
<|start_filename|>source/SProxy/DiscoMapDocument.h<|end_filename|>
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "xmldocument.h"
class CDiscoMapDocument :
public CXMLDocument
{
public:
CDiscoMapDocument(void);
~CDiscoMapDocument(void);
private:
typedef CAtlMap<CStringW, CStringW> SCHEMAMAP;
public:
void SetWSDLFile(const CStringW & wsdlFile);
private:
CStringW m_wsdlFile;
SCHEMAMAP m_schemaMap;
public:
void AddSchema(const CStringW & url, const CStringW & filename);
CStringW & GetWSDLFile(void);
CStringW & GetValue(const CStringW & value);
private:
CStringW m_strPath;
protected:
CStringW & GetPath(void);
};
<|start_filename|>source/SProxy/AttributeParser.h<|end_filename|>
//
// AttributeParser.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
#include "Parser.h"
#include "Emit.h"
#include "resource.h"
class CAttribute;
class CAttributeParser : public CParserBase
{
private:
CAttribute *m_pAttribute;
public:
inline CAttributeParser(ISAXXMLReader *pReader, CParserBase *pParent, DWORD dwLevel, CAttribute *pAttribute = NULL)
:CParserBase(pReader, pParent, dwLevel), m_pAttribute(pAttribute)
{
}
inline CAttribute * GetAttribute()
{
return m_pAttribute;
}
inline void SetAttribute(CAttribute *pAttribute)
{
m_pAttribute = pAttribute;
}
HRESULT ValidateElement();
/*
<attribute
form = (qualified | unqualified)
id = ID
name = NCName
ref = QName
type = QName
use = (prohibited | optional | required | default | fixed) : optional
value = string
{any attributes with non-schema namespace . . .}>
Content: (annotation? , (simpleType?))
</attribute>
*/
BEGIN_XMLTAG_MAP()
XMLTAG_ENTRY_EX("annotation", XSD_NAMESPACEA, OnAnnotation)
XMLTAG_ENTRY_EX("simpleType", XSD_NAMESPACEA, OnSimpleType)
END_XMLTAG_MAP()
BEGIN_XMLATTR_MAP()
XMLATTR_ENTRY("form", OnForm)
XMLATTR_ENTRY("ref", OnRef)
XMLATTR_ENTRY("name", OnName)
XMLATTR_ENTRY_EX("arrayType", WSDL_NAMESPACEA, OnArrayType)
XMLATTR_ENTRY("type", OnType)
XMLATTR_ENTRY("use", OnUse)
XMLATTR_ENTRY("value", OnValue)
XMLATTR_ENTRY("id", OnID)
END_XMLATTR_MAP()
TAG_METHOD_DECL(OnAnnotation);
TAG_METHOD_DECL(OnSimpleType);
ATTR_METHOD_DECL(OnForm);
ATTR_METHOD_DECL(OnRef);
ATTR_METHOD_DECL(OnArrayType);
ATTR_METHOD_DECL(OnName);
ATTR_METHOD_DECL(OnType);
ATTR_METHOD_DECL(OnUse);
ATTR_METHOD_DECL(OnValue);
ATTR_METHOD_DECL(OnID);
};
<|start_filename|>source/SProxy/ComplexTypeParser.h<|end_filename|>
//
// ComplexTypeParser.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
#include "Parser.h"
#include "Emit.h"
#include "resource.h"
class CComplexType;
class CComplexTypeParser : public CParserBase
{
private:
CComplexType * m_pComplexType;
public:
inline CComplexTypeParser(ISAXXMLReader *pReader, CParserBase *pParent, DWORD dwLevel, CComplexType * pComplexType = NULL)
:CParserBase(pReader, pParent, dwLevel), m_pComplexType(pComplexType)
{
}
inline CComplexType * GetComplexType()
{
return m_pComplexType;
}
inline void SetComplexType(CComplexType * pComplexType)
{
m_pComplexType = pComplexType;
}
inline void MarkUnsupported(const wchar_t *wszQName, int cchQName)
{
#ifdef _DEBUG
int nLine;
int nCol;
GetLocator()->getLineNumber(&nLine);
GetLocator()->getColumnNumber(&nCol);
ATLTRACE( _T("%sUnsupported tag@(%d, %d) : %.*ws -- skipping element\n"), GetTabs(GetLevel()),
nLine, nCol,
cchQName, wszQName );
#endif
}
/*
annotation, length, enumeration, pattern, scale, period, duration,
maxLength, precision, minInclusive, minExclusive, maxInclusive,
maxExclusive, minLength, encoding, element, group, all, choice,
sequence, attribute, attributeGroup, anyAttribute
*/
BEGIN_XMLTAG_MAP()
XMLTAG_ENTRY_EX("element", XSD_NAMESPACEA, OnElement)
XMLTAG_ENTRY_EX("all", XSD_NAMESPACEA, OnAll)
XMLTAG_ENTRY_EX("choice", XSD_NAMESPACEA, OnChoice)
XMLTAG_ENTRY_EX("annotation", XSD_NAMESPACEA, OnAnnotation)
XMLTAG_ENTRY_EX("length", XSD_NAMESPACEA, OnLength)
XMLTAG_ENTRY_EX("enumeration", XSD_NAMESPACEA, OnEnumeration)
XMLTAG_ENTRY_EX("pattern", XSD_NAMESPACEA, OnPattern)
XMLTAG_ENTRY_EX("scale", XSD_NAMESPACEA, OnScale)
XMLTAG_ENTRY_EX("period", XSD_NAMESPACEA, OnPeriod)
XMLTAG_ENTRY_EX("duration", XSD_NAMESPACEA, OnDuration)
XMLTAG_ENTRY_EX("maxLength", XSD_NAMESPACEA, OnMaxLength)
XMLTAG_ENTRY_EX("precision", XSD_NAMESPACEA, OnPrecision)
XMLTAG_ENTRY_EX("minInclusive", XSD_NAMESPACEA, OnMinInclusive)
XMLTAG_ENTRY_EX("minExclusive", XSD_NAMESPACEA, OnMinExclusive)
XMLTAG_ENTRY_EX("maxInclusive", XSD_NAMESPACEA, OnMaxInclusive)
XMLTAG_ENTRY_EX("maxExclusive", XSD_NAMESPACEA, OnMaxExclusive)
XMLTAG_ENTRY_EX("minLength", XSD_NAMESPACEA, OnMinLength)
XMLTAG_ENTRY_EX("encoding", XSD_NAMESPACEA, OnEncoding)
XMLTAG_ENTRY_EX("group", XSD_NAMESPACEA, OnGroup)
XMLTAG_ENTRY_EX("sequence", XSD_NAMESPACEA, OnSequence)
XMLTAG_ENTRY_EX("attribute", XSD_NAMESPACEA, OnAttribute)
XMLTAG_ENTRY_EX("attributeGroup", XSD_NAMESPACEA, OnAttributeGroup)
XMLTAG_ENTRY_EX("anyAttribute", XSD_NAMESPACEA, OnAnyAttribute)
// REVIEW: new ones
XMLTAG_ENTRY_EX("complexContent", XSD_NAMESPACEA, OnComplexContent)
XMLTAG_ENTRY_EX("simpleContent", XSD_NAMESPACEA, OnSimpleContent)
XMLTAG_ENTRY_EX("any", XSD_NAMESPACEA, OnAny)
END_XMLTAG_MAP()
/*
<complexType
abstract = "boolean"
base = "QName"
block = "#all | subset of {extension, restriction}"
content = "elementOnly | textOnly | mixed | empty"
derivedBy = "extension | restriction"
final = "#all | subset of {extension, restriction}"
id = "ID"
name ="NCName"
>
*/
BEGIN_XMLATTR_MAP()
XMLATTR_ENTRY("name", OnName)
XMLATTR_ENTRY("id", OnID)
XMLATTR_ENTRY("abstract", OnAbstract)
XMLATTR_ENTRY("base", OnBase)
XMLATTR_ENTRY("block", OnBlock)
XMLATTR_ENTRY("content", OnContent)
XMLATTR_ENTRY("derivedBy", OnDerivedBy)
XMLATTR_ENTRY("final", OnFinal)
END_XMLATTR_MAP()
TAG_METHOD_DECL(OnElement);
TAG_METHOD_DECL(OnAll);
TAG_METHOD_DECL(OnChoice);
TAG_METHOD_DECL(OnAnnotation);
TAG_METHOD_DECL(OnLength);
TAG_METHOD_DECL(OnPattern);
TAG_METHOD_DECL(OnEnumeration);
TAG_METHOD_DECL(OnScale);
TAG_METHOD_DECL(OnPeriod);
TAG_METHOD_DECL(OnDuration);
TAG_METHOD_DECL(OnMaxLength);
TAG_METHOD_DECL(OnPrecision);
TAG_METHOD_DECL(OnMinInclusive);
TAG_METHOD_DECL(OnMinExclusive);
TAG_METHOD_DECL(OnMaxInclusive);
TAG_METHOD_DECL(OnMaxExclusive);
TAG_METHOD_DECL(OnMinLength);
TAG_METHOD_DECL(OnEncoding);
TAG_METHOD_DECL(OnGroup);
TAG_METHOD_DECL(OnSequence);
TAG_METHOD_DECL(OnAttribute);
TAG_METHOD_DECL(OnAttributeGroup);
TAG_METHOD_DECL(OnAnyAttribute);
// new ones
TAG_METHOD_DECL(OnComplexContent);
TAG_METHOD_DECL(OnSimpleContent);
TAG_METHOD_DECL(OnAny);
ATTR_METHOD_DECL(OnName);
ATTR_METHOD_DECL(OnID);
ATTR_METHOD_DECL(OnAbstract);
ATTR_METHOD_DECL(OnBase);
ATTR_METHOD_DECL(OnBlock);
ATTR_METHOD_DECL(OnContent);
ATTR_METHOD_DECL(OnDerivedBy);
ATTR_METHOD_DECL(OnFinal);
HRESULT __stdcall startPrefixMapping(
const wchar_t *wszPrefix,
int cchPrefix,
const wchar_t *wszUri,
int cchUri);
};
<|start_filename|>source/SProxy/Schema.cpp<|end_filename|>
//
// Schema.cpp
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#include "stdafx.h"
#include "Attribute.h"
#include "Schema.h"
#include "Element.h"
#include "SimpleType.h"
#include "ComplexType.h"
#include "WSDLDocument.h"
CComplexType * CSchema::AddComplexType(CComplexType * p)
{
if (p != NULL)
{
if (p->GetName().GetLength() != 0)
{
if (m_complexTypes.SetAt(p->GetName(), p) != NULL)
{
return p;
}
}
}
//
// TODO: error
//
return NULL;
}
CSimpleType * CSchema::AddSimpleType(CSimpleType * p)
{
if (p != NULL)
{
if (p->GetName().GetLength() != 0)
{
if (m_simpleTypes.SetAt(p->GetName(), p) != NULL)
{
return p;
}
}
}
return NULL;
}
CElement * CSchema::AddElement(CElement * p)
{
if (p != NULL)
{
if (p->GetName().GetLength() != 0)
{
if (m_elements.SetAt(p->GetName(), p) != NULL)
{
return p;
}
}
}
return NULL;
}
CXSDElement * CSchema::GetNamedItemFromParent(const CStringW& strUri, const CStringW& strName)
{
CXSDElement *pRet = NULL;
CXMLDocument *pParentDoc = GetParentDocument();
if ((pParentDoc != NULL) && (pParentDoc->GetDocumentType() == WSDLDOC))
{
CWSDLDocument *pWSDLDocument = static_cast<CWSDLDocument *>(pParentDoc);
pRet = pWSDLDocument->GetComplexType(strName, strUri);
if (pRet == NULL)
{
pRet = pWSDLDocument->GetSimpleType(strName, strUri);
if (pRet == NULL)
{
pRet = pWSDLDocument->GetElement(strName, strUri);
}
}
}
return pRet;
}
<|start_filename|>source/SProxy/Emit.cpp<|end_filename|>
//
// Emit.cpp
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#include "stdafx.h"
#include "Emit.h"
#include "errordefs.h"
#include "XMLDocument.h"
void EmitError(UINT uID, ...)
{
va_list arglist;
va_start(arglist, uID);
g_Emit.EmitError(uID, arglist);
va_end(arglist);
}
void EmitErrorHr(HRESULT hr)
{
g_Emit.EmitErrorHr(hr);
}
void EmitWarning(UINT uID, ...)
{
va_list arglist;
va_start(arglist, uID);
g_Emit.EmitWarning(uID, arglist);
va_end(arglist);
}
void Emit(UINT uID, ...)
{
va_list arglist;
va_start(arglist, uID);
g_Emit.Emit(uID, arglist);
va_end(arglist);
}
bool SetEmitWarnings(bool bWarn)
{
return g_Emit.SetEmitWarnings(bWarn);
}
void EmitCmdLineError(UINT uID, ...)
{
va_list arglist;
va_start(arglist, uID);
g_Emit.EmitCmdLineError(uID, arglist);
va_end(arglist);
}
void EmitCmdLineWarning(UINT uID, ...)
{
va_list arglist;
va_start(arglist, uID);
g_Emit.EmitCmdLineWarning(uID, arglist);
va_end(arglist);
}
void EmitFileWarning(UINT uID, LPCWSTR wszFile, int nLine, int nCol, UINT uIDExtra, ...)
{
va_list arglist;
va_start(arglist, uIDExtra);
g_Emit.EmitFileWarning(uID, wszFile, nLine, nCol, uIDExtra, arglist);
va_end(arglist);
}
void EmitFileError(UINT uID, LPCWSTR wszFile, int nLine, int nCol, UINT uIDExtra, ...)
{
va_list arglist;
va_start(arglist, uIDExtra);
g_Emit.EmitFileError(uID, wszFile, nLine, nCol, uIDExtra, arglist);
va_end(arglist);
}
void EmitFileWarning(UINT uID, CXMLElement *pElem, UINT uIDExtra, ...)
{
va_list arglist;
va_start(arglist, uIDExtra);
g_Emit.EmitFileWarning(uID, pElem->GetParentDocument()->GetDocumentUri(), pElem->GetLineNumber(), pElem->GetColumnNumber(), uIDExtra, arglist);
va_end(arglist);
}
void EmitFileError(UINT uID, CXMLElement *pElem, UINT uIDExtra, ...)
{
va_list arglist;
va_start(arglist, uIDExtra);
g_Emit.EmitFileError(uID, pElem->GetParentDocument()->GetDocumentUri(), pElem->GetLineNumber(), pElem->GetColumnNumber(), uIDExtra, arglist);
va_end(arglist);
}
<|start_filename|>source/SProxy/ElementParser.h<|end_filename|>
//
// ElementParser.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
#include "Parser.h"
#include "Emit.h"
#include "resource.h"
class CElement;
class CElementParser : public CParserBase
{
private:
CElement * m_pElem;
public:
inline CElementParser(ISAXXMLReader *pReader, CParserBase *pParent, DWORD dwLevel, CElement * pElem)
:CParserBase(pReader, pParent, dwLevel), m_pElem(pElem)
{
}
inline CElement * GetElement()
{
return m_pElem;
}
inline void SetElement(CElement * pElem)
{
m_pElem = pElem;
}
inline void MarkUnsupported(const wchar_t *wszQName, int cchQName)
{
#ifdef _DEBUG
int nLine;
int nCol;
GetLocator()->getLineNumber(&nLine);
GetLocator()->getColumnNumber(&nCol);
ATLTRACE( _T("%sUnsupported tag@(%d, %d) : %.*ws -- skipping element\n"), GetTabs(GetLevel()),
nLine, nCol,
cchQName, wszQName );
#endif
}
/*
simpleType, complexType, key, keyref, unique
*/
BEGIN_XMLTAG_MAP()
XMLTAG_ENTRY_EX("simpleType", XSD_NAMESPACEA, OnSimpleType)
XMLTAG_ENTRY_EX("complexType", XSD_NAMESPACEA, OnComplexType)
XMLTAG_ENTRY_EX("key", XSD_NAMESPACEA, OnKey)
XMLTAG_ENTRY_EX("keyRef", XSD_NAMESPACEA, OnKeyRef)
XMLTAG_ENTRY_EX("unique", XSD_NAMESPACEA, OnUnique)
END_XMLTAG_MAP()
/*
<element
abstract = "boolean"
block = "#all or (possibly empty) subset of {equivClass, extension, restriction}"
default = "string"
equivClass = "QName"
final = "#all or (possibly empty) subset of {extension, restriction}"
fixed = "string"
form = "qualified | unqualified"
id = "ID"
maxOccurs = "nonNegativeInteger | unbounded"
minOccurs = "nonNegativeInteger"
name = "NCName"
nullable = "boolean"
ref = "QName"
type = "QName"
{any attributes with non-schema namespace}
>
*/
BEGIN_XMLATTR_MAP()
XMLATTR_ENTRY("name", OnName)
XMLATTR_ENTRY("type", OnType)
XMLATTR_ENTRY("minOccurs", OnMinOccurs)
XMLATTR_ENTRY("maxOccurs", OnMaxOccurs)
XMLATTR_ENTRY("nillable", OnNillable)
XMLATTR_ENTRY("ref", OnRef)
XMLATTR_ENTRY("id", OnID)
XMLATTR_ENTRY("abstract", OnAbstract)
XMLATTR_ENTRY("block", OnBlock)
XMLATTR_ENTRY("default", OnDefault)
XMLATTR_ENTRY("equivClass", OnEquivClass)
XMLATTR_ENTRY("final", OnFinal)
XMLATTR_ENTRY("fixed", OnFixed)
XMLATTR_ENTRY("form", OnForm)
XMLATTR_ENTRY_EX("arrayType", SOAP_NAMESPACEA, OnArrayType)
XMLATTR_ENTRY_EX("SizeIs", ATLS_NAMESPACEA, OnSizeIs)
END_XMLATTR_MAP()
TAG_METHOD_DECL(OnSimpleType);
TAG_METHOD_DECL(OnComplexType);
TAG_METHOD_DECL(OnKey);
TAG_METHOD_DECL(OnKeyRef);
TAG_METHOD_DECL(OnUnique);
ATTR_METHOD_DECL(OnName);
ATTR_METHOD_DECL(OnType);
ATTR_METHOD_DECL(OnMinOccurs);
ATTR_METHOD_DECL(OnMaxOccurs);
ATTR_METHOD_DECL(OnNillable);
ATTR_METHOD_DECL(OnRef);
ATTR_METHOD_DECL(OnID);
ATTR_METHOD_DECL(OnAbstract);
ATTR_METHOD_DECL(OnBlock);
ATTR_METHOD_DECL(OnDefault);
ATTR_METHOD_DECL(OnEquivClass);
ATTR_METHOD_DECL(OnFinal);
ATTR_METHOD_DECL(OnFixed);
ATTR_METHOD_DECL(OnForm);
ATTR_METHOD_DECL(OnArrayType);
ATTR_METHOD_DECL(OnSizeIs);
HRESULT __stdcall startPrefixMapping(
const wchar_t *wszPrefix,
int cchPrefix,
const wchar_t *wszUri,
int cchUri);
};
<|start_filename|>source/SProxy/CodeTypeBuilder.h<|end_filename|>
//
// CodeTypeBuilder.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#include "stdafx.h"
#include "CodeTypes.h"
#include "WSDLDocument.h"
//
// ElementTraits class for CElement*, CComplexType*, CSimpleType*
//
template <typename T>
class CXSDElementPtrTraits : public CElementTraitsBase<T>
{
public:
typedef T* INARGTYPE;
typedef T*& OUTARGTYPE;
static ULONG Hash( INARGTYPE element )
{
ATLASSERT( element != NULL );
ULONG nHash = 0;
CXMLDocument *pDoc = element->GetParentDocument();
if (pDoc != NULL)
{
nHash = XSDHash((LPCWSTR) pDoc->GetDocumentUri(), nHash);
}
CSchema *pSchema = element->GetParentSchema();
if (pSchema != NULL)
{
nHash = XSDHash((LPCWSTR) pSchema->GetTargetNamespace(), nHash);
}
nHash = XSDHash((LPCWSTR) element->GetName(), nHash);
return nHash;
}
static bool CompareElements( INARGTYPE element1, INARGTYPE element2 )
{
ATLASSERT( element1 != NULL );
ATLASSERT( element2 != NULL );
CXMLDocument *pDoc1 = element1->GetParentDocument();
CXMLDocument *pDoc2 = element2->GetParentDocument();
if (pDoc1 != NULL && pDoc2 != NULL)
{
CSchema *pSchema1 = element1->GetParentSchema();
CSchema *pSchema2 = element2->GetParentSchema();
if (pSchema1 != NULL && pSchema2 != NULL)
{
return ( pDoc1->GetDocumentUri()==pDoc2->GetDocumentUri() &&
pSchema1->GetTargetNamespace()==pSchema2->GetTargetNamespace() &&
element1->GetName()==element2->GetName());
}
}
return false;
}
static int CompareElementsOrdered( INARGTYPE element1, INARGTYPE element2 )
{
ATLASSERT( element1 != NULL );
ATLASSERT( element2 != NULL );
CXMLDocument *pDoc1 = element1->GetParentDocument();
CXMLDocument *pDoc2 = element2->GetParentDocument();
int nRet = 1;
if (pDoc1 != NULL && pDoc2 != NULL)
{
nRet = pDoc1->GetDocumentUri().Compare( pDoc2->GetDocumentUri() );
if (nRet == 0)
{
CSchema *pSchema1 = element1->GetParentSchema();
CSchema *pSchema2 = element2->GetParentSchema();
if (pSchema1 != NULL && pSchema2 != NULL)
{
nRet = pSchema1->GetTargetNamespace().Compare( pSchema2->GetTargetNamespace() );
if (nRet == 0)
{
nRet = element1->GetName().Compare( element2->GetName() );
}
}
}
}
return nRet;
}
};
class CCodeTypeBuilder
{
private:
CWSDLDocument * m_pDoc;
CCodeProxy * m_pProxy;
//
// collections for the various code elements
//
typedef CAtlPtrList<CCodeFunction*> CODEFUNCTIONLIST;
typedef CAtlPtrMap<CSimpleType*, CCodeEnum*, CXSDElementPtrTraits<CSimpleType> > CODEENUMMAP;
typedef CAtlPtrMap<CComplexType*, CCodeStruct*, CXSDElementPtrTraits<CComplexType> > CODESTRUCTMAP;
typedef CAtlMap<CComplexType*, CCodeTypedElement, CXSDElementPtrTraits<CComplexType> > CODETYPEMAP;
typedef CAtlMap<CSimpleType*, CCodeTypedElement, CXSDElementPtrTraits<CSimpleType> > CODESIMPLETYPEMAP;
// REVIEW: headers mapped by message (unique, I would think)
typedef CAtlMap<CWSDLMessagePart*, CCodeTypedElement *> CODEHEADERPARTMAP;
typedef CAtlMap<CXSDElement*, CCodeTypedElement *> CODEHEADERTYPEMAP;
typedef CAtlMap<CComplexType*, int, CXSDElementPtrTraits<CComplexType> > PARSEMAP;
// map for C++ struct, enum, function, parameter, enum entry, struct field names
typedef CAtlMap<CStringA, int, CStringRefElementTraits<CStringA> > NAMEMAP;
CODEFUNCTIONLIST m_functions;
CODESTRUCTMAP m_structs;
CODEENUMMAP m_enums;
CODETYPEMAP m_codeTypes;
CODESIMPLETYPEMAP m_codeEnums;
CODEHEADERPARTMAP m_headersByPart;
CODEHEADERTYPEMAP m_headersByType;
// map of what we are currently parsing
PARSEMAP m_currParse;
// map of named elements we've encountered at the global scope
NAMEMAP m_globalNameMap;
// global counter for duplicate names
int m_nNameCounter;
public:
CCodeTypeBuilder(CWSDLDocument *pDoc = NULL, CCodeProxy * pProxy = NULL)
:m_pDoc(pDoc), m_pProxy(pProxy), m_nNameCounter(0)
{
}
inline HRESULT Initialize(CWSDLDocument *pDoc, CCodeProxy * pProxy = NULL)
{
if (!pDoc)
{
return E_INVALIDARG;
}
if (pProxy)
{
m_pProxy = pProxy;
}
m_pDoc = pDoc;
return S_OK;
}
HRESULT Build(CCodeProxy * pCodeProxy = NULL, CWSDLDocument *pDoc = NULL);
private:
HRESULT ProcessService(CWSDLService *pSvc);
HRESULT ProcessPort(CWSDLPort *pPort);
HRESULT ProcessBinding(CWSDLBinding *pBinding);
HRESULT ProcessPortType(CWSDLPortType *pPortType, CWSDLBinding *pBinding);
HRESULT ProcessMessage(
CWSDLPortTypeIO *pIO,
CWSDLPortTypeOperation *pBindingOp,
CWSDLBinding *pBinding,
CWSDLMessage *pMessage,
CCodeFunction *pCodeFunc,
DWORD dwFlags);
// TODO (jasjitg): must respect parts= with all these
HRESULT ProcessMessage_PID(
CWSDLMessage *pMsg,
CCodeFunction *pCodeFunc,
DWORD dwFlags,
DWORD dwCallFlags);
HRESULT ProcessMessagePart_PID(
CWSDLMessagePart *pPart,
CCodeFunction *pCodeFunc,
DWORD dwFlags,
DWORD dwCallFlags);
HRESULT ProcessMessage_PAD(
CWSDLMessage *pMsg,
CCodeFunction *pCodeFunc,
DWORD dwFlags,
DWORD dwCallFlags);
HRESULT ProcessMessagePart_PAD(
CWSDLMessagePart *pPart,
CCodeFunction *pCodeFunc,
DWORD dwFlags,
DWORD dwCallFlags);
HRESULT ProcessMessage_RPC_Encoded(
CWSDLMessage *pMsg,
CCodeFunction *pCodeFunc,
DWORD dwFlags,
DWORD dwCallFlags);
HRESULT ProcessMessagePart_RPC_Encoded(
CWSDLMessagePart *pPart,
CCodeFunction *pCodeFunc,
DWORD dwFlags,
DWORD dwCallFlags);
// REVIEW (jasjitg): not going to support these in this version
// HRESULT ProcessMessage_RPC_Literal(
// CWSDLMessage *pMsg,
// CCodeFunction *pCodeFunc,
// DWORD dwFlags,
// DWORD dwCallFlags);
//
// HRESULT ProcessMessagePart_RPC_Literal(
// CWSDLMessagePart *pPart,
// CCodeFunction *pCodeFunc,
// DWORD dwFlags,
// DWORD dwCallFlags);
HRESULT ProcessElement(
CElement *pElem,
CCodeElementContainer *pContainer,
DWORD dwFlags,
CODETYPE parentCodeType,
BOOL fTopLevel = FALSE,
CWSDLMessagePart *pMsgPart = NULL);
HRESULT ProcessComplexType(
CComplexType *pType,
CCodeElementContainer *pContainer,
DWORD dwFlags);
HRESULT ProcessSimpleType(
CSimpleType *pType,
XSDTYPE *pXSDType,
LPDWORD pdwFlags);
HRESULT SortStructs();
HRESULT SortStructHelper(POSITION pos);
HRESULT CreateSafeNames(CCodeElementContainer *pElem);
HRESULT CheckGlobalNameMap(CStringA& strName, bool bAddToMap = false);
HRESULT CheckNameMap(NAMEMAP &map, CStringA& strName, bool bAddToMap = false);
HRESULT GetTypeFromElement(
CElement *pElem,
CCodeTypedElement *pCodeElem,
CCodeElementContainer *pContainer,
DWORD dwFlags);
HRESULT ProcessSchemaElement(
CXSDElement *pElem,
CCodeTypedElement *pCodeElem,
CCodeElementContainer *pContainer,
DWORD dwFlags);
HRESULT ProcessXSDElement(
CXMLElement *pElem,
CQName& typeName,
CCodeTypedElement *pCodeElem);
HRESULT ProcessMessagePart_Type(
CWSDLMessagePart *pPart,
CXSDElement *pXSDElement,
XSDTYPE xsdType,
CODETYPE codeType,
const CStringW& strName,
CCodeFunction *pCodeFunc,
DWORD dwFlags,
DWORD dwCallFlags);
// Is it a PAD, PID, RPC, etc.
HRESULT GetCallFlags(
LPCWSTR wszParts,
CWSDLMessage *pMessage,
CWSDLPortTypeIO *pIO,
CWSDLPortTypeOperation *pBindingOp,
CWSDLBinding *pBinding,
LPDWORD pdwFlags);
HRESULT GetCallFlags(
CWSDLMessage *pMessage,
CWSDLMessagePart *pPart,
CWSDLPortTypeIO *pIO,
CWSDLPortTypeOperation *pBindingOp,
CWSDLBinding *pBinding,
LPDWORD pdwFlags);
HRESULT CheckDocLiteralNamespace(
CCodeFunction *pCodeFunc,
CXSDElement *pXSDElement,
DWORD dwFlags,
DWORD dwCallFlags);
CODEFLAGS IsArrayDefinition(CComplexType *pType);
BOOL IsVarArrayDefinition(CComplexType *pType,DWORD dwFlags, CElement **ppElement);
HRESULT ProcessArrayDefintion(
CElement *pElem,
CCodeElementContainer *pContainer,
CCodeTypedElement *pCodeElem,
DWORD dwFlags);
HRESULT ProcessArray(
CComplexType *pType,
CCodeElementContainer *pContainer,
CCodeTypedElement *pCodeElem,
DWORD dwFlags);
HRESULT ProcessVarArray(
CElement *pElement,
CCodeElementContainer *pContainer,
CCodeTypedElement *pCodeElem,
DWORD dwFlags);
HRESULT GetTypeFromQName(
CQName& type,
CXSDElement *pXSDElement,
CXSDElement **ppXSDElement,
XSDTYPE *pXSD);
HRESULT GetArrayDimensions(
CAttribute *pAttribute,
CCodeTypedElement *pCodeElem);
HRESULT ProcessSoapHeaders(
CCodeFunction *pElem,
CWSDLPortTypeIO *pIO,
CWSDLPortTypeOperation *pBindingOp,
CWSDLBinding *pBinding,
DWORD dwFlags);
HRESULT GetElementInfo(CXMLElement *pElem, CQName& name, CStringW& strUri);
HRESULT CheckDuplicateHeaders(CCodeFunction *pCodeFunc, CCodeTypedElement *pElem, DWORD dwFlags);
HRESULT AddHeaderToFunction(CCodeFunction *pCodeFunc, CAutoPtr<CCodeTypedElement>& spElem, CWSDLMessagePart *pPart);
CCodeTypedElement * GetParameterByName(CCodeFunction *pCodeFunc, const CStringW& strName);
HRESULT GetNameFromSchemaElement(CXSDElement *pXSDElement, CStringW& strName);
CCodeTypedElement * GetHeaderByName(CCodeFunction *pCodeFunc, const CStringW& strName);
HRESULT CheckAndAddHeader(
CCodeFunction *pCodeFunc,
CAutoPtr<CCodeTypedElement>& spElem,
DWORD dwFlags,
CWSDLMessagePart *pPart);
};
inline ULONG XSDHash(LPCWSTR wsz, ULONG nHash)
{
while (*wsz != 0)
{
nHash=(nHash<<5)+nHash+(*wsz);
wsz++;
}
return nHash;
}
<|start_filename|>source/SProxy/ComplexTypeParser.cpp<|end_filename|>
//
// ComplexTypeParser.cpp
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#include "stdafx.h"
#include "Attribute.h"
#include "Content.h"
#include "Element.h"
#include "ComplexType.h"
#include "ContentParser.h"
#include "AttributeParser.h"
#include "ComplexTypeParser.h"
#include "ElementParser.h"
#include "Emit.h"
#include "resource.h"
TAG_METHOD_IMPL(CComplexTypeParser, OnElement)
{
TRACE_PARSE_ENTRY();
CComplexType * pCurr = GetComplexType();
if (pCurr != NULL)
{
CElement * pElem = pCurr->AddElement();
if (pElem != NULL)
{
SetXSDElementInfo(pElem, pCurr, GetLocator());
CAutoPtr<CElementParser> p( new CElementParser(GetReader(), this, GetLevel(), pElem) );
if (p != NULL)
{
if (g_ParserList.AddHead(p) != NULL)
{
return p.Detach()->GetAttributes(pAttributes);
}
}
}
}
EmitErrorHr(E_OUTOFMEMORY);
return E_FAIL;
}
TAG_METHOD_IMPL(CComplexTypeParser, OnAll)
{
TRACE_PARSE_ENTRY();
DisableReset();
return S_OK;
}
TAG_METHOD_IMPL(CComplexTypeParser, OnChoice)
{
TRACE_PARSE_ENTRY();
MarkUnsupported(wszQName, cchQName);
EmitString(wszNamespaceUri, wszLocalName);
CComplexType *pCurr = GetComplexType();
if (pCurr != NULL)
{
if (pCurr->GetElementType() == XSD_COMPLEXTYPE)
{
pCurr->SetElementType(XSD_UNSUPPORTED);
return SkipElement();
}
return E_FAIL;
}
EmitError(IDS_SDL_INTERNAL);
return E_FAIL;
}
TAG_METHOD_IMPL(CComplexTypeParser, OnAnnotation)
{
TRACE_PARSE_ENTRY();
MarkUnsupported(wszQName, cchQName);
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CComplexTypeParser, OnLength)
{
TRACE_PARSE_ENTRY();
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CComplexTypeParser, OnEnumeration)
{
TRACE_PARSE_ENTRY();
MarkUnsupported(wszQName, cchQName);
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CComplexTypeParser, OnPattern)
{
TRACE_PARSE_ENTRY();
MarkUnsupported(wszQName, cchQName);
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CComplexTypeParser, OnScale)
{
TRACE_PARSE_ENTRY();
MarkUnsupported(wszQName, cchQName);
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CComplexTypeParser, OnPeriod)
{
TRACE_PARSE_ENTRY();
MarkUnsupported(wszQName, cchQName);
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CComplexTypeParser, OnDuration)
{
TRACE_PARSE_ENTRY();
MarkUnsupported(wszQName, cchQName);
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CComplexTypeParser, OnMaxLength)
{
TRACE_PARSE_ENTRY();
MarkUnsupported(wszQName, cchQName);
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CComplexTypeParser, OnPrecision)
{
TRACE_PARSE_ENTRY();
MarkUnsupported(wszQName, cchQName);
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CComplexTypeParser, OnMinInclusive)
{
TRACE_PARSE_ENTRY();
MarkUnsupported(wszQName, cchQName);
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CComplexTypeParser, OnMinExclusive)
{
TRACE_PARSE_ENTRY();
MarkUnsupported(wszQName, cchQName);
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CComplexTypeParser, OnMaxInclusive)
{
TRACE_PARSE_ENTRY();
MarkUnsupported(wszQName, cchQName);
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CComplexTypeParser, OnMaxExclusive)
{
TRACE_PARSE_ENTRY();
MarkUnsupported(wszQName, cchQName);
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CComplexTypeParser, OnMinLength)
{
TRACE_PARSE_ENTRY();
MarkUnsupported(wszQName, cchQName);
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CComplexTypeParser, OnEncoding)
{
TRACE_PARSE_ENTRY();
MarkUnsupported(wszQName, cchQName);
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CComplexTypeParser, OnGroup)
{
TRACE_PARSE_ENTRY();
MarkUnsupported(wszQName, cchQName);
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CComplexTypeParser, OnSequence)
{
TRACE_PARSE_ENTRY();
DisableReset();
return S_OK;
}
TAG_METHOD_IMPL(CComplexTypeParser, OnAttribute)
{
TRACE_PARSE_ENTRY();
CComplexType *pCurr = GetComplexType();
if (pCurr != NULL)
{
CAttribute *pElem = pCurr->AddAttribute();
if (pElem != NULL)
{
SetXSDElementInfo(pElem, pCurr, GetLocator());
CAutoPtr<CAttributeParser> p( new CAttributeParser(GetReader(), this, GetLevel(), pElem) );
if (p != NULL)
{
if (g_ParserList.AddHead(p) != NULL)
{
return p.Detach()->GetAttributes(pAttributes);
}
}
}
}
EmitErrorHr(E_OUTOFMEMORY);
return E_FAIL;
}
TAG_METHOD_IMPL(CComplexTypeParser, OnAttributeGroup)
{
TRACE_PARSE_ENTRY();
MarkUnsupported(wszQName, cchQName);
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CComplexTypeParser, OnAnyAttribute)
{
TRACE_PARSE_ENTRY();
MarkUnsupported(wszQName, cchQName);
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CComplexTypeParser, OnSimpleContent)
{
TRACE_PARSE_ENTRY();
/*
CComplexType *pCurr = GetComplexType();
if (pCurr != NULL)
{
if (pCurr->GetElementType() == XSD_COMPLEXTYPE)
{
CContent *pElem = pCurr->AddContent();
pElem->SetParentDocument(pCurr->GetParentDocument());
pElem->SetParentElement(pCurr);
pElem->SetElementType(XSD_SIMPLECONTENT);
CContentParser * p = new CContentParser(GetReader(), this, GetLevel(), pElem);
if (p != NULL)
{
if (g_ParserList.AddHead(p) != NULL)
{
return p->GetAttributes(pAttributes);
}
}
}
else
{
return OnUnrecognizedTag(wszNamespaceUri,
cchNamespaceUri, wszLocalName, cchLocalName,
wszQName, cchQName, pAttributes);
}
}
EmitErrorHr(E_OUTOFMEMORY);
return E_FAIL;
*/
MarkUnsupported(wszQName, cchQName);
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CComplexTypeParser, OnAny)
{
TRACE_PARSE_ENTRY();
MarkUnsupported(wszQName, cchQName);
EmitString(wszNamespaceUri, wszLocalName);
CComplexType *pCurr = GetComplexType();
if (pCurr != NULL)
{
if (pCurr->GetElementType() == XSD_COMPLEXTYPE)
{
pCurr->SetElementType(XSD_UNSUPPORTED);
return SkipElement();
}
return E_FAIL;
}
EmitError(IDS_SDL_INTERNAL);
return E_FAIL;
}
TAG_METHOD_IMPL(CComplexTypeParser, OnComplexContent)
{
TRACE_PARSE_ENTRY();
CComplexType *pCurr = GetComplexType();
if (pCurr != NULL)
{
if (pCurr->GetElementType() == XSD_COMPLEXTYPE)
{
CContent *pElem = pCurr->AddContent();
if (pElem != NULL)
{
SetXSDElementInfo(pElem, pCurr, GetLocator());
pElem->SetElementType(XSD_COMPLEXCONTENT);
CAutoPtr<CContentParser> p( new CContentParser(GetReader(), this, GetLevel(), pElem) );
if (p != NULL)
{
if (g_ParserList.AddHead(p) != NULL)
{
return p.Detach()->GetAttributes(pAttributes);
}
}
}
}
else
{
return OnUnrecognizedTag(wszNamespaceUri,
cchNamespaceUri, wszLocalName, cchLocalName,
wszQName, cchQName, pAttributes);
}
}
EmitErrorHr(E_OUTOFMEMORY);
return E_FAIL;
}
ATTR_METHOD_IMPL(CComplexTypeParser, OnName)
{
TRACE_PARSE_ENTRY();
CComplexType * pCurr = GetComplexType();
if (pCurr != NULL)
{
HRESULT hr = S_OK;
if (pCurr->GetElementType() == XSD_COMPLEXTYPE)
{
hr = pCurr->SetName(wszValue, cchValue);
}
return hr;
}
return E_FAIL;
}
ATTR_METHOD_IMPL(CComplexTypeParser, OnID)
{
TRACE_PARSE_ENTRY();
CComplexType * pCurr = GetComplexType();
if (pCurr != NULL)
{
return pCurr->SetID(wszValue, cchValue);
}
return E_FAIL;
}
ATTR_METHOD_IMPL(CComplexTypeParser, OnAbstract)
{
TRACE_PARSE_ENTRY();
CComplexType * pCurr = GetComplexType();
if (pCurr != NULL && pCurr->GetElementType() == XSD_COMPLEXTYPE)
{
MarkUnsupported(wszQName, cchQName);
}
return S_OK;
}
ATTR_METHOD_IMPL(CComplexTypeParser, OnBase)
{
TRACE_PARSE_ENTRY();
CComplexType * pCurr = GetComplexType();
if (pCurr != NULL)
{
return pCurr->SetBase(wszValue, cchValue);
}
return E_FAIL;
}
ATTR_METHOD_IMPL(CComplexTypeParser, OnBlock)
{
TRACE_PARSE_ENTRY();
return S_OK;
}
ATTR_METHOD_IMPL(CComplexTypeParser, OnContent)
{
TRACE_PARSE_ENTRY();
CComplexType * pCurr = GetComplexType();
if (pCurr != NULL)
{
HRESULT hr = S_OK;
if (pCurr->GetElementType() == XSD_COMPLEXTYPE)
{
hr = pCurr->SetContentType(wszValue, cchValue);
}
return hr;
}
return E_FAIL;
}
ATTR_METHOD_IMPL(CComplexTypeParser, OnDerivedBy)
{
TRACE_PARSE_ENTRY();
CComplexType * pCurr = GetComplexType();
if (pCurr != NULL)
{
HRESULT hr = S_OK;
if (pCurr->GetElementType() == XSD_COMPLEXTYPE)
{
if (FAILED(pCurr->SetDerivedBy(wszValue, cchValue)))
{
EmitInvalidValue("derivedBy", wszValue);
hr = E_FAIL;
}
}
return hr;
}
return E_FAIL;
}
ATTR_METHOD_IMPL(CComplexTypeParser, OnFinal)
{
TRACE_PARSE_ENTRY();
CComplexType * pCurr = GetComplexType();
if (pCurr != NULL && pCurr->GetElementType() == XSD_COMPLEXTYPE)
{
MarkUnsupported(wszQName, cchQName);
}
return S_OK;
}
HRESULT __stdcall CComplexTypeParser::startPrefixMapping(
const wchar_t *wszPrefix,
int cchPrefix,
const wchar_t *wszUri,
int cchUri)
{
CComplexType * pCurr = GetComplexType();
if (pCurr != NULL)
{
return pCurr->SetNamespaceUri(wszPrefix, cchPrefix, wszUri, cchUri);
}
return E_FAIL;
}
<|start_filename|>source/SProxy/ErrorHandler.h<|end_filename|>
//
// ErrorHandler.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
class CErrorHandler : public ISAXErrorHandler
{
private:
const wchar_t * m_wszLocation;
public:
CErrorHandler()
:m_wszLocation(NULL)
{
}
void SetLocation(const wchar_t * wszLocation)
{
m_wszLocation = wszLocation;
}
virtual ~CErrorHandler() {}
HRESULT __stdcall QueryInterface(REFIID riid, void **ppv);
ULONG __stdcall AddRef();
ULONG __stdcall Release();
HRESULT __stdcall error(
ISAXLocator *pLocator,
const wchar_t *wszErrorMessage,
HRESULT hrErrorCode);
HRESULT __stdcall fatalError(
ISAXLocator *pLocator,
const wchar_t *wszErrorMessage,
HRESULT hrErrorCode);
HRESULT __stdcall ignorableWarning(
ISAXLocator *pLocator,
const wchar_t *wszErrorMessage,
HRESULT hrErrorCode);
};
<|start_filename|>source/SProxy/WSDLServicePortParser.cpp<|end_filename|>
//
// WSDLServicePortParser.cpp
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#include "stdafx.h"
#include "Util.h"
#include "WSDLServicePortParser.h"
#include "WSDLPort.h"
#include "Attribute.h"
#include "Content.h"
#include "Element.h"
#include "ComplexType.h"
TAG_METHOD_IMPL(CWSDLServicePortParser, OnSoapAddress)
{
TRACE_PARSE_ENTRY();
CWSDLPort * pCurr = GetPort();
if (pCurr != NULL)
{
CStringW strAddress;
if (S_OK == GetAttribute(pAttributes, L"location", sizeof("location")-1, strAddress))
{
if (SUCCEEDED(pCurr->SetSoapAddress(strAddress)))
{
return SkipElement();
}
}
OnMissingAttribute(TRUE, L"location", sizeof("location")-1, L"", 0);
}
EmitError(IDS_SDL_INTERNAL);
return E_FAIL;
}
TAG_METHOD_IMPL(CWSDLServicePortParser, OnHttpAddress)
{
TRACE_PARSE_ENTRY();
CWSDLPort * pCurr = GetPort();
if (pCurr != NULL)
{
CStringW strAddress;
if (S_OK == GetAttribute(pAttributes, L"location", sizeof("location")-1, strAddress))
{
if (SUCCEEDED(pCurr->SetHttpAddress(strAddress)))
{
return SkipElement();
}
}
OnMissingAttribute(TRUE, L"location", sizeof("location")-1, L"", 0);
}
EmitError(IDS_SDL_INTERNAL);
return E_FAIL;
}
ATTR_METHOD_IMPL(CWSDLServicePortParser, OnName)
{
TRACE_PARSE_ENTRY();
CWSDLPort * pCurr = GetPort();
if (pCurr != NULL)
{
return pCurr->SetName(wszValue, cchValue);
}
EmitError(IDS_SDL_INTERNAL);
return E_FAIL;
}
ATTR_METHOD_IMPL(CWSDLServicePortParser, OnBinding)
{
TRACE_PARSE_ENTRY();
CWSDLPort * pCurr = GetPort();
if (pCurr != NULL)
{
return pCurr->SetBinding(wszValue, cchValue);
}
EmitError(IDS_SDL_INTERNAL);
return E_FAIL;
}
HRESULT __stdcall CWSDLServicePortParser::startPrefixMapping(
const wchar_t *wszPrefix,
int cchPrefix,
const wchar_t *wszUri,
int cchUri)
{
CWSDLPort * pCurr = GetPort();
if (pCurr != NULL)
{
return pCurr->SetNamespaceUri(wszPrefix, cchPrefix, wszUri, cchUri);
}
return E_FAIL;
}
HRESULT CWSDLServicePortParser::OnUnrecognizedTag(
const wchar_t *wszNamespaceUri, int cchNamespaceUri,
const wchar_t *wszLocalName, int cchLocalName,
const wchar_t * /*wszQName*/, int /*cchQName*/,
ISAXAttributes * /*pAttributes*/) throw()
{
CWSDLPort * pCurr = GetPort();
if (pCurr != NULL)
{
int nLine;
int nCol;
GetLocator()->getLineNumber(&nLine);
GetLocator()->getColumnNumber(&nCol);
EmitFileWarning(IDS_SDL_SKIP_EXTENSIBILITY,
pCurr->GetParentDocument()->GetDocumentUri(),
nLine,
nCol,
0,
wszNamespaceUri,
wszLocalName);
}
return SkipElement();
}
<|start_filename|>source/SProxy/WSDLMessage.h<|end_filename|>
//
// WSDLMessage.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
#include "XMLElement.h"
#include "WSDLMessagePart.h"
#include "Emit.h"
class CWSDLMessage : public CXMLElement
{
private:
CStringW m_strDocumentation;
CStringW m_strName;
CAtlPtrList<CWSDLMessagePart *> m_parts;
public:
inline POSITION GetFirstPart()
{
return m_parts.GetHeadPosition();
}
inline CWSDLMessagePart * GetNextPart(POSITION& pos)
{
return m_parts.GetNext(pos);
}
inline CWSDLMessagePart * GetPartByName(CStringW& strName)
{
POSITION pos = m_parts.GetHeadPosition();
while (pos != NULL)
{
if (m_parts.GetAt(pos)->GetName() == strName)
{
break;
}
m_parts.GetNext(pos);
}
if (pos != NULL)
{
return m_parts.GetAt(pos);
}
return NULL;
}
inline CWSDLMessagePart * AddPart()
{
CAutoPtr<CWSDLMessagePart> p ( new CWSDLMessagePart );
if (p != NULL)
{
if (m_parts.AddTail(p) != NULL)
{
return p.Detach();
}
}
EmitErrorHr(E_OUTOFMEMORY);
return NULL;
}
inline CWSDLMessagePart * AddPart(CWSDLMessagePart *part)
{
if (m_parts.AddTail(part) != NULL)
{
return part;
}
EmitErrorHr(E_OUTOFMEMORY);
return NULL;
}
inline size_t GetNumParts()
{
return m_parts.GetCount();
}
inline HRESULT SetName(const CStringW& strName)
{
m_strName = strName;
return S_OK;
}
inline HRESULT SetName(const wchar_t *wszName, int cchName)
{
if (!wszName)
{
return E_FAIL;
}
m_strName.SetString(wszName, cchName);
return S_OK;
}
inline const CStringW& GetName()
{
return m_strName;
}
};
<|start_filename|>include/atlsrvres.h<|end_filename|>
// This is a part of the Active Template Library.
// Copyright (C) Microsoft Corporation
// All rights reserved.
//
// This source code is only intended as a supplement to the
// Active Template Library Reference and related
// electronic documentation provided with the library.
// See these sources for detailed information regarding the
// Active Template Library product.
// Used by atlsrv.rc
//
#ifndef ATLSRV_RESID_BASE
#define ATLSRV_RESID_BASE 0x6000
#endif
#ifndef PERFMON_RESID_BASE
#define PERFMON_RESID_BASE 0x6100
#endif
#ifndef STENCIL_RESID_BASE
#define STENCIL_RESID_BASE 0x6200
#endif
#define IDS_ATLSRV_BAD_REQUEST (ATLSRV_RESID_BASE+1)
#define IDS_ATLSRV_AUTH_REQUIRED (ATLSRV_RESID_BASE+2)
#define IDS_ATLSRV_FORBIDDEN (ATLSRV_RESID_BASE+3)
#define IDS_ATLSRV_NOT_FOUND (ATLSRV_RESID_BASE+4)
#define IDS_ATLSRV_SERVER_ERROR (ATLSRV_RESID_BASE+5)
#define IDS_ATLSRV_NOT_IMPLEMENTED (ATLSRV_RESID_BASE+6)
#define IDS_ATLSRV_BAD_GATEWAY (ATLSRV_RESID_BASE+7)
#define IDS_ATLSRV_SERVICE_NOT_AVAILABLE (ATLSRV_RESID_BASE+8)
#define IDS_ATLSRV_SERVER_ERROR_BADSRF (ATLSRV_RESID_BASE+9)
#define IDS_ATLSRV_SERVER_ERROR_HNDLFAIL (ATLSRV_RESID_BASE+10)
#define IDS_ATLSRV_SERVER_ERROR_SYSOBJFAIL (ATLSRV_RESID_BASE+11)
#define IDS_ATLSRV_SERVER_ERROR_READFILEFAIL (ATLSRV_RESID_BASE+12)
#define IDS_ATLSRV_SERVER_ERROR_LOADLIB (ATLSRV_RESID_BASE+13)
#define IDS_ATLSRV_SERVER_ERROR_HANDLERIF (ATLSRV_RESID_BASE+14)
#define IDS_ATLSRV_SERVER_ERROR_OUTOFMEM (ATLSRV_RESID_BASE+15)
#define IDS_ATLSRV_SERVER_ERROR_UNEXPECTED (ATLSRV_RESID_BASE+16)
#define IDS_ATLSRV_SERVER_ERROR_STENCILPARSEFAIL (ATLSRV_RESID_BASE+17)
#define IDS_ATLSRV_SERVER_ERROR_STENCILLOADFAIL (ATLSRV_RESID_BASE+18)
#define IDS_ATLSRV_SERVER_ERROR_HANDLERNOTFOUND (ATLSRV_RESID_BASE+19)
#define IDS_ATLSRV_SERVER_ERROR_BADHANDLERTAG (ATLSRV_RESID_BASE+20)
#define IDS_ATLSRV_SERVER_ERROR_NOHANDLERTAG (ATLSRV_RESID_BASE+21)
#define IDS_ATLSRV_SERVER_ERROR_LONGMETHODNAME (ATLSRV_RESID_BASE+22)
#define IDS_ATLSRV_SERVER_ERROR_LONGHANDLERNAME (ATLSRV_RESID_BASE+23)
#define IDS_ATLSRV_SERVER_ERROR_IMPERSONATIONFAILED (ATLSRV_RESID_BASE+24)
#define IDS_ATLSRV_SERVER_ERROR_ISAPISTARTUPFAILED (ATLSRV_RESID_BASE+25)
#define IDS_ATLSRV_SERVER_ERROR_LOADFILEFAIL (ATLSRV_RESID_BASE+26)
#define IDS_ATLSRV_CRITICAL_LOGMESSAGE (ATLSRV_RESID_BASE+27)
#define IDS_ATLSRV_CRITICAL_HEAPCREATEFAILED (ATLSRV_RESID_BASE+28)
#define IDS_ATLSRV_CRITICAL_WORKERINITFAILED (ATLSRV_RESID_BASE+29)
#define IDS_ATLSRV_CRITICAL_CRITSECINITFAILED (ATLSRV_RESID_BASE+30)
#define IDS_ATLSRV_CRITICAL_THREADPOOLFAILED (ATLSRV_RESID_BASE+31)
#define IDS_ATLSRV_CRITICAL_DLLCACHEFAILED (ATLSRV_RESID_BASE+32)
#define IDS_ATLSRV_CRITICAL_PAGECACHEFAILED (ATLSRV_RESID_BASE+33)
#define IDS_ATLSRV_CRITICAL_STENCILCACHEFAILED (ATLSRV_RESID_BASE+34)
#define IDS_ATLSRV_CRITICAL_SESSIONSTATEFAILED (ATLSRV_RESID_BASE+35)
#define IDS_ATLSRV_CRITICAL_BLOBCACHEFAILED (ATLSRV_RESID_BASE+36)
#define IDS_ATLSRV_CRITICAL_FILECACHEFAILED (ATLSRV_RESID_BASE+37)
#define IDS_ATLSRV_SERVER_ERROR_SOAPNOSOAPACTION (ATLSRV_RESID_BASE+38)
#define IDS_PERFMON_CACHE (PERFMON_RESID_BASE+1)
#define IDS_PERFMON_CACHE_HELP (PERFMON_RESID_BASE+2)
#define IDS_PERFMON_HITCOUNT (PERFMON_RESID_BASE+3)
#define IDS_PERFMON_HITCOUNT_HELP (PERFMON_RESID_BASE+4)
#define IDS_PERFMON_MISSCOUNT (PERFMON_RESID_BASE+5)
#define IDS_PERFMON_MISSCOUNT_HELP (PERFMON_RESID_BASE+6)
#define IDS_PERFMON_CURRENTALLOCATIONS (PERFMON_RESID_BASE+7)
#define IDS_PERFMON_CURRENTALLOCATIONS_HELP (PERFMON_RESID_BASE+8)
#define IDS_PERFMON_MAXALLOCATIONS (PERFMON_RESID_BASE+9)
#define IDS_PERFMON_MAXALLOCATIONS_HELP (PERFMON_RESID_BASE+10)
#define IDS_PERFMON_CURRENTENTRIES (PERFMON_RESID_BASE+11)
#define IDS_PERFMON_CURRENTENTRIES_HELP (PERFMON_RESID_BASE+12)
#define IDS_PERFMON_MAXENTRIES (PERFMON_RESID_BASE+13)
#define IDS_PERFMON_MAXENTRIES_HELP (PERFMON_RESID_BASE+14)
#define IDS_PERFMON_HITCOUNTRATE (PERFMON_RESID_BASE+15)
#define IDS_PERFMON_HITCOUNTRATE_HELP (PERFMON_RESID_BASE+16)
#define IDS_PERFMON_REQUEST (PERFMON_RESID_BASE+17)
#define IDS_PERFMON_REQUEST_HELP (PERFMON_RESID_BASE+18)
#define IDS_PERFMON_REQUEST_TOTAL (PERFMON_RESID_BASE+19)
#define IDS_PERFMON_REQUEST_TOTAL_HELP (PERFMON_RESID_BASE+20)
#define IDS_PERFMON_REQUEST_FAILED (PERFMON_RESID_BASE+21)
#define IDS_PERFMON_REQUEST_FAILED_HELP (PERFMON_RESID_BASE+22)
#define IDS_PERFMON_REQUEST_RATE (PERFMON_RESID_BASE+23)
#define IDS_PERFMON_REQUEST_RATE_HELP (PERFMON_RESID_BASE+24)
#define IDS_PERFMON_REQUEST_AVG_RESPONSE_TIME (PERFMON_RESID_BASE+25)
#define IDS_PERFMON_REQUEST_AVG_RESPONSE_TIME_HELP (PERFMON_RESID_BASE+26)
#define IDS_PERFMON_REQUEST_CURR_WAITING (PERFMON_RESID_BASE+27)
#define IDS_PERFMON_REQUEST_CURR_WAITING_HELP (PERFMON_RESID_BASE+28)
#define IDS_PERFMON_REQUEST_MAX_WAITING (PERFMON_RESID_BASE+29)
#define IDS_PERFMON_REQUEST_MAX_WAITING_HELP (PERFMON_RESID_BASE+30)
#define IDS_PERFMON_REQUEST_ACTIVE_THREADS (PERFMON_RESID_BASE+31)
#define IDS_PERFMON_REQUEST_ACTIVE_THREADS_HELP (PERFMON_RESID_BASE+32)
//
// Stencil parse error support
//
// the error stencil
#define IDS_STENCIL_ERROR_STENCIL (STENCIL_RESID_BASE+1)
// parse errors
#define IDS_STENCIL_UNCLOSEDBLOCK_IF (STENCIL_RESID_BASE+2)
#define IDS_STENCIL_UNCLOSEDBLOCK_ELSE (STENCIL_RESID_BASE+3)
#define IDS_STENCIL_UNCLOSEDBLOCK_WHILE (STENCIL_RESID_BASE+4)
#define IDS_STENCIL_UNOPENEDBLOCK_ENDWHILE (STENCIL_RESID_BASE+5)
#define IDS_STENCIL_UNOPENEDBLOCK_ELSE (STENCIL_RESID_BASE+6)
#define IDS_STENCIL_UNOPENEDBLOCK_ENDIF (STENCIL_RESID_BASE+7)
#define IDS_STENCIL_INVALID_HANDLER (STENCIL_RESID_BASE+8)
#define IDS_STENCIL_NULLPARAM (STENCIL_RESID_BASE+9)
#define IDS_STENCIL_INVALIDSTRING (STENCIL_RESID_BASE+10)
#define IDS_STENCIL_EMBEDDED_NULL (STENCIL_RESID_BASE+11)
#define IDS_STENCIL_UNMATCHED_TAG_START (STENCIL_RESID_BASE+12)
#define IDS_STENCIL_MISMATCHED_TAG_START (STENCIL_RESID_BASE+13)
#define IDS_STENCIL_BAD_PARAMETER (STENCIL_RESID_BASE+14)
#define IDS_STENCIL_METHODNAME_TOO_LONG (STENCIL_RESID_BASE+15)
#define IDS_STENCIL_HANDLERNAME_TOO_LONG (STENCIL_RESID_BASE+16)
#define IDS_STENCIL_INCLUDE_ERROR (STENCIL_RESID_BASE+17)
#define IDS_STENCIL_INCLUDE_INVALID (STENCIL_RESID_BASE+18)
#define IDS_STENCIL_INVALID_SUBHANDLER (STENCIL_RESID_BASE+19)
#define IDS_STENCIL_UNRESOLVED_REPLACEMENT (STENCIL_RESID_BASE+20)
// mlang errors
#define IDS_STENCIL_MLANG_COCREATE (STENCIL_RESID_BASE+21)
#define IDS_STENCIL_MLANG_LCID (STENCIL_RESID_BASE+22)
#define IDS_STENCIL_MLANG_GETLOCALE (STENCIL_RESID_BASE+23)
#define IDS_STENCIL_MLANG_GETCHARSET (STENCIL_RESID_BASE+24)
// misceallaneous
#define IDS_STENCIL_OUTOFMEMORY (STENCIL_RESID_BASE+25)
#define IDS_STENCIL_UNEXPECTED (STENCIL_RESID_BASE+26)
// Next default values for new objects
//
#ifdef APSTUDIO_INVOKED
#ifndef APSTUDIO_READONLY_SYMBOLS
#define _APS_NO_MFC 1
#define _APS_NEXT_RESOURCE_VALUE 101
#define _APS_NEXT_COMMAND_VALUE 40001
#define _APS_NEXT_CONTROL_VALUE 1000
#define _APS_NEXT_SYMED_VALUE 101
#endif
#endif
<|start_filename|>source/SProxy/SchemaDocument.h<|end_filename|>
//
// SchemaDocument.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
#include "Schema.h"
class CSchemaDocument : public CXMLDocument
{
private:
CSchema m_schema;
public:
inline CSchema * GetSchema()
{
return m_&schema;
}
};
<|start_filename|>source/SProxy/DiscoMapParser.cpp<|end_filename|>
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#include "stdafx.h"
#include "util.h"
#include "DiscoMapparser.h"
#include "DiscoMapDocument.h"
CDiscoMapParser::CDiscoMapParser(void)
:m_pDocument(NULL)
{
}
CDiscoMapParser::~CDiscoMapParser(void)
{
}
CDiscoMapParser::CDiscoMapParser(ISAXXMLReader * pReader, CParserBase * pParent, DWORD dwLevel)
:CParserBase(pReader, pParent, dwLevel), m_pDocument(NULL)
{
}
CDiscoMapDocument * CDiscoMapParser::CreateDiscoMapDocument(void)
{
m_pDocument.Attach( new CDiscoMapDocument );
return m_pDocument;
}
TAG_METHOD_IMPL(CDiscoMapParser, OnDiscoveryClientResultsFile)
{
TRACE_PARSE_ENTRY();
return S_OK;
}
TAG_METHOD_IMPL(CDiscoMapParser, OnResults)
{
TRACE_PARSE_ENTRY();
return S_OK;
}
TAG_METHOD_IMPL(CDiscoMapParser, OnDiscoveryClientResult)
{
TRACE_PARSE_ENTRY();
CStringW strRT;
HRESULT hr = GetAttribute(pAttributes, L"referenceType", sizeof("referenceType")-1, strRT);
if(FAILED(hr))
return hr;
CDiscoMapDocument * pDoc = GetDiscoMapDocument();
if(strRT == "System.Web.Services.Discovery.SchemaReference")
{
CStringW strURL;
hr = GetAttribute(pAttributes, L"url", sizeof("url")-1, strURL);
if(FAILED(hr))
return hr;
CStringW strFileName;
hr = GetAttribute(pAttributes, L"filename", sizeof("filename")-1, strFileName);
if(FAILED(hr))
return hr;
pDoc->AddSchema(strURL,strFileName);
return S_OK;
}
if(strRT == "System.Web.Services.Discovery.ContractReference")
{
CStringW strWSDLFile;
hr = GetAttribute(pAttributes, L"filename", sizeof("filename")-1, strWSDLFile);
if(FAILED(hr))
return hr;
pDoc->SetWSDLFile(strWSDLFile);
}
return S_OK;
}
<|start_filename|>source/SProxy/Parser.cpp<|end_filename|>
//
// Parser.cpp
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#include "stdafx.h"
#include "Parser.h"
#include "SkipParser.h"
#include "resource.h"
////////////////////////////////////////////////////////////////////////
//
// CParserBase interface
//
////////////////////////////////////////////////////////////////////////
HRESULT CParserBase::DispatchElement(
const XMLTAG *pEntry,
const wchar_t *wszNamespaceUri, int cchNamespaceUri,
const wchar_t *wszLocalName, int cchLocalName,
const wchar_t *wszQName, int cchQName,
ISAXAttributes *pAttributes) throw()
{
if (!pEntry)
{
return S_OK;
}
while (pEntry->szElemName)
{
if (CheckTagElement(pEntry,
wszNamespaceUri, cchNamespaceUri,
wszLocalName, cchLocalName,
wszQName, cchQName))
{
return (this->*pEntry->pfnTag)(wszNamespaceUri,
cchNamespaceUri, wszLocalName, cchLocalName,
wszQName, cchQName, pAttributes);
}
pEntry++;
}
//
// unrecognized tag
//
return OnUnrecognizedTag(wszNamespaceUri,
cchNamespaceUri, wszLocalName, cchLocalName,
wszQName, cchQName, pAttributes);
}
HRESULT CParserBase::GetAttributes(const XMLATTR *pEntry, ISAXAttributes *pAttributes)
{
if (!pAttributes)
{
EmitError(IDS_SDL_INTERNAL);
return E_FAIL;
}
if (GetGotAttributes() != FALSE)
{
return S_OK;
}
if (!pEntry)
{
return S_OK;
}
int nAttrs = 0;
HRESULT hr = pAttributes->getLength(&nAttrs);
if (FAILED(hr))
{
EmitError(IDS_SDL_MSXML);
return E_FAIL;
}
while (pEntry->wszElemName)
{
int i;
for (i=0; i<nAttrs; i++)
{
const wchar_t *wszNamespaceUri = NULL;
const wchar_t *wszLocalName = NULL;
const wchar_t *wszQName = NULL;
int cchUri = 0;
int cchLocalName = 0;
int cchQName = 0;
hr = pAttributes->getName(i, &wszNamespaceUri, &cchUri, &wszLocalName, &cchLocalName, &wszQName, &cchQName);
if (FAILED(hr))
{
EmitError(IDS_SDL_MSXML);
return E_FAIL;
}
if (CheckTagElement(pEntry,
wszNamespaceUri, cchUri,
wszLocalName, cchLocalName,
wszQName, cchQName))
{
const wchar_t *wszValue = NULL;
int cchValue = 0;
hr = pAttributes->getValue(i, &wszValue, &cchValue);
if (SUCCEEDED(hr))
{
hr = (this->*pEntry->pfnAttr)(wszNamespaceUri,
cchUri, wszLocalName, cchLocalName,
wszQName, cchQName, wszValue, cchValue,
pAttributes);
}
else
{
EmitError(IDS_SDL_MSXML);
}
break;
}
}
if (i >= nAttrs)
{
hr = OnMissingAttribute(pEntry->bRequired, pEntry->wszElemName, pEntry->cchElem,
pEntry->wszElemNamespace, pEntry->cchNamespaceUri);
}
if (FAILED(hr))
{
break;
}
pEntry++;
}
SetGotAttributes(TRUE);
return hr;
}
HRESULT CParserBase::DispatchElement(
const wchar_t *wszNamespaceUri, int cchNamespaceUri,
const wchar_t *wszLocalName, int cchLocalName,
const wchar_t *wszQName, int cchQName,
ISAXAttributes *pAttributes) throw()
{
if (FAILED(GetAttributes(pAttributes)))
{
return E_FAIL;
}
const XMLTAG * pEntry = GetXMLTAGMap();
return DispatchElement(pEntry, wszNamespaceUri,
cchNamespaceUri, wszLocalName, cchLocalName,
wszQName, cchQName, pAttributes);
}
HRESULT CParserBase::GetAttributes(ISAXAttributes *pAttributes)
{
const XMLATTR * pEntry = GetXMLATTRMap();
return GetAttributes(pEntry, pAttributes);
}
BOOL CParserBase::CheckTagElement(
const XMLTAG *pTag,
const wchar_t *wszNamespaceUri, int cchNamespaceUri,
const wchar_t *wszLocalName, int cchLocalName,
const wchar_t *wszQName, int cchQName)
{
if (!pTag->wszElemName)
{
EmitError(IDS_SDL_INTERNAL);
return FALSE;
}
if (pTag->cchElem == cchLocalName &&
!wcsncmp(pTag->wszElemName, wszLocalName, pTag->cchElem))
{
//
// Don't need to check default namespace for tags since SAX gives it in wszElemNamespace
//
if ((!pTag->wszElemNamespace) ||
(!pTag->cchNamespaceUri && !cchNamespaceUri) ||
(pTag->cchNamespaceUri == cchNamespaceUri &&
!wcsncmp(pTag->wszElemNamespace, wszNamespaceUri, pTag->cchNamespaceUri)))
{
//
// namespace and tag match
// it is okay if there is no namespace ("") or if the user doesn't care (NULL)
//
return TRUE;
}
}
return FALSE;
}
void CParserBase::CrackQName(const wchar_t *wszQName, int cchQName,
wchar_t **pwszNs, int *pcchNs,
wchar_t **pwszName, int *pcchName)
{
wchar_t * wszQNameTmp = (wchar_t *) wszQName;
wchar_t * wszColon = wcschr(wszQNameTmp, L':');
if (wszColon && (wszColon-wszQNameTmp) <= cchQName)
{
*pwszNs = wszQNameTmp;
*pcchNs = (int)(wszColon-wszQNameTmp);
*pwszName = wszColon+1;
*pcchName = cchQName-(*pcchNs)-1;
}
else
{
*pwszNs = NULL;
*pcchNs = 0;
*pwszName = wszQNameTmp;
*pcchName = cchQName;
}
}
HRESULT CParserBase::OnUnrecognizedTag(
const wchar_t *wszNamespaceUri, int cchNamespaceUri,
const wchar_t *wszLocalName, int cchLocalName,
const wchar_t *wszQName, int cchQName,
ISAXAttributes *pAttributes) throw()
{
wszNamespaceUri;
cchNamespaceUri;
wszLocalName;
cchLocalName;
wszQName;
cchQName;
pAttributes;
ATLTRACE( _T("%sUnrecoginzed Tag: %.*ws\n"), GetTabs(m_dwLevel), cchQName, wszQName );
int nLine;
int nCol;
GetLocator()->getLineNumber(&nLine);
GetLocator()->getColumnNumber(&nCol);
EmitFileError(IDS_SDL_UNRECOGNIZED_TAG, GetWSDLFile(), nLine, nCol, 0, wszNamespaceUri, wszLocalName);
return E_FAIL;
}
HRESULT CParserBase::OnMissingAttribute(BOOL bRequired,
const wchar_t *wszName, int cchName,
const wchar_t *wszNamespace, int cchNamespace)
{
if (bRequired != FALSE)
{
ATLTRACE( _T("%sMissing Required Attribute: name %.*ws, uri %.*ws\n"),
GetTabs(m_dwLevel), cchName, wszName, cchNamespace, wszNamespace );
int nLine;
int nCol;
GetLocator()->getLineNumber(&nLine);
GetLocator()->getColumnNumber(&nCol);
EmitFileError(IDS_SDL_MISSING_ATTRIBUTE, GetWSDLFile(), nLine, nCol, 0, wszNamespace, wszName);
return E_FAIL;
}
return S_OK;
}
void CParserBase::EmitInvalidValue(const char *szName, const wchar_t *wszValue)
{
int nLine;
int nCol;
GetLocator()->getLineNumber(&nLine);
GetLocator()->getColumnNumber(&nCol);
EmitFileError(IDS_SDL_INVALID_VALUE, GetWSDLFile(), nLine, nCol, 0, szName, wszValue);
}
void CParserBase::EmitUnsupported(UINT nID, const wchar_t *wszUri, const wchar_t *wszName)
{
int nLine;
int nCol;
GetLocator()->getLineNumber(&nLine);
GetLocator()->getColumnNumber(&nCol);
EmitFileWarning(nID, GetWSDLFile(), nLine, nCol, 0, wszUri, wszName);
}
void CParserBase::EmitSkip(const wchar_t *wszUri, const wchar_t *wszName)
{
EmitUnsupported(IDS_SDL_UNSUPPORTED_TAG, wszUri, wszName);
}
void CParserBase::EmitString(const wchar_t *wszUri, const wchar_t *wszName)
{
EmitUnsupported(IDS_SDL_UNSUPPORTED_STRING, wszUri, wszName);
}
HRESULT CParserBase::SkipElement()
{
CAutoPtr<CSkipParser> p( new CSkipParser(m_spReader, this, GetLevel()) );
if (p != NULL)
{
if (g_ParserList.AddHead(p) != NULL)
{
p.Detach();
return S_OK;
}
}
EmitErrorHr(E_OUTOFMEMORY);
return E_FAIL;
}
////////////////////////////////////////////////////////////////////////
//
// IUnknown
//
////////////////////////////////////////////////////////////////////////
HRESULT __stdcall CParserBase::QueryInterface(REFIID riid, void **ppv)
{
if (!ppv)
{
return E_POINTER;
}
if (InlineIsEqualGUID(riid, __uuidof(ISAXContentHandler)) ||
InlineIsEqualGUID(riid, __uuidof(IUnknown)))
{
*ppv = static_cast<IUnknown*>(this);
return S_OK;
}
return E_NOINTERFACE;
}
ULONG __stdcall CParserBase::AddRef()
{
return 1;
}
ULONG __stdcall CParserBase::Release()
{
return 1;
}
////////////////////////////////////////////////////////////////////////
//
// ISAXContentHandler
//
////////////////////////////////////////////////////////////////////////
HRESULT __stdcall CParserBase::putDocumentLocator(ISAXLocator *pLocator)
{
m_spLocator = pLocator;
return S_OK;
}
HRESULT __stdcall CParserBase::startDocument()
{
return S_OK;
}
HRESULT __stdcall CParserBase::endDocument()
{
return S_OK;
}
HRESULT __stdcall CParserBase::startPrefixMapping(
const wchar_t *wszPrefix,
int cchPrefix,
const wchar_t *wszUri,
int cchUri)
{
return S_OK;
}
HRESULT __stdcall CParserBase::endPrefixMapping(
const wchar_t *wszPrefix,
int cchPrefix)
{
wszPrefix;
cchPrefix;
// REVIEW: remove from map?
return S_OK;
}
HRESULT __stdcall CParserBase::startElement(
const wchar_t *wszNamespaceUri,
int cchNamespaceUri,
const wchar_t *wszLocalName,
int cchLocalName,
const wchar_t *wszQName,
int cchQName,
ISAXAttributes *pAttributes)
{
return DispatchElement(wszNamespaceUri, cchNamespaceUri,
wszLocalName, cchLocalName,
wszQName, cchQName,
pAttributes);
}
HRESULT __stdcall CParserBase::endElement(
const wchar_t *wszNamespaceUri,
int cchNamespaceUri,
const wchar_t *wszLocalName,
int cchLocalName,
const wchar_t *wszQName,
int cchQName)
{
HRESULT hr = ValidateElement();
if (GetReset() > 0)
{
EnableReset();
return hr;
}
//
// Restore the parent handler
//
CComPtr<CParserBase> spParentHandler = GetParentHandler();
if (spParentHandler.p != NULL)
{
m_spReader->putContentHandler(spParentHandler);
}
if (GetDynamicAlloc() != FALSE)
{
POSITION pos = g_ParserList.Find(this);
if (pos != NULL)
{
g_ParserList.RemoveAt(pos);
}
delete this;
}
return hr;
}
HRESULT __stdcall CParserBase::characters(
const wchar_t *wszChars,
int cchChars)
{
//
// REVIEW: what to do here?
//
// ATLTRACE( _T("CParserBase::characters: %*.ws\n"), cchChars, wszChars );
return S_OK;
}
HRESULT __stdcall CParserBase::ignorableWhitespace(
const wchar_t *wszChars,
int cchChars)
{
// ATLTRACE( _T("CParserBase::ignorableWhitespace: %.*ws\n"), cchChars, wszChars );
return S_OK;
}
HRESULT __stdcall CParserBase::processingInstruction(
const wchar_t *wszTarget,
int cchTarget,
const wchar_t *wszData,
int cchData)
{
wszTarget;
cchTarget;
wszData;
cchData;
ATLTRACE( _T("CParserBase::processingInstruction: target: %.*ws, data: %.*ws\n"), cchTarget, wszTarget, cchData, wszData );
return S_OK;
}
HRESULT __stdcall CParserBase::skippedEntity(
const wchar_t *wszName,
int cchName)
{
wszName;
cchName;
ATLTRACE( _T("CParserBase::skippedEntity: %.*ws\n"), wszName, cchName );
return S_OK;
}
<|start_filename|>source/SProxy/WSDLPortTypeIO.cpp<|end_filename|>
//
// WSDLPortTypeIO.cpp
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#include "stdafx.h"
#include "WSDLPortTypeIO.h"
#include "WSDLMessage.h"
#include "WSDLDocument.h"
#include "Attribute.h"
#include "Content.h"
#include "Element.h"
#include "ComplexType.h"
CWSDLMessage * CWSDLPortTypeIO::GetMessage()
{
if (m_pMessage != NULL)
{
return m_pMessage;
}
CXMLDocument *pDoc = GetParentDocument();
if (pDoc != NULL)
{
CStringW strUri;
if (SUCCEEDED(GetNamespaceUri(m_message.GetPrefix(), strUri)))
{
if (strUri == pDoc->GetTargetNamespace())
{
if (pDoc->GetDocumentType() == WSDLDOC)
{
CWSDLDocument *pWSDLDoc = static_cast<CWSDLDocument *>(pDoc);
m_pMessage = pWSDLDoc->GetMessage(m_message.GetName());
if (m_pMessage == NULL)
{
EmitFileError(IDS_SDL_UNRESOLVED_ELEM2, const_cast<CWSDLPortTypeIO *>(this), 0,
"message", strUri, m_message.GetName());
}
}
}
}
else
{
EmitFileError(IDS_SDL_UNRESOLVED_NAMESPACE, const_cast<CWSDLPortTypeIO *>(this), 0, m_message.GetPrefix());
}
}
return m_pMessage;
}
<|start_filename|>source/SProxy/WSDLParser.h<|end_filename|>
//
// WSDLParser.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
#include "Parser.h"
typedef NAMESPACEMAP IMPORTMAP;
class CWSDLDocument;
class CWSDLParser : public CParserBase
{
private:
IMPORTMAP m_importMap;
CAutoPtr<CWSDLDocument> m_pDocument;
public:
CWSDLParser();
CWSDLParser(ISAXXMLReader *pReader, CParserBase *pParent, DWORD dwLevel);
//
// Parsing maps
//
BEGIN_XMLTAG_MAP()
XMLTAG_ENTRY_EX( "definitions", WSDL_NAMESPACEA, OnDefinitions )
XMLTAG_ENTRY( "import", OnImport )
XMLTAG_ENTRY_EX( "documentation", WSDL_NAMESPACEA, OnDocumentation )
XMLTAG_ENTRY_EX( "types", WSDL_NAMESPACEA, OnTypes )
XMLTAG_ENTRY_EX( "message", WSDL_NAMESPACEA, OnMessage )
XMLTAG_ENTRY_EX( "portType", WSDL_NAMESPACEA, OnPortType )
XMLTAG_ENTRY_EX( "binding", WSDL_NAMESPACEA, OnBinding )
XMLTAG_ENTRY_EX( "service", WSDL_NAMESPACEA, OnService )
END_XMLTAG_MAP()
BEGIN_XMLATTR_MAP()
XMLATTR_ENTRY( "name", OnName )
XMLATTR_ENTRY( "targetNamespace", OnTargetNamespace )
END_XMLATTR_MAP()
//
// Parse functions
//
TAG_METHOD_DECL( OnDefinitions );
TAG_METHOD_DECL( OnImport );
TAG_METHOD_DECL( OnDocumentation );
TAG_METHOD_DECL( OnTypes );
TAG_METHOD_DECL( OnMessage );
TAG_METHOD_DECL( OnPortType );
TAG_METHOD_DECL( OnBinding );
TAG_METHOD_DECL( OnService );
ATTR_METHOD_DECL( OnName );
ATTR_METHOD_DECL( OnTargetNamespace );
inline void SetWSDLDocument(CWSDLDocument *pDoc)
{
m_pDocument.Free();
m_pDocument.Attach(pDoc);
}
inline CWSDLDocument * GetWSDLDocument(BOOL bReleaseOwnership = FALSE)
{
if (m_pDocument == NULL)
{
CreateWSDLDocument();
}
if (bReleaseOwnership != FALSE)
{
return m_pDocument.Detach();
}
return m_pDocument;
}
CWSDLDocument * CreateWSDLDocument();
HRESULT __stdcall startPrefixMapping(
const wchar_t *wszPrefix,
int cchPrefix,
const wchar_t *wszUri,
int cchUri);
};
<|start_filename|>source/SProxy/XMLDocParser.cpp<|end_filename|>
//
// XMLDocParser.cpp
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#include "stdafx.h"
#include "XMLDocParser.h"
#include "resource.h"
CXMLDocParser::CXMLDocParser()
:m_docType(UNKDOC)
{
}
CXMLDocParser::CXMLDocParser(ISAXXMLReader *pReader, CParserBase *pParent, DWORD dwLevel)
:CParserBase(pReader, pParent, dwLevel), m_docType(UNKDOC)
{
}
TAG_METHOD_IMPL( CXMLDocParser, OnSchema )
{
if (m_docType != UNKDOC && m_docType != SCHEMADOC)
{
ATLTRACE( _T("Not a schema document. Unknown root document tag: %.*ws\n"), cchLocalName, wszLocalName );
}
return S_OK;
}
TAG_METHOD_IMPL( CXMLDocParser, OnDefinitions )
{
if (m_docType != UNKDOC && m_docType != WSDLDOC)
{
ATLTRACE( _T("Not a WSDL document. Unknown root document tag: %.*ws\n"), cchLocalName, wszLocalName );
}
return S_OK;
}
HRESULT CXMLDocParser::OnUnrecognizedTag(
const wchar_t *wszNamespaceUri, int cchNamespaceUri,
const wchar_t *wszLocalName, int cchLocalName,
const wchar_t *wszQName, int cchQName,
ISAXAttributes *pAttributes) throw()
{
const wchar_t *wszUrl;
if (SUCCEEDED(GetReader()->getBaseURL(&wszUrl)))
{
EmitError(IDS_SDL_UNRECOGNIZED_DOC, wszUrl, wszNamespaceUri, wszLocalName);
}
return CParserBase::OnUnrecognizedTag(wszNamespaceUri, cchNamespaceUri,
wszLocalName, cchLocalName,
wszQName, cchQName,
pAttributes);
}
<|start_filename|>source/SProxy/WSDLMessageParser.cpp<|end_filename|>
//
// WSDLMessageParser.cpp
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#include "stdafx.h"
#include "Util.h"
#include "WSDLMessageParser.h"
#include "WSDLMessage.h"
#include "Attribute.h"
#include "Content.h"
#include "Element.h"
#include "ComplexType.h"
TAG_METHOD_IMPL(CWSDLMessageParser, OnDocumentation)
{
TRACE_PARSE_ENTRY();
return SkipElement();
}
TAG_METHOD_IMPL(CWSDLMessageParser, OnPart)
{
TRACE_PARSE_ENTRY();
CWSDLMessage *pCurr = GetMessage();
if (pCurr != NULL)
{
CWSDLMessagePart *pPart = pCurr->AddPart();
if (pPart != NULL)
{
SetXMLElementInfo(pPart, pCurr, GetLocator());
CStringW strName;
if (S_OK == GetAttribute(pAttributes, L"name", sizeof("name")-1, strName))
{
pPart->SetName(strName);
CStringW strElement;
if (S_OK == GetAttribute(pAttributes, L"element", sizeof("element")-1, strElement))
{
pPart->SetElement(strElement);
}
CStringW strType;
if (S_OK == GetAttribute(pAttributes, L"type", sizeof("type")-1, strType))
{
pPart->SetType(strType);
}
// else
// {
// OnMissingAttribute(TRUE, L"element", sizeof("element")-1, L"", 0);
// }
return SkipElement();
}
OnMissingAttribute(TRUE, L"name", sizeof("name")-1, L"", 0);
}
}
EmitErrorHr(E_OUTOFMEMORY);
return E_FAIL;
}
ATTR_METHOD_IMPL(CWSDLMessageParser, OnName)
{
TRACE_PARSE_ENTRY();
CWSDLMessage *pCurr = GetMessage();
if (pCurr != NULL)
{
return pCurr->SetName(wszValue, cchValue);
}
EmitError(IDS_SDL_INTERNAL);
return E_FAIL;
}
HRESULT __stdcall CWSDLMessageParser::startPrefixMapping(
const wchar_t *wszPrefix,
int cchPrefix,
const wchar_t *wszUri,
int cchUri)
{
CWSDLMessage *pCurr = GetMessage();
if (pCurr != NULL)
{
return pCurr->SetNamespaceUri(wszPrefix, cchPrefix, wszUri, cchUri);
}
return E_FAIL;
}
<|start_filename|>source/SProxy/SkipParser.h<|end_filename|>
//
// SkipParser.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
#include "Parser.h"
class CSkipParser : public CParserBase
{
public:
inline CSkipParser(ISAXXMLReader *pReader, CParserBase *pParent, DWORD dwLevel)
:CParserBase(pReader, pParent, dwLevel)
{
}
virtual HRESULT OnUnrecognizedTag(
const wchar_t *wszNamespaceUri, int cchNamespaceUri,
const wchar_t *wszLocalName, int cchLocalName,
const wchar_t *wszQName, int cchQName,
ISAXAttributes *pAttributes) throw()
{
DisableReset();
return S_OK;
}
BEGIN_XMLTAG_MAP()
END_XMLTAG_MAP()
};
<|start_filename|>source/SProxy/WSDLBindingParser.h<|end_filename|>
//
// WSDLBindingParser.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
#include "Parser.h"
class CWSDLBinding;
class CWSDLBindingParser : public CParserBase
{
private:
CWSDLBinding *m_pBinding;
public:
inline CWSDLBindingParser(ISAXXMLReader *pReader, CParserBase *pParent, DWORD dwLevel, CWSDLBinding *pBinding = NULL)
:CParserBase(pReader, pParent, dwLevel), m_pBinding(pBinding)
{
}
BEGIN_XMLTAG_MAP()
XMLTAG_ENTRY_EX("binding", SOAP_NAMESPACEA, OnSoapBinding)
XMLTAG_ENTRY_EX("operation", WSDL_NAMESPACEA, OnOperation)
XMLTAG_ENTRY_EX("binding", HTTP_NAMESPACEA, OnHttpBinding)
XMLTAG_ENTRY_EX("documentation", WSDL_NAMESPACEA, OnDocumentation)
// extensibility elements
// XMLTAG_ENTRY_EX("class", SUDS_NAMESPACEA, OnSudsClass)
// XMLTAG_ENTRY_EX("binding", STK_PREFERREDENCODING_NAMESPACEA, OnStkPreferredBinding)
END_XMLTAG_MAP()
BEGIN_XMLATTR_MAP()
XMLATTR_ENTRY("name", OnName)
XMLATTR_ENTRY("type", OnType)
END_XMLATTR_MAP()
TAG_METHOD_DECL(OnDocumentation);
TAG_METHOD_DECL(OnOperation);
TAG_METHOD_DECL(OnSoapBinding);
TAG_METHOD_DECL(OnHttpBinding);
// TAG_METHOD_DECL(OnSudsClass);
// TAG_METHOD_DECL(OnStkPreferredBinding);
ATTR_METHOD_DECL(OnName);
ATTR_METHOD_DECL(OnType);
inline CWSDLBinding * GetBinding()
{
return m_pBinding;
}
inline void SetBinding(CWSDLBinding * pBinding)
{
ATLASSERT( pBinding != NULL );
m_pBinding = pBinding;
}
HRESULT __stdcall startPrefixMapping(
const wchar_t *wszPrefix,
int cchPrefix,
const wchar_t *wszUri,
int cchUri);
HRESULT OnUnrecognizedTag(
const wchar_t *wszNamespaceUri, int cchNamespaceUri,
const wchar_t *wszLocalName, int cchLocalName,
const wchar_t *wszQName, int cchQName,
ISAXAttributes *pAttributes) throw();
};
<|start_filename|>include/atlextmgmt.h<|end_filename|>
// This is a part of the Active Template Library.
// Copyright (C) Microsoft Corporation
// All rights reserved.
//
// This source code is only intended as a supplement to the
// Active Template Library Reference and related
// electronic documentation provided with the library.
// See these sources for detailed information regarding the
// Active Template Library product.
#ifndef __ATLEXTMGMT_H__
#define __ATLEXTMGMT_H__
#ifdef _WIN32_WCE
#error atlextmgmt.h is not supported on Windows CE (_WIN32_WCE is defined)
#endif //_WIN32_WCE
#pragma once
#pragma warning(push)
#pragma warning(disable: 4702)
#include <atlsoap.h>
#include <atlutil.h>
#include <atlsrvres.h>
#include <atlsecurity.h>
//
// You can change the local group that is used for authorizing
// site administrators by #define'ing ATL_DEFAULT_AUTH group
// to something else before including this header file. For
// example:
// #define ATL_DEFAULT_AUTHGRP CSid(_T("My Heros"))
// Verify that the logged on user is a member of
// the local group 'My Heros' before allowing them to
// administrate this site.
//
// #define ATL_DEFAULT_AUTHGRP Sids::World
// Allow everyone access
//
// #define ATL_DEFAULT_AUTHGRP Sids::Null
// Allow no one access
//
#ifndef ATL_DEFAULT_AUTHGRP
#define ATL_DEFAULT_AUTHGRP Sids::Admins()
#endif
// If you #define ATL_NO_DEFAULT_AUTHORITY then there will be no authorization
// check before allowing access to management functions. You can also #define
// ATL_NO_DEFAULT_AUTHORITY and then declare you own instance of _Authority
// before #include-ing atlextmgmt.h to use a different authorization scheme.
#ifndef ATL_NO_DEFAULT_AUTHORITY
__declspec(selectany) CDefaultAuth _Authority;
#endif
// You can choose which of the management handlers actually get used by
// #defining the following constants before including this header
// _ATL_THREADPOOL_MANAGEMENT (The thread pool manager web service and web based UI)
// _ATL_STENCILCACHE_MANAGEMENT (The stencil cache manager web service and web based UI)
// _ATL_DLLCACHE_MANAGEMENT (The DLL cache manager service and web based UI)
// You can use the following constants to remove the web based UI if you don't
// want to use it.
// _ATL_THREADPOOL_NOUI (removes the thread pool mgr's stencil handler)
// _ATL_STENCILCACHE_NOUI (removes the stencil cache mgr's stencil handler)
// _ATL_DLLCACHE_NOUI (removes the dll cache mgr's stencil handler)
// You can use the following constants to remove the web service management
// components individually
// _ATL_THREADPOOL_NOWEBSERVICE (removes the thread pool mgr's stencil handler)
// _ATL_STENCILCACHE_NOWEBSERVICE (removes the stencil cache mgr's stencil handler)
// _ATL_DLLCACHE_NOWEBSERVICE (removes the dll cache mgr's stencil handler)
// The following constants declare resource names of stencils included
// as resources in the module that uses this header. These stencils are
// used for the web based UI for the management objects. You can provide
// stencils of your own by including them as resources and redefining these
// constants before including this header.
#ifndef IDR_THREADMGR_SRF
#define IDR_THREADMGR_SRF "THREADMGR.SRF"
#endif
#ifndef IDR_STENCILMGR_SRF
#define IDR_STENCILMGR_SRF "STENCILMGR.SRF"
#endif
#ifndef IDR_DLLMGR_SRF
#define IDR_DLLMGR_SRF "DLLMGR.SRF"
#endif
// A warning so users using the web based UI to manage their extension
// will remember to include the stencil resources in their projects
#if (defined(_ATL_THREADPOOL_MANAGEMENT) && !defined(_ATL_THREADPOOL_NOUI)) || (defined(_ATL_STENCILCACHE_MANAGEMENT) && !defined(_ATL_STENCILCACHE_NOUI)) || (defined(_ATL_DLLCACHE_MANAGEMENT) && !defined(_ATL_DLLCACHE_NOUI))
#ifndef NO_ATL_MGMT_STENCIL_WARNING
#pragma message("*************** Please Note ***************")
#pragma message("Your usage of atlextmgmt.h requires you to include management")
#pragma message("stencil resources in your module's resource file.")
#pragma message("Please make sure you include atlsrv.rc in your resource file.\r\n")
#endif
#endif
// These constants define the names used for the handler objects for the
// various services. You can change the names by redefining these constants
// before including this header
#ifndef ID_THREADMGR_WEBSERVICE_NAME
#define ID_THREADMGR_WEBSERVICE_NAME "ThreadPoolManager"
#endif
#ifndef ID_THREADMGR_WEBSERVICE_URL
#define ID_THREADMGR_WEBSERVICE_URL "http://www.microsoft.com/vc/atlserver/soap/ThreadPoolManager"
#endif
#ifndef ID_THREADMGR_WEBSERVICE_WSDL
#define ID_THREADMGR_WEBSERVICE_WSDL "GenThreadPoolManagerWSDL"
#endif
#ifndef ID_THREADMGR_SRFHANDLER_NAME
#define ID_THREADMGR_SRFHANDLER_NAME "ThreadMgrSrf"
#endif
#ifndef ID_STENCILCACHEMGR_WEBSERVICE_NAME
#define ID_STENCILCACHEMGR_WEBSERVICE_NAME "StencilCacheManager"
#endif
#ifndef ID_STENCILCACHEMGR_WEBSERVICE_URL
#define ID_STENCILCACHEMGR_WEBSERVICE_URL "http://www.microsoft.com/vc/atlserver/soap/StencilCacheManager"
#endif
#ifndef ID_STENCILCACHEMGR_WEBSERVICE_WSDL
#define ID_STENCILCACHEMGR_WEBSERVICE_WSDL "GenStencilCacheManagerWSDL"
#endif
#ifndef ID_STENCILCACHEMGR_SRFHANDLER_NAME
#define ID_STENCILCACHEMGR_SRFHANDLER_NAME "StencilMgrSrf"
#endif
#ifndef ID_DLLCACHEMGR_WEBSERVICE_NAME
#define ID_DLLCACHEMGR_WEBSERVICE_NAME "DllCacheManager"
#endif
#ifndef ID_DLLCACHEMGR_WEBSERVICE_URL
#define ID_DLLCACHEMGR_WEBSERVICE_URL "http://www.microsoft.com/vc/atlserver/soap/DllCacheManager"
#endif
#ifndef ID_DLLCACHEMGR_WEBSERVICE_WSDL
#define ID_DLLCACHEMGR_WEBSERVICE_WSDL "GenDllCacheManagerWSDL"
#endif
#ifndef ID_DLLCACHEMGR_SRFHANDLER_NAME
#define ID_DLLCACHEMGR_SRFHANDLER_NAME "DllMgrSrf"
#endif
#pragma pack(push,_ATL_PACKING)
namespace ATL {
[emitidl(restricted)];
#define ATL_COLOR_TR1 RGB(0xd2, 0xff, 0xff)
#define ATL_COLOR_TR2 RGB(0xd2, 0xff, 0xd2)
#define ATL_COLOR_BODYBG RGB(0xec, 0xf9, 0xec)
// _AtlRedirectToPage builds up a redirect URL from the
// current request plus a Handler= specification and
// redirects the user's browser to that page.
inline HTTP_CODE _AtlRedirectToPage(
IHttpServerContext *pContext,
CHttpRequest& request,
CHttpResponse& response,
const char *szHandler)
{
ATLENSURE(pContext);
CStringA strRedirect("http://");
char buff[ATL_URL_MAX_URL_LENGTH];
DWORD dwLen = static_cast<DWORD>(_countof(buff));
if (!pContext->GetServerVariable("SERVER_NAME", buff, &dwLen))
{
return HTTP_FAIL;
}
buff[_countof(buff)-1]='\0';
strRedirect+=buff;
dwLen = static_cast<DWORD>(_countof(buff));
if (!request.GetUrl(buff, &dwLen))
{
return HTTP_FAIL;
}
buff[_countof(buff)-1]='\0';
strRedirect+=buff;
strRedirect+=szHandler;
if (strRedirect.GetLength() >= ATL_URL_MAX_URL_LENGTH)
{
return HTTP_FAIL;
}
BOOL bOK=response.Redirect(strRedirect.GetString());
return bOK ? HTTP_SUCCESS_NO_PROCESS : HTTP_FAIL;
}
#ifdef _ATL_THREADPOOL_MANAGEMENT
///////////////////////////////////////////////////////////////////////
// Thread pool management
[ uuid("44e9962a-5207-4d2a-a466-5f08a76e0e5d"), object ]
__interface IThreadPoolMgr
{
[id(0)] STDMETHOD(SetSize)([in] int nNumThreads);
[id(1)] STDMETHOD(GetSize)([out,retval] int *pnNumThreads);
};
class CThreadPoolMgrObject
{
public:
CThreadPoolMgrObject() throw()
{
}
HRESULT SetSize(int nNumThreads) throw()
{
if (!m_spThreadPoolConfig)
return E_UNEXPECTED;
CRevertThreadToken revert;
if (!revert.Initialize())
return E_FAIL;
HRESULT hr = m_spThreadPoolConfig->SetSize(nNumThreads);
DWORD dwErr = revert.Restore();
if (dwErr)
return AtlHresultFromWin32(dwErr);
return hr;
}
HRESULT GetSize(int *pnNumThreads) throw()
{
if (!m_spThreadPoolConfig)
return E_UNEXPECTED;
return m_spThreadPoolConfig->GetSize(pnNumThreads);
}
HTTP_CODE Initialize(IServiceProvider *pProvider) throw()
{
ATLASSERT(pProvider); // should never be NULL
if (!pProvider)
return HTTP_ERROR(500, ISE_SUBERR_UNEXPECTED);
if (m_spThreadPoolConfig)
return HTTP_SUCCESS; // already initialized
pProvider->QueryService(__uuidof(IThreadPoolConfig), &m_spThreadPoolConfig);
return m_spThreadPoolConfig ? HTTP_SUCCESS : HTTP_ERROR(500, ISE_SUBERR_UNEXPECTED);
}
private:
CComPtr<IThreadPoolConfig> m_spThreadPoolConfig;
};
#ifndef _ATL_THREADPOOL_NOWEBSERVICE
#pragma warning(push)
#pragma warning(disable:4199)
[
soap_handler(
name= ID_THREADMGR_WEBSERVICE_NAME,
namespace= ID_THREADMGR_WEBSERVICE_URL,
protocol= "soap"
),
request_handler(
name= ID_THREADMGR_WEBSERVICE_NAME,
sdl= ID_THREADMGR_WEBSERVICE_WSDL
)
]
class CThreadPoolManager :
public IThreadPoolMgr
{
#pragma warning(pop)
public:
[soap_method]
STDMETHOD(SetSize)(int nNumThreads)
{
return m_PoolMgr.SetSize(nNumThreads);
}
[soap_method]
STDMETHOD(GetSize)(int *pnNumThreads)
{
return m_PoolMgr.GetSize(pnNumThreads);
}
// override HandleRequest to Initialize our m_spServiceProvider
// and to handle authorizing the client.
HTTP_CODE HandleRequest(AtlServerRequest *pRequestInfo, IServiceProvider *pProvider)
{
HTTP_CODE hcErr = m_PoolMgr.Initialize(pProvider);
if (hcErr != HTTP_SUCCESS)
return hcErr;
// Make sure caller is authorized on this system
__if_exists(_Authority)
{
hcErr = HTTP_FAIL;
ATLTRY(hcErr = _Authority.IsAuthorized(pRequestInfo, ATL_DEFAULT_AUTHGRP))
}
if (hcErr == HTTP_SUCCESS)
{
hcErr = __super::HandleRequest(pRequestInfo, pProvider);
}
return hcErr;
}
private:
CThreadPoolMgrObject m_PoolMgr;
};
#endif //_ATL_THREADPOOL_NOWEBSERVICE
#ifndef _ATL_THREADPOOL_NOUI
#define INVALID_COMMAND_ID -1
#define MAX_COMMAND_ID 64
[request_handler(name=ID_THREADMGR_SRFHANDLER_NAME)]
class CThreadMgrStencil
{
public:
CThreadMgrStencil() :
m_nColor(ATL_COLOR_TR1)
{
}
[tag_name("GetSize")]
HTTP_CODE GetSize()
{
int nSize = 0;
HRESULT hr = m_PoolMgr.GetSize(&nSize);
if (SUCCEEDED(hr))
{
m_HttpResponse << nSize;
}
else
m_HttpResponse << "size not found";
return HTTP_SUCCESS;
}
[tag_name("GetTRColor")]
HTTP_CODE GetTRColor()
{
m_nColor = (m_nColor == ATL_COLOR_TR1) ? ATL_COLOR_TR2 : ATL_COLOR_TR1;
TCHAR cr[8];
if (RGBToHtml(m_nColor, cr, sizeof(cr)))
m_HttpResponse << cr;
return HTTP_SUCCESS;
}
[tag_name("GetBodyColor")]
HTTP_CODE GetBodyColor()
{
TCHAR cr[8];
if (RGBToHtml(ATL_COLOR_BODYBG, cr, sizeof(cr)))
m_HttpResponse << cr;
return HTTP_SUCCESS;
}
HTTP_CODE ValidateAndExchange() throw()
{
_ATLTRY
{
// Initialize the thread pool manager instance. Internally
// the initialize function will only intialize it's data structures
// once.
HTTP_CODE hcErr = m_PoolMgr.Initialize(m_spServiceProvider);
if (hcErr != HTTP_SUCCESS)
return hcErr;
__if_exists(_Authority)
{
// Make sure caller is authorized on this system
hcErr = HTTP_FAIL;
ATLTRY(hcErr = _Authority.IsAuthorized(m_pRequestInfo, ATL_DEFAULT_AUTHGRP))
if (hcErr != HTTP_SUCCESS)
return hcErr;
}
m_HttpResponse.SetContentType("text/html");
CString strHandler, strOptParam;
int nCmdToExec = INVALID_COMMAND_ID;
if (m_HttpRequest.GetMethod() == CHttpRequest::HTTP_METHOD_POST)
{
// check to see if we have a "Method" form variable and can execute a command
DWORD dwErr = m_HttpRequest.FormVars.Exchange("Method", &strHandler);
if (dwErr == VALIDATION_S_OK)
{
if (strHandler == _T("ExecuteCommand"))
{
// get the value of the command parameter so we can execute it
dwErr = m_HttpRequest.FormVars.Validate("command", &nCmdToExec, 0, MAX_COMMAND_ID);
if (dwErr == VALIDATION_S_OK)
{
// get the optional parameter if it's there.
m_HttpRequest.FormVars.Validate("DynValue", &strOptParam, 0, MAX_COMMAND_ID);
hcErr = ExecCommand(nCmdToExec, strOptParam);
return hcErr;
}
}
}
}
// If we had a proper command to execute, we would have done it by now.
// Just handle like it's a normal request to view the thread count.
hcErr = LoadStencilResource(m_hInstHandler, IDR_THREADMGR_SRF);
return hcErr;
}
_ATLCATCHALL()
{
return HTTP_FAIL;
}
}
HTTP_CODE ExecCommand(int nCmdToExec, CString& strOptParam)
{
switch (nCmdToExec)
{
case 0:
TCHAR *pStop = NULL;
int nValue = _tcstoul(strOptParam, &pStop, 10);
m_PoolMgr.SetSize(nValue);
break;
};
return _AtlRedirectToPage(
m_spServerContext,
m_HttpRequest,
m_HttpResponse,
"?Handler=" ID_THREADMGR_SRFHANDLER_NAME
);
}
private:
CThreadPoolMgrObject m_PoolMgr;
long m_nColor;
CString m_strUrl;
};
#endif // _ATL_THREADPOOL_NOUI
#endif // _ATL_THREADPOOL_MANAGEMENT
#ifdef _ATL_STENCILCACHE_MANAGEMENT
//////////////////////////////////////////////////////////////////////
// Stencil cache management
class CStencilCacheMgrObject
{
public:
CStencilCacheMgrObject()
{
}
HRESULT GetCurrentEntryCount(__int64 *pdwSize)
{
ATLASSUME(m_spMemCacheStats);
if (!pdwSize)
return E_INVALIDARG;
DWORD dwValue;
HRESULT hr = m_spMemCacheStats->GetCurrentEntryCount(&dwValue);
if (hr == S_OK)
{
*pdwSize = dwValue;
}
return hr;
}
HRESULT ClearStats()
{
ATLENSURE(m_spMemCacheStats);
return m_spMemCacheStats->ClearStats();
}
HRESULT GetHitCount(__int64 *pdwSize)
{
ATLENSURE(m_spMemCacheStats);
if (!pdwSize)
return E_INVALIDARG;
DWORD dwValue;
HRESULT hr = m_spMemCacheStats->GetHitCount(&dwValue);
if (hr == S_OK)
{
*pdwSize = dwValue;
}
return hr;
}
HRESULT GetMissCount(__int64 *pdwSize)
{
ATLENSURE(m_spMemCacheStats);
if (!pdwSize)
return E_INVALIDARG;
DWORD dwValue;
HRESULT hr = m_spMemCacheStats->GetMissCount(&dwValue);
if (hr == S_OK)
{
*pdwSize = dwValue;
}
return hr;
}
HRESULT GetCurrentAllocSize(__int64 *pdwSize)
{
ATLENSURE(m_spMemCacheStats);
if (!pdwSize)
return E_INVALIDARG;
DWORD dwValue;
HRESULT hr = m_spMemCacheStats->GetCurrentAllocSize(&dwValue);
if (hr == S_OK)
{
*pdwSize = dwValue;
}
return hr;
}
HRESULT GetMaxAllocSize(__int64 *pdwSize)
{
ATLENSURE(m_spMemCacheStats);
if (!pdwSize)
return E_INVALIDARG;
DWORD dwValue;
HRESULT hr = m_spMemCacheStats->GetMaxAllocSize(&dwValue);
if (hr == S_OK)
{
*pdwSize = dwValue;
}
return hr;
}
HRESULT GetMaxEntryCount(__int64 *pdwSize)
{
ATLENSURE(m_spMemCacheStats);
if (!pdwSize)
return E_INVALIDARG;
DWORD dwValue;
HRESULT hr = m_spMemCacheStats->GetMaxEntryCount(&dwValue);
if (hr == S_OK)
{
*pdwSize = dwValue;
}
return hr;
}
HRESULT RemoveStencil(__int64 hStencil)
{
ATLENSURE(m_spStencilCacheControl);
return m_spStencilCacheControl->RemoveStencil((const HCACHEITEM)hStencil);
}
HRESULT RemoveStencilByName(BSTR szStencil) throw()
{
ATLENSURE_RETURN(m_spStencilCacheControl);
return m_spStencilCacheControl->RemoveStencilByName(CW2A(szStencil));
}
HRESULT RemoveAllStencils()
{
ATLENSURE(m_spStencilCacheControl);
return m_spStencilCacheControl->RemoveAllStencils();
}
// we show lifespan in milliseconds in the UI so we have to
// do the conversion to 100ns intervals here.
HRESULT SetDefaultLifespan(unsigned __int64 dwdwLifespan)
{
ATLENSURE(m_spStencilCacheControl);
// convert to 100ns intervals
return m_spStencilCacheControl->SetDefaultLifespan(dwdwLifespan * CFileTime::Millisecond);
}
HRESULT GetDefaultLifespan(unsigned __int64 *pdwdwLifespan)
{
ATLENSURE(m_spStencilCacheControl);
ATLENSURE(pdwdwLifespan!=NULL);
*pdwdwLifespan = 0;
unsigned __int64 dwls = 0;
HRESULT hr = m_spStencilCacheControl->GetDefaultLifespan(&dwls);
// convert to milliseconds
if (SUCCEEDED(hr))
{
dwls /= CFileTime::Millisecond;
*pdwdwLifespan = dwls;
}
return hr;
}
HTTP_CODE Initialize(IServiceProvider *pProvider) throw()
{
ATLASSERT(pProvider); // should never be NULL
if (!pProvider)
return HTTP_ERROR(500, ISE_SUBERR_UNEXPECTED);
if (m_spMemCacheStats && m_spStencilCacheControl)
return HTTP_SUCCESS; // already initialized
CComPtr<IStencilCache> spStencilCache;
pProvider->QueryService(__uuidof(IStencilCache), &spStencilCache);
if (spStencilCache)
{
if (!m_spMemCacheStats)
{
spStencilCache->QueryInterface(__uuidof(IMemoryCacheStats),
(void**)&m_spMemCacheStats);
}
if (!m_spStencilCacheControl)
{
spStencilCache->QueryInterface(__uuidof(IStencilCacheControl),
(void**)&m_spStencilCacheControl);
}
}
return (m_spMemCacheStats && m_spStencilCacheControl)
? HTTP_SUCCESS : HTTP_ERROR(500, ISE_SUBERR_UNEXPECTED);
}
private:
CComPtr<IMemoryCacheStats> m_spMemCacheStats;
CComPtr<IStencilCacheControl> m_spStencilCacheControl;
};
#ifndef _ATL_STENCILCACHE_NOWEBSERVICE
[ uuid("3813895C-4C4C-41df-95F4-12220140B164"), object ]
__interface IStencilCacheMgr
{
// data access
[id(0)] STDMETHOD(GetCurrentEntryCount)([out,retval] __int64 *pdwSize);
[id(1)] STDMETHOD(GetHitCount)([out,retval] __int64 *pdwSize);
[id(2)] STDMETHOD(GetMissCount)([out,retval] __int64 *pdwSize);
[id(3)] STDMETHOD(GetCurrentAllocSize)([out,retval] __int64 *pdwSize);
[id(4)] STDMETHOD(GetMaxAllocSize)([out,retval] __int64 *pdwSize);
[id(5)] STDMETHOD(GetMaxEntryCount)([out,retval] __int64 *pdwSize);
[id(6)] STDMETHOD(GetDefaultLifespan)([out,retval] unsigned __int64 *pdwdwLifespan);
// commands
[id(7)] STDMETHOD(ClearStats)();
[id(8)] STDMETHOD(RemoveStencil)([in] __int64 hStencil);
[id(9)] STDMETHOD(RemoveStencilByName)([in] BSTR szStencil);
[id(10)] STDMETHOD(RemoveAllStencils)();
[id(11)] STDMETHOD(SetDefaultLifespan)([in] unsigned __int64 dwdwLifespan);
};
#pragma warning(push)
#pragma warning(disable:4199)
[
soap_handler( name= ID_STENCILCACHEMGR_WEBSERVICE_NAME,
namespace= ID_STENCILCACHEMGR_WEBSERVICE_URL,
protocol= "soap"
),
request_handler(
name= ID_STENCILCACHEMGR_WEBSERVICE_NAME,
sdl= ID_STENCILCACHEMGR_WEBSERVICE_WSDL )
]
class CStencilCacheManager :
public IStencilCacheMgr
{
#pragma warning(pop)
public:
[ soap_method ]
STDMETHOD(GetCurrentEntryCount)(__int64 *pdwSize)
{
return m_MgrObj.GetCurrentEntryCount(pdwSize);
}
[ soap_method ]
STDMETHOD(ClearStats)()
{
return m_MgrObj.ClearStats();
}
[ soap_method ]
STDMETHOD(GetHitCount)(__int64 *pdwSize)
{
return m_MgrObj.GetHitCount(pdwSize);
}
[ soap_method ]
STDMETHOD(GetMissCount)(__int64 *pdwSize)
{
return m_MgrObj.GetMissCount(pdwSize);
}
[ soap_method ]
STDMETHOD(GetCurrentAllocSize)(__int64 *pdwSize)
{
return m_MgrObj.GetCurrentAllocSize(pdwSize);
}
[ soap_method ]
STDMETHOD(GetMaxAllocSize)(__int64 *pdwSize)
{
return m_MgrObj.GetMaxAllocSize(pdwSize);
}
[ soap_method ]
STDMETHOD(GetMaxEntryCount)(__int64 *pdwSize)
{
return m_MgrObj.GetMaxEntryCount(pdwSize);
}
[ soap_method ]
STDMETHOD(RemoveStencil)(__int64 hStencil)
{
return m_MgrObj.RemoveStencil(hStencil);
}
[ soap_method ]
STDMETHOD(RemoveStencilByName)(BSTR bstrStencil)
{
return m_MgrObj.RemoveStencilByName(bstrStencil);
}
[ soap_method ]
STDMETHOD(RemoveAllStencils)()
{
return m_MgrObj.RemoveAllStencils();
}
// we show lifespan in milliseconds in the UI.
// m_MgrObj handles the conversion to 100ns intervals.
[ soap_method ]
STDMETHOD(SetDefaultLifespan)(unsigned __int64 dwdwLifespan)
{
return m_MgrObj.SetDefaultLifespan(dwdwLifespan);
}
[ soap_method ]
STDMETHOD(GetDefaultLifespan)(unsigned __int64 *pdwdwLifespan)
{
return m_MgrObj.GetDefaultLifespan(pdwdwLifespan);
}
HTTP_CODE HandleRequest(AtlServerRequest *pRequestInfo, IServiceProvider *pProvider)
{
HTTP_CODE hcErr = m_MgrObj.Initialize(pProvider);
if (hcErr != HTTP_SUCCESS)
return hcErr;
__if_exists(_Authority)
{
// Make sure caller is authorized on this system
hcErr = HTTP_FAIL;
ATLTRY(hcErr = _Authority.IsAuthorized(pRequestInfo, ATL_DEFAULT_AUTHGRP))
}
if (hcErr == HTTP_SUCCESS)
{
hcErr = __super::HandleRequest(pRequestInfo, pProvider);
}
return hcErr;
}
private:
CStencilCacheMgrObject m_MgrObj;
};
#endif //_ATL_STENCILCACHE_NOWEBSERVICE
#ifndef _ATL_STENCILCACHE_NOUI
typedef HRESULT (CStencilCacheMgrObject::*PFNGETDATA)(__int64 *pdwSize);
struct CCache_data
{
PFNGETDATA m_pfn;
char m_sz[128];
};
#define INVALID_DATA_PTR ((DWORD_PTR) -1)
#define INVALID_COMMAND_ID -1
#define MAX_COMMAND_ID 64
#define ATL_STENCILCACHECMD_CLEARALLSTATS 0
#define ATL_STENCILCACHECMD_REMOVESTENCIL 1
#define ATL_STENCILCACHECMD_REMOVEALLSTENCILS 2
#define ATL_STENCILCACHECMD_SETDEFLIFESPAN 3
[request_handler(name=ID_STENCILCACHEMGR_SRFHANDLER_NAME)]
class CStencilMgr
{
public:
CStencilMgr()
{
m_pData = (CCache_data*)INVALID_DATA_PTR;
m_nColor = ATL_COLOR_TR1;
}
HTTP_CODE ValidateAndExchange() throw()
{
_ATLTRY
{
HTTP_CODE hcErr = m_MgrObj.Initialize(m_spServiceProvider);
if (hcErr != HTTP_SUCCESS)
return hcErr;
__if_exists(_Authority)
{
// Make sure caller is authorized on this system
hcErr = HTTP_FAIL;
ATLTRY(hcErr = _Authority.IsAuthorized(m_pRequestInfo, ATL_DEFAULT_AUTHGRP))
if (hcErr != HTTP_SUCCESS)
return hcErr;
}
m_HttpResponse.SetContentType("text/html");
// check to see if we have a "Handler" form variable
CString strHandler, strOptParam;
int nCmdToExec;
if (m_HttpRequest.GetMethod() == CHttpRequest::HTTP_METHOD_POST)
{
DWORD dwErr = m_HttpRequest.FormVars.Exchange("Method", &strHandler);
if (dwErr == VALIDATION_S_OK)
{
if (strHandler == _T("ExecuteCommand"))
{
// get the value of the command parameter so we can execute it
dwErr = m_HttpRequest.FormVars.Validate("command", &nCmdToExec, 0, MAX_COMMAND_ID);
if (dwErr == VALIDATION_S_OK)
{
// get the optional parameter if it's there.
m_HttpRequest.FormVars.Validate("DynValue", &strOptParam, 0, MAX_COMMAND_ID);
hcErr = ExecCommand(nCmdToExec, strOptParam);
return hcErr;
}
}
}
}
hcErr = LoadStencilResource(m_hInstHandler, IDR_STENCILMGR_SRF);
return hcErr;
}
_ATLCATCHALL()
{
return HTTP_FAIL;
}
}
HTTP_CODE ExecCommand(int nCmdToExec, CString& strOptParam)
{
switch (nCmdToExec)
{
case ATL_STENCILCACHECMD_CLEARALLSTATS:
m_MgrObj.ClearStats();
break;
case ATL_STENCILCACHECMD_REMOVESTENCIL:
m_MgrObj.RemoveStencilByName(strOptParam.AllocSysString());
break;
case ATL_STENCILCACHECMD_REMOVEALLSTENCILS:
m_MgrObj.RemoveAllStencils();
break;
case ATL_STENCILCACHECMD_SETDEFLIFESPAN:
TCHAR *pStop = NULL;
m_MgrObj.SetDefaultLifespan(_tcstoul(strOptParam, &pStop, 10));
break;
};
return _AtlRedirectToPage(
m_spServerContext,
m_HttpRequest,
m_HttpResponse,
"?Handler=" ID_STENCILCACHEMGR_SRFHANDLER_NAME
);
}
[tag_name("GetNextStencilCacheStats")]
HTTP_CODE GetNextStencilCacheStats()
{
if (m_pData == (CCache_data*)INVALID_DATA_PTR)
{
m_pData = GetCacheData();
return HTTP_SUCCESS;
}
m_pData++;
if (m_pData->m_pfn != NULL)
return HTTP_SUCCESS;
m_pData = (CCache_data*)INVALID_DATA_PTR;
return HTTP_S_FALSE;
}
[tag_name("GetCacheValue")]
HTTP_CODE GetCacheValue()
{
ATLENSURE(m_pData);
ATLENSURE(m_pData != (CCache_data*)INVALID_DATA_PTR);
m_HttpResponse << m_pData->m_sz;
return HTTP_SUCCESS;
}
[tag_name("GetCacheQuantity")]
HTTP_CODE GetCacheQuantity()
{
ATLENSURE(m_pData);
ATLENSURE(m_pData != (CCache_data*)INVALID_DATA_PTR);
__int64 dwValue = 0;
PFNGETDATA pfn = m_pData->m_pfn;
ATLENSURE(pfn);
CStencilCacheMgrObject *pMgr = &m_MgrObj;
(pMgr->*pfn)(&dwValue);
m_HttpResponse << dwValue;
return HTTP_SUCCESS;
}
[tag_name("GetTRColor")]
HTTP_CODE GetTRColor()
{
m_nColor = (m_nColor == ATL_COLOR_TR1) ? ATL_COLOR_TR2 : ATL_COLOR_TR1;
TCHAR cr[8];
if (RGBToHtml(m_nColor, cr, sizeof(cr)))
m_HttpResponse << cr;
return HTTP_SUCCESS;
}
[tag_name("GetBodyColor")]
HTTP_CODE GetBodyColor()
{
TCHAR cr[8];
if (RGBToHtml(ATL_COLOR_BODYBG, cr, sizeof(cr)))
m_HttpResponse << cr;
return HTTP_SUCCESS;
}
private:
static CCache_data* GetCacheData()
{
static CCache_data cache_data[] =
{
{(PFNGETDATA)&CStencilCacheMgrObject::GetCurrentEntryCount, "Current Cache Entry Count(stencils)"},
{(PFNGETDATA)&CStencilCacheMgrObject::GetHitCount, "Cache Hit Count(stencils)"},
{(PFNGETDATA)&CStencilCacheMgrObject::GetMissCount, "Cache Miss Count(stencils)"},
{(PFNGETDATA)&CStencilCacheMgrObject::GetCurrentAllocSize, "Cache memory allocation(bytes)"},
{(PFNGETDATA)&CStencilCacheMgrObject::GetMaxAllocSize, "Cache maximum allocation size(bytes)"},
{(PFNGETDATA)&CStencilCacheMgrObject::GetMaxEntryCount, "Cache maximum entry count(stencils)"},
{(PFNGETDATA)&CStencilCacheMgrObject::GetDefaultLifespan, "Default stencil lifespan(ms)"},
{NULL, NULL}
};
return cache_data;
}
CStencilCacheMgrObject m_MgrObj;
CCache_data *m_pData;
long m_nColor;
};
//__declspec(selectany) CComObjectGlobal<CStencilCacheManager> CStencilMgr::m_cachemgr;
#endif // _ATL_STENCILCACHE_NOUI
#endif // _ATL_STENCILCACHE_MANAGEMENT
//////////////////////////////////////////////////////////////////////
// DLL cache management
#ifdef _ATL_DLLCACHE_MANAGEMENT
#ifndef _ATL_DLLCACHE_NOWEBSERVICE
[export]
#endif
struct _DLL_CACHE_ENTRY
{
DWORD hInstDll;
DWORD dwRefs;
BSTR szDllName;
};
class CDllMgrObject
{
public:
HRESULT GetEntries(DWORD dwCount, _DLL_CACHE_ENTRY *pEntries, DWORD *pdwCopied)
{
ATLASSUME(m_spDllCache);
HRESULT hr = E_FAIL;
DLL_CACHE_ENTRY *pe = NULL;
if (!m_spDllCache)
return E_UNEXPECTED;
if (dwCount != 0 && pEntries == NULL)
return E_UNEXPECTED; // asking for entries but no place to put them
if (!pdwCopied)
return E_POINTER;
*pdwCopied = 0;
if (dwCount)
{
pe = new DLL_CACHE_ENTRY[dwCount];
if (!pe)
return E_OUTOFMEMORY;
}
hr = m_spDllCache->GetEntries(dwCount, pe, pdwCopied);
if (hr == S_OK && dwCount != 0 && pEntries != NULL)
{
// SysAllocString our path strings
for (DWORD i = 0; i<*pdwCopied; i++)
{
pEntries[i].hInstDll = (DWORD)(DWORD_PTR)pe[i].hInstDll;
pEntries[i].dwRefs = pe[i].dwRefs;
pEntries[i].szDllName = ::SysAllocString(CA2W(pe[i].szDllName));
}
}
delete [] pe;
return hr;
}
HRESULT GetEntryCount(DWORD *pdwCount)
{
ATLASSUME(m_spDllCache);
if (!m_spDllCache)
return E_UNEXPECTED;
return m_spDllCache->GetEntries(0, NULL, pdwCount);
}
HTTP_CODE Initialize(IServiceProvider *pProvider)
{
ATLASSERT(pProvider); // should never be NULL
if (!pProvider)
return HTTP_ERROR(500, ISE_SUBERR_UNEXPECTED);
if (m_spDllCache)
return HTTP_SUCCESS; // already initialized
pProvider->QueryService(__uuidof(IDllCache), &m_spDllCache);
return m_spDllCache ? HTTP_SUCCESS : HTTP_ERROR(500, ISE_SUBERR_UNEXPECTED);
}
private:
CComPtr<IDllCache> m_spDllCache;
}; // CDllMgrObject
#ifndef _ATL_DLLCACHE_NOWEBSERVICE
// _DLL_CACHE_ENTRY is our own version of DLL_CACHE_ENTRY(atlcache.h) that
// uses a BSTR instead of a fixed length string for the szDllName for compatiblility
// with our SOAP implementation.
[ uuid("A0C00AF8-CEA5-46b9-97ED-FDEE55B583EF"), object ]
__interface IDllCacheMgr
{
[id(0)] STDMETHOD(GetEntries)([in] DWORD dwCount, [out] _DLL_CACHE_ENTRY *pEntries, [out, retval] DWORD *pdwCopied);
[id(1)] STDMETHOD(GetEntryCount)([out, retval] DWORD *pdwCount);
};
#pragma warning(push)
#pragma warning(disable:4199)
[
soap_handler(
name= ID_DLLCACHEMGR_WEBSERVICE_NAME,
namespace= ID_DLLCACHEMGR_WEBSERVICE_URL,
protocol= "soap"
),
request_handler(
name= ID_DLLCACHEMGR_WEBSERVICE_NAME,
sdl= ID_DLLCACHEMGR_WEBSERVICE_WSDL
)
]
class CDllCacheManager :
public IDllCacheMgr
{
#pragma warning(pop)
public:
[soap_method]
HRESULT GetEntries(DWORD dwCount, _DLL_CACHE_ENTRY *pEntries, DWORD *pdwCopied)
{
return m_MgrObj.GetEntries(dwCount, pEntries, pdwCopied);
}
[soap_method]
STDMETHOD(GetEntryCount)(DWORD *pdwCount)
{
return m_MgrObj.GetEntries(0, NULL, pdwCount);
}
HTTP_CODE HandleRequest(AtlServerRequest *pRequestInfo, IServiceProvider *pProvider)
{
HTTP_CODE hcErr = m_MgrObj.Initialize(pProvider);
if (hcErr != HTTP_SUCCESS)
return hcErr;
__if_exists(_Authority)
{
// Make sure caller is authorized on this system
hcErr = HTTP_FAIL;
ATLTRY(hcErr = _Authority.IsAuthorized(pRequestInfo, ATL_DEFAULT_AUTHGRP))
}
if (hcErr == HTTP_SUCCESS)
{
hcErr = __super::HandleRequest(pRequestInfo, pProvider);
}
return hcErr;
}
protected:
CDllMgrObject m_MgrObj;
};
#endif _ATL_DLLCACHE_NOWEBSERVICE
#ifndef _ATL_DLLCACHE_NOUI
#define INVALID_INDEX -1
[
request_handler(name=ID_DLLCACHEMGR_SRFHANDLER_NAME)
]
class CDllCacheMgr
{
public:
CDllCacheMgr() : m_nColor(ATL_COLOR_TR1),
m_nEnumCount(INVALID_INDEX),
m_nEnumIndex(INVALID_INDEX),
m_pEntries(NULL)
{
}
[tag_name("GetTRColor")]
HTTP_CODE GetTRColor()
{
m_nColor = (m_nColor == ATL_COLOR_TR1) ? ATL_COLOR_TR2 : ATL_COLOR_TR1;
TCHAR cr[8];
if (RGBToHtml(m_nColor, cr, sizeof(cr)))
m_HttpResponse << cr;
return HTTP_SUCCESS;
}
[tag_name("GetBodyColor")]
HTTP_CODE GetBodyColor()
{
TCHAR cr[8];
if (RGBToHtml(ATL_COLOR_BODYBG, cr, sizeof(cr)))
m_HttpResponse << cr;
return HTTP_SUCCESS;
}
[tag_name("GetNumEntries")]
HTTP_CODE GetNumEntries()
{
DWORD dwEntries = 0;
m_MgrObj.GetEntryCount(&dwEntries);
m_HttpResponse << dwEntries;
return HTTP_SUCCESS;
}
[tag_name("EnumEntries")]
HTTP_CODE EnumEntries()
{
// we lock the cache while we enum entries so no entries
// will be removed during the enumeration request.
if (m_nEnumIndex == INVALID_INDEX)
{
// set up for the iteration
m_MgrObj.GetEntryCount((DWORD*)&m_nEnumCount);
if (!m_nEnumCount)
return HTTP_S_FALSE; // nothing to enum
m_pEntries = new _DLL_CACHE_ENTRY[m_nEnumCount];
if (!m_pEntries)
return HTTP_ERROR(500, ISE_SUBERR_OUTOFMEM);
DWORD dwFetched = INVALID_INDEX;
if (S_OK != m_MgrObj.GetEntries(m_nEnumCount, m_pEntries, &dwFetched))
return HTTP_ERROR(500, ISE_SUBERR_UNEXPECTED);
m_nEnumIndex = 0;
return HTTP_SUCCESS;
}
m_nEnumIndex++;
if (m_nEnumIndex < m_nEnumCount)
return HTTP_SUCCESS; // continue iterating
else
{
// done, clean up
for (int i = 0; i< m_nEnumCount; i++)
{
::SysFreeString(m_pEntries[i].szDllName);
}
delete [] m_pEntries;
m_pEntries = NULL;
m_nEnumCount = INVALID_INDEX;
m_nEnumIndex = INVALID_INDEX;
return HTTP_S_FALSE; // terminate iterations.
}
}
[tag_name("GetDllName")]
HTTP_CODE GetDllName()
{
m_HttpResponse << m_pEntries[m_nEnumIndex].szDllName;
return HTTP_SUCCESS;
}
[tag_name("GetDllReferences")]
HTTP_CODE GetDllReferences()
{
m_HttpResponse << m_pEntries[m_nEnumIndex].dwRefs;
return HTTP_SUCCESS;
}
HTTP_CODE ValidateAndExchange()
{
HTTP_CODE hcErr = m_MgrObj.Initialize(m_spServiceProvider);
if (hcErr != HTTP_SUCCESS)
return hcErr;
__if_exists(_Authority)
{
// Make sure caller is authorized on this system
hcErr = HTTP_FAIL;
ATLTRY(hcErr = _Authority.IsAuthorized(m_pRequestInfo, ATL_DEFAULT_AUTHGRP))
if (hcErr != HTTP_SUCCESS)
return hcErr;
}
hcErr = LoadStencilResource(m_hInstHandler, IDR_DLLMGR_SRF);
m_HttpResponse.SetContentType("text/html");
return hcErr;
}
CDllMgrObject m_MgrObj;
long m_nColor;
int m_nEnumCount;
int m_nEnumIndex;
_DLL_CACHE_ENTRY *m_pEntries;
};
#endif // _ATL_DLLCACHE_NOUI
#endif // _ATL_DLLCACHE_MANAGEMENT
}; // ATL
#pragma pack(pop)
#pragma warning(pop)
#endif // __ATLEXTMGMT_H__
<|start_filename|>source/CLStencil/resource.h<|end_filename|>
//{{NO_DEPENDENCIES}}
// Microsoft Developer Studio generated include file.
// Used by clstencil.rc
//
#define IDS_HEADER 101
#define IDS_NOTSUPPORTED 102
#define IDS_USAGE 103
#define IDS_ERROR 104
#define IDS_UNKNOWN_PARAM 105
#define IDS_INPUT_FILE 106
#define IDS_INVALID_ARGS 107
#define IDS_INIT_FAILED 108
#define IDS_SERVER_VARIABLE_NOT_FOUND 109
#define IDS_QS_TOO_LONG 110
#define IDS_CONTENT_TYPE_TOO_LONG 111
#define IDS_CT_TOO_LONG 111
#define IDS_VERB_TOO_LONG 112
// Next default values for new objects
//
#ifdef APSTUDIO_INVOKED
#ifndef APSTUDIO_READONLY_SYMBOLS
#define _APS_NEXT_RESOURCE_VALUE 109
#define _APS_NEXT_COMMAND_VALUE 40001
#define _APS_NEXT_CONTROL_VALUE 1000
#define _APS_NEXT_SYMED_VALUE 101
#endif
#endif
<|start_filename|>source/SProxy/CppCodeGenerator.cpp<|end_filename|>
//
// CppCodeGenerator.cpp
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#include "stdafx.h"
#include "CppCodeGenerator.h"
#include "Emit.h"
#include "resource.h"
#include "Element.h"
HRESULT CCppCodeGenerator::Generate(LPCWSTR wszFile, CCodeProxy *pProxy, bool bPragma, bool bNoClobber, bool bEmitNamespace, bool bGenProxy, const char *szNamespace)
{
ATLASSERT( (wszFile != NULL) && (*wszFile != L'\0') );
CWriteStreamOnFileA fileStream;
HRESULT hr = fileStream.Init(wszFile, (bNoClobber != false) ? CREATE_NEW : CREATE_ALWAYS);
if (FAILED(hr))
{
EmitErrorHr(hr);
return hr;
}
CStencil s;
HMODULE hModule = _AtlBaseModule.GetResourceInstance();
if (hModule == NULL)
{
EmitError(IDS_SDL_INTERNAL);
return E_FAIL;
}
HTTP_CODE hcErr = s.LoadFromResource(hModule, IDR_SPROXYSRF, "SRF");
if (hcErr != HTTP_SUCCESS)
{
EmitError(IDS_SDL_INTERNAL);
return E_FAIL;
}
if (s.ParseReplacements(this) == false)
{
EmitError(IDS_SDL_INTERNAL);
return E_FAIL;
}
s.FinishParseReplacements();
if (s.ParseSuccessful() == false)
{
#ifdef _DEBUG
CWriteStreamOnStdout errStream;
s.RenderErrors(&errStream);
#endif
EmitError(IDS_SDL_INTERNAL);
return E_FAIL;
}
m_pProxy = pProxy;
m_writeHelper.Attach(&fileStream);
m_bPragma = bPragma;
m_bGenProxy = bGenProxy;
m_szNamespace = szNamespace;
if (m_szNamespace && !*m_szNamespace)
{
m_bEmitNamespace = false;
}
else
{
m_bEmitNamespace = bEmitNamespace;
}
hcErr = s.Render(this, &fileStream);
if (hcErr != HTTP_SUCCESS)
{
EmitError(IDS_SDL_INTERNAL);
return E_FAIL;
}
return S_OK;
}
HTTP_CODE CCppCodeGenerator::OnGetNextEnum()
{
if (m_currEnumPos == NULL)
{
m_nCntr = 0;
m_currEnumPos = m_pProxy->GetFirstEnum();
}
else
{
m_pProxy->GetNextEnum(m_currEnumPos);
}
if (m_currEnumPos != NULL)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnGetEnumSafeQName()
{
return WriteCString(GetCurrentEnum()->GetName());
}
HTTP_CODE CCppCodeGenerator::OnGetNextEnumElement()
{
CCodeEnum *p = GetCurrentEnum();
if (m_currEnumFieldPos == NULL)
{
m_currEnumFieldPos = p->GetFirstElement();
}
else
{
p->GetNextElement(m_currEnumFieldPos);
}
if (m_currEnumFieldPos != NULL)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnGetEnumElementHashW()
{
return GetHashW(GetCurrentEnumElement()->GetCodeTypeName());
}
HTTP_CODE CCppCodeGenerator::OnGetEnumElementName()
{
return WriteCString(GetCurrentEnumElement()->GetCodeTypeName());
}
HTTP_CODE CCppCodeGenerator::OnGetEnumElementValue()
{
m_writeHelper << m_nCntr++;
return HTTP_SUCCESS;
}
HTTP_CODE CCppCodeGenerator::OnResetCounter()
{
m_nCntr = 0;
return HTTP_SUCCESS;
}
HTTP_CODE CCppCodeGenerator::OnGetEnumNameHashW()
{
return GetHashW(GetCurrentEnum()->GetName());
}
HTTP_CODE CCppCodeGenerator::OnGetEnumName()
{
return WriteCString(GetCurrentEnum()->GetName());
}
HTTP_CODE CCppCodeGenerator::OnGetEnumQName()
{
return WriteCString(GetCurrentEnum()->GetName());
}
HTTP_CODE CCppCodeGenerator::OnGetNextStruct()
{
if (m_currStructPos == NULL)
{
m_currStructPos = m_pProxy->GetFirstStruct();
}
else
{
m_pProxy->GetNextStruct(m_currStructPos);
}
if (m_currStructPos != NULL)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnGetStructSafeQName()
{
return WriteCString(GetCurrentStruct()->GetName());
}
HTTP_CODE CCppCodeGenerator::OnGetNextStructField()
{
CCodeStruct *p = GetCurrentStruct();
if (m_currStructFieldPos == NULL)
{
m_currStructFieldPos = p->GetFirstElement();
}
else
{
p->GetNextElement(m_currStructFieldPos);
}
if (m_currStructFieldPos != NULL)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnGetStructFieldType()
{
return GetCppType(GetCurrentStructField());
}
HTTP_CODE CCppCodeGenerator::OnGetStructFieldSuffix()
{
return GetTypeSuffix(GetCurrentStructField());
}
HTTP_CODE CCppCodeGenerator::OnIsFieldNullable()
{
if ((GetCurrentStructField()->GetFlags() & CODEFLAG_NULLABLE) ||
(GetCurrentStructField()->GetXSDType() == XSDTYPE_STRING) ||
(GetCurrentStructField()->GetXSDType() == XSDTYPE_BASE64BINARY) ||
(GetCurrentStructField()->GetXSDType() == XSDTYPE_HEXBINARY) ||
(OnIsFieldDynamicArray() == HTTP_SUCCESS))
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnIsFieldDynamicArray()
{
if (GetCurrentStructField()->GetFlags() & CODEFLAG_DYNARRAY)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnIsFieldDynamicArrayWrapper()
{
if (GetCurrentStructField()->GetFlags() & CODEFLAG_DYNARRAYWRAPPER)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnFieldHasSizeIs()
{
if (GetCurrentStructField()->GetSizeIs().GetLength() != 0)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnGetFieldSizeIsName()
{
return WriteCString(GetCurrentStructField()->GetSizeIs());
}
HTTP_CODE CCppCodeGenerator::OnGetFieldSizeIsIndex()
{
CCodeStruct *p = GetCurrentStruct();
int nCntr = -1;
POSITION pos = p->GetFirstElement();
while (pos != NULL)
{
nCntr++;
CCodeTypedElement *pElem = p->GetNextElement(pos);
ATLASSERT( pElem != NULL );
if (pElem->GetName() == ((LPCSTR)GetCurrentStructField()->GetSizeIs()))
{
m_writeHelper << nCntr;
return HTTP_SUCCESS;
}
}
return HTTP_FAIL;
}
HTTP_CODE CCppCodeGenerator::OnIsFieldSizeIs()
{
CCodeStruct *p = GetCurrentStruct();
POSITION pos = p->GetFirstElement();
while (pos != NULL)
{
CCodeTypedElement *pElem = p->GetNextElement(pos);
ATLASSERT( pElem != NULL );
if (GetCurrentStructField()->GetName() == ((LPCSTR)pElem->GetSizeIs()))
{
return HTTP_SUCCESS;
}
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnGetCurrentFieldIndex()
{
CCodeStruct *p = GetCurrentStruct();
int nCntr = 0;
POSITION pos = p->GetFirstElement();
while ((pos != NULL) && (pos != m_currStructFieldPos))
{
nCntr++;
CCodeTypedElement *pElem = p->GetNextElement(pos);
if ((pElem->GetFlags() & CODEFLAG_DYNARRAY) && (pElem->GetSizeIs().GetLength()==0))
{
nCntr++;
}
}
m_writeHelper << nCntr;
return HTTP_SUCCESS;
}
HTTP_CODE CCppCodeGenerator::OnIsFieldFixedArray()
{
return IsFixedArray(GetCurrentStructField());
}
HTTP_CODE CCppCodeGenerator::OnGetStructFieldName()
{
return WriteCString(GetCurrentStructField()->GetName());
}
HTTP_CODE CCppCodeGenerator::OnGetStructFieldDimsDecl()
{
return GetDimsDecl(GetCurrentStructField());
}
HTTP_CODE CCppCodeGenerator::OnGetStructFieldHashW()
{
return GetHashW(GetCurrentStructField()->GetName());
}
HTTP_CODE CCppCodeGenerator::OnGetStructFieldAtlSoapType()
{
return GetAtlSoapType(GetCurrentStructField());
}
HTTP_CODE CCppCodeGenerator::OnGetStructQName()
{
return WriteCString(GetCurrentStruct()->GetName());
}
HTTP_CODE CCppCodeGenerator::OnIsFieldUDT()
{
return IsUDT(GetCurrentStructField());
}
HTTP_CODE CCppCodeGenerator::OnGetStructFieldTypeSafeQName()
{
return WriteCString(GetCurrentStructField()->GetCodeTypeName());
}
HTTP_CODE CCppCodeGenerator::OnGetStructNameHashW()
{
return GetHashW(GetCurrentStruct()->GetName());
}
HTTP_CODE CCppCodeGenerator::OnGetStructName()
{
return WriteCString(GetCurrentStruct()->GetName());
}
HTTP_CODE CCppCodeGenerator::OnGetStructFieldCount()
{
return (m_writeHelper.Write(GetCurrentStruct()->GetElementCount()) != FALSE) ? HTTP_SUCCESS : HTTP_FAIL;
}
HTTP_CODE CCppCodeGenerator::OnGetNextFunction()
{
if (m_currFunctionPos == NULL)
{
m_currFunctionPos = m_pProxy->GetFirstFunction();
m_nFuncIndex = 0;
}
else
{
m_pProxy->GetNextFunction(m_currFunctionPos);
m_nFuncIndex++;
}
if (m_currFunctionPos != NULL)
{
return HTTP_SUCCESS;
}
m_nFuncIndex = 0;
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnGetNextParameter()
{
CCodeFunction *p = GetCurrentFunction();
if (m_currParameterPos == NULL)
{
m_nCntr = 0;
m_currParameterPos = p->GetFirstElement();
}
else
{
m_nCntr++;
p->GetNextElement(m_currParameterPos);
}
if (m_currParameterPos != NULL)
{
return HTTP_SUCCESS;
}
m_nCntr = 0;
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnIsParameterFixedArray()
{
return IsFixedArray(GetCurrentParameter());
}
HTTP_CODE CCppCodeGenerator::OnGetClassSafeQName()
{
return WriteCString(m_pProxy->GetClassName());
}
HTTP_CODE CCppCodeGenerator::OnGetFunctionName()
{
return WriteCString(GetCurrentFunction()->GetName());
}
HTTP_CODE CCppCodeGenerator::OnGetParameterName()
{
CCodeTypedElement *p = GetCurrentParameter();
if (p->GetName() == "return")
{
return (m_pStream->WriteStream("__retval", sizeof("__retval")-1, NULL) == S_OK) ?
HTTP_SUCCESS : HTTP_FAIL;
}
return WriteCString(GetCurrentParameter()->GetName());
}
HTTP_CODE CCppCodeGenerator::OnGetParameterNameRaw()
{
return WriteCString(GetCurrentParameter()->GetName());
}
HTTP_CODE CCppCodeGenerator::OnGetParameterDimsDecl()
{
return GetDimsDecl(GetCurrentParameter());
}
HTTP_CODE CCppCodeGenerator::OnGetParameterType()
{
return GetCppType(GetCurrentParameter());
}
HTTP_CODE CCppCodeGenerator::OnIsParameterDynamicArray()
{
if (GetCurrentParameter()->GetFlags() & CODEFLAG_DYNARRAY)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnGetParameterSuffix()
{
return GetTypeSuffix(GetCurrentParameter());
}
HTTP_CODE CCppCodeGenerator::OnIsInParameter()
{
if (GetCurrentParameter()->GetFlags() & CODEFLAG_IN)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnHasRetval()
{
// meaningless for client side stuff
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnGetRetval()
{
// should never get called
ATLASSERT( FALSE );
return HTTP_FAIL;
}
HTTP_CODE CCppCodeGenerator::OnGetParameterHashW()
{
return GetHashW(GetCurrentParameter()->GetName());
}
HTTP_CODE CCppCodeGenerator::OnGetParameterAtlSoapType()
{
return GetAtlSoapType(GetCurrentParameter());
}
HTTP_CODE CCppCodeGenerator::OnIsParameterNullable()
{
if ((GetCurrentParameter()->GetFlags() & CODEFLAG_NULLABLE) ||
(GetCurrentParameter()->GetXSDType() == XSDTYPE_STRING) ||
(GetCurrentParameter()->GetXSDType() == XSDTYPE_BASE64BINARY) ||
(GetCurrentParameter()->GetXSDType() == XSDTYPE_HEXBINARY) ||
(GetCurrentParameter()->GetFlags() & CODEFLAG_DYNARRAY))
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnIsOutParameter()
{
if (GetCurrentParameter()->GetFlags() & CODEFLAG_OUT)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnIsParameterUDT()
{
return IsUDT(GetCurrentParameter());
}
/*
HTTP_CODE CCppCodeGenerator::OnNotIsParameterUDT()
{
return (OnIsParameterUDT() == HTTP_SUCCESS) ? HTTP_S_FALSE : HTTP_SUCCESS;
}
HTTP_CODE CCppCodeGenerator::OnIsRpcLiteralWithElement()
{
if ((GetCurrentParameter()->GetFlags() & (CODEFLAG_LITERAL | CODEFLAG_RPC | CODEFLAG_ELEMENT)) ==
(CODEFLAG_LITERAL | CODEFLAG_RPC | CODEFLAG_ELEMENT))
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnNotIsParameterUDT_AND_IsRpcLiteralWithElement()
{
return ((OnNotIsParameterUDT() == HTTP_SUCCESS) &&
(OnIsRpcLiteralWithElement() == HTTP_SUCCESS))
? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnGetParameterElementHashW()
{
CXSDElement *pXSDElement = GetCurrentParameter()->GetElement();
if (pXSDElement->GetElementType() == XSD_ELEMENT)
{
CElement *pElem = static_cast<CElement *>(pXSDElement);
return GetHashW(pElem->GetName());
}
EmitError(IDS_SDL_INTERNAL);
return HTTP_FAIL;
}
HTTP_CODE CCppCodeGenerator::OnGetParameterElementName()
{
CXSDElement *pXSDElement = GetCurrentParameter()->GetElement();
if (pXSDElement->GetElementType() == XSD_ELEMENT)
{
CElement *pElem = static_cast<CElement *>(pXSDElement);
return WriteCString(pElem->GetName());
}
EmitError(IDS_SDL_INTERNAL);
return HTTP_FAIL;
}
*/
HTTP_CODE CCppCodeGenerator::OnGetParameterTypeQName()
{
return WriteCString(GetCurrentParameter()->GetCodeTypeName());
}
HTTP_CODE CCppCodeGenerator::OnGetSizeIsIndex()
{
// should never get called
ATLASSERT( FALSE );
return HTTP_FAIL;
}
HTTP_CODE CCppCodeGenerator::OnGetCurrentParameterIndex()
{
if (m_writeHelper.Write(m_nCntr++) != FALSE)
{
return HTTP_SUCCESS;
}
return HTTP_FAIL;
}
HTTP_CODE CCppCodeGenerator::OnResetParameterIndex()
{
// nothing to do here on the client side
return HTTP_SUCCESS;
}
HTTP_CODE CCppCodeGenerator::OnNotIsRetval()
{
// always true on the client side
return HTTP_SUCCESS;
}
HTTP_CODE CCppCodeGenerator::OnGetFunctionNameHashW()
{
return GetHashW(GetCurrentFunction()->GetName());
}
HTTP_CODE CCppCodeGenerator::OnGetFunctionResultNameHashW()
{
return GetHashW(GetCurrentFunction()->GetResponseName());
}
HTTP_CODE CCppCodeGenerator::OnGetFunctionResultName()
{
return WriteCString(GetCurrentFunction()->GetResponseName());
}
HTTP_CODE CCppCodeGenerator::OnGetFunctionSendName()
{
return WriteCString(GetCurrentFunction()->GetSendName());
}
HTTP_CODE CCppCodeGenerator::OnGetExpectedParameterCount()
{
int nCnt = 0;
while (OnGetNextParameter() == HTTP_SUCCESS)
{
if (OnIsOutParameter() == HTTP_SUCCESS)
{
nCnt++;
}
}
if (m_writeHelper.Write(nCnt) != FALSE)
{
return HTTP_SUCCESS;
}
return HTTP_FAIL;
}
HTTP_CODE CCppCodeGenerator::OnIsPAD()
{
return (GetCurrentFunction()->GetCallFlags() & CODEFLAG_PAD) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnIsChain()
{
return (GetCurrentFunction()->GetCallFlags() & CODEFLAG_CHAIN) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnIsPID()
{
return (GetCurrentFunction()->GetCallFlags() & CODEFLAG_PID) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnIsDocument()
{
return (GetCurrentFunction()->GetCallFlags() & CODEFLAG_DOCUMENT) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnIsRpc()
{
return (GetCurrentFunction()->GetCallFlags() & CODEFLAG_RPC) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnIsOneWay()
{
return (GetCurrentFunction()->GetCallFlags() & CODEFLAG_ONEWAY) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnIsLiteral()
{
return (GetCurrentFunction()->GetCallFlags() & CODEFLAG_LITERAL) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnIsEncoded()
{
return (GetCurrentFunction()->GetCallFlags() & CODEFLAG_ENCODED) ? HTTP_SUCCESS : HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnGetClassName()
{
return WriteCString(m_pProxy->GetClassName());
}
HTTP_CODE CCppCodeGenerator::OnNotIsParameterFixedArray()
{
return (IsFixedArray(GetCurrentParameter()) == HTTP_SUCCESS) ? HTTP_S_FALSE : HTTP_SUCCESS;
}
HTTP_CODE CCppCodeGenerator::OnNotIsLastParameter()
{
CCodeFunction *p = GetCurrentFunction();
POSITION pos = m_currParameterPos;
p->GetNextElement(pos);
if (pos == NULL)
{
return HTTP_S_FALSE;
}
return HTTP_SUCCESS;
}
HTTP_CODE CCppCodeGenerator::OnGetParameterFixedArraySize()
{
CCodeTypedElement *p = GetCurrentParameter();
if (p->GetDims() != 0)
{
int i = 1;
for (int j=1; j<=p->GetDimension(0); j++)
{
i*= p->GetDimension(j);
}
if (m_writeHelper.Write(i) != FALSE)
{
return HTTP_SUCCESS;
}
}
return HTTP_FAIL;
}
HTTP_CODE CCppCodeGenerator::OnGetDateTime()
{
SYSTEMTIME systime;
SYSTEMTIME loctime;
TIME_ZONE_INFORMATION tz;
memset(&systime, 0x00, sizeof(systime));
memset(&loctime, 0x00, sizeof(loctime));
memset(&tz, 0x00, sizeof(tz));
GetSystemTime(&systime);
GetTimeZoneInformation(&tz);
SystemTimeToTzSpecificLocalTime(&tz, &systime, &loctime);
char szDate[256];
int n = sprintf(szDate, "%.02d/%.02d/%d@%.02d:%.02d:%d",
loctime.wMonth, loctime.wDay, loctime.wYear, loctime.wHour,
loctime.wMinute, loctime.wSecond);
return (m_pStream->WriteStream(szDate, n, NULL) == S_OK) ? HTTP_SUCCESS : HTTP_FAIL;
}
HTTP_CODE CCppCodeGenerator::OnGetURL()
{
return WriteCString(m_pProxy->GetAddressUri());
}
HTTP_CODE CCppCodeGenerator::OnGetSoapAction()
{
CStringA m_strSoapAction = GetCurrentFunction()->GetSoapAction();
return WriteCString(m_strSoapAction);
}
HTTP_CODE CCppCodeGenerator::OnGetNamespace()
{
return WriteCString(m_pProxy->GetTargetNamespace());
}
HTTP_CODE CCppCodeGenerator::OnEmitNamespace()
{
if (m_bEmitNamespace != false)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnGetCppNamespace()
{
ATLASSERT( m_bEmitNamespace != false );
if ((m_szNamespace != NULL) && (*m_szNamespace))
{
return WriteCString(CStringA(m_szNamespace));
//return (m_writeHelper.Write(m_szNamespace) == TRUE) ? HTTP_SUCCESS : HTTP_FAIL;
}
return WriteCString(m_pProxy->GetServiceName());
}
HTTP_CODE CCppCodeGenerator::OnClassHasHeaders()
{
POSITION pos = m_pProxy->GetFirstHeader();
if (pos != NULL)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnGetNextMember()
{
if (m_currMemberPos == NULL)
{
m_currMemberPos = m_pProxy->GetFirstHeader();
}
else
{
m_pProxy->GetNextHeader(m_currMemberPos);
}
if (m_currMemberPos != NULL)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnGetMemberType()
{
return GetCppType(GetCurrentMember());
}
HTTP_CODE CCppCodeGenerator::OnGetMemberName()
{
return WriteCString(GetCurrentMember()->GetName());
}
HTTP_CODE CCppCodeGenerator::OnGetMemberSuffix()
{
return GetTypeSuffix(GetCurrentMember());
}
HTTP_CODE CCppCodeGenerator::OnIsMemberFixedArray()
{
if (GetCurrentMember()->GetDims() != 0)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnIsMemberUDT()
{
if (GetCurrentMember()->GetCodeType() == CODETYPE_STRUCT)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnIsMemberEnum()
{
if (GetCurrentMember()->GetCodeType() == CODETYPE_ENUM)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnGetNextHeader()
{
CCodeFunction *p = GetCurrentFunction();
if (m_currHeaderPos == NULL)
{
m_currHeaderPos = p->GetFirstHeader();
}
else
{
p->GetNextHeader(m_currHeaderPos);
}
if (m_currHeaderPos != NULL)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnIsHeaderFixedArray()
{
return IsFixedArray(GetCurrentHeader());
}
HTTP_CODE CCppCodeGenerator::OnGetHeaderValue()
{
return WriteCString(GetCurrentHeader()->GetName());
}
HTTP_CODE CCppCodeGenerator::OnGetHeaderDimsDecl()
{
return GetDimsDecl(GetCurrentHeader());
}
HTTP_CODE CCppCodeGenerator::OnGetHeaderHashW()
{
return GetHashW(GetCurrentHeader()->GetName());
}
HTTP_CODE CCppCodeGenerator::OnGetHeaderAtlSoapType()
{
return GetAtlSoapType(GetCurrentHeader());
}
HTTP_CODE CCppCodeGenerator::OnIsInHeader()
{
if (GetCurrentHeader()->GetFlags() & CODEFLAG_IN)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnIsOutHeader()
{
if (GetCurrentHeader()->GetFlags() & CODEFLAG_OUT)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnIsRequiredHeader()
{
if (GetCurrentHeader()->GetFlags() & CODEFLAG_MUSTUNDERSTAND)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnIsHeaderNullable()
{
if (GetCurrentHeader()->GetXSDType() == XSDTYPE_STRING)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnIsHeaderUDT()
{
return IsUDT(GetCurrentHeader());
}
HTTP_CODE CCppCodeGenerator::OnGetHeaderTypeQName()
{
return WriteCString(GetCurrentHeader()->GetCodeTypeName());
}
HTTP_CODE CCppCodeGenerator::OnGetExpectedHeaderCount()
{
CCodeFunction *p = GetCurrentFunction();
POSITION pos = p->GetFirstHeader();
int nCnt = 0;
while (pos != NULL)
{
CCodeTypedElement *pElem = p->GetNextHeader(pos);
if (pElem->GetFlags() & CODEFLAG_MUSTUNDERSTAND)
{
nCnt++;
}
}
m_writeHelper << nCnt;
return HTTP_SUCCESS;
}
HTTP_CODE CCppCodeGenerator::OnHeaderHasNamespace()
{
if (GetCurrentHeader()->GetNamespace().GetLength() != 0)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnGetHeaderNamespace()
{
return WriteCString(GetCurrentHeader()->GetNamespace());
}
HTTP_CODE CCppCodeGenerator::OnGetHeaderNamespaceHashW()
{
return GetHashW(GetCurrentHeader()->GetNamespace());
}
HTTP_CODE CCppCodeGenerator::OnGetFunctionIndex()
{
m_writeHelper << m_nFuncIndex;
return HTTP_SUCCESS;
}
HTTP_CODE CCppCodeGenerator::OnEmitPragma()
{
if (m_bPragma != false)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnGetEnumNamespaceHashW()
{
return GetHashW(GetCurrentEnum()->GetNamespace());
}
HTTP_CODE CCppCodeGenerator::OnEnumHasUniqueNamespace()
{
if (GetCurrentEnum()->GetNamespace() != m_pProxy->GetTargetNamespace())
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnGetEnumNamespace()
{
return WriteCString(GetCurrentEnum()->GetNamespace());
}
HTTP_CODE CCppCodeGenerator::OnGetStructNamespaceHashW()
{
return GetHashW(GetCurrentStruct()->GetNamespace());
}
HTTP_CODE CCppCodeGenerator::OnStructHasUniqueNamespace()
{
if (GetCurrentStruct()->GetNamespace() != m_pProxy->GetTargetNamespace())
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
HTTP_CODE CCppCodeGenerator::OnGetStructNamespace()
{
return WriteCString(GetCurrentStruct()->GetNamespace());
}
HTTP_CODE CCppCodeGenerator::OnGetFunctionNamespaceHashW()
{
return GetHashW(GetCurrentFunction()->GetNamespace());
}
HTTP_CODE CCppCodeGenerator::OnGetFunctionNamespace()
{
return WriteCString(GetCurrentFunction()->GetNamespace());
}
// safe-naming stuffs
HTTP_CODE CCppCodeGenerator::OnGetHeaderTypeCppQName()
{
return WriteCString(GetCurrentHeader()->GetSafeCodeTypeName());
}
HTTP_CODE CCppCodeGenerator::OnGetMemberCppType()
{
return GetSafeCppType(GetCurrentMember());
}
HTTP_CODE CCppCodeGenerator::OnGetMemberCppName()
{
return WriteCString(GetCurrentMember()->GetSafeName());
}
HTTP_CODE CCppCodeGenerator::OnGetParameterTypeCppQName()
{
return WriteCString(GetCurrentParameter()->GetSafeCodeTypeName());
}
HTTP_CODE CCppCodeGenerator::OnGetFunctionCppName()
{
return WriteCString(GetCurrentFunction()->GetSafeName());
}
HTTP_CODE CCppCodeGenerator::OnGetParameterCppName()
{
CCodeTypedElement *p = GetCurrentParameter();
if (p->GetName() == "return")
{
return (m_pStream->WriteStream("__retval", sizeof("__retval")-1, NULL) == S_OK) ?
HTTP_SUCCESS : HTTP_FAIL;
}
return WriteCString(GetCurrentParameter()->GetSafeName());
}
HTTP_CODE CCppCodeGenerator::OnGetParameterCppType()
{
return GetSafeCppType(GetCurrentParameter());
}
HTTP_CODE CCppCodeGenerator::OnGetStructFieldCppName()
{
return WriteCString(GetCurrentStructField()->GetSafeName());
}
HTTP_CODE CCppCodeGenerator::OnGetStructCppQName()
{
return WriteCString(GetCurrentStruct()->GetSafeName());
}
HTTP_CODE CCppCodeGenerator::OnGetStructCppName()
{
return WriteCString(GetCurrentStruct()->GetSafeName());
}
HTTP_CODE CCppCodeGenerator::OnGetStructSafeCppQName()
{
return WriteCString(GetCurrentStruct()->GetSafeName());
}
HTTP_CODE CCppCodeGenerator::OnGetStructFieldCppType()
{
return GetSafeCppType(GetCurrentStructField());
}
HTTP_CODE CCppCodeGenerator::OnGetEnumCppName()
{
return WriteCString(GetCurrentEnum()->GetSafeName());
}
HTTP_CODE CCppCodeGenerator::OnGetEnumSafeCppQName()
{
return WriteCString(GetCurrentEnum()->GetSafeName());
}
HTTP_CODE CCppCodeGenerator::OnGetEnumElementCppName()
{
return WriteCString(GetCurrentEnumElement()->GetSafeCodeTypeName());
}
HTTP_CODE CCppCodeGenerator::OnGetStructFieldTypeSafeCppQName()
{
return WriteCString(GetCurrentStructField()->GetSafeCodeTypeName());
}
HTTP_CODE CCppCodeGenerator::OnGetHeaderCppValue()
{
return WriteCString(GetCurrentHeader()->GetSafeName());
}
HTTP_CODE CCppCodeGenerator::OnGetEnumCppQName()
{
return WriteCString(GetCurrentEnum()->GetSafeName());
}
HTTP_CODE CCppCodeGenerator::OnGenProxy()
{
if (m_bGenProxy != false)
{
return HTTP_SUCCESS;
}
return HTTP_S_FALSE;
}
<|start_filename|>include/atlserr.h<|end_filename|>
// This is a part of the Active Template Library.
// Copyright (C) Microsoft Corporation
// All rights reserved.
//
// This source code is only intended as a supplement to the
// Active Template Library Reference and related
// electronic documentation provided with the library.
// See these sources for detailed information regarding the
// Active Template Library product.
#ifndef __ATLSERR_H__
#define __ATLSERR_H__
#pragma once
#pragma pack(push,_ATL_PACKING)
namespace ATL{
#define VALIDATION_S_OK 0x00000000
#define VALIDATION_S_EMPTY 0x00000001
#define VALIDATION_E_PARAMNOTFOUND 0x00000002
#define VALIDATION_E_LENGTHMIN 0x80000083
#define VALIDATION_E_LENGTHMAX 0x80000084
#define VALIDATION_E_INVALIDLENGTH 0x80000080
#define VALIDATION_E_INVALIDPARAM 0x80000005
#define VALIDATION_E_FAIL 0x80000006
#define VALIDATION_SUCCEEDED(x) (((x == VALIDATION_S_OK) || (x == VALIDATION_S_EMPTY )))
typedef DWORD HTTP_CODE;
#define HTTP_ERROR(err, sub) ((HTTP_CODE)(DWORD_PTR)MAKELONG((WORD)err, (WORD)sub))
#define HTTP_ERROR_CODE(err) ((DWORD)LOWORD(err))
#define HTTP_SUBERROR_CODE(err) ((DWORD)HIWORD(err))
#define HTTP_SUCCESS HTTP_ERROR(0, 0)
#define SUBERR_NONE 0
#define ISE_SUBERR_BADSRF 1
#define ISE_SUBERR_HNDLFAIL 2
#define ISE_SUBERR_SYSOBJFAIL 3
#define ISE_SUBERR_READFILEFAIL 4
#define ISE_SUBERR_LOADFILEFAIL 6
#define ISE_SUBERR_LOADLIB 7
#define ISE_SUBERR_HANDLERIF 8
#define ISE_SUBERR_OUTOFMEM 9
#define ISE_SUBERR_UNEXPECTED 10
#define ISE_SUBERR_STENCIL_INVALIDFUNCOFFSET 11
#define ISE_SUBERR_STENCIL_MISMATCHWHILE 12
#define ISE_SUBERR_STENCIL_MISMATCHIF 13
#define ISE_SUBERR_STENCIL_UNEXPECTEDTYPE 14
#define ISE_SUBERR_STENCIL_INVALIDINDEX 15
#define ISE_SUBERR_STENCIL_INDEXOUTOFRANGE 16
#define ISE_SUBERR_STENCIL_PARSE_FAIL 17
#define ISE_SUBERR_STENCIL_LOAD_FAIL 18
#define ISE_SUBERR_HANDLER_NOT_FOUND 19
#define ISE_SUBERR_BAD_HANDLER_TAG 20
#define ISE_SUBERR_NO_HANDLER_TAG 21
#define ISE_SUBERR_LONGMETHODNAME 22
#define ISE_SUBERR_LONGHANDLERNAME 23
#define ISE_SUBERR_IMPERSONATIONFAILED 24
#define ISE_SUBERR_ISAPISTARTUPFAILED 25
#define ISE_SUBERR_SOAPNOSOAPACTION 26
#define SUBERR_NO_PROCESS 27
#define SUBERR_S_FALSE 28
#define SUBERR_ASYNC 29
#define SUBERR_ASYNC_DONE 30
#define SUBERR_ASYNC_NOFLUSH 31
#define SUBERR_ASYNC_NOFLUSH_DONE 32
#define SUBERR_NO_CACHE 33
#define DBG_SUBERR_ALREADY_DEBUGGING 34
#define DBG_SUBERR_NOT_DEBUGGING 35
#define DBG_SUBERR_INVALID_SESSION 36
#define DBG_SUBERR_BAD_ID 37
#define DBG_SUBERR_COCREATE 38
#define DBG_SUBERR_ATTACH 39
#define HTTP_FAIL HTTP_ERROR(500, SUBERR_NONE)
#define HTTP_SUCCESS_NO_PROCESS HTTP_ERROR(200, SUBERR_NO_PROCESS)
#define HTTP_S_FALSE HTTP_ERROR(HTTP_ERROR_CODE(HTTP_SUCCESS), SUBERR_S_FALSE)
#define HTTP_SUCCESS_ASYNC HTTP_ERROR(200, SUBERR_ASYNC)
#define HTTP_SUCCESS_ASYNC_DONE HTTP_ERROR(200, SUBERR_ASYNC_DONE)
#define HTTP_SUCCESS_ASYNC_NOFLUSH HTTP_ERROR(200, SUBERR_ASYNC_NOFLUSH)
#define HTTP_SUCCESS_ASYNC_NOFLUSH_DONE HTTP_ERROR(200, SUBERR_ASYNC_NOFLUSH_DONE)
#define HTTP_SUCCESS_NO_CACHE HTTP_ERROR(200, SUBERR_NO_CACHE)
#define HTTP_OK HTTP_ERROR(200, SUBERR_NONE)
#define HTTP_CONTINUE HTTP_ERROR(100, SUBERR_NONE)
#define HTTP_CREATED HTTP_ERROR(201, SUBERR_NONE)
#define HTTP_ACCEPTED HTTP_ERROR(202, SUBERR_NONE)
#define HTTP_NON_AUTHORITATIVE HTTP_ERROR(203, SUBERR_NONE)
#define HTTP_NO_CONTENT HTTP_ERROR(204, SUBERR_NONE)
#define HTTP_RESET_CONTENT HTTP_ERROR(205, SUBERR_NONE)
#define HTTP_PARTIAL_CONTENT HTTP_ERROR(206, SUBERR_NONE)
#define HTTP_MULTIPLE_CHOICES HTTP_ERROR(300, SUBERR_NONE)
#define HTTP_MOVED_PERMANENTLY HTTP_ERROR(301, SUBERR_NONE)
#define HTTP_FOUND HTTP_ERROR(302, SUBERR_NONE)
#define HTTP_SEE_OTHER HTTP_ERROR(303, SUBERR_NONE)
#define HTTP_NOT_MODIFIED HTTP_ERROR(304, SUBERR_NONE)
#define HTTP_USE_PROXY HTTP_ERROR(305, SUBERR_NONE)
#define HTTP_TEMPORARY_REDIRECT HTTP_ERROR(307, SUBERR_NONE)
#define HTTP_BAD_REQUEST HTTP_ERROR(400, SUBERR_NONE)
#define HTTP_UNAUTHORIZED HTTP_ERROR(401, SUBERR_NONE)
#define HTTP_PAYMENT_REQUIRED HTTP_ERROR(402, SUBERR_NONE)
#define HTTP_FORBIDDEN HTTP_ERROR(403, SUBERR_NONE)
#define HTTP_NOT_FOUND HTTP_ERROR(404, SUBERR_NONE)
#define HTTP_METHOD_NOT_ALLOWED HTTP_ERROR(405, SUBERR_NONE)
#define HTTP_NOT_ACCEPTABLE HTTP_ERROR(406, SUBERR_NONE)
#define HTTP_PROXY_AUTHENTICATION_REQUIRED HTTP_ERROR(407, SUBERR_NONE)
#define HTTP_REQUEST_TIMEOUT HTTP_ERROR(408, SUBERR_NONE)
#define HTTP_CONFLICT HTTP_ERROR(409, SUBERR_NONE)
#define HTTP_GONE HTTP_ERROR(410, SUBERR_NONE)
#define HTTP_LENGTH_REQUIRED HTTP_ERROR(411, SUBERR_NONE)
#define HTTP_PRECONDITION_FAILED HTTP_ERROR(412, SUBERR_NONE)
#define HTTP_REQUEST_ENTITY_TOO_LONG HTTP_ERROR(413, SUBERR_NONE)
#define HTTP_REQUEST_URI_TOO_LONG HTTP_ERROR(414, SUBERR_NONE)
#define HTTP_UNSUPPORTED_MEDIA_TYPE HTTP_ERROR(415, SUBERR_NONE)
#define HTTP_RANGE_NOT_SATISFIABLE HTTP_ERROR(416, SUBERR_NONE)
#define HTTP_EXPECTATION_FAILED HTTP_ERROR(417, SUBERR_NONE)
#define HTTP_INTERNAL_SERVER_ERROR HTTP_ERROR(500, SUBERR_NONE)
#define HTTP_NOT_IMPLEMENTED HTTP_ERROR(501, SUBERR_NONE)
#define HTTP_BAD_GATEWAY HTTP_ERROR(502, SUBERR_NONE)
#define HTTP_SERVICE_UNAVAILABLE HTTP_ERROR(503, SUBERR_NONE)
#define HTTP_GATEWAY_TIMEOUT HTTP_ERROR(504, SUBERR_NONE)
#define HTTP_VERSION_NOT_SUPPORTED HTTP_ERROR(505, SUBERR_NONE)
inline bool IsAsyncStatus(HTTP_CODE hcStatus)
{
return
hcStatus == HTTP_SUCCESS_ASYNC ||
hcStatus == HTTP_SUCCESS_ASYNC_DONE ||
hcStatus == HTTP_SUCCESS_ASYNC_NOFLUSH ||
hcStatus == HTTP_SUCCESS_ASYNC_NOFLUSH_DONE;
}
inline bool IsAsyncContinueStatus(HTTP_CODE hcStatus)
{
return
hcStatus == HTTP_SUCCESS_ASYNC ||
hcStatus == HTTP_SUCCESS_ASYNC_NOFLUSH;
}
inline bool IsAsyncDoneStatus(HTTP_CODE hcStatus)
{
return
hcStatus == HTTP_SUCCESS_ASYNC_DONE ||
hcStatus == HTTP_SUCCESS_ASYNC_NOFLUSH_DONE;
}
inline bool IsAsyncFlushStatus(HTTP_CODE hcStatus)
{
return
hcStatus == HTTP_SUCCESS_ASYNC ||
hcStatus == HTTP_SUCCESS_ASYNC_DONE;
}
inline bool IsAsyncNoFlushStatus(HTTP_CODE hcStatus)
{
return
hcStatus == HTTP_SUCCESS_ASYNC_NOFLUSH ||
hcStatus == HTTP_SUCCESS_ASYNC_NOFLUSH_DONE;
}
ATL_NOINLINE inline HTTP_CODE AtlsHttpError(WORD wStatus, WORD wSubErr) throw()
{
return HTTP_ERROR(wStatus, wSubErr);
}
}; // namespace ATL
#pragma pack(pop)
#endif // __ATLSERR_H__
<|start_filename|>source/SProxy/utils/mkutils.cmd<|end_filename|>
cl /GF /O1 /Os /Gy /Oi /Ob2 /Og /Oy /MD static_hash.c /link /NODEFAULTLIB /OPT:REF /OPT:ICF,32 msvcrt.lib kernel32.lib
cl /GF /O1 /Os /Gy /Oi /Ob2 /Og /Oy /MD cppkw.c /link /NODEFAULTLIB /OPT:REF /OPT:ICF,32 msvcrt.lib kernel32.lib
<|start_filename|>source/SProxy/Attribute.h<|end_filename|>
//
// Attribute.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
#include "XSDElement.h"
#include "QName.h"
enum ATTRIBUTEFORM
{
ATTRFORM_UNK = 0,
ATTRFORM_QUALIFIED,
ATTRFORM_UNQUALIFIED
};
inline ATTRIBUTEFORM GetAttributeForm(const wchar_t *wsz, int cch)
{
struct _attrform
{
wchar_t *wsz;
int cch;
ATTRIBUTEFORM attrform;
};
ATTRIBUTEFORM retForm = ATTRFORM_UNK;
// data driven is kind of overkill for two options, but makes it
// easy to extend later
static const _attrform s_forms[] =
{
{ L"qualified", sizeof("qualified")-1, ATTRFORM_QUALIFIED },
{ L"unqualified", sizeof("unqualified")-1, ATTRFORM_UNQUALIFIED }
};
for (int i=0; i<(sizeof(s_forms)/sizeof(s_forms[0])); i++)
{
if (cch == s_forms[i].cch && !wcsncmp(wsz, s_forms[i].wsz, cch))
{
retForm = s_forms[i].attrform;
break;
}
}
return retForm;
}
enum ATTRIBUTEUSE
{
ATTRUSE_UNK = 0,
ATTRUSE_PROHIBITED,
ATTRUSE_OPTIONAL,
ATTRUSE_REQUIRED,
ATTRUSE_DEFAULT,
ATTRUSE_FIXED
};
inline ATTRIBUTEUSE GetAttributeUse(const wchar_t *wsz, int cch)
{
struct _attruse
{
wchar_t *wsz;
int cch;
ATTRIBUTEUSE attruse;
};
ATTRIBUTEUSE retUse = ATTRUSE_UNK;
// data driven is kind of overkill for two options, but makes it
// easy to extend later
static const _attruse s_uses[] =
{
{ L"prohibited", sizeof("prohibited")-1, ATTRUSE_PROHIBITED },
{ L"optional", sizeof("optional")-1, ATTRUSE_OPTIONAL },
{ L"required", sizeof("required")-1, ATTRUSE_REQUIRED },
{ L"default", sizeof("default")-1, ATTRUSE_DEFAULT },
{ L"fixed", sizeof("fixed")-1, ATTRUSE_FIXED },
};
for (int i=0; i<(sizeof(s_uses)/sizeof(s_uses[0])); i++)
{
if (cch == s_uses[i].cch && !wcsncmp(wsz, s_uses[i].wsz, cch))
{
retUse = s_uses[i].attruse;
break;
}
}
return retUse;
}
class CAttribute : public CXSDElement
{
private:
ATTRIBUTEFORM m_attrForm;
ATTRIBUTEUSE m_attrUse;
CStringW m_strName;
CQName m_ref;
CQName m_type;
CStringW m_strValue;
CStringW m_strID;
// WSDL:arrayType attribute
CStringW m_strArrayType;
protected:
public:
// REVIEW: set to defaults?
CAttribute()
:m_attrForm(ATTRFORM_UNK), m_attrUse(ATTRUSE_UNK)
{
}
inline HRESULT SetName(const wchar_t *wszName, int cchName)
{
if (!wszName)
{
return E_FAIL;
}
m_strName.SetString(wszName, cchName);
return S_OK;
}
inline HRESULT SetName(const CStringW& strName)
{
m_strName = strName;
return S_OK;
}
inline const CStringW& GetName()
{
return m_strName;
}
inline HRESULT SetArrayType(const wchar_t *wszArrayType, int cchArrayType)
{
if (!wszArrayType)
{
return E_FAIL;
}
m_strArrayType.SetString(wszArrayType, cchArrayType);
return S_OK;
}
inline HRESULT SetArrayType(const CStringW& strArrayType)
{
m_strArrayType = strArrayType;
return S_OK;
}
inline const CStringW& GetArrayType()
{
return m_strArrayType;
}
inline HRESULT SetValue(const wchar_t *wszValue, int cchValue)
{
if (!wszValue)
{
return E_FAIL;
}
m_strValue.SetString(wszValue, cchValue);
return S_OK;
}
inline HRESULT SetValue(const CStringW& strValue)
{
m_strValue = strValue;
return S_OK;
}
inline const CStringW& GetValue()
{
return m_strValue;
}
inline HRESULT SetID(const wchar_t *wszID, int cchID)
{
if (!wszID)
{
return E_FAIL;
}
m_strID.SetString(wszID, cchID);
return S_OK;
}
inline HRESULT SetID(const CStringW& strID)
{
m_strID = strID;
return S_OK;
}
inline const CStringW& GetID()
{
return m_strID;
}
inline HRESULT SetType(const CStringW& strQName)
{
m_type.SetQName(strQName);
return S_OK;
}
inline HRESULT SetType(const CStringW& strPrefix, const CStringW& strName)
{
m_type.SetQName(strPrefix, strName);
return S_OK;
}
inline HRESULT SetType(const wchar_t *wszQName, int cchQName)
{
m_type.SetQName(wszQName, cchQName);
return S_OK;
}
inline CQName& GetTypeName()
{
return m_type;
}
inline HRESULT SetRef(const CStringW& strQName)
{
m_ref.SetQName(strQName);
return S_OK;
}
inline HRESULT SetRef(const CStringW& strPrefix, const CStringW& strName)
{
m_ref.SetQName(strPrefix, strName);
return S_OK;
}
inline HRESULT SetRef(const wchar_t *wszQName, int cchQName)
{
m_ref.SetQName(wszQName, cchQName);
return S_OK;
}
inline CQName& GetRefName()
{
return m_ref;
}
inline HRESULT SetAttributeForm(const wchar_t *wsz, int cch)
{
m_attrForm = ::GetAttributeForm(wsz, cch);
if (m_attrForm != ATTRFORM_UNK)
{
return S_OK;
}
return E_FAIL;
}
inline HRESULT SetAttributeForm(const CStringW& str)
{
return SetAttributeForm(str, str.GetLength());
}
inline ATTRIBUTEFORM GetAttributeForm()
{
return m_attrForm;
}
inline HRESULT SetAttributeUse(const wchar_t *wsz, int cch)
{
m_attrUse = ::GetAttributeUse(wsz, cch);
if (m_attrUse != ATTRUSE_UNK)
{
return S_OK;
}
return E_FAIL;
}
inline HRESULT SetAttributeUse(const CStringW& str)
{
return SetAttributeUse(str, str.GetLength());
}
inline ATTRIBUTEUSE GetAttributeUse()
{
return m_attrUse;
}
}; // class CAttribute
<|start_filename|>source/SProxy/WSDLPortType.h<|end_filename|>
//
// WSDLPortType.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
#include "XMLElement.h"
#include "WSDLPortTypeOperation.h"
class CWSDLPortType : public CXMLElement
{
private:
CStringW m_strDocumentation;
CStringW m_strName;
typedef CAtlPtrMap<CStringW, CWSDLPortTypeOperation *, CStringRefElementTraits<CStringW> > PORTYPEOPERATIONMAP;
PORTYPEOPERATIONMAP m_operations;
public:
inline CWSDLPortTypeOperation * AddOperation(CWSDLPortTypeOperation *p)
{
if (p != NULL)
{
if (p->GetName().GetLength() != 0)
{
if (m_operations.SetAt(p->GetName(), p) != NULL)
{
return p;
}
}
}
EmitErrorHr(E_OUTOFMEMORY);
return NULL;
}
inline CWSDLPortTypeOperation * GetOperation(const CStringW& strName)
{
const PORTYPEOPERATIONMAP::CPair *p = m_operations.Lookup(strName);
if (p != NULL)
{
return p->m_value;
}
return NULL;
}
inline POSITION GetFirstOperation()
{
return m_operations.GetStartPosition();
}
inline CWSDLPortTypeOperation * GetNextOperation(POSITION &pos)
{
return m_operations.GetNextValue(pos);
}
inline HRESULT SetName(const CStringW& strName)
{
m_strName = strName;
return S_OK;
}
inline HRESULT SetName(const wchar_t *wszName, int cchName)
{
if (!wszName)
{
return E_FAIL;
}
m_strName.SetString(wszName, cchName);
return S_OK;
}
inline const CStringW& GetName()
{
return m_strName;
}
};
<|start_filename|>include/atlspriv.inl<|end_filename|>
// This is a part of the Active Template Library.
// Copyright (C) Microsoft Corporation
// All rights reserved.
//
// This source code is only intended as a supplement to the
// Active Template Library Reference and related
// electronic documentation provided with the library.
// See these sources for detailed information regarding the
// Active Template Library product.
/////////////////////////////////////////////////////////////////////////////////
//
// ZEvtSyncSocket
// ************ This is an implementation only class ************
// Class ZEvtSyncSocket is a non-supported, implementation only
// class used by the ATL HTTP client class CAtlHttpClient. Do not
// use this class in your code. Use of this class is not supported by Microsoft.
//
/////////////////////////////////////////////////////////////////////////////////
#ifndef __ATLSPRIV_INL__
#define __ATLSPRIV_INL__
#pragma once
#pragma warning(push)
#pragma warning(disable:4312)
inline ZEvtSyncSocket::ZEvtSyncSocket()
{
m_dwCreateFlags = WSA_FLAG_OVERLAPPED;
m_hEventRead = m_hEventWrite = m_hEventConnect = NULL;
m_socket = INVALID_SOCKET;
m_bConnected = false;
m_dwLastError = 0;
m_dwSocketTimeout = ATL_SOCK_TIMEOUT;
g_HttpInit.Init();
}
inline ZEvtSyncSocket::~ZEvtSyncSocket()
{
Close();
}
inline ZEvtSyncSocket::operator SOCKET()
{
return m_socket;
}
inline void ZEvtSyncSocket::Close()
{
if (m_socket != INVALID_SOCKET)
{
m_bConnected = false;
closesocket(m_socket);
m_socket = INVALID_SOCKET;
Term();
}
}
inline void ZEvtSyncSocket::Term()
{
if (m_hEventRead)
{
WSACloseEvent(m_hEventRead);
m_hEventRead = NULL;
}
if (m_hEventWrite)
{
WSACloseEvent(m_hEventWrite);
m_hEventWrite = NULL;
}
if (m_hEventConnect)
{
WSACloseEvent(m_hEventConnect);
m_hEventConnect = NULL;
}
m_socket = INVALID_SOCKET;
}
inline bool ZEvtSyncSocket::Create(const ADDRINFOT* pAI, WORD wFlags)
{
return Create(pAI->ai_family, pAI->ai_socktype, pAI->ai_protocol, wFlags);
}
inline bool ZEvtSyncSocket::Create(int af, int st, int proto, WORD wFlags)
{
bool bRet = true;
if (m_socket != INVALID_SOCKET)
{
m_dwLastError = WSAEALREADY;
return false; // Must close this socket first
}
m_socket = WSASocket(af, st, proto, NULL, 0,
wFlags | m_dwCreateFlags);
if (m_socket == INVALID_SOCKET)
{
m_dwLastError = ::WSAGetLastError();
bRet = false;
}
else
bRet = Init(m_socket, NULL);
return bRet;
}
inline bool ZEvtSyncSocket::Connect(LPCTSTR szAddr, unsigned short nPort) throw()
{
if (m_bConnected)
return true;
bool bRet = true;
CSocketAddr address;
// Find address information
if ((m_dwLastError = address.FindAddr(szAddr, nPort, 0, PF_UNSPEC, SOCK_STREAM, 0)) != ERROR_SUCCESS)
{
bRet = false;
}
else
{
bRet = Connect(address.GetAddrInfo());
}
return bRet;
}
inline bool ZEvtSyncSocket::Connect(const ADDRINFOT *pAI)
{
if (m_socket == INVALID_SOCKET && !Create(pAI))
return false;
return Connect((SOCKADDR*)pAI->ai_addr, (int)pAI->ai_addrlen);
}
inline bool ZEvtSyncSocket::Connect(const SOCKADDR* psa, int len)
{
if (m_bConnected)
return true; // already connected
DWORD dwLastError;
bool bRet = true;
// if you try to connect the socket without
// creating it first it's reasonable to automatically
// try the create for you.
if (m_socket == INVALID_SOCKET)
return false;
if (WSAConnect(m_socket,
psa, len,
NULL, NULL, NULL, NULL))
{
dwLastError = WSAGetLastError();
if (dwLastError != WSAEWOULDBLOCK)
{
m_dwLastError = dwLastError;
bRet = false;
}
else
{
dwLastError = WaitForSingleObject((HANDLE)m_hEventConnect, m_dwSocketTimeout);
if (dwLastError == WAIT_OBJECT_0)
{
// make sure there were no connection errors.
WSANETWORKEVENTS wse;
ZeroMemory(&wse, sizeof(wse));
WSAEnumNetworkEvents(m_socket, NULL, &wse);
if (wse.iErrorCode[FD_CONNECT_BIT]!=0)
{
m_dwLastError = (DWORD)(wse.iErrorCode[FD_CONNECT_BIT]);
bRet = false;
}
}
else
bRet = false;
}
}
m_bConnected = bRet;
return bRet;
}
inline bool ZEvtSyncSocket::Write(WSABUF *pBuffers, int nCount, DWORD *pdwSize)
{
// if we aren't already connected we'll wait to see if the connect
// event happens
if (WAIT_OBJECT_0 != WaitForSingleObject((HANDLE)m_hEventConnect , m_dwSocketTimeout))
{
m_dwLastError = WSAENOTCONN;
return false; // not connected
}
// make sure we aren't already writing
if (WAIT_TIMEOUT == WaitForSingleObject((HANDLE)m_hEventWrite, 0))
{
m_dwLastError = WSAEINPROGRESS;
return false; // another write on is blocking this socket
}
bool bRet = true;
*pdwSize = 0;
WSAOVERLAPPED o;
m_csWrite.Lock();
o.hEvent = m_hEventWrite;
WSAResetEvent(o.hEvent);
if (WSASend(m_socket, pBuffers, nCount, pdwSize, 0, &o, 0))
{
DWORD dwLastError = WSAGetLastError();
if (dwLastError != WSA_IO_PENDING)
{
m_dwLastError = dwLastError;
bRet = false;
}
}
// wait for write to complete
if (bRet)
{
if (WaitForSingleObject((HANDLE)m_hEventWrite, m_dwSocketTimeout) == WAIT_OBJECT_0)
{
DWORD dwFlags = 0;
if (WSAGetOverlappedResult(m_socket, &o, pdwSize, FALSE, &dwFlags))
bRet = true;
else
{
m_dwLastError = ::GetLastError();
bRet = false;
}
}
else
bRet = false;
}
m_csWrite.Unlock();
return bRet;
}
inline bool ZEvtSyncSocket::Write(const unsigned char *pBuffIn, DWORD *pdwSize)
{
WSABUF buff;
buff.buf = (char*)pBuffIn;
buff.len = *pdwSize;
return Write(&buff, 1, pdwSize);
}
inline bool ZEvtSyncSocket::Read(const unsigned char *pBuff, DWORD *pdwSize)
{
// if we aren't already connected we'll wait to see if the connect
// event happens
if (WAIT_OBJECT_0 != WaitForSingleObject((HANDLE)m_hEventConnect , m_dwSocketTimeout))
{
m_dwLastError = WSAENOTCONN;
return false; // not connected
}
if (WAIT_ABANDONED == WaitForSingleObject((HANDLE)m_hEventRead, 0))
{
m_dwLastError = WSAEINPROGRESS;
return false; // another write on is blocking this socket
}
bool bRet = true;
WSABUF buff;
buff.buf = (char*)pBuff;
buff.len = *pdwSize;
*pdwSize = 0;
DWORD dwFlags = 0;
WSAOVERLAPPED o;
ZeroMemory(&o, sizeof(o));
// protect against re-entrency
m_csRead.Lock();
o.hEvent = m_hEventRead;
WSAResetEvent(o.hEvent);
if (WSARecv(m_socket, &buff, 1, pdwSize, &dwFlags, &o, 0))
{
DWORD dwLastError = WSAGetLastError();
if (dwLastError != WSA_IO_PENDING)
{
m_dwLastError = dwLastError;
bRet = false;
}
}
// wait for the read to complete
if (bRet)
{
if (WAIT_OBJECT_0 == WaitForSingleObject((HANDLE)o.hEvent, m_dwSocketTimeout))
{
dwFlags = 0;
if (WSAGetOverlappedResult(m_socket, &o, pdwSize, FALSE, &dwFlags))
bRet = true;
else
{
m_dwLastError = ::GetLastError();
bRet = false;
}
}
else
bRet = false;
}
m_csRead.Unlock();
return bRet;
}
inline bool ZEvtSyncSocket::Init(SOCKET hSocket, void * /*pData=NULL*/)
{
ATLASSERT(hSocket != INVALID_SOCKET);
if (hSocket == INVALID_SOCKET)
{
m_dwLastError = WSAENOTSOCK;
return false;
}
m_socket = hSocket;
// Allocate Events. On error, any open event handles will be closed
// in the destructor
if (NULL != (m_hEventRead = WSACreateEvent()))
if (NULL != (m_hEventWrite = WSACreateEvent()))
if (NULL != (m_hEventConnect = WSACreateEvent()))
{
if (!WSASetEvent(m_hEventWrite) || !WSASetEvent(m_hEventRead))
{
m_dwLastError = ::GetLastError();
return false;
}
if (SOCKET_ERROR != WSAEventSelect(m_socket, m_hEventRead, FD_READ))
if (SOCKET_ERROR != WSAEventSelect(m_socket, m_hEventWrite, FD_WRITE))
if (SOCKET_ERROR != WSAEventSelect(m_socket, m_hEventConnect, FD_CONNECT))
return true;
}
m_dwLastError = ::GetLastError();
return false;
}
inline DWORD ZEvtSyncSocket::GetSocketTimeout() throw()
{
return m_dwSocketTimeout;
}
inline DWORD ZEvtSyncSocket::SetSocketTimeout(DWORD dwNewTimeout) throw()
{
DWORD dwOldTimeout = m_dwSocketTimeout;
m_dwSocketTimeout = dwNewTimeout;
return dwOldTimeout;
}
inline bool ZEvtSyncSocket::SupportsScheme(ATL_URL_SCHEME scheme) throw()
{
// default only supports HTTP
return scheme == ATL_URL_SCHEME_HTTP ? true : false;
}
#pragma warning(pop)
#endif // __ATLSPRIV_INL__
<|start_filename|>source/SProxy/XMLElement.h<|end_filename|>
//
// XMLElement.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
class CXMLDocument;
// TODO: set locator information for each element
struct LOCATORINFO
{
int nLine;
int nCol;
LOCATORINFO()
:nLine(0), nCol(0)
{
}
};
class CXMLElement
{
private:
CXMLDocument * m_pParentDocument;
CXMLElement * m_pParentElement;
NAMESPACEMAP m_namespaceMap;
LOCATORINFO m_locInfo;
public:
virtual ~CXMLElement() = 0 {};
inline CXMLElement(CXMLDocument *pDoc = NULL, CXMLElement * pParentElement = NULL)
:m_pParentDocument(pDoc), m_pParentElement(pParentElement)
{
}
inline CXMLElement * GetParentElement()
{
return m_pParentElement;
}
inline void SetParentElement(CXMLElement * pParentElement)
{
m_pParentElement = pParentElement;
}
inline CXMLDocument * GetParentDocument()
{
return m_pParentDocument;
}
void SetParentDocument(CXMLDocument *pDoc)
{
m_pParentDocument = pDoc;
}
void SetLineNumber(int nLine)
{
m_locInfo.nLine = nLine;
}
void SetColumnNumber(int nCol)
{
m_locInfo.nCol = nCol;
}
int GetLineNumber()
{
return m_locInfo.nLine;
}
int GetColumnNumber()
{
return m_locInfo.nCol;
}
HRESULT GetNamespaceUri(const CStringW &strPrefix, CStringW &strUri);
LPCWSTR GetNamespaceUri(LPCWSTR wszPrefix, int cchPrefix = -1);
HRESULT SetNamespaceUri(const CStringW& strPrefix, CStringW &strUri);
HRESULT SetNamespaceUri(LPCWSTR wszPrefix, int cchPrefix, LPCWSTR wszUri, int cchUri);
};
<|start_filename|>source/SProxy/ContentParser.h<|end_filename|>
//
// ContentParser.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
#include "Parser.h"
#include "Emit.h"
#include "resource.h"
// CContentParser parses the simpleContent tag and the complexContent tag
class CContent;
class CContentParser : public CParserBase
{
private:
CContent *m_pContent;
public:
inline CContentParser(ISAXXMLReader *pReader, CParserBase *pParent, DWORD dwLevel, CContent *pContent = NULL)
:CParserBase(pReader, pParent, dwLevel), m_pContent(pContent)
{
}
inline CContent * GetContent()
{
return m_pContent;
}
inline CContent * SetContent(CContent *pContent)
{
m_pContent = pContent;
}
/*
<complexContent
id = ID
mixed = boolean
{any attributes with non-schema namespace . . .}>
Content: (annotation? , (restriction | extension))
</complexContent>
<simpleContent
id = ID
{any attributes with non-schema namespace . . .}>
Content: (annotation? , (restriction | extension))
</simpleContent>
*/
BEGIN_XMLTAG_MAP()
XMLTAG_ENTRY_EX("annotation", XSD_NAMESPACEA, OnAnnotation)
XMLTAG_ENTRY_EX("restriction", XSD_NAMESPACEA, OnRestriction)
XMLTAG_ENTRY_EX("extension", XSD_NAMESPACEA, OnExtension)
END_XMLTAG_MAP()
BEGIN_XMLATTR_MAP()
XMLATTR_ENTRY("mixed", OnMixed)
XMLATTR_ENTRY("id", OnID)
END_XMLATTR_MAP()
TAG_METHOD_DECL(OnAnnotation);
TAG_METHOD_DECL(OnRestriction);
TAG_METHOD_DECL(OnExtension);
ATTR_METHOD_DECL(OnMixed);
ATTR_METHOD_DECL(OnID);
};
typedef CContentParser CComplexContentParser;
typedef CContentParser CSimpleContentParser;
<|start_filename|>source/SProxy/WSDLBinding.cpp<|end_filename|>
//
// WSDLBinding.cpp
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#include "stdafx.h"
#include "WSDLBinding.h"
#include "WSDLPortType.h"
#include "WSDLDocument.h"
#include "Emit.h"
#include "resource.h"
#include "Attribute.h"
#include "Content.h"
#include "Element.h"
#include "ComplexType.h"
CWSDLPortType * CWSDLBinding::GetPortType()
{
if (m_pPortType != NULL)
{
return m_pPortType;
}
CXMLDocument *pDoc = GetParentDocument();
if (pDoc != NULL)
{
CStringW strUri;
if (SUCCEEDED(GetNamespaceUri(m_type.GetPrefix(), strUri)))
{
if (strUri == pDoc->GetTargetNamespace())
{
if (pDoc->GetDocumentType() == WSDLDOC)
{
CWSDLDocument *pWSDLDoc = static_cast<CWSDLDocument *>(pDoc);
m_pPortType = pWSDLDoc->GetPortType(m_type.GetName());
}
if (m_pPortType == NULL)
{
EmitFileError(IDS_SDL_UNRESOLVED_ELEM2, const_cast<CWSDLBinding *>(this), 0,
"portType", strUri, m_type.GetName());
}
}
}
else
{
EmitFileError(IDS_SDL_UNRESOLVED_NAMESPACE, const_cast<CWSDLBinding *>(this), 0, m_type.GetPrefix());
}
}
return m_pPortType;
}
<|start_filename|>source/SProxy/resource.h<|end_filename|>
//{{NO_DEPENDENCIES}}
// Microsoft Developer Studio generated include file.
// Used by sproxy.rc
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#define IDR_SPROXYSRF 101
#define IDR_SRF1 102
#define IDS_SDL_HEADER 103
#define IDS_SDL_SUCCESS 104
#define IDS_SDL_USAGE 105
#define IDS_SDL_PROCESS_FAILURE 106
#define IDS_SDL_GENERATE_FAILURE 107
#define IDS_SDL_INTERNAL 108
#define IDS_SDL_PARSE_WARNING 109
#define IDS_SDL_PARSE_ERROR 110
#define IDS_SDL_UNRECOGNIZED_TAG 111
#define IDS_SDL_MISSING_ATTRIBUTE 112
#define IDS_SDL_MSXML 113
#define IDS_SDL_UNRECOGNIZED_DOC 114
#define IDS_SDL_UNRESOLVED_ELEM 115
#define IDS_SDL_UNRESOLVED_NAMESPACE 116
#define IDS_SDL_LITERAL_ONLY 117
#define IDS_SDL_ONE_PORT 118
#define IDS_SDL_UNSUPPORTED_TAG 119
#define IDS_SDL_CMDLINE_FAILURE 120
#define IDS_SDL_UNRESOLVED_ELEM2 123
#define IDS_SDL_INVALID_VALUE 124
#define IDS_SDL_SOAP_PORT_ONLY 125
#define IDS_SDL_SOAP_BINDING_ONLY 126
#define IDS_SDL_INVALID_ARRAY_DESC 127
#define IDS_SDL_UNSUPPORTED_STRING 128
#define IDS_SDL_RECURSIVE_TYPE 129
#define IDS_SDL_IGNORE_CMDITEM 130
#define IDS_SDL_MISSING_OPTION 131
#define IDS_SDL_FAILED_WSDL_OPEN 132
#define IDS_SDL_UNRESOLVED_MSGPART 133
#define IDS_SDL_SOAPHEADER_DUPNAME 134
#define IDS_SDL_USAGE_EX 135
#define IDS_SDL_CMDLINE 136
#define IDS_SDL_PARSE 137
#define IDS_SDL_SCHEMALEVEL_NAME 138
#define IDS_SDL_PAD_TYPE 139
#define IDS_SDL_PAD_INVALID_SOAP 140
#define IDS_SDL_RPC_ENCODED_TYPE 141
#define IDS_SDL_DOC_ENCODED 142
#define IDS_SDL_ENCODINGSTYLE 143
#define IDS_SDL_SKIP_EXTENSIBILITY 144
#define IDS_SDL_IO_DIFF_NAMESPACES 145
#define IDS_SDL_RPC_LITERAL 146
#define IDS_SDL_HEADER_DIFF_NAMESPACES 147
#define IDS_SDL_INVALID_ARRAY_DESC_ERR 148
#define IDS_SDL_NO_GENERATE 149
#define IDS_SDL_CUSTOM_TYPE 150
#define IDS_SDL_BASE_EXTENSION 151
#define IDS_SDL_NO_ATTRIBUTES 152
#define IDS_SDL_FAILED_DM_OPEN 153
#define IDS_SDL_PROCESS_DM_FAILURE 154
#define IDS_SDL_DEFAULT_TYPE 155
// Next default values for new objects
//
#ifdef APSTUDIO_INVOKED
#ifndef APSTUDIO_READONLY_SYMBOLS
#define _APS_NEXT_RESOURCE_VALUE 156
#define _APS_NEXT_COMMAND_VALUE 40001
#define _APS_NEXT_CONTROL_VALUE 1001
#define _APS_NEXT_SYMED_VALUE 102
#endif
#endif
<|start_filename|>source/SProxy/utils/cppkw.c<|end_filename|>
/*
*
* cppkw.c
*
* generate the cppkeywords.in file
* this will include all reserved macros in
* addition to all reserved keywords
*
*/
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#include <string.h>
#include <stdio.h>
#include <malloc.h>
// usage : cppkw <location of keywdat.h> <location of p0io.c> <outputfile>
int main(int argc, char *argv[])
{
char *szRoot;
char *szData;
char *szTemp;
char *szEnd;
FILE *fpIn;
FILE *fpOut;
int nRet;
long nFileLength = 0;
nRet = 0;
szRoot = NULL;
szData = NULL;
szTemp = NULL;
szEnd = NULL;
fpIn = NULL;
fpOut = NULL;
if (argc < 4)
{
printf("usage : cppkw <location of keywdat.h> <location of p0io.c> <outputfile>\n");
return 1;
}
fpIn = fopen(argv[1], "r");
if (!fpIn)
{
printf("error : could not open keywdat file \"%s\"\n", argv[1]);
return 1;
}
fpOut = fopen(argv[3], "w");
if (!fpOut)
{
printf("error : could not open output file \"%s\" for writing\n", argv[3]);
goto error;
}
fprintf(fpOut,
"%%class=CCppKeywordLookup\r\n"
"%%preamble\r\n"
"//\r\n"
"// generated with VC7Libs\\Nonship\\Src\\Sproxy2\\static_hash\\static_hash /i cppkeywords.in /o CppKeywordTable.h /w\r\n"
"//\r\n"
"\r\n"
"%%type=char\r\n"
"%%data\r\n");
nFileLength = _filelength(_fileno(fpIn));
szData = (char *)malloc(nFileLength+1);
if (!szData)
{
printf("out of memory\n");
goto error;
}
fread(szData, nFileLength, 1, fpIn);
szData[nFileLength] = '\0';
szTemp = szRoot = szData;
szEnd = szData+nFileLength;
szTemp = strstr(szTemp, "\nDAT(\"");
while (szTemp != NULL)
{
char *szTemp2 = NULL;
szData = szTemp;
szData+= sizeof("\nDAT(\"")-1;
if (szData >= szEnd)
{
printf("error in (expected) format of keywdat file.\n");
goto error;
}
szTemp2 = strchr(szData, '"');
if (!szTemp2)
{
printf("error in (expected) format of keywdat file.\n");
goto error;
}
fprintf(fpOut, "%.*s,0\r\n", szTemp2-szData, szData);
szTemp = strstr(szTemp+1, "\nDAT(\"");
}
fclose(fpIn);
fpIn = fopen(argv[2], "r");
if (!fpIn)
{
printf("error : could not open p0io file \"%s\" for reading.\n", argv[2]);
goto error;
}
nFileLength = _filelength(_fileno(fpIn));
if ((szEnd-szRoot) < nFileLength)
{
free(szRoot);
szRoot = (char *)malloc(nFileLength+1);
if (!szRoot)
{
printf("out of memory.\n");
goto error;
}
}
fread(szRoot, nFileLength, 1, fpIn);
szRoot[nFileLength] = '\0';
szTemp = szData = szRoot;
szEnd = szData+nFileLength;
szTemp = strstr(szTemp, "GetIdForString((pIdString_t) \"");
while (szTemp != NULL)
{
char *szTemp2 = NULL;
szData = szTemp;
szData += sizeof("GetIdForString((pIdString_t) \"")-1;
if (szData >= szEnd)
{
printf("error in (expected) format of p0io file.\n");
goto error;
}
szTemp2 = strchr(szData, '"');
if (!szTemp2)
{
printf("error in (expected) format of p0io file.\n");
goto error;
}
fprintf(fpOut, "%.*s,0\r\n", szTemp2-szData, szData);
szTemp = strstr(szTemp+1, "GetIdForString((pIdString_t) \"");
}
goto end;
error:
nRet = 1;
end:
if (fpIn != NULL)
{
fclose(fpIn);
}
if (fpOut != NULL)
{
fclose(fpOut);
}
if (szRoot != NULL)
{
free(szRoot);
}
return nRet;
}
<|start_filename|>source/SProxy/WSDLTypesParser.h<|end_filename|>
//
// WSDLTypesParser.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
#include "Parser.h"
class CWSDLType;
class CWSDLTypesParser : public CParserBase
{
private:
CWSDLType * m_pType;
public:
inline CWSDLTypesParser()
{
}
inline CWSDLTypesParser(ISAXXMLReader *pReader, CParserBase *pParent, DWORD dwLevel, CWSDLType * pType = NULL)
:CParserBase(pReader, pParent, dwLevel), m_pType(pType)
{
}
inline CWSDLType * GetType()
{
return m_pType;
}
inline void SetType(CWSDLType * pType)
{
m_pType = pType;
}
BEGIN_XMLTAG_MAP()
XMLTAG_ENTRY_EX("documentation", WSDL_NAMESPACEA, OnDocumentation)
XMLTAG_ENTRY_EX("schema", XSD_NAMESPACEA, OnSchema)
END_XMLTAG_MAP()
EMPTY_XMLATTR_MAP()
TAG_METHOD_DECL(OnDocumentation);
TAG_METHOD_DECL(OnSchema);
HRESULT __stdcall startPrefixMapping(
const wchar_t *wszPrefix,
int cchPrefix,
const wchar_t *wszUri,
int cchUri);
HRESULT OnUnrecognizedTag(
const wchar_t *wszNamespaceUri, int cchNamespaceUri,
const wchar_t *wszLocalName, int cchLocalName,
const wchar_t *wszQName, int cchQName,
ISAXAttributes *pAttributes) throw();
};
<|start_filename|>source/VCDeploy/resource.h<|end_filename|>
//{{NO_DEPENDENCIES}}
// Microsoft Visual C++ generated include file.
// Used by atlsdply.rc
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#define IDS_USAGE 101
#define IDS_BANNER 102
#define IDS_COMINIT_FAILED 103
#define IDS_UPDATEIRL 104
#define IDS_UNEXPECTED 106
#define IDS_ERR_FAILEDTOCREATEDOM 107
#define IDS_ERR_FAILEDTOLOAD_SETTINGS_XML 108
#define IDS_ERR_BADROOTNODE 109
#define IDS_ERR_WEBHOSTNAME 110
#define IDS_ERR_NOVIRTDIR 111
#define IDS_ERR_BADVIRTDIRNODE 112
#define IDS_ERR_BADREGISTERISAPI 113
#define IDS_ERR_INVALIDREGISTERISAPI 114
#define IDS_ERR_BADUNLOADBEFORECOPY 115
#define IDS_ERR_INVALIDUNLOADBEFORECOPY 116
#define IDS_ERR_BADAPPISOLATION 117
#define IDS_ERR_INVALIDAPPISOLATION 118
#define IDS_ERR_NOVIRTDIRFSPATH 119
#define IDS_ERR_BADVIRTDIRSFPATHNODE 120
#define IDS_ERR_CONNECTADSFAILED 121
#define IDS_ERR_CREATEVROOTFAILED 122
#define IDS_ERR_SETADSPROPERTY 123
#define IDS_FILEEXTATTR_NOTFOUND 124
#define IDS_FILEEXTATTR_INVALID 125
#define IDS_ERR_TOOMANYVERBS 126
#define ATLS_ERR_TOOMANYVERBBLOCKS 127
#define IDS_ERR_NOSTOPW3SVC 129
#define IDS_ERR_W3SVCFAILEDTOSTART 130
#define IDS_FAILEDOPENSCM 131
#define IDS_FAILEDOPENSVC 132
#define IDS_FAILEDSTOPCOMMAND 133
#define IDS_FAILEDQUERYSTATUS 134
#define IDS_FAILEDSTARTSVC 135
#define IDS_ERR_CREATING_DIRECTORY 136
#define IDS_ERR_CREATING_DIRECTORY_RELATIVE 137
#define IDS_WARN_COPYING_FILE 138
#define IDS_ERR_REGISTERING_EXTENSION 139
#define IDS_ERR_REGISTERING_NOVDIRPATH 140
#define IDS_ERR_REGISTERING_NOEXTFILE 141
#define IDS_ERR_RUNTIME 142
#define IDS_ERR_OUTOFMEM 143
#define IDS_WARN_SOURCE_NOT_EXIST 144
#define IDS_WARN_SOURCE_ACCESS_ERROR 145
#define IDS_COPYFILE_MESSAGE 146
#define IDS_FILES_IDENTICAL 147
#define IDS_WARNING_FAILED_METABSESAVE 148
#define IDS_WARNING_EMPTY_HOST_TAG 149
#define IDS_MUSTBEADMIN 150
#define IDS_CANTGETSECURITY 151
#define IDS_ERR_BADDONOTCREATEVIRTDIR 152
#define IDS_ERR_INVALIDDONOTCREATEVIRTDIR 153
#define VER_LANGID_HEX 0409
#define VER_LANGID 0x0409
#define VER_CODEPAGE 0x04b0
// Next default values for new objects
//
#ifdef APSTUDIO_INVOKED
#ifndef APSTUDIO_READONLY_SYMBOLS
#define _APS_NEXT_RESOURCE_VALUE 148
#define _APS_NEXT_COMMAND_VALUE 40001
#define _APS_NEXT_CONTROL_VALUE 1001
#define _APS_NEXT_SYMED_VALUE 101
#endif
#endif
<|start_filename|>source/SProxy/WSDLOperationIOParser.cpp<|end_filename|>
//
// WSDLOperationIOParser.cpp
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#include "stdafx.h"
#include "Util.h"
#include "WSDLOperationIOParser.h"
#include "WSDLPortTypeIO.h"
#include "Attribute.h"
#include "Content.h"
#include "Element.h"
#include "ComplexType.h"
#include "Emit.h"
#include "resource.h"
TAG_METHOD_IMPL(CWSDLOperationIOParser, OnSoapBody)
{
TRACE_PARSE_ENTRY();
CWSDLPortTypeIO * pCurr = GetIO();
if (pCurr != NULL)
{
CSoapBody *pBody = pCurr->AddSoapBody();
if (pBody != NULL)
{
SetXMLElementInfo(pBody, pCurr, GetLocator());
const wchar_t *wszValue;
int cchValue;
if (S_OK == GetAttribute(pAttributes, L"use", sizeof("use")-1, &wszValue, &cchValue))
{
if (S_OK != pBody->SetUse(wszValue, cchValue))
{
EmitInvalidValue("use", wszValue);
}
}
if (S_OK == GetAttribute(pAttributes, L"parts", sizeof("parts")-1, &wszValue, &cchValue))
{
pBody->SetParts(wszValue, cchValue);
}
if (S_OK == GetAttribute(pAttributes, L"encodingStyle", sizeof("encodingStyle")-1, &wszValue, &cchValue))
{
pBody->SetEncodingStyle(wszValue, cchValue);
}
if (S_OK == GetAttribute(pAttributes, L"namespace", sizeof("namespace")-1, &wszValue, &cchValue))
{
pBody->SetNamespace(wszValue, cchValue);
}
}
return SkipElement();
}
EmitErrorHr(E_OUTOFMEMORY);
return E_FAIL;
}
TAG_METHOD_IMPL(CWSDLOperationIOParser, OnSoapHeader)
{
TRACE_PARSE_ENTRY();
CWSDLPortTypeIO * pCurr = GetIO();
if (pCurr != NULL)
{
CSoapHeader *pElem = pCurr->AddSoapHeader();
if (pElem != NULL)
{
SetXMLElementInfo(pElem, pCurr, GetLocator());
const wchar_t *wszValue = NULL;
int cchValue = 0;
// message, part, and use are all required attributes
if (S_OK == GetAttribute(pAttributes, L"message", sizeof("message")-1, &wszValue, &cchValue))
{
pElem->SetMessage(wszValue, cchValue);
if (S_OK == GetAttribute(pAttributes, L"part", sizeof("part")-1, &wszValue, &cchValue))
{
pElem->SetParts(wszValue, cchValue);
if (S_OK == GetAttribute(pAttributes, L"use", sizeof("use")-1, &wszValue, &cchValue))
{
if (S_OK == pElem->SetUse(wszValue, cchValue))
{
// encodingStyle, namespace, and required are optional attributes
if (S_OK == GetAttribute(pAttributes, L"encodingStyle", sizeof("encodingStyle")-1, &wszValue, &cchValue))
{
pElem->SetEncodingStyle(wszValue, cchValue);
}
if (S_OK == GetAttribute(pAttributes, L"namespace", sizeof("namespace")-1, &wszValue, &cchValue))
{
pElem->SetNamespace(wszValue, cchValue);
}
if (S_OK == GetAttribute(pAttributes, L"required",
sizeof("required")-1, &wszValue, &cchValue, WSDL_NAMESPACEW, sizeof(WSDL_NAMESPACEA)-1))
{
bool bVal;
if (S_OK == GetBooleanValue(&bVal, wszValue, cchValue))
{
pElem->SetRequired(bVal);
}
else
{
EmitInvalidValue("required", wszValue);
}
}
return SkipElement();
}
EmitInvalidValue("use", wszValue);
}
else
{
OnMissingAttribute(TRUE, L"use", sizeof("use")-1, L"", 0);
}
}
else
{
OnMissingAttribute(TRUE, L"part", sizeof("part")-1, L"", 0);
}
}
else
{
OnMissingAttribute(TRUE, L"message", sizeof("message")-1, L"", 0);
}
}
}
EmitErrorHr(E_OUTOFMEMORY);
return E_FAIL;
}
TAG_METHOD_IMPL(CWSDLOperationIOParser, OnSoapHeaderFault)
{
TRACE_PARSE_ENTRY();
// REVIEW: output warning here (probably not necessary)?
return SkipElement();
}
TAG_METHOD_IMPL(CWSDLOperationIOParser, OnSoapFault)
{
TRACE_PARSE_ENTRY();
CWSDLPortTypeIO * pCurr = GetIO();
if (pCurr != NULL)
{
CSoapFault *pElem = pCurr->AddSoapFault();
if (pElem != NULL)
{
SetXMLElementInfo(pElem, pCurr, GetLocator());
const wchar_t *wszValue = NULL;
int cchValue = 0;
// name and use are required attributes
if (S_OK == GetAttribute(pAttributes, L"name", sizeof("name")-1, &wszValue, &cchValue))
{
pElem->SetName(wszValue, cchValue);
if (S_OK == GetAttribute(pAttributes, L"use", sizeof("use")-1, &wszValue, &cchValue))
{
if (S_OK == pElem->SetUse(wszValue, cchValue))
{
// encodingStyle and namespace are optional attributes
if (S_OK == GetAttribute(pAttributes, L"encodingStyle", sizeof("encodingStyle")-1, &wszValue, &cchValue))
{
pElem->SetEncodingStyle(wszValue, cchValue);
}
if (S_OK == GetAttribute(pAttributes, L"namespace", sizeof("namespace")-1, &wszValue, &cchValue))
{
pElem->SetNamespace(wszValue, cchValue);
}
return SkipElement();
}
EmitInvalidValue("use", wszValue);
}
}
}
}
EmitErrorHr(E_OUTOFMEMORY);
return E_FAIL;
}
TAG_METHOD_IMPL(CWSDLOperationIOParser, OnMimeContent)
{
TRACE_PARSE_ENTRY();
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CWSDLOperationIOParser, OnMimeXML)
{
TRACE_PARSE_ENTRY();
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CWSDLOperationIOParser, OnMimeMultipartRelated)
{
TRACE_PARSE_ENTRY();
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CWSDLOperationIOParser, OnHttpUrlEncoded)
{
TRACE_PARSE_ENTRY();
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
TAG_METHOD_IMPL(CWSDLOperationIOParser, OnDocumentation)
{
TRACE_PARSE_ENTRY();
EmitSkip(wszNamespaceUri, wszLocalName);
return SkipElement();
}
ATTR_METHOD_IMPL(CWSDLOperationIOParser, OnName)
{
TRACE_PARSE_ENTRY();
CWSDLPortTypeIO * pCurr = GetIO();
if (pCurr != NULL)
{
return pCurr->SetName(wszValue, cchValue);
}
return E_FAIL;
}
ATTR_METHOD_IMPL(CWSDLOperationIOParser, OnMessage)
{
TRACE_PARSE_ENTRY();
CWSDLPortTypeIO * pCurr = GetIO();
if (pCurr != NULL)
{
return pCurr->SetMessage(wszValue, cchValue);
}
return E_FAIL;
}
HRESULT __stdcall CWSDLOperationIOParser::startPrefixMapping(
const wchar_t *wszPrefix,
int cchPrefix,
const wchar_t *wszUri,
int cchUri)
{
CWSDLPortTypeIO * pCurr = GetIO();
if (pCurr != NULL)
{
return pCurr->SetNamespaceUri(wszPrefix, cchPrefix, wszUri, cchUri);
}
return E_FAIL;
}
HRESULT CWSDLOperationIOParser::OnUnrecognizedTag(
const wchar_t *wszNamespaceUri, int cchNamespaceUri,
const wchar_t *wszLocalName, int cchLocalName,
const wchar_t * /*wszQName*/, int /*cchQName*/,
ISAXAttributes * /*pAttributes*/) throw()
{
CWSDLPortTypeIO * pCurr = GetIO();
if (pCurr != NULL)
{
int nLine;
int nCol;
GetLocator()->getLineNumber(&nLine);
GetLocator()->getColumnNumber(&nCol);
EmitFileWarning(IDS_SDL_SKIP_EXTENSIBILITY,
pCurr->GetParentDocument()->GetDocumentUri(),
nLine,
nCol,
0,
wszNamespaceUri,
wszLocalName);
}
return SkipElement();
}
<|start_filename|>source/SProxy/StdAfx.h<|end_filename|>
//
// stdafx.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#pragma warning(1: 4927)
#pragma warning(1: 4928)
#define _WIN32_WINNT 0x0502
#ifdef _DEBUG
#define ATL_DEBUG_STENCILS
#endif
#define _CRT_SECURE_CPP_OVERLOAD_STANDARD_NAMES 1
#include <atlbase.h>
#include <atlpath.h>
#include <msxml2.h>
#include <atlstencil.h>
#include "SproxyColl.h"
#include "resource.h"
#ifdef _DEBUG
#include <crtdbg.h>
#endif
#define _MAKEWIDESTR( str ) L ## str
#define MAKEWIDESTR( str ) _MAKEWIDESTR( str )
#include "Namespaces.h"
typedef CAtlMap<CStringW, CStringW, CStringRefElementTraits<CStringW>, CStringRefElementTraits<CStringW> > NAMESPACEMAP;
const wchar_t * GetWSDLFile();
#include "DiscoMapDocument.h"
CDiscoMapDocument * GetDiscoMapDocument();
#ifdef _DEBUG
inline const TCHAR * GetTabs(DWORD dwLevel)
{
static TCHAR s_szTabs[2048];
dwLevel = min(dwLevel, 2047);
for (DWORD i=0; i<dwLevel; i++)
{
s_szTabs[i] = _T('\t');
}
s_szTabs[dwLevel] = _T('\0');
return s_szTabs;
}
#else // _DEBUG
#define GetTabs __noop
#endif // _DEBUG
HRESULT CreateSafeCppName(char **ppszName, const wchar_t *wszName);
HRESULT CreateSafeCppName(char **ppszName, const char *wszName);
HRESULT CreateSafeCppName(CStringA& strSafeName, const wchar_t *wszName);
HRESULT CreateSafeCppName(CStringA& strSafeName, const char *wszName);
#pragma warning(disable:4100)
<|start_filename|>source/SProxy/CodeTypes.h<|end_filename|>
//
// CodeTypes.h
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#include "stdafx.h"
#include "XSDElement.h"
enum CODETYPE
{
CODETYPE_ERR = 0,
CODETYPE_UNK,
CODETYPE_ENUM,
CODETYPE_PARAMETER,
CODETYPE_STRUCT,
CODETYPE_UNION,
CODETYPE_FIELD,
CODETYPE_FUNCTION,
CODETYPE_HEADER
};
enum CODEFLAGS
{
CODEFLAG_ERR = 0x00000000,
CODEFLAG_UNK = 0x00000001,
CODEFLAG_IN = 0x00000002,
CODEFLAG_OUT = 0x00000004,
CODEFLAG_RETVAL = 0x00000008,
CODEFLAG_MUSTUNDERSTAND = 0x00000010,
CODEFLAG_HEX = 0x00000020,
CODEFLAG_BASE64 = 0x00000040,
CODEFLAG_FIXEDARRAY = 0x00000080,
CODEFLAG_DYNARRAY = 0x00000100,
CODEFLAG_STRUCT = 0x00000200,
CODEFLAG_UNION = 0x00000400,
CODEFLAG_ENUM = 0x00000800,
CODEFLAG_HEADER = 0x00001000,
// WSDL message styles
CODEFLAG_DOCUMENT = 0x00002000,
CODEFLAG_RPC = 0x00004000,
// WSDL message uses
CODEFLAG_LITERAL = 0x00008000,
CODEFLAG_ENCODED = 0x00010000,
// specific wire format for document/literal with
// <message ...><part ... element="..."/></message>
CODEFLAG_PID = 0x00020000, // ParametersInDocuments
CODEFLAG_PAD = 0x00040000, // ParametersAsDocuments
// special processing required for document/literal with type=
CODEFLAG_CHAIN = 0x00080000,
CODEFLAG_TYPE = 0x00100000,
CODEFLAG_ELEMENT = 0x00200000,
// one-way method
CODEFLAG_ONEWAY = 0x00400000,
// MinOccurs=0/MaxOccurs=1 Wrapper
CODEFLAG_DYNARRAYWRAPPER= 0x00800000,
// nullable/nillable element
CODEFLAG_NULLABLE = 0x01000000
};
class CCodeElement
{
private:
CStringW m_strName;
DWORD m_dwFlags;
CODETYPE m_codeType;
CCodeElement * m_pParentElement;
// safe naming
CStringA m_strSafeName;
public:
CCodeElement();
virtual ~CCodeElement() {}
HRESULT SetName(const wchar_t *wszName, int cchName);
HRESULT SetName(const CStringW& strName);
const CStringW& GetName();
CODETYPE GetCodeType();
void SetCodeType(CODETYPE codeType);
DWORD SetFlags(DWORD dwFlags);
DWORD GetFlags();
DWORD AddFlags(DWORD dwFlag);
DWORD ClearFlags(DWORD dwFlags = 0);
void SetParentElement(CCodeElement * pParentElement);
CCodeElement * GetParentElement();
const CCodeElement& operator=(const CCodeElement& that);
CStringA& GetSafeName();
HRESULT SetSafeName(const CStringA& strName);
};
class CCodeType
{
private:
XSDTYPE m_xsdType;
CStringA m_strCodeType;
CStringA m_strSafeCodeType;
public:
CCodeType();
void SetXSDType(XSDTYPE xsdType);
XSDTYPE GetXSDType();
HRESULT SetCodeTypeName(const wchar_t *wszCodeType, int cchCodeType);
HRESULT SetCodeTypeName(const CStringW& strCodeType);
const CStringA& GetCodeTypeName();
const CCodeType& operator=(const CCodeType& that);
CStringA& GetSafeCodeTypeName();
HRESULT SetSafeCodeTypeName(const CStringA& strSafeCodeType);
};
class CCodeTypedElement : public CCodeElement, public CCodeType
{
private:
CXSDElement * m_pElement;
CAtlArray<int> m_arrDims;
CStringA m_strSizeIs;
CStringW m_strNamespace;
public:
CCodeTypedElement()
:m_pElement(NULL)
{
}
inline HRESULT SetNamespace(const CStringW& strNamespace)
{
m_strNamespace = strNamespace;
return S_OK;
}
inline CStringW& GetNamespace()
{
return m_strNamespace;
}
inline size_t AddDimension(int nDim)
{
return m_arrDims.Add(nDim);
}
inline int GetDimension(int i)
{
return m_arrDims[i];
}
inline void SetDimension(int i, int nDim)
{
m_arrDims[i] = nDim;
}
inline int GetDims()
{
return (int)m_arrDims.GetCount();
}
inline void ClearDims()
{
m_arrDims.RemoveAll();
}
inline CStringA& GetSizeIs()
{
return m_strSizeIs;
}
inline void SetSizeIs(const CStringA& str)
{
m_strSizeIs = str;
}
inline void SetSizeIs(const CStringW& str)
{
m_strSizeIs = CW2A(const_cast<LPWSTR>((LPCWSTR) str));
}
inline const CCodeTypedElement& operator=(const CCodeTypedElement& that)
{
if (this != &that)
{
CCodeElement::operator=(that);
CCodeType::operator=(that);
m_pElement = that.m_pElement;
// REVIEW: (jasjitg) -- ugly!
if (that.m_arrDims.GetCount() != 0)
{
m_arrDims.SetCount(that.m_arrDims.GetCount());
for (size_t i=0; i<m_arrDims.GetCount(); i++)
{
m_arrDims[i] = that.m_arrDims[i];
}
}
}
return (*this);
}
inline CCodeTypedElement(const CCodeTypedElement& that)
{
*this = that;
}
inline void SetElement(CXSDElement *pElem)
{
m_pElement = pElem;
}
inline CXSDElement * GetElement()
{
return m_pElement;
}
};
class CCodeElementContainer
{
private:
CStringA m_strName;
// safe name
CStringA m_strSafeName;
CStringA m_strResponseName; // for CCodeFunction only
CStringA m_strSendName; // for CCodeFunction only
CStringA m_strSoapAction; // for CCodeFunction only
CAtlPtrList<CCodeTypedElement *> m_headers; // for CCodeFunction only
DWORD m_dwCallFlags; // for CCodeFunction only
// namespace of the type/function
CStringA m_strNamespace;
CAtlPtrList<CCodeTypedElement *> m_elements;
public:
CCodeElementContainer()
:m_dwCallFlags(0)
{
}
DWORD GetCallFlags();
void SetCallFlags(DWORD dwCallFlags);
CStringA& GetSafeName();
const CStringA& GetName();
HRESULT SetName(const CStringA& str);
HRESULT SetName(const CStringW& str);
const CStringA& GetResponseName();
HRESULT SetResponseName(const CStringA& str);
HRESULT SetResponseName(const CStringW& str);
const CStringA& GetSendName();
HRESULT SetSendName(const CStringA& str);
HRESULT SetSendName(const CStringW& str);
CStringA& GetNamespace();
HRESULT SetNamespace(const CStringA& str);
HRESULT SetNamespace(const CStringW& str);
const CStringA& GetSoapAction();
HRESULT SetSoapAction(const CStringA& str);
HRESULT SetSoapAction(const CStringW& str);
POSITION GetFirstElement();
CCodeTypedElement * GetNextElement(POSITION &pos);
CCodeTypedElement * AddElement(CCodeTypedElement * p = NULL);
int GetElementCount();
POSITION GetFirstHeader();
CCodeTypedElement * GetNextHeader(POSITION &pos);
CCodeTypedElement * AddHeader(CCodeTypedElement *p = NULL);
int GetHeaderCount();
};
typedef CCodeElementContainer CCodeFunction;
typedef CCodeElementContainer CCodeStruct;
typedef CCodeElementContainer CCodeEnum;
class CCodeProxy
{
private:
CStringA m_strClassName;
CStringA m_strAddressUri;
CStringA m_strTargetNamespace;
CStringA m_strServiceName;
CAtlPtrList<CCodeStruct *> m_structs;
CAtlPtrList<CCodeEnum *> m_enums;
CAtlPtrList<CCodeFunction *> m_functions;
CAtlPtrList<CCodeTypedElement *> m_headers;
public:
const CStringA& GetClassName();
HRESULT SetClassName(const CStringW &strName);
HRESULT SetClassName(const char * szName);
const CStringA& GetServiceName();
HRESULT SetServiceName(const CStringW &strName);
HRESULT SetServiceName(const char * szName);
const CStringA& GetAddressUri();
HRESULT SetAddressUri(const CStringW &strName);
HRESULT SetAddressUri(const char * szName);
const CStringA& GetTargetNamespace();
HRESULT SetTargetNamespace(const CStringW &strName);
HRESULT SetTargetNamespace(const char * szName);
POSITION GetFirstStruct();
CCodeStruct * GetNextStruct(POSITION &pos);
CCodeStruct * AddStruct(CCodeStruct * p = NULL);
POSITION GetFirstEnum();
CCodeEnum * GetNextEnum(POSITION &pos);
CCodeEnum * AddEnum(CCodeEnum * p = NULL);
POSITION GetFirstFunction();
CCodeFunction * GetNextFunction(POSITION &pos);
CCodeFunction * AddFunction(CCodeFunction * p = NULL);
POSITION GetFirstHeader();
CCodeTypedElement * GetNextHeader(POSITION &pos);
CCodeTypedElement * AddHeader(CCodeTypedElement *p = NULL);
};
//////////////////////////////////////////////////////////////////
//
// CCodeElement
//
//////////////////////////////////////////////////////////////////
inline CCodeElement::CCodeElement()
:m_dwFlags(CODEFLAG_ERR)
{
}
inline CStringA& CCodeElement::GetSafeName()
{
return m_strSafeName;
}
inline HRESULT CCodeElement::SetSafeName(const CStringA& strName)
{
m_strSafeName = strName;
return S_OK;
}
inline HRESULT CCodeElement::SetName(const wchar_t *wszName, int cchName)
{
if (!wszName)
{
return E_FAIL;
}
m_strName.SetString(wszName, cchName);
return S_OK;
}
inline HRESULT CCodeElement::SetName(const CStringW& strName)
{
m_strName = strName;
return S_OK;
}
inline const CStringW& CCodeElement::GetName()
{
return m_strName;
}
inline CODETYPE CCodeElement::GetCodeType()
{
return m_codeType;
}
inline void CCodeElement::SetCodeType(CODETYPE codeType)
{
m_codeType = codeType;
}
inline DWORD CCodeElement::SetFlags(DWORD dwFlags)
{
return m_dwFlags = dwFlags;
}
inline DWORD CCodeElement::GetFlags()
{
return m_dwFlags;
}
inline DWORD CCodeElement::AddFlags(DWORD dwFlag)
{
m_dwFlags |= dwFlag;
return m_dwFlags;
}
inline DWORD CCodeElement::ClearFlags(DWORD dwFlags)
{
m_dwFlags &= ~dwFlags;
return m_dwFlags;
}
inline void CCodeElement::SetParentElement(CCodeElement * pParentElement)
{
m_pParentElement = pParentElement;
}
inline CCodeElement * CCodeElement::GetParentElement()
{
return m_pParentElement;
}
inline const CCodeElement& CCodeElement::operator=(const CCodeElement& that)
{
if (this != &that)
{
m_strName = that.m_strName;
m_strSafeName = that.m_strSafeName;
m_dwFlags = that.m_dwFlags;
m_codeType = that.m_codeType;
m_pParentElement = that.m_pParentElement;
}
return (*this);
}
//////////////////////////////////////////////////////////////////
//
// CCodeType
//
//////////////////////////////////////////////////////////////////
inline CCodeType::CCodeType()
:m_xsdType(XSDTYPE_ERR)
{
}
inline void CCodeType::SetXSDType(XSDTYPE xsdType)
{
m_xsdType = xsdType;
}
inline XSDTYPE CCodeType::GetXSDType()
{
return m_xsdType;
}
inline HRESULT CCodeType::SetCodeTypeName(const wchar_t *wszCodeType, int cchCodeType)
{
if (!wszCodeType)
{
return E_FAIL;
}
CStringW strW;
strW.SetString(wszCodeType, cchCodeType);
m_strCodeType = CW2A(const_cast<LPWSTR>((LPCWSTR) strW));
return S_OK;
}
inline HRESULT CCodeType::SetCodeTypeName(const CStringW& strCodeType)
{
m_strCodeType = strCodeType;
return S_OK;
}
inline const CStringA& CCodeType::GetCodeTypeName()
{
return m_strCodeType;
}
inline CStringA& CCodeType::GetSafeCodeTypeName()
{
return m_strSafeCodeType;
}
inline HRESULT CCodeType::SetSafeCodeTypeName(const CStringA& strName)
{
m_strSafeCodeType = strName;
return S_OK;
}
inline const CCodeType& CCodeType::operator=(const CCodeType& that)
{
if (this != &that)
{
m_strCodeType = that.m_strCodeType;
m_strSafeCodeType = that.m_strSafeCodeType;
m_xsdType = that.m_xsdType;
}
return (*this);
}
//////////////////////////////////////////////////////////////////
//
// CCodeElementContainer
//
//////////////////////////////////////////////////////////////////
inline DWORD CCodeElementContainer::GetCallFlags()
{
return m_dwCallFlags;
}
inline CStringA& CCodeElementContainer::GetSafeName()
{
return m_strSafeName;
}
inline void CCodeElementContainer::SetCallFlags(DWORD dwCallFlags)
{
m_dwCallFlags = dwCallFlags;
}
inline const CStringA& CCodeElementContainer::GetName()
{
return m_strName;
}
inline HRESULT CCodeElementContainer::SetName(const CStringA& str)
{
m_strName = str;
return S_OK;
}
inline HRESULT CCodeElementContainer::SetName(const CStringW& str)
{
m_strName = CW2A( const_cast<LPWSTR>((LPCWSTR) str) );
return S_OK;
}
inline const CStringA& CCodeElementContainer::GetResponseName()
{
return m_strResponseName;
}
inline HRESULT CCodeElementContainer::SetResponseName(const CStringA& str)
{
m_strResponseName = str;
return S_OK;
}
inline HRESULT CCodeElementContainer::SetResponseName(const CStringW& str)
{
m_strResponseName = CW2A( const_cast<LPWSTR>((LPCWSTR) str) );
return S_OK;
}
inline const CStringA& CCodeElementContainer::GetSendName()
{
return m_strSendName;
}
inline HRESULT CCodeElementContainer::SetSendName(const CStringA& str)
{
m_strSendName = str;
return S_OK;
}
inline HRESULT CCodeElementContainer::SetSendName(const CStringW& str)
{
m_strSendName = CW2A( const_cast<LPWSTR>((LPCWSTR) str) );
return S_OK;
}
inline CStringA& CCodeElementContainer::GetNamespace()
{
return m_strNamespace;
}
inline HRESULT CCodeElementContainer::SetNamespace(const CStringA& str)
{
m_strNamespace = str;
return S_OK;
}
inline HRESULT CCodeElementContainer::SetNamespace(const CStringW& str)
{
m_strNamespace = CW2A( const_cast<LPWSTR>((LPCWSTR) str) );
return S_OK;
}
inline const CStringA& CCodeElementContainer::GetSoapAction()
{
return m_strSoapAction;
}
inline HRESULT CCodeElementContainer::SetSoapAction(const CStringA& str)
{
m_strSoapAction = str;
return S_OK;
}
inline HRESULT CCodeElementContainer::SetSoapAction(const CStringW& str)
{
m_strSoapAction = CW2A( const_cast<LPWSTR>((LPCWSTR) str) );
return S_OK;
}
inline POSITION CCodeElementContainer::GetFirstElement()
{
return m_elements.GetHeadPosition();
}
inline CCodeTypedElement * CCodeElementContainer::GetNextElement(POSITION &pos)
{
return m_elements.GetNext(pos);
}
inline CCodeTypedElement * CCodeElementContainer::AddElement(CCodeTypedElement * p)
{
CAutoPtr<CCodeTypedElement> spOut;
if (p == NULL)
{
spOut.Attach( new CCodeTypedElement );
p = spOut;
}
if (p != NULL)
{
if (m_elements.AddTail(p) != NULL)
{
spOut.Detach();
return p;
}
}
return NULL;
}
inline int CCodeElementContainer::GetElementCount()
{
return (int)m_elements.GetCount();
}
inline POSITION CCodeElementContainer::GetFirstHeader()
{
return m_headers.GetHeadPosition();
}
inline CCodeTypedElement * CCodeElementContainer::GetNextHeader(POSITION &pos)
{
return m_headers.GetNext(pos);
}
inline CCodeTypedElement * CCodeElementContainer::AddHeader(CCodeTypedElement * p)
{
CAutoPtr<CCodeTypedElement> spOut;
if (p == NULL)
{
spOut.Attach( new CCodeTypedElement );
p = spOut;
}
if (p != NULL)
{
if (m_headers.AddTail(p) != NULL)
{
spOut.Detach();
return p;
}
}
return NULL;
}
inline int CCodeElementContainer::GetHeaderCount()
{
return (int)m_headers.GetCount();
}
//////////////////////////////////////////////////////////////////
//
// CCodeProxy
//
//////////////////////////////////////////////////////////////////
inline POSITION CCodeProxy::GetFirstStruct()
{
return m_structs.GetHeadPosition();
}
inline CCodeStruct * CCodeProxy::GetNextStruct(POSITION &pos)
{
return m_structs.GetNext(pos);
}
inline CCodeStruct * CCodeProxy::AddStruct(CCodeStruct * p)
{
CAutoPtr<CCodeStruct> spOut;
if (p == NULL)
{
spOut.Attach( new CCodeStruct);
p = spOut;
}
if (p != NULL)
{
if (m_structs.AddTail(p) != NULL)
{
spOut.Detach();
return p;
}
}
return NULL;
}
inline POSITION CCodeProxy::GetFirstEnum()
{
return m_enums.GetHeadPosition();
}
inline CCodeEnum * CCodeProxy::GetNextEnum(POSITION &pos)
{
return m_enums.GetNext(pos);
}
inline CCodeEnum * CCodeProxy::AddEnum(CCodeEnum * p)
{
CAutoPtr<CCodeEnum> spOut;
if (p == NULL)
{
spOut.Attach( new CCodeEnum);
p = spOut;
}
if (p != NULL)
{
if (m_enums.AddTail(p) != NULL)
{
spOut.Detach();
return p;
}
}
return NULL;
}
inline POSITION CCodeProxy::GetFirstFunction()
{
return m_functions.GetHeadPosition();
}
inline CCodeFunction * CCodeProxy::GetNextFunction(POSITION &pos)
{
return m_functions.GetNext(pos);
}
inline CCodeFunction * CCodeProxy::AddFunction(CCodeFunction * p)
{
CAutoPtr<CCodeFunction> spOut;
if (p == NULL)
{
spOut.Attach( new CCodeFunction );
p = spOut;
}
if (p != NULL)
{
if (m_functions.AddTail(p) != NULL)
{
spOut.Detach();
return p;
}
}
return NULL;
}
inline POSITION CCodeProxy::GetFirstHeader()
{
return m_headers.GetHeadPosition();
}
inline CCodeTypedElement * CCodeProxy::GetNextHeader(POSITION &pos)
{
return m_headers.GetNext(pos);
}
inline CCodeTypedElement * CCodeProxy::AddHeader(CCodeTypedElement * p)
{
CAutoPtr<CCodeTypedElement> spOut;
if (p == NULL)
{
spOut.Attach( new CCodeTypedElement );
p = spOut;
}
if (p != NULL)
{
if (m_headers.AddTail(p) != NULL)
{
spOut.Detach();
return p;
}
}
return NULL;
}
inline const CStringA& CCodeProxy::GetClassName()
{
return m_strClassName;
}
inline HRESULT CCodeProxy::SetClassName(const CStringW &strName)
{
return CreateSafeCppName(m_strClassName, strName);
}
inline HRESULT CCodeProxy::SetClassName(const char * szName)
{
return CreateSafeCppName(m_strClassName, szName);
}
inline const CStringA& CCodeProxy::GetServiceName()
{
return m_strServiceName;
}
inline HRESULT CCodeProxy::SetServiceName(const CStringW &strName)
{
return CreateSafeCppName(m_strServiceName, strName);
}
inline HRESULT CCodeProxy::SetServiceName(const char * szName)
{
return CreateSafeCppName(m_strServiceName, szName);
}
inline const CStringA& CCodeProxy::GetAddressUri()
{
return m_strAddressUri;
}
inline HRESULT CCodeProxy::SetAddressUri(const CStringW &strName)
{
m_strAddressUri = CW2A(const_cast<LPWSTR>((LPCWSTR) strName));
return S_OK;
}
inline HRESULT CCodeProxy::SetAddressUri(const char * szName)
{
m_strAddressUri = szName;
return S_OK;
}
inline const CStringA& CCodeProxy::GetTargetNamespace()
{
return m_strTargetNamespace;
}
inline HRESULT CCodeProxy::SetTargetNamespace(const CStringW &strName)
{
m_strTargetNamespace = CW2A(const_cast<LPWSTR>((LPCWSTR) strName));
return S_OK;
}
inline HRESULT CCodeProxy::SetTargetNamespace(const char * szName)
{
m_strTargetNamespace = szName;
return S_OK;
} | wanttobeno/AtlServe |
<|start_filename|>test/solidus.js<|end_filename|>
const DEFAULT_ENCODING = 'UTF8';
const FILESYSTEM_DELAY = 1100;
var path = require('path');
var assert = require('assert');
var async = require('async');
var fs = require('fs');
var moment = require('moment');
var request = require('supertest');
var nock = require('nock');
var zlib = require('zlib');
var timekeeper = require('timekeeper');
var solidus = require('../solidus.js');
var SolidusServer = require('../lib/server.js');
var Page = require('../lib/page.js');
var original_path = __dirname;
var site1_path = path.join( original_path, 'fixtures', 'site 1' );
var site2_path = path.join( original_path, 'fixtures', 'site2' );
var normalizePath = function( file_path ){
return file_path.replace( /\//g, path.sep );
};
describe( 'Solidus', function(){
describe( 'production', function(){
var solidus_server;
var original_redirects = [];
beforeEach( function( done ){
process.chdir( site1_path );
// Generate time-based redirects
// These are used to ensure that temporary redirects are properly checked
original_redirects = fs.readFileSync( 'redirects.js', DEFAULT_ENCODING );
delete require.cache[require.resolve(site1_path + '/redirects.js')];
var original_redirects_arr = require(site1_path + '/redirects.js');
var redirect_date_format = 'YYYY-MM-DD HH:mm:ss';
var temporal_redirects = [{
start: moment().add( 's', 5 ).format( redirect_date_format ),
from: '/future-redirect',
to: '/'
}, {
start: moment().subtract( 's', 5 ).format( redirect_date_format ),
end: moment().add( 's', 5 ).format( redirect_date_format ),
from: '/current-redirect',
to: '/'
}, {
start: moment().subtract( 's', 10 ).format( redirect_date_format ),
end: moment().subtract( 's', 5 ).format( redirect_date_format ),
from: '/past-redirect',
to: '/'
}];
var overlapping_redirects = [{
start: moment().add( 's', 5 ).format( redirect_date_format ),
from: '/overlapping-redirect',
to: '/overlapping-redirect-future'
}, {
start: moment().subtract( 's', 5 ).format( redirect_date_format ),
end: moment().add( 's', 5 ).format( redirect_date_format ),
from: '/overlapping-redirect',
to: '/overlapping-redirect-current'
}, {
start: moment().subtract( 's', 10 ).format( redirect_date_format ),
end: moment().subtract( 's', 5 ).format( redirect_date_format ),
from: '/overlapping-redirect',
to: '/overlapping-redirect-past'
}];
var combined_redirects = JSON.stringify( temporal_redirects.concat( overlapping_redirects ) );
fs.appendFileSync( 'redirects.js', ';module.exports = module.exports.concat(' + combined_redirects + ');', DEFAULT_ENCODING );
// mock http endpoints for resources
nock('https://solid.us').get('/basic/1').reply( 200, { test: true } );
nock('https://solid.us').get('/basic/2').reply( 200, { test: true } );
nock('https://solid.us').get('/dynamic/segment/3').reply( 200, { test: true } );
nock('https://solid.us').get('/resource/options/url').reply( 200, { test: true } );
nock('https://solid.us').get('/resource/options/query?test=true').reply( 200, { test: true } );
nock('https://solid.us').get('/resource/options/dynamic/query?test=3').reply( 200, { test: true } );
nock('https://solid.us').get('/resource/options/double/dynamic/query?a=%2C&b=%2C&test2=4&c=%2C&d=%252C&test=3').reply( 200, { test: true } );
nock('https://solid.us').get('/centralized/auth/query').reply( 200, { test: true } );
nock('https://solid.us').get('/resource/options/headers').matchHeader( 'key', '12345' ).reply( 200, { test: true } );
nock('https://a.solid.us').get('/centralized/auth').matchHeader( 'key', '12345' ).reply( 200, { test: true } );
nock('https://b.solid.us').get('/centralized/auth/query?key=12345').reply( 200, { test: true } );
// empty dynamic segments
nock('https://solid.us').get('/dynamic/segment/').reply( 200, { test: false } );
nock('https://solid.us').get('/resource/options/dynamic/query?test=').reply( 200, { test: false } );
nock('https://solid.us').get('/resource/options/double/dynamic/query?a=%2C&b=%2C&test2=&c=%2C&d=%252C&test=').reply( 200, { test: false } );
async.parallel([
// compressed resources
function( callback ){
zlib.gzip( '{"test":true}', function( _, result ){
nock('https://solid.us').get('/compressed/gzip').reply( 200, result, { 'Content-Encoding': 'gzip' } );
callback();
});
},
function( callback ){
zlib.deflate( '{"test":true}', function( _, result ){
nock('https://solid.us').get('/compressed/deflate').reply( 200, result, { 'Content-Encoding': 'deflate' } );
callback();
});
}
],
function(){
solidus_server = solidus.start({
log_level: -1,
port: 9009
});
solidus_server.on( 'ready', done );
});
});
afterEach( function(){
solidus_server.stop();
fs.writeFileSync( 'redirects.js', original_redirects, DEFAULT_ENCODING );
process.chdir( original_path );
});
it( 'Starts a new http server', function( done ){
request( solidus_server.router )
.get('/')
.end( function( err, res ){
if( err ) throw err;
done();
});
});
it( 'Creates routes based on the contents of /views', function( done ){
var s_request = request( solidus_server.router );
async.parallel([
function( callback ){
s_request.get('/').expect( 200, callback );
},
function( callback ){
s_request.get('/layout').expect( 200, callback );
}
], function( err, results ){
if( err ) throw err;
done();
});
});
it( 'Creates routes with dynamic segments', function( done ){
var s_request = request( solidus_server.router );
async.parallel([
function( callback ){
s_request.get('/dynamic/1').expect( 200, callback );
},
function( callback ){
s_request.get('/dynamic/2').expect( 200, callback );
}
], function( err, results ){
if( err ) throw err;
done();
});
});
it( 'Creates routes for page contexts', function( done ){
var s_request = request( solidus_server.router );
async.parallel([
function( callback ){
s_request.get('/.json')
.expect( 'Content-Type', /json/ )
.expect( 200 )
.end( function( err, res ){
assert( res.body.page.title === 'test' );
assert( res.body.parameters );
assert( res.body.query );
callback( err );
});
},
function( callback ){
s_request.get('/layout.json?test=true')
.expect( 'Content-Type', /json/ )
.expect( 200 )
.end( function( err, res ){
assert( res.body.page );
assert( res.body.parameters );
assert( res.body.query.test );
callback( err );
});
},
function( callback ){
s_request.get('/dynamic/1.json')
.expect( 'Content-Type', /json/ )
.expect( 200 )
.end( function( err, res ){
assert( res.body.page );
assert( res.body.parameters.segment == '1' );
assert( res.body.query );
callback( err );
});
},
function( callback ){
s_request.get('/dynamic/2.json')
.expect( 'Content-Type', /json/ )
.expect( 200 )
.end( function( err, res ){
assert( res.body.page );
assert( res.body.parameters.segment == '2' );
assert( res.body.query );
callback( err );
});
}
], function( err, results ){
if( err ) throw err;
done();
});
});
it( 'Returns 404s for unmatched routes', function( done ){
var s_request = request( solidus_server.router );
s_request.get('/nonexistent-url')
.expect( 404 )
.end( function( err, res ){
assert(res.text.indexOf('Not here!') > -1);
if( err ) throw err;
done();
});
});
it( 'Makes URL information available in page context', function( done ){
var s_request = request( solidus_server.router );
s_request.get('/.json')
.expect( 'Content-Type', /json/ )
.expect( 200 )
.end( function( err, res ){
assert( res.body.url );
assert( res.body.url.path === '/.json' );
if( err ) throw err;
done();
});
});
it( 'Sets noindex header for preview mode', function( done ){
var s_request = request( solidus_server.router );
s_request.get('/?is_preview=true')
.expect( 'x-robots-tag', /noindex/ )
.end( function( err, res ){
if (err) throw err;
done();
});
});
it( 'Finds the list of partials used by each page', function(done) {
var partials = ['partial1', 'partial2', 'partial3', 'partial/4', 'partial9', "partial'10", 'partial11', 'partial"12'];
assert.deepEqual(solidus_server.views[solidus_server.pathFromPartialName('multiple_partials')].partials, partials);
done();
});
it( 'Fetches resources and adds them to the page context', function( done ){
var s_request = request( solidus_server.router );
async.parallel([
function( callback ){
s_request.get('/.json?resource_test=3&resource_test2=4')
.expect( 'Content-Type', /json/ )
.expect( 200 )
.end( function( err, res ){
assert( res.body.resources.basic.test );
assert( res.body.resources.basic2.test );
assert( res.body.resources['dynamic-segment'].test );
assert( res.body.resources['resource-options-url'].test );
assert( res.body.resources['resource-options-query'].test );
assert( res.body.resources['resource-options-headers'].test );
assert( res.body.resources['resource-options-double-dynamic-query'].test );
assert( res.body.resources['resource-options-dynamic-query'].test );
assert( res.body.resources['centralized-auth'].test );
assert( res.body.resources['centralized-auth-query'].test );
assert( res.body.resources['compressed-gzip'].test );
assert( res.body.resources['compressed-deflate'].test );
callback( err );
});
}
], function( err, results ){
if( err ) throw err;
done();
});
});
it( 'Sets request url as resource referer', function(done) {
nock('https://solid.us').get('/cache?a=1').matchHeader('Referer', 'http://some.domain.com:123/caching?a=1').reply( 200, { test: 1 } );
nock('https://solid.us').get('/cache?a=2').matchHeader('Referer', 'http://some.domain.com:123/caching?a=1').reply( 200, { test: 2 } );
var s_request = request(solidus_server.router);
s_request.get('/caching?a=1')
.set('Host', 'some.domain.com:123')
.expect(200)
.end(function(err, res) {
if (err) throw err;
done();
});
});
it('Renders an error with error.hbs when a mandatory resource has an error', function(done) {
var s_request = request(solidus_server.router);
async.parallel([
function(callback) {
// Bad fetched data
nock('https://solid.us').get('/error/mandatory?test=1').reply(200, 'Not JSON');
nock('https://solid.us').get('/error/optional?test=1').reply(200, {test: 1});
s_request.get('/with_resource_error?test=1')
.expect(500)
.end(function(err, res) {
assert(res.text.indexOf('Oh no (500)!') > -1);
callback(err);
});
},
function(callback) {
// 200 status and error in response message
nock('https://solid.us').get('/error/mandatory?test=2').reply(200, {status: 'error', error: 'Could not find such a thing'});
nock('https://solid.us').get('/error/optional?test=2').reply(200, {test: 2});
s_request.get('/with_resource_error?test=2')
.expect(404)
.end(function(err, res) {
assert(res.text.indexOf('Not here!') > -1);
callback(err);
});
},
function(callback) {
// Bad status
nock('https://solid.us').get('/error/mandatory?test=3').reply(401, {error: 'Nice try'});
nock('https://solid.us').get('/error/optional?test=3').reply(200, {test: 3});
s_request.get('/with_resource_error?test=3')
.expect(401)
.end(function(err, res) {
assert(res.text.indexOf('Oh no (401)!') > -1);
callback(err);
});
},
function(callback) {
// No error with mandatory resource
nock('https://solid.us').get('/error/mandatory?test=4').reply(200, {test: 4});
nock('https://solid.us').get('/error/optional?test=4').reply(401, {error: 'Nice try'});
s_request.get('/with_resource_error.json?test=4')
.expect(200)
.end(function(err, res) {
assert.equal(res.body.resources.mandatory.test, 4);
assert.equal(res.body.resources.optional, undefined);
callback(err);
});
},
function(callback) {
// Missing static resource
nock('https://solid.us').get('/cache/1').reply(404, {error: 'Could not find such a thing'});
nock('https://solid.us').get('/cache/2').reply(404, {error: 'Could not find such a thing'});
s_request.get('/with_all_features')
.expect(500)
.end(function(err, res) {
assert(res.text.indexOf('Oh no (500)!') > -1);
callback(err);
});
},
function(callback) {
// No errors with preview parameter
nock('https://solid.us').get('/error/mandatory?test=5').reply(401, {error: 'Nice try'});
nock('https://solid.us').get('/error/optional?test=5').reply(401, {error: 'Nice try'});
s_request.get('/with_resource_error.json?test=5&is_preview=true')
.expect(200)
.end(function(err, res) {
assert.equal(res.body.resources.mandatory, undefined);
assert.equal(res.body.resources.optional, undefined);
callback(err);
});
},
], function(err, results) {
if (err) throw err;
done();
});
});
it( 'Preprocesses the context of pages', function( done ){
var s_request = request( solidus_server.router );
async.parallel([
function( callback ){
s_request.get('/.json')
.expect( 'Content-Type', /json/ )
.expect( 200 )
.end( function( err, res ){
if( err ) throw err;
assert( res.body.test === true );
callback( err );
});
},
], function( err, results ){
if( err ) throw err;
done();
});
});
it('Renders an error when the preprocessor has an error', function(done) {
var s_request = request(solidus_server.router);
async.parallel([
function(callback) {
s_request.get('/with_preprocessor_error?error=exception')
.expect(500)
.end(function(err, res) {
assert(res.text.indexOf('Oh no (500)!') > -1);
callback();
});
},
function(callback) {
s_request.get('/with_preprocessor_error?error=status_code')
.expect(401)
.end(function(err, res) {
assert(res.text.indexOf('Oh no (401)!') > -1);
callback();
});
},
function(callback) {
s_request.get('/with_preprocessor_error?error=redirect')
.expect(302)
.expect('location', '/redirected', callback);
},
function(callback) {
s_request.get('/with_preprocessor_error?error=redirect_permanent')
.expect(301)
.expect('location', '/redirected', callback);
},
function(callback) {
s_request.get('/with_preprocessor_error?error=invalid_redirect_array')
.expect(200)
.end(function(err, res) {
if( err ) throw err;
assert(res.text.indexOf('No error happened!') > -1);
callback();
});
},
function(callback) {
s_request.get('/with_preprocessor_error?error=no_context')
.expect(500)
.end(function(err, res) {
assert(res.text.indexOf('Oh no (500)!') > -1);
callback();
});
}
], function(err) {
if (err) throw err;
done();
});
});
it('Ignores preprocessor errors in preview mode', function(done) {
var s_request = request(solidus_server.router);
async.parallel([
function(callback) {
s_request.get('/with_preprocessor_error?error=exception&is_preview=true')
.expect(200)
.end(function(err, res) {
if( err ) throw err;
assert(res.text.indexOf('No error happened!') > -1);
callback();
});
},
function(callback) {
s_request.get('/with_preprocessor_error?error=status_code&is_preview=true')
.expect(200)
.end(function(err, res) {
if( err ) throw err;
assert(res.text.indexOf('No error happened!') > -1);
callback();
});
},
function(callback) {
s_request.get('/with_preprocessor_error?error=redirect&is_preview=true')
.expect(200)
.end(function(err, res) {
if( err ) throw err;
assert(res.text.indexOf('No error happened!') > -1);
callback();
});
},
function(callback) {
s_request.get('/with_preprocessor_error?error=redirect_permanent&is_preview=true')
.expect(200)
.end(function(err, res) {
if( err ) throw err;
assert(res.text.indexOf('No error happened!') > -1);
callback();
});
},
function(callback) {
s_request.get('/with_preprocessor_error?error=invalid_redirect_array&is_preview=true')
.expect(200)
.end(function(err, res) {
if( err ) throw err;
assert(res.text.indexOf('No error happened!') > -1);
callback();
});
},
function(callback) {
s_request.get('/with_preprocessor_error?error=no_context&is_preview=true')
.expect(200)
.end(function(err, res) {
if( err ) throw err;
assert(res.text.indexOf('No error happened!') > -1);
callback();
});
}
], function(err) {
if (err) throw err;
done();
});
});
it('Renders error.hbs when an error occurs', function(done) {
var s_request = request(solidus_server.router);
async.parallel([
// Error while rendering page
function(callback) {
s_request.get('/with_preprocessor_error?error=exception')
.expect(500)
.end(function(err, res) {
if( err ) throw err;
assert(res.text.indexOf('<title>Original title</title>') > -1); // Original page title
assert(res.text.indexOf('Oh no (500)!') > -1); // Error page content
assert(res.text.indexOf('Le undefined sacrebleu !') > -1); // Helper used in error page
callback();
});
},
// Missing page
function(callback) {
s_request.get('/doesnotexist')
.expect(404)
.end(function(err, res) {
if( err ) throw err;
assert(res.text.indexOf('<title>Error title</title>') > -1); // Error page title
assert(res.text.indexOf('Not here!') > -1); // Error page content
assert(res.text.indexOf('Le undefined sacrebleu !') > -1); // Helper used in error page
callback();
});
},
], function(err) {
if (err) throw err;
done();
});
});
it( 'Serves assets in /assets', function( done ){
var s_request = request( solidus_server.router );
async.parallel([
function( callback ){
s_request.get('/scripts/test.js')
.expect( 200, callback )
.expect( 'cache-control', 'public, max-age=31536000' );
},
function( callback ){
s_request.get('/styles/test.css').expect( 200, callback );
}
], function( err, results ){
if( err ) throw err;
done();
});
});
it( 'Creates redirects based on the contents of redirects.js', function( done ){
var s_request = request( solidus_server.router );
async.parallel([
function( callback ){
s_request.get('/redirect1').expect( 302, callback );
},
function( callback ){
s_request.get('/redirect2').expect( 302, callback );
},
function( callback ){
s_request.get('/redirect3').expect( 404, callback );
},
function( callback ){
s_request.get('/redirect4').expect( 404, callback );
},
function( callback ){
s_request.get('/redirect5').expect( 301, callback );
},
function( callback ){
s_request.get('/redirect6/old/path').expect( 'location', '/new/path/old', callback );
},
function( callback ){
s_request.get('/redirect7/12-34-56-78').expect( 'location', '/new/56/12/78', callback );
},
function( callback ){
s_request.get('/redirect8/old/path').expect( 'location', '/new/path/OLD', callback );
},
function( callback ){
s_request.get('/redirect9/12-34-56-78').expect( 'location', '/new/56/12/1078', callback );
},
function( callback ){
s_request.get('/match-http-root?with=params').set('Host', 'solidusjs.com').expect( 'location', '/new/match-http-root', callback );
},
function( callback ){
// Bad protocol
s_request.get('/match-https-root?with=params').set('Host', 'solidusjs.com').expect( 404, callback );
},
function( callback ){
// Bad host
s_request.get('/match-http-root?with=params').set('Host', 'www.solidusjs.com').expect( 404, callback );
},
function( callback ){
s_request.get('/some/path?with=params').set('Host', 'no-path.com').expect( 'location', 'http://www.no-path.com/some/path?with=params', callback );
},
function( callback ){
s_request.get('/to-https-www?with=params').set('Host', 'solidusjs.com').expect( 'location', 'https://www.solidusjs.com/to-https-www?with=params', callback );
},
function( callback ){
s_request.get('/to-https-www-url/old-path?with=params').set('Host', 'solidusjs.com').expect( 'location', 'https://www.solidusjs.com/new/url/old-path', callback );
},
function( callback ){
s_request.get('/past-redirect').expect( 404, callback );
},
function( callback ){
s_request.get('/current-redirect').expect( 302, callback );
},
function( callback ){
s_request.get('/future-redirect').expect( 404, callback );
},
function( callback ){
s_request.get('/overlapping-redirect').expect( 'location', '/overlapping-redirect-current', callback );
}
], function( err, results ){
if( err ) throw err;
done();
});
});
it( 'Sets the default layout', function(){
assert( solidus_server.handlebars.defaultLayout === 'layout' );
});
it( 'Uses the layout closest to a page view', function( done ){
var s_request = request( solidus_server.router );
async.parallel([
function( callback ){
s_request
.get('/deeply/nested/page/using/a_layout.json')
.expect( 200 )
.end( function( err, res ){
assert( res.body.layout === normalizePath('deeply/nested/layout.hbs') );
callback( err );
});
},
function( callback ){
s_request
.get('/deeply/nested/page.json')
.expect( 200 )
.end( function( err, res ){
assert( res.body.layout === normalizePath('deeply/nested/layout.hbs') );
callback( err );
});
}
], function( err, results ){
if( err ) throw err;
done();
});
});
it( 'Makes partials available even if they have the same name in different directories', function( done ){
var s_request = request( solidus_server.router );
async.parallel([
function( callback ){
s_request
.get('/partial_holder/')
.expect( 200 )
.end( function( err, res ){
assert( res.text == 'partial.hbs' );
callback( err );
});
},
function( callback ){
s_request
.get('/partial_holder2/')
.expect( 200 )
.end( function( err, res ){
assert( res.text == 'deeply/partial.hbs' );
callback( err );
});
}
], function( err, results ){
if( err ) throw err;
done();
});
});
it( 'Makes partials available from node_modules', function( done ){
request(solidus_server.router)
.get('/partial_holder3/')
.expect(200)
.end(function(err, res) {
assert.ifError(err);
assert.equal(res.text, 'partial.hbs\nPartial from external module.\nPartial with same path as external partial (site 1/node_modules/extra/conflict).');
done();
});
});
describe('Sends appropriate cache headers with pages', function() {
var now;
beforeEach(function() {
now = new Date().getTime()
timekeeper.freeze(now);
});
afterEach(function() {
timekeeper.reset();
});
it( '5 minutes for pages without resources', function( done ){
var s_request = request( solidus_server.router );
s_request
.get('/partial')
.expect( 'cache-control', 'public, max-age=300, stale-while-revalidate=86400, stale-if-error=86400' )
.expect( 'expires', new Date(now + 300 * 1000).toUTCString() )
.end( function( err, res ){
if( err ) throw err;
done();
});
});
it( 'nearest resource expiration for pages with resources', function( done ){
nock('https://solid.us').get('/cache?a=1').reply(200, {test: 1}, {'Cache-Control': 'max-age=111'});
nock('https://solid.us').get('/cache?a=2').reply(200, {test: 2}, {'Cache-Control': 'max-age=222'});
var s_request = request( solidus_server.router );
s_request
.get('/caching')
.expect( 'cache-control', 'public, max-age=111, stale-while-revalidate=86400, stale-if-error=86400' )
.expect( 'expires', new Date(now + 111 * 1000).toUTCString() )
.end( function( err, res ){
if( err ) throw err;
done();
});
});
it( 'ignores resources with expiration higher than 5 minutes', function( done ){
nock('https://solid.us').get('/cache?a=1').reply(200, {test: 1}, {'Cache-Control': 'max-age=1111'});
nock('https://solid.us').get('/cache?a=2').reply(200, {test: 2}, {'Cache-Control': 'max-age=222'});
var s_request = request( solidus_server.router );
s_request
.get('/caching')
.expect( 'cache-control', 'public, max-age=222, stale-while-revalidate=86400, stale-if-error=86400' )
.expect( 'expires', new Date(now + 222 * 1000).toUTCString() )
.end( function( err, res ){
if( err ) throw err;
done();
});
});
it( 'ignores resources without expiration', function( done ){
nock('https://solid.us').get('/cache?a=1').reply(200, {test: 1});
nock('https://solid.us').get('/cache?a=2').reply(200, {test: 2}, {'Cache-Control': 'max-age=222'});
var s_request = request( solidus_server.router );
s_request
.get('/caching')
.expect( 'cache-control', 'public, max-age=222, stale-while-revalidate=86400, stale-if-error=86400' )
.expect( 'expires', new Date(now + 222 * 1000).toUTCString() )
.end( function( err, res ){
if( err ) throw err;
done();
});
});
it( 'ignores bad resources with expiration', function( done ){
nock('https://solid.us').get('/cache?a=1').reply(400, {test: 1}, {'Cache-Control': 'max-age=111'});
nock('https://solid.us').get('/cache?a=2').reply(200, {test: 2}, {'Cache-Control': 'max-age=222'});
var s_request = request( solidus_server.router );
s_request
.get('/caching')
.expect( 'cache-control', 'public, max-age=222, stale-while-revalidate=86400, stale-if-error=86400' )
.expect( 'expires', new Date(now + 222 * 1000).toUTCString() )
.end( function( err, res ){
if( err ) throw err;
done();
});
});
})
it( 'Runs helpers after preprocessors', function( done ){
var s_request = request( solidus_server.router );
s_request
.get('/helpers')
.end( function( err, res ){
assert( res.text.indexOf('SOLIDUS') > -1 ); // Handlebars-helper
assert( res.text.indexOf('Le Solidus sacrebleu !') > -1 ); // Site helper
done();
});
});
it('Sets the X-Powered-By header for HTML requests', function(done) {
var s_request = request(solidus_server.router);
s_request
.get('/')
.expect('X-Powered-By', 'Solidus/' + require('../package.json').version)
.end(function(err, res) {
if (err) throw err;
done();
});
});
it('Sets the X-Powered-By header for JSON requests', function(done) {
var s_request = request(solidus_server.router);
s_request
.get('/.json')
.expect('X-Powered-By', 'Solidus/' + require('../package.json').version)
.end(function(err, res) {
if (err) throw err;
done();
});
});
it('Sets the X-Powered-By header for 404s', function(done) {
var s_request = request(solidus_server.router);
s_request
.get('/nonexistent-url')
.expect('X-Powered-By', 'Solidus/' + require('../package.json').version)
.end(function(err, res) {
if (err) throw err;
done();
});
});
describe('/api/resource.json', function() {
beforeEach(function() {
var now = new Date(1397524638000); // Test date rounded to the second, to simplify comparisons
timekeeper.freeze(now);
});
afterEach(function() {
timekeeper.reset();
});
it('fetches and renders the url in the query string', function(done) {
nock('https://solid.us').get('/api-resource').reply(200, {test: 2});
var s_request = request(solidus_server.router);
s_request.get('/api/resource.json?url=https://solid.us/api-resource')
.expect(200)
.expect('Content-Type', /json/)
.end(function(err, res) {
if (err) throw err;
assert.deepEqual(res.body, {test: 2});
done();
});
});
it('renders an error when missing url', function(done) {
var s_request = request(solidus_server.router);
s_request.get('/api/resource.json')
.expect(400)
.expect('Content-Type', /json/)
.end(function(err, res) {
if (err) throw err;
assert.deepEqual(res.body, {error: "Missing 'url' parameter"});
done();
});
});
it('renders an error when bad url', function(done) {
var s_request = request(solidus_server.router);
s_request.get('/api/resource.json?url=not-a-url')
.expect(400)
.expect('Content-Type', /json/)
.end(function(err, res) {
if (err) throw err;
assert.deepEqual(res.body, {error: "Invalid 'url' parameter"});
done();
});
});
it('fetches and renders an error when resource is invalid', function(done) {
nock('https://solid.us').get('/api-resource').reply(200, 'this is not json');
var s_request = request(solidus_server.router);
s_request.get('/api/resource.json?url=https://solid.us/api-resource')
.expect(400)
.expect('Content-Type', /json/)
.end(function(err, res) {
if (err) throw err;
assert.deepEqual(res.body, {status: 400, error: 'Invalid JSON', message: {error: 'Unexpected token h'}});
done();
});
});
it('returns the resource\'s freshness when the resource is valid and has caching headers', function(done) {
nock('https://solid.us').get('/api-resource').reply(200, {test: 2}, {'Cache-Control': 'max-age=123'});
var s_request = request(solidus_server.router);
s_request.get('/api/resource.json?url=https://solid.us/api-resource')
.expect('Cache-Control', 'public, max-age=123, stale-while-revalidate=86400, stale-if-error=86400')
.expect('Expires', new Date(new Date().getTime() + 123 * 1000).toUTCString())
.end(function(err, res) {
if (err) throw err;
done();
});
});
it('returns the default freshness when the resource is valid and has no caching headers', function(done) {
nock('https://solid.us').get('/api-resource').reply(200, {test: 2});
var s_request = request(solidus_server.router);
s_request.get('/api/resource.json?url=https://solid.us/api-resource')
.expect('Cache-Control', 'public, max-age=300, stale-while-revalidate=86400, stale-if-error=86400')
.expect('Expires', new Date(new Date().getTime() + 5 * 60 * 1000).toUTCString())
.end(function(err, res) {
if (err) throw err;
done();
});
});
it('returns the resource\'s freshness when the resource is invalid and has caching headers', function(done) {
nock('https://solid.us').get('/api-resource').reply(400, {test: 2}, {'Cache-Control': 'max-age=123'});
var s_request = request(solidus_server.router);
s_request.get('/api/resource.json?url=https://solid.us/api-resource')
.expect('Cache-Control', 'public, max-age=123, stale-while-revalidate=86400, stale-if-error=86400')
.expect('Expires', new Date(new Date().getTime() + 123 * 1000).toUTCString())
.end(function(err, res) {
if (err) throw err;
done();
});
});
it('returns no freshness when the resource is invalid and has no caching headers', function(done) {
nock('https://solid.us').get('/api-resource').reply(400, {test: 2});
var s_request = request(solidus_server.router);
s_request.get('/api/resource.json?url=https://solid.us/api-resource')
.expect('Cache-Control', 'public, max-age=0, stale-while-revalidate=86400, stale-if-error=86400')
.expect('Expires', new Date().toUTCString())
.end(function(err, res) {
if (err) throw err;
done();
});
});
it('fetches the url using the appropriate auth', function(done) {
nock('https://a.solid.us').get('/api-resource').matchHeader('key', '12345').reply(200, {test: 2});
var s_request = request(solidus_server.router);
s_request.get('/api/resource.json?url=https://a.solid.us/api-resource')
.expect(200)
.expect('Content-Type', /json/)
.end(function(err, res) {
if (err) throw err;
assert.deepEqual(res.body, {test: 2});
done();
});
});
});
describe('no-cache request header', function() {
it('adds no-cache headers to resources', function(done) {
nock('https://solid.us').get('/cache?a=1').matchHeader('Cache-Control', 'no-cache').matchHeader('Pragma', 'no-cache').reply(200, {test: 1});
nock('https://solid.us').get('/cache?a=2').matchHeader('Cache-Control', 'no-cache').matchHeader('Pragma', 'no-cache').reply(200, {test: 2});
var s_request = request(solidus_server.router);
s_request.get('/caching')
.set('cache-control', 'no-cache')
.expect(200)
.end( function( err, res ){
if( err ) throw err;
done();
});
});
it('adds no-cache headers to api resources', function(done) {
nock('https://solid.us').get('/api-resource').matchHeader('Cache-Control', 'no-cache').matchHeader('Pragma', 'no-cache').reply(200, {test: 2});
var s_request = request(solidus_server.router);
s_request.get('/api/resource.json?url=https://solid.us/api-resource')
.set('PRAGMA', 'no-cache')
.expect(200)
.end(function(err, res) {
if (err) throw err;
done();
});
});
});
});
describe( 'development', function(){
var solidus_server;
beforeEach( function( done ){
process.chdir( site2_path );
solidus_server = solidus.start({
log_level: -1,
port: 9009,
dev: true,
livereload_port: 12345
});
// hack that will work until .start callback is complete
solidus_server.on( 'ready', function(){
setTimeout( done, FILESYSTEM_DELAY );
});
});
afterEach( function(){
solidus_server.stop();
process.chdir( original_path );
});
it( 'Adds a route when a new view is added', function( done ){
fs.writeFileSync( 'views/watch_test.hbs', 'test', DEFAULT_ENCODING );
var s_request = request( solidus_server.router );
setTimeout( function(){
s_request.get('/watch_test').expect( 200, function( err ){
if( err ) throw err;
done();
});
}, FILESYSTEM_DELAY );
});
it( 'Removes a route when a view is removed', function( done ){
fs.unlinkSync('views/watch_test.hbs');
var s_request = request( solidus_server.router );
setTimeout( function(){
s_request.get('/watch_test').expect( 404, function( err ){
if( err ) throw err;
done();
});
}, FILESYSTEM_DELAY );
});
it( 'Adds redirects when redirects.js is added', function( done ){
var s_request = request( solidus_server.router );
var redirects_json = JSON.stringify([{"from": "/redirect1", "to": "/"}]);
fs.writeFileSync( 'redirects.js', 'module.exports = ' + redirects_json, DEFAULT_ENCODING );
setTimeout(function() {
async.parallel([
function(callback) {
s_request.get('/redirect1').expect(302, callback);
},
function(callback) {
s_request.get('/redirect2').expect(404, callback);
},
], function(err) {
if (err) throw err;
done();
});
}, FILESYSTEM_DELAY);
});
it( 'Updates redirects when redirects.js changes', function( done ){
var s_request = request( solidus_server.router );
var redirects_json = JSON.stringify([{"from": "/redirect2", "to": "/"}]);
fs.writeFileSync( 'redirects.js', 'module.exports = ' + redirects_json, DEFAULT_ENCODING );
setTimeout(function() {
async.parallel([
function(callback) {
s_request.get('/redirect1').expect(404, callback);
},
function(callback) {
s_request.get('/redirect2').expect(302, callback);
},
], function(err) {
if (err) throw err;
done();
});
}, FILESYSTEM_DELAY);
});
it( 'Removes redirects when redirects.js is deleted', function( done ){
var s_request = request( solidus_server.router );
fs.unlinkSync('redirects.js');
setTimeout(function() {
async.parallel([
function(callback) {
s_request.get('/redirect1').expect(404, callback);
},
function(callback) {
s_request.get('/redirect2').expect(404, callback);
},
], function(err) {
if (err) throw err;
done();
});
}, FILESYSTEM_DELAY);
});
var test_preprocessor_contents = 'module.exports=function(context){context.test = true;return context;};';
it( 'Adds preprocessors when a preprocessor js file is added', function( done ){
var s_request = request( solidus_server.router );
fs.writeFileSync( 'preprocessors/test.js', test_preprocessor_contents, DEFAULT_ENCODING );
setTimeout( function(){
s_request.get('/test.json')
.expect( 200 )
.end( function( err, res ){
if( err ) throw err;
assert( res.body.test );
done();
});
}, FILESYSTEM_DELAY );
});
var parent_preprocessor_contents = 'var child = require("./child");module.exports=function(context){context.test = child();return context;};';
var child_preprocessor_contents = 'module.exports=function(){return "ok";};';
var child_preprocessor_contents_2 = 'module.exports=function(){return "okok";};';
it( 'Invalidates all cached preprocessor modules on a change to any of them', function( done ){
this.timeout(FILESYSTEM_DELAY * 2.5);
var s_request = request( solidus_server.router );
fs.writeFileSync( 'preprocessors/test.js', parent_preprocessor_contents, DEFAULT_ENCODING );
fs.writeFileSync( 'preprocessors/child.js', child_preprocessor_contents, DEFAULT_ENCODING );
setTimeout( function(){
s_request.get('/test.json')
.expect( 200 )
.end( function( err, res ){
if( err ) throw err;
assert( res.body.test === 'ok' );
fs.writeFileSync( 'preprocessors/child.js', child_preprocessor_contents_2, DEFAULT_ENCODING );
setTimeout( function(){
s_request.get('/test.json')
.expect( 200 )
.end( function( err, res ){
if( err ) throw err;
assert( res.body.test === 'okok' );
fs.unlinkSync('preprocessors/child.js');
done();
});
}, FILESYSTEM_DELAY );
});
}, FILESYSTEM_DELAY );
});
it( 'Removes preprocessors when their file is removed', function( done ){
fs.unlinkSync('preprocessors/test.js');
var s_request = request( solidus_server.router );
setTimeout( function(){
s_request.get('/test.json')
.expect( 200 )
.end( function( err, res ){
if( err ) throw err;
assert( !res.body.test );
done();
});
}, FILESYSTEM_DELAY );
});
var helpers_js = "module.exports={uppercase:function(string){return string+' is uppercase';}};";
var helpers2_js = "module.exports={uppercase:function(string){return string+' is uppercase 2';}};";
it( 'Adds helpers when helpers.js is added', function( done ){
var s_request = request( solidus_server.router );
fs.writeFileSync( 'helpers.js', helpers_js, DEFAULT_ENCODING );
setTimeout( function(){
s_request
.get('/helpers')
.end( function( err, res ){
if( err ) throw err;
assert( res.text.indexOf('Site helpers loaded is uppercase') > -1 );
done();
});
}, FILESYSTEM_DELAY );
});
it( 'Updates helpers when helpers.js changes', function( done ){
var s_request = request( solidus_server.router );
fs.writeFileSync( 'helpers.js', helpers2_js, DEFAULT_ENCODING );
setTimeout( function(){
s_request
.get('/helpers')
.end( function( err, res ){
if( err ) throw err;
assert( res.text.indexOf('Site helpers loaded is uppercase 2') > -1 );
done();
});
}, FILESYSTEM_DELAY );
});
it( 'Removes helpers when helpers.js is deleted', function( done ){
var s_request = request( solidus_server.router );
fs.unlinkSync('helpers.js');
setTimeout( function(){
s_request
.get('/helpers')
.end( function( err, res ){
if( err ) throw err;
assert( res.text.indexOf('Site helpers loaded is uppercase') == -1 );
done();
});
}, FILESYSTEM_DELAY );
});
it( 'Passes dev variables to view context', function( done ){
var s_request = request( solidus_server.router );
s_request.get('/dev.json')
.expect( 200 )
.end( function( err, res ){
if( err ) throw err;
assert( res.body.dev );
assert( res.body.development );
assert.equal( 12345, res.body.livereload_port );
done();
});
});
it( 'Does not send cache headers in development', function( done ){
var s_request = request( solidus_server.router );
s_request.get('/dev')
.expect('cache-control', 'no-cache, no-store, max-age=0, must-revalidate')
.expect('x-robots-tag', 'noindex, nofollow')
.end( function( err, res ){
if( err ) throw err;
done();
});
});
it( 'Does not cache assets in development', function( done ){
var s_request = request( solidus_server.router );
s_request.get('/scripts/test.js')
.expect( 'cache-control', 'public, max-age=0' )
.end( function( err, res ){
if( err ) throw err;
done();
});
});
it('Renders the default error response when error.hbs is missing', function(done) {
var s_request = request(solidus_server.router);
async.parallel([
function(callback) {
// 200 status and error in response message
nock('https://solid.us').get('/error/mandatory?test=2').reply(200, {status: 'error', error: 'Could not find such a thing'});
nock('https://solid.us').get('/error/optional?test=2').reply(200, {test: 2});
s_request.get('/with_resource_error?test=2')
.expect(404)
.end(function(err, res) {
assert.equal(res.text, '404 Not Found');
callback(err);
});
},
function(callback) {
// Bad status
nock('https://solid.us').get('/error/mandatory?test=3').reply(401, {error: 'Nice try'});
nock('https://solid.us').get('/error/optional?test=3').reply(200, {test: 3});
s_request.get('/with_resource_error?test=3')
.expect(401)
.end(function(err, res) {
assert.equal(res.text, '401 Unauthorized');
callback(err);
});
}
], function(err, results) {
if (err) throw err;
done();
});
});
});
describe('log server', function() {
var solidus_server;
beforeEach(function(done) {
process.chdir(site1_path);
solidus_server = solidus.start({
dev: true,
log_level: 0,
port: 9009,
log_server_port: 12345,
log_server_level: 3
});
solidus_server.on('ready', done);
});
afterEach(function() {
process.chdir(original_path);
});
it('Sends the logs to the web socket', function(done) {
var socket = require('socket.io-client')('http://localhost:12345');
socket.on('connect', function() {
// Our web socket client is connected, make a Solidus request
request(solidus_server.router).get('/helpers.json').end(function(err, res) {
if (err) throw err;
assert.equal(12345, res.body.log_server_port);
});
});
socket.on('log', function(data) {
// Solidus emitted a log message, if it's the last message stop the server
if (data.level == 3 && /\/helpers \[200\] served in \d+ms/.test(data.message)) {
solidus_server.stop();
}
});
socket.on('disconnect', function() {
// The log server was closed, we're done
done();
});
});
});
describe('Page.toObjectString', function() {
var solidus_server;
before(function(done) {
process.chdir(site1_path);
solidus_server = new SolidusServer({start_server: false});
solidus_server.on('ready', done);
});
after(function() {
process.chdir(original_path);
});
it('returns a JS string version of the parsed view', function(done) {
var parent_file_path = path.join(solidus_server.paths.assets, 'scripts', 'index.js');
var expected = '{resources:{"cache1":"https://solid.us/cache/1","cache2":"https://solid.us/cache/2"},preprocessor:require("../../preprocessors/index.js"),template:require("../../views/with_all_features.hbs"),template_options:{helpers:require("../../helpers.js"),partials:{"partial":require("../../views/partial.hbs"),"partial_holder":require("../../views/partial_holder.hbs"),"partial_holder2":require("../../views/partial_holder2.hbs"),"deeply/partial":require("../../views/deeply/partial.hbs")}}}';
assert.equal(solidus_server.views[solidus_server.pathFromPartialName('with_all_features')].toObjectString(parent_file_path), expected);
done();
});
it('with missing features', function(done) {
var parent_file_path = path.join(solidus_server.paths.assets, 'scripts', 'index.js');
var expected = '{template:require("../../views/partial.hbs"),template_options:{helpers:require("../../helpers.js")}}';
assert.equal(solidus_server.views[solidus_server.pathFromPartialName('partial')].toObjectString(parent_file_path), expected);
done();
});
});
});
<|start_filename|>test/fixtures/site 1/redirects.js<|end_filename|>
module.exports = [
{
from: "/redirect1",
to: "/"
}, {
from: "/redirect2",
to: "/",
"start": "2000-1-1 00:00:00"
}, {
from: "/redirect3",
to: "/",
"start": "3000-1-1 00:00:00"
}, {
from: "/redirect4",
to: "/",
"end": "2000-1-1 00:00:00"
}, {
from: "/redirect5",
to: "/",
"permanent": true
}, {
from: "/redirect6/{dynamic}/{route}",
to: "/new/{route}/{dynamic}"
}, {
from: /\/redirect7\/(\d+)-\d+-(\d+)-(\d+)/,
to: "/new/{1}/{0}/{2}"
}, {
from: "/redirect8/{dynamic}/{route}",
to: function(params) {
return "/new/{route}/" + params.dynamic.toUpperCase();
}
}, {
from: /\/redirect9\/(\d+)-\d+-(\d+)-(\d+)/,
to: function(params) {
return "/new/{1}/{0}/" + (1000 + parseInt(params['2']));
}
}, {
from: {
protocol: 'http',
host: 'solidusjs.com',
path: '/match-http-root'
},
to: '/new/match-http-root'
}, {
from: {
protocol: 'https',
host: 'solidusjs.com',
path: '/match-https-root'
},
to: '/new/match-https-root'
}, {
from: {
host: 'no-path.com'
},
to: {
host: 'www.no-path.com'
}
}, {
from: '/to-https-www',
to: {
protocol: 'https',
host: 'www.solidusjs.com'
}
}, {
from: '/to-https-www-url/{dynamic}',
to: {
protocol: 'https',
host: 'www.solidusjs.<EMAIL>',
url: '/new/url/{dynamic}'
}
}];
<|start_filename|>lib/preprocessor.js<|end_filename|>
var path = require('path');
var Preprocessor = function(preprocessor_path, options) {
this.path = preprocessor_path;
this.relative_path = path.relative(options.server.paths.preprocessors, preprocessor_path);
this.process = function(context, callback) {
try {
context = require(this.path)(context);
} catch (err) {
return callback(err, context);
}
callback(null, context);
};
};
module.exports = Preprocessor;
<|start_filename|>lib/logger.js<|end_filename|>
var _ = require('underscore');
var colors = require('colors');
const level_colors = {
'0': 'red', // errors
'1': 'yellow', // warnings
'2': 'cyan', // status
'3': 'magenta' // debug
};
var Logger = function( options ){
options = _.defaults( options, {
level: 2,
log_server_level: 2
});
this.level = options.level;
this.log_server = options.log_server;
this.log_server_level = options.log_server_level;
this.session = options.session;
this.dev = options.dev;
this.log = function(message, level) {
var time = new Date().toISOString();
if (this.dev) time = time[level_colors[level]].bold;
var line = ['[' + time + ']', this.session.get('request_id') || '-', message].join(' ');
level = _.isNumber(level) ? level : 2;
if (level <= this.level) {
console.log(line);
}
if (level <= this.log_server_level && this.log_server) {
this.log_server.emit('log', {level: level, message: line});
}
};
};
module.exports = Logger;
<|start_filename|>test/fixtures/site 1/preprocessors/error.js<|end_filename|>
module.exports = function(context) {
switch (context.parameters.error) {
case 'exception':
context.does_not_exist.uhoh = true;
case 'status_code':
return 401;
case 'redirect':
return '/redirected';
case 'redirect_permanent':
return [301, '/redirected'];
case 'invalid_redirect_array':
return ['/redirected'];
case 'no_context':
return;
}
return context;
};
<|start_filename|>test/fixtures/site 1/preprocessors/index.js<|end_filename|>
module.exports = function( context ){
context.test = true;
return context;
};
<|start_filename|>test/fixtures/site 1/helpers.js<|end_filename|>
module.exports = {
frenchify: function(string) {
return 'Le ' + string + ' sacrebleu !';
}
};
<|start_filename|>lib/redirect.js<|end_filename|>
const PERMANENT_STATUS = 301;
const TEMPORARY_STATUS = 302;
var path = require('path');
var _ = require('underscore');
var moment = require('moment');
var utils = require('./utils.js');
var Redirect = function(redirect_data, options) {
options = options || {};
var redirect = this;
var router = options.server.router;
var status = redirect_data.permanent ? PERMANENT_STATUS : TEMPORARY_STATUS;
var start = redirect_data.start;
var end = redirect_data.end;
var from = _.isObject(redirect_data.from) && !_.isRegExp(redirect_data.from) ? redirect_data.from : {path: redirect_data.from};
var to = _.isObject(redirect_data.to) && !_.isFunction(redirect_data.to) ? redirect_data.to : {url: redirect_data.to};
var expired = moment() > moment(end);
// Don't create redirect route at all if expired
// Don't check prematurity yet since it could become valid later
if (!expired) {
redirect.route = from.path || '*';
if (_.isString(redirect.route)) {
redirect.route = path.normalize(redirect.route).replace(/\\/g, '/');
redirect.route = utils.formatRouteForExpress(redirect.route);
}
router.get(redirect.route, function(req, res, next) {
// If redirect is expired or premature skip it
var expired = moment() > moment(end);
var premature = moment() < moment(start);
if (expired || premature) return next();
// If the protocol or host don't match, skip redirect
if (from.protocol && from.protocol !== req.protocol) return next();
if (from.host && from.host !== req.host) return next();
// Compute to
var newTo = to;
if (_.isFunction(newTo.url)) {
newTo = newTo.url(req.params);
if (!_.isObject(newTo)) newTo = Object.assign({}, to, {url: newTo});
}
// Compute location
var location = newTo.protocol || newTo.host ? ((newTo.protocol || req.protocol) + '://' + (newTo.host || req.host)) : '';
location += newTo.url ? utils.expandVariables(newTo.url, req.params) : req.url;
res.redirect(status, location);
});
// Removes the redirect route
this.destroy = function() {
router.routes.get = _(router.routes.get).reject(function(current_route) {
return redirect.route === current_route.path;
});
};
}
};
module.exports = Redirect;
<|start_filename|>lib/page.js<|end_filename|>
const DEFAULT_ENCODING = 'UTF8';
const MAX_EXPIRY_TIME = 1000 * 60 * 5; // 5 minutes
var url = require('url');
var fs = require('fs');
var path = require('path');
var util = require('util');
var http = require('http');
var EventEmitter = require('events').EventEmitter;
var _ = require('underscore');
var async = require('async');
var Resource = require('./resource.js');
var utils = require('./utils.js');
// rounds datetime to nearest 5 minutes (in the past)
var getRoundedTime = function( datetime, round_by ){
var remainder = datetime % round_by;
var rounded_time = new Date( datetime - remainder );
return rounded_time;
};
var Page = function( page_path, options ){
// properly inherit from EventEmitter part 1
EventEmitter.call( this );
var page = this;
options = options || {};
this.options = options;
var server = this.options.server;
var router = server.router;
this.path = page_path;
this.relative_path = path.relative( server.paths.views, page_path );
// adds a route based on the page's path
this.createRoute = function(){
page.is_index = /index\.hbs$/i.test( this.relative_path );
var route = this.relative_path.replace( /\.[a-z0-9]+$/i, '' ).replace( /\\/g, '/' );
var route = '/'+ route;
route = route.replace( '/index', '' ); // replace indexes with base routes
route = utils.formatRouteForExpress(route);
if( route === '' ) route = '/';
page.route = route;
// only overwrite existing routes if we're an index page
var existing_route = _( router.routes.get ).find( function( route_data ){
return route_data.path === route;
});
if( existing_route ){
server.logger.log( 'Warning. You have a conflicting route at "'+ existing_route.path +'"', 0 );
if( !page.is_index ) return route; // return out if this isn't an index
router.routes.get = _( router.routes.get ).without( existing_route ); // ensure the old route is removed if this is an index
}
router.get( route +'.json', function( req, res ){
page.render( req, res, {
json: true
});
});
router.get( route, function( req, res ){
page.render( req, res );
});
return route;
};
// reads the json configuration inside the view
this.parseConfig = function( callback ){
fs.readFile( this.path, DEFAULT_ENCODING, function( err, data ){
var params = {};
var params_exec = /^\s*{{!\s*({[\S\s]+?})\s*}}/.exec( data );
try {
params = ( params_exec )? JSON.parse( params_exec[1] ): {};
}
catch( err ){
server.logger.log( 'Error preprocessing "'+ page.path +'" '+ err, 0 );
}
params.resources = params.resources || {};
page.partials = page.findPartials(data);
page.params = params;
_( page ).extend({
title: params.title,
description: params.description,
name: params.name,
layout: params.layout
});
if( callback ) callback( params );
});
};
// finds the names of the partials used by the template
this.findPartials = function(template) {
var template_without_comments = template.replace(/{{!--[\s\S]*?--}}/g, '').replace(/{{![\s\S]*?}}/g, '');
var partials = [];
var partial_regex = /{{>\s*([^\s}]+)[\s\S]*?}}/g;
var match;
while (match = partial_regex.exec(template_without_comments)) {
// Fix quoted strings
var name = match[1].replace(/(^['"])|(["']$)/g, '').replace(/\\(['"])/, '$1');
partials.push(name);
}
return _.uniq(partials);
}
this.allPartials = function() {
var partials = {};
_.each(page.partials, function(name) {
var file_path = server.pathFromPartialName(name);
partials[name] = file_path;
var partial = server.views[file_path];
if (partial) {
partials = _.extend(partials, partial.allPartials());
}
});
return partials;
};
this.toObjectString = function(parent_file_path) {
var root = path.dirname(parent_file_path);
var preprocessor = server.preprocessors[page.params.preprocessor];
var partials = page.allPartials();
var properties = [];
var template_options = [];
if (!_.isEmpty(page.params.resources)) {
properties.push('resources:' + JSON.stringify(page.params.resources));
}
if (preprocessor) {
properties.push('preprocessor:require(' + JSON.stringify(path.relative(root, preprocessor.path)) + ')');
}
properties.push('template:require(' + JSON.stringify(path.relative(root, page.path)) + ')');
if (!_.isEmpty(server.site_helpers)) {
template_options.push('helpers:require(' + JSON.stringify(path.relative(root, server.paths.helpers)) + ')');
}
if (!_.isEmpty(partials)) {
var requires = _.map(partials, function(file_path, name) {
return JSON.stringify(name) + ':require(' + JSON.stringify(path.relative(root, file_path)) + ')';
})
template_options.push('partials:{' + requires.join(',') + '}');
}
if (!_.isEmpty(template_options)) {
properties.push('template_options:{' + template_options.join(',') + '}');
}
return '{' + properties.join(',') + '}';
};
// fetches remote resources
this.fetchResources = function( req, context, iterator, callback ){
var page = this;
if( page.params.resources ){
// convert resources object into array
var resources_array = _( page.params.resources ).map( function( options, name ){
var resource = new Resource(server.resourceOptions(req, options), server.auth, context.parameters, server.logger);
resource.name = name;
return resource;
});
async.each( resources_array, function( resource, cb ){
resource.get(function(err, resource_response) {
if (err) {
var status = err.status || 500;
var error = 'Resource error: ' + err.message;
var is_optional = context.is_preview || resource.options.optional;
// A page with a missing static resource is broken, not missing
if (status == 404 && !resource.resource.dynamic) status = 500;
if (status == 404) {
server.logger.log(page.route + ' [' + resource.name + '] ' + error, 3);
} else {
server.logger.log(page.route + ' [' + resource.name + '] ' + error, is_optional ? 1 : 0);
if (!is_optional) {
page.logToSentry(error, {
error: err,
resource: {name: resource.name, url: resource.resource.url},
response: resource_response ? {status: resource_response.status, body: resource_response.data} : null,
context: _.omit(context, 'resources')
});
}
}
err = {status: status, error: error, message: err, resource: resource.name};
cb(is_optional ? null : err);
} else {
iterator(resource, resource_response);
cb();
}
});
}, callback);
}
else {
callback();
}
};
// preprocesses the page's context
this.preprocess = function( context, callback ){
var preprocessor = server.preprocessors[page.params.preprocessor];
if (preprocessor) {
preprocessor.process(context, function(err, processed_context) {
if (err) {
var error = 'Preprocessor error: ' + (_.isString(err) ? err : err.message);
var stack = err.stack ? ('\n' + err.stack) : '';
server.logger.log(page.route + ' [' + preprocessor.relative_path + '] ' + error + stack, 0);
page.logToSentry(error, {
error: err,
preprocessor: preprocessor.relative_path,
context: context
});
callback({status: 500, error: error, message: stack.split('\n'), preprocessor: preprocessor.relative_path}, processed_context);
} else if (_.isNumber(processed_context)) {
var error = 'Preprocessor error: status code ' + processed_context;
var message = http.STATUS_CODES[processed_context];
if (processed_context == 404) {
server.logger.log(page.route + ' [' + preprocessor.relative_path + '] ' + error, 3);
} else {
server.logger.log(page.route + ' [' + preprocessor.relative_path + '] ' + error, 0);
page.logToSentry(error, {
error: err,
preprocessor: preprocessor.relative_path,
context: context
});
}
callback({status: processed_context, error: error, message: message, preprocessor: preprocessor.relative_path}, context);
} else if (_.isString(processed_context)) {
callback({status: 302, redirect_url: processed_context}, context);
} else if (_.isArray(processed_context) && processed_context.length === 2 && _.isNumber(processed_context[0]) && _.isString(processed_context[1])) {
callback({status: processed_context[0], redirect_url: processed_context[1]}, context);
} else if (!_.isObject(processed_context)) {
var error = 'Preprocessor error: invalid context returned';
server.logger.log(page.route + ' [' + preprocessor.relative_path + '] ' + error, 0);
page.logToSentry(error, {
error: err,
preprocessor: preprocessor.relative_path,
context: context
});
callback({status: 500, error: error, preprocessor: preprocessor.relative_path}, context);
} else {
callback(null, processed_context);
}
});
} else {
callback(null, context);
}
};
// generates the page's markup
this.render = function( req, res, options ){
var start_serve = new Date;
options = options || {};
// generate url data to be served in context
var href = req.protocol +'://'+ req.get('host') + req.url;
var url_data = url.parse( href, true );
url_data = _.pick( url_data, 'host', 'port', 'hostname', 'hash', 'search', 'query', 'pathname', 'path', 'href', 'protocol' );
url_data.origin = url_data.protocol +'//'+ url_data.host;
var context = {
url: url_data,
page: {
path: this.path,
title: this.title,
description: this.description,
name: this.name
},
parameters: {},
query: req.query,
resources: {},
assets: {
scripts: '<script src="/compiled/scripts.js"></script>',
styles: '<link rel="stylesheet" href="/compiled/styles.css" />'
},
layout: this.getLayout(),
is_preview: !!req.query.is_preview,
no_cache: req.no_cache
};
context = _( context ).defaults( router.locals );
// req.params is actually an array with crap stuck to it
// so we have to parse that stuff out into a real object
var parameters = {};
for( var key in req.params ) parameters[key] = req.params[key];
context.parameters = _( parameters ).extend( req.query );
var max_age = MAX_EXPIRY_TIME / 1000;
var expires = new Date().getTime() + MAX_EXPIRY_TIME;
// actually render the page
var renderPage = function(err, context) {
err = err || options.err;
var status = err ? err.status : 200;
server.logger.log(page.route + ' [' + status + '] served in ' + (new Date - start_serve) +'ms', !err || status == 404 ? 3 : 0);
res.status(status);
res.set(server.responseCacheHeaders(req, {
max_age: max_age,
expires: new Date(expires)
}));
if (err) {
err.redirect_url ? res.redirect(status, err.redirect_url) : renderErrorPage(err, context);
} else {
renderSuccessPage(context);
}
};
var renderErrorPage = function(err, context) {
context = context || {};
context.helpers = server.site_helpers;
context.error = err;
if (options.json) {
res.json(context);
} else if (server.views[server.paths.error]) {
res.expose(context, 'solidus.context', 'context');
res.render(server.paths.error, context);
} else {
res.send(err.status + ' ' + http.STATUS_CODES[err.status]);
}
}
var renderSuccessPage = function(context) {
context = context || {};
context.helpers = server.site_helpers;
if (options.json) {
res.json(context);
} else {
res.expose(context, 'solidus.context', 'context');
res.render(page.relative_path, context);
}
};
var start_resources = new Date;
this.fetchResources( req, context,
function(resource, resource_response) {
context.resources[resource.name] = resource_response.data;
if (resource_response.expires_at < expires) {
max_age = resource_response.maxAge();
expires = resource_response.expires_at;
}
},
function(err) {
server.logger.log( page.route +' resources fetched in '+ ( new Date - start_resources ) +'ms', 3 );
if (err) return renderPage(err, context);
var start_preprocess = new Date;
page.preprocess(context, function(err, context) {
server.logger.log( page.route +' preprocessed in '+ ( new Date - start_preprocess ) +'ms', 3 );
if (err && !context.is_preview) return renderPage(err, context);
renderPage(null, context);
});
}
);
};
// get the view's layout
this.getLayout = function(){
if( this.layout || this.layout === false ) return this.layout;
var layouts = _( server.layouts ).sortBy( function( layout_path ){
return -layout_path.length;
});
var local_layout = _( layouts ).find( function( layout_path ){
var layout_dir = layout_path.replace( /layout\..+$/i, '' );
return page.path.indexOf( layout_dir ) > -1;
});
if( !local_layout ) return null;
local_layout = path.relative( server.paths.views, local_layout );
return local_layout;
};
// removes the page's route
this.destroy = function(){
router.routes.get = _( router.routes.get ).reject( function( current_route ){
return current_route.path === page.route;
});
};
this.logToSentry = function(err, extra) {
if (server.raven_client) server.raven_client.captureError(err, {extra: extra});
};
this.createRoute();
this.parseConfig( function(){
page.emit( 'ready' );
});
};
// properly inherit from EventEmitter part 2
util.inherits( Page, EventEmitter );
Page.layouts = {};
module.exports = Page;
<|start_filename|>test/resource_response.js<|end_filename|>
var assert = require('assert');
var timekeeper = require('timekeeper');
var ResourceResponse = require('../lib/resource_response.js');
describe( 'ResourceResponse', function(){
var request_time; // When the request was sent
var response_time; // When the response was received
var response;
beforeEach(function() {
var now = new Date(1397524638000); // Test date rounded to the second, to simplify comparisons
timekeeper.freeze(now);
request_time = now.getTime();
response_time = now.getTime();
response = {headers: {date: now.toUTCString()}};
});
afterEach(function() {
timekeeper.reset();
});
describe( '.constructor()', function(){
it('sets has_expiration to false when no caching headers', function() {
has_expiration = new ResourceResponse({response: response, data: null, request_time: request_time, response_time: response_time}).has_expiration;
assert(!has_expiration);
});
it('sets has_expiration to true when caching headers', function() {
response.headers['cache-control'] = 's-maxage=100';
has_expiration = new ResourceResponse({response: response, data: null, request_time: request_time, response_time: response_time}).has_expiration;
assert(has_expiration);
});
it('sets expires_at to now when no caching headers', function() {
expires_at = new ResourceResponse({response: response, data: null, request_time: request_time, response_time: response_time}).expires_at;
assert.equal(new Date().getTime(), expires_at);
});
it('sets expires_at to s-maxage when present', function() {
response.headers['cache-control'] = 's-maxage=100';
expires_at = new ResourceResponse({response: response, data: null, request_time: request_time, response_time: response_time}).expires_at;
assert.equal(100 * 1000, expires_at - new Date().getTime());
});
it('sets expires_at to max-age when present', function() {
response.headers['cache-control'] = 'max-age=100';
expires_at = new ResourceResponse({response: response, data: null, request_time: request_time, response_time: response_time}).expires_at;
assert.equal(100 * 1000, expires_at - new Date().getTime());
});
it('sets expires_at to expires when present', function() {
response.headers['expires'] = new Date(request_time + 100 * 1000).toUTCString();
expires_at = new ResourceResponse({response: response, data: null, request_time: request_time, response_time: response_time}).expires_at;
assert.equal(100 * 1000, expires_at - new Date().getTime());
});
it('sets expires_at to s-maxage when s-maxage, max-age and expires present', function() {
response.headers['cache-control'] = 'max-age=100,s-maxage=200';
response.headers['expires'] = new Date(request_time + 300 * 1000).toUTCString();
expires_at = new ResourceResponse({response: response, data: null, request_time: request_time, response_time: response_time}).expires_at;
assert.equal(200 * 1000, expires_at - new Date().getTime());
});
it('sets expires_at to max-age when max-age and expires present', function() {
response.headers['cache-control'] = 'max-age=200';
response.headers['expires'] = new Date(request_time + 300 * 1000).toUTCString();
expires_at = new ResourceResponse({response: response, data: null, request_time: request_time, response_time: response_time}).expires_at;
assert.equal(200 * 1000, expires_at - new Date().getTime());
});
it('deducts time since Date from expires_at when Age not present', function() {
var now = new Date().getTime();
request_time = now;
// +3 second delay until request is processed
timekeeper.freeze(now += 3 * 1000);
response = {headers: {date: new Date(now).toUTCString(), 'cache-control': 'max-age=100'}};
// +5 second delay until response is received
timekeeper.freeze(now += 5 * 1000);
response_time = now;
// +7 second delay until response is processed
timekeeper.freeze(now += 7 * 1000);
expires_at = new ResourceResponse({response: response, data: null, request_time: request_time, response_time: response_time}).expires_at;
assert.equal((100 - (5 + 7)) * 1000, expires_at - new Date().getTime());
});
it('deducts Age and response delay from expires_at when Age present', function() {
var now = new Date().getTime();
request_time = now;
// +3 second delay until request is processed
// Response is already 30 seconds old
timekeeper.freeze(now += 3 * 1000);
response = {headers: {date: new Date(now - 30 * 1000).toUTCString(), 'cache-control': 'max-age=100', age: '30'}};
// +5 second delay until response is received
timekeeper.freeze(now += 5 * 1000);
response_time = now;
// +7 second delay until response is processed
timekeeper.freeze(now += 7 * 1000);
expires_at = new ResourceResponse({response: response, data: null, request_time: request_time, response_time: response_time}).expires_at;
assert.equal((100 - (30 + 3 + 5 + 7)) * 1000, expires_at - new Date().getTime());
});
});
describe( '.maxAge()', function(){
it('converts invalid expires_at to seconds from now', function() {
timekeeper.freeze(new Date().getTime() + 2 * 1000);
max_age = new ResourceResponse({response: response, data: null, request_time: request_time, response_time: response_time}).maxAge();
assert.equal(0, max_age);
});
it('converts valid expires_at to seconds from now', function() {
timekeeper.freeze(new Date().getTime() + 2 * 1000);
response.headers['cache-control'] = 'max-age=100';
max_age = new ResourceResponse({response: response, data: null, request_time: request_time, response_time: response_time}).maxAge();
assert.equal(98, max_age);
});
});
describe( '.expired()', function(){
it('returns whether expires_at is in the past', function() {
response.headers['cache-control'] = 'max-age=0';
var resource_response = new ResourceResponse({response: response, data: null, request_time: request_time, response_time: response_time});
assert(resource_response.expired());
});
});
describe( '.lock()', function(){
it('prevents locking before .unlock()', function() {
var resource_response = new ResourceResponse({response: response, data: null, request_time: request_time, response_time: response_time});
assert(resource_response.lock());
assert(!resource_response.lock());
resource_response.unlock();
assert(resource_response.lock());
});
it('prevents locking before 30 seconds', function() {
var resource_response = new ResourceResponse({response: response, data: null, request_time: request_time, response_time: response_time});
assert(resource_response.lock());
assert(!resource_response.lock());
timekeeper.freeze(new Date().getTime() + 40 * 1000);
assert(resource_response.lock());
});
});
});
<|start_filename|>solidify.js<|end_filename|>
var path = require('path');
var SolidusServer = require('./lib/server');
var transformTools = require('browserify-transform-tools');
var solidus_server;
// Browserify transform that inlines requires to Solidus JS views.
// For example, this code:
// var view = require('solidus/views/some/view'); // The path can end with .js too
// Becomes something like:
// var view = {template:require("../../views/some/view.hbs"),template_options:{helpers:require("../../helpers.js")}};
module.exports = transformTools.makeRequireTransform(
'solidus/solidify',
{excludeExtensions: ['.' + SolidusServer.extensions.template]},
function(args, opts, callback) {
var view_name = args[0].match(/^solidus\/views\/(.*?)(\.js)?$/);
if (!view_name) return callback();
view_name = view_name[1];
var waitForSolidus = function() {
if (!solidus_server) {
solidus_server = new SolidusServer({start_server: false});
solidus_server.on('ready', function() {solidus_server.ready = true});
}
if (solidus_server.ready) {
var view = solidus_server.views[solidus_server.pathFromPartialName(view_name)];
callback(null, view ? view.toObjectString(opts.file) : null);
} else {
setImmediate(waitForSolidus);
}
};
waitForSolidus();
}
);
<|start_filename|>test/solidify.js<|end_filename|>
var assert = require('assert');
var path = require('path');
var solidify = require('../solidify.js');
var transformTools = require('browserify-transform-tools');
var original_path = __dirname;
var site1_path = path.join(original_path, 'fixtures', 'site 1');
var dummyJsFile = path.join(site1_path, 'assets', 'scripts', 'index.js');
describe('Solidify transform', function() {
before(function(done) {
process.chdir(site1_path);
done();
});
after(function() {
process.chdir(original_path);
});
it('replaces required views with their JS version', function(done) {
var content = 'var a = require("solidus/views/dynamic/{segment}");';
var expected = 'var a = {template:require("../../views/dynamic/{segment}.hbs"),template_options:{helpers:require("../../helpers.js")}};'
transformTools.runTransform(solidify, dummyJsFile, {content: content}, function(err, transformed) {
assert.ifError(err);
assert.equal(transformed, expected);
done();
});
});
it('with multiple requires', function(done) {
var content = 'var a = require("solidus/views/dynamic/{segment}");var b = require("solidus/views/dynamic/{segment}.js");var c = require("solidus/views/partial");';
var expected = 'var a = {template:require("../../views/dynamic/{segment}.hbs"),template_options:{helpers:require("../../helpers.js")}};var b = {template:require("../../views/dynamic/{segment}.hbs"),template_options:{helpers:require("../../helpers.js")}};var c = {template:require("../../views/partial.hbs"),template_options:{helpers:require("../../helpers.js")}};'
transformTools.runTransform(solidify, dummyJsFile, {content: content}, function(err, transformed) {
assert.ifError(err);
assert.equal(transformed, expected);
done();
});
});
it('with view using partial from extra package', function(done) {
var content = 'var a = require("solidus/views/partial_holder3");';
var expected = 'var a = {template:require("../../views/partial_holder3.hbs"),template_options:{helpers:require("../../helpers.js"),partials:{"partial":require("../../views/partial.hbs"),"extra/partial":require("../../node_modules/extra/partial.hbs"),"extra/conflict":require("../../views/extra/conflict.hbs")}}};'
transformTools.runTransform(solidify, dummyJsFile, {content: content}, function(err, transformed) {
assert.ifError(err);
assert.equal(transformed, expected);
done();
});
});
it('with bad extension', function(done) {
var content = 'var a = require("solidus/views/partial.hbs");var b = require("solidus/views/partial.html");';
transformTools.runTransform(solidify, dummyJsFile, {content: content}, function(err, transformed) {
assert.ifError(err);
assert.equal(transformed, content);
done();
});
});
it('with bad name', function(done) {
var content = 'var a = require("solidus/views/wrong");';
transformTools.runTransform(solidify, dummyJsFile, {content: content}, function(err, transformed) {
assert.ifError(err);
assert.equal(transformed, content);
done();
});
});
});
<|start_filename|>solidus.js<|end_filename|>
var solidus = {
Server: require('./lib/server.js')
};
// Start the solidus server
solidus.start = function( options, callback ){
var solidus_server = new solidus.Server( options );
if( callback ) solidus_server.once( 'ready', callback );
return solidus_server;
};
module.exports = solidus;
<|start_filename|>lib/utils.js<|end_filename|>
// Format a Solidus route so it can be used by the router
module.exports.formatRouteForExpress = function(route) {
return route.replace(/{([a-z_-]*)}/ig, ':$1'); // replace dynamic bits
};
// Expand variables like {this} in the string with the params values
module.exports.expandVariables = function(string, params) {
return string.replace(/\{([^\}]*)\}/ig, function(match, capture) {
return params[capture] || '';
});
};
// Checks if the headers contain a "no-cache" directive
module.exports.hasNoCacheHeader = function(headers) {
var header = headers && (headers['cache-control'] || headers['Cache-Control'] || headers['CACHE-CONTROL'] || headers['pragma'] || headers['Pragma'] || headers['PRAGMA']);
return /no-cache($|[^=])/.test(header);
};
<|start_filename|>test/fixtures/site 1/preprocessors/helpers.js<|end_filename|>
module.exports = function(context) {
context.test_string = 'Solidus';
return context;
};
<|start_filename|>lib/resource.js<|end_filename|>
const DEFAULT_RESOURCE_FRESHNESS = 1000 * 60 * 5; // 5 minutes
var BaseResource = require('solidus-client/lib/resource');
var ResourceResponse = require('./resource_response.js');
var utils = require('./utils.js');
var Resource = function(options, auth, params, logger) {
this.resource = new BaseResource(options, auth, params);
this.options = this.resource.options;
this.logger = logger;
this.no_cache = utils.hasNoCacheHeader(this.options.headers);
};
Resource.prototype.get = function(callback) {
var self = this;
if (!self.resource.url) return callback(null, {});
self.resource.get(function(err, res) {
if (err && !res.response) return callback(err);
self.logger.log('Requested resource: [' + res.response.status + '] ' + self.resource.url, 3);
var resource_response = new ResourceResponse(res);
if (!err && !resource_response.has_expiration) {
// The response has no cache headers, cache for a default duration
resource_response.expires_at += DEFAULT_RESOURCE_FRESHNESS;
}
callback(err, resource_response);
});
};
module.exports = Resource;
| solidusjs/solidus |
<|start_filename|>index.js<|end_filename|>
const got = require('got')
const crypto = require('crypto')
const querystring = require('querystring')
//
// API docs: https://podcastindex-org.github.io/docs-api/#get-/search/byterm
//
const BASE_API_URL = 'https://api.podcastindex.org/api/1.0/'
const PATH_SEARCH_BY_TERM = 'search/byterm'
const PATH_SEARCH_EPISODE_BY_PERSON = 'search/byperson'
const PATH_ADD_BY_FEED_URL = 'add/byfeedurl'
const PATH_ADD_BY_ITUNES_ID = 'add/byitunesid'
const PATH_EPISODES_BY_FEED_ID = 'episodes/byfeedid'
const PATH_EPISODES_BY_FEED_URL = 'episodes/byfeedurl'
const PATH_EPISODES_BY_ITUNES_ID = 'episodes/byitunesid'
const PATH_EPISODES_BY_ID = 'episodes/byid'
const PATH_EPISODES_RANDOM = 'episodes/random'
const PATH_PODCASTS_BY_FEED_URL = 'podcasts/byfeedurl'
const PATH_PODCASTS_BY_FEED_ID = 'podcasts/byfeedid'
const PATH_PODCASTS_BY_ITUNES_ID = 'podcasts/byitunesid'
const PATH_PODCASTS_BY_TAG = 'podcasts/bytag'
const PATH_PODCASTS_TRENDING = 'podcasts/trending'
const PATH_PODCASTS_DEAD = 'podcasts/dead'
const PATH_RECENT_FEEDS = 'recent/feeds'
const PATH_RECENT_EPISODES = 'recent/episodes'
const PATH_RECENT_NEWFEEDS = 'recent/newfeeds'
const PATH_RECENT_SOUNDBITES = 'recent/soundbites'
const PATH_VALUE_BY_FEED_ID = 'value/byfeedid'
const PATH_VALUE_BY_FEED_URL = 'value/byfeedurl'
const PATH_STATS_CURRENT = 'stats/current'
const PATH_CATEGORIES_LIST = 'categories/list'
const PATH_HUB_PUBNOTIFIY = 'hub/pubnotify'
const qs = (o) => '?' + querystring.stringify(o)
const withResponse = (response) => {
// Check for success or failure and create a predictable error response
let body = response.body
// if response.statusCode == 200?
if (
response.statusCode === 500 ||
(body.hasOwnProperty('status') && body.status === 'false')
) {
// Failed
if (body.hasOwnProperty('description')) {
// Error message from server API
throw { message: body.description, code: response.statusCode }
} else {
throw { message: 'Request failed.', code: response.statusCode }
}
} else {
// Succcess // 200
return body
}
}
module.exports = (key, secret, userAgent) => {
if (!key || !secret) {
throw new Error(
'API Key and Secret are required from https://api.podcastindex.org/'
)
}
const api = got.extend({
responseType: 'json',
prefixUrl: BASE_API_URL,
throwHttpErrors: false,
hooks: {
beforeRequest: [
(options) => {
let dt = new Date().getTime() / 1000
options.headers['User-Agent'] =
userAgent ||
'PodcastIndexBot/@<EMAIL>'
options.headers['X-Auth-Date'] = dt
options.headers['X-Auth-Key'] = key
options.headers['Authorization'] = crypto
.createHash('sha1')
.update(key + secret + dt)
.digest('hex')
},
],
},
})
const custom = async (path, queries) => {
const response = await api(path + qs(queries))
return withResponse(response)
}
return {
api,
custom,
searchByTerm: async (q, val = '', clean = false, fullText = false) => {
let queries = {
q: q,
}
if (val !== '') queries['val'] = val
if (clean) queries['clean'] = ''
if (fullText) queries['fullText'] = ''
return custom(PATH_SEARCH_BY_TERM, queries)
},
searchEpisodesByPerson: async (q, fullText = false) => {
let queries = {
q: q,
}
if (fullText) queries['fullText'] = ''
return custom(PATH_SEARCH_EPISODE_BY_PERSON, queries)
},
podcastsByFeedUrl: async (feedUrl) => {
return custom(PATH_PODCASTS_BY_FEED_URL, { url: feedUrl })
},
podcastsByFeedId: async (feedId) => {
return custom(PATH_PODCASTS_BY_FEED_ID, { id: feedId })
},
podcastsByFeedItunesId: async (itunesId) => {
return custom(PATH_PODCASTS_BY_ITUNES_ID, { id: itunesId })
},
podcastsByTag: async () => {
return custom(PATH_PODCASTS_BY_TAG, { 'podcast-value': '' })
},
podcastsTrending: async (
max = 10,
since = null,
lang = null,
cat = null,
notcat = null
) => {
return custom(PATH_PODCASTS_TRENDING, {
max: max,
since: since,
lang: lang,
cat: cat,
notcat: notcat,
})
},
podcastsDead: async () => {
return custom(PATH_PODCASTS_DEAD)
},
addByFeedUrl: async (feedUrl, chash = null, itunesId = null) => {
return custom(PATH_ADD_BY_FEED_URL, {
url: feedUrl,
chash: chash,
itunesid: itunesId,
})
},
addByItunesId: async (itunesId) => {
return custom(PATH_ADD_BY_ITUNES_ID, { id: itunesId })
},
episodesByFeedId: async (
feedId,
since = null,
max = 10,
fullText = false
) => {
let queries = {
id: feedId,
since: since,
max: max,
}
if (fullText) queries['fullText'] = ''
return custom(PATH_EPISODES_BY_FEED_ID, queries)
},
episodesByFeedUrl: async (
feedUrl,
since = null,
max = 10,
fullText = false
) => {
let queries = {
url: feedUrl,
since: since,
max: max,
}
if (fullText) queries['fullText'] = ''
return custom(PATH_EPISODES_BY_FEED_URL, queries)
},
episodesByItunesId: async (
itunesId,
since = null,
max = 10,
fullText = false
) => {
let queries = {
id: itunesId,
since: since,
max: max,
}
if (fullText) queries['fullText'] = ''
return custom(PATH_EPISODES_BY_ITUNES_ID, queries)
},
episodesById: async (id, fullText = false) => {
let queries = {
id: id,
}
if (fullText) queries['fullText'] = ''
return custom(PATH_EPISODES_BY_ID, queries)
},
episodesRandom: async (
max = 1,
lang = null,
cat = null,
notcat = null,
fullText = false
) => {
let queries = {
max: max,
lang: lang,
cat: cat,
notcat: notcat,
}
if (fullText) queries['fullText'] = ''
return custom(PATH_EPISODES_RANDOM, queries)
},
recentFeeds: async (
max = 40,
since = null,
cat = null,
lang = null,
notcat = null
) => {
return custom(PATH_RECENT_FEEDS, {
max: max,
since: since,
lang: lang,
cat: cat,
notcat: notcat,
})
},
recentEpisodes: async (
max = 10,
excludeString = null,
before = null,
fullText = false
) => {
let queries = {
max: max,
excludeString: excludeString ? excludeString : null,
before: before,
}
if (fullText) queries['fullText'] = ''
return custom(PATH_RECENT_EPISODES, queries)
},
recentNewFeeds: async (max = 20, since = null) => {
return custom(PATH_RECENT_NEWFEEDS, {
max: max,
since: since,
})
},
recentSoundbites: async (max = 20) => {
return custom(PATH_RECENT_SOUNDBITES, {
max: max,
})
},
valueByFeedId: async (feedId) => {
return custom(PATH_VALUE_BY_FEED_ID, {
id: feedId,
})
},
valueByFeedUrl: async (feedUrl) => {
return custom(PATH_VALUE_BY_FEED_URL, {
url: feedUrl,
})
},
statsCurrent: async () => {
return custom(PATH_STATS_CURRENT)
},
categoriesList: async () => {
return custom(PATH_CATEGORIES_LIST)
},
hubPubNotify: async (feedId, update = true) => {
let queries = {
id: feedId,
}
if (update) queries['update'] = ''
return custom(PATH_HUB_PUBNOTIFIY, queries)
},
}
}
<|start_filename|>test/api.js<|end_filename|>
jest.setTimeout(10000)
const lib = require('../index.js')
const api = lib(
process.env.PODCASTINDEX_API_KEY,
process.env.PODCASTINDEX_API_SECRET
)
const apiBadCreds = lib('ABC', '123')
const SEARCH_TERM = '<NAME> Experience'
const SEARCH_PERSON = '<NAME>'
const FEED_ID = 550168
const FEED_ID_VALUE = 920666
const FEED_ITUNES_ID = 360084272
const FEED_TITLE = 'The Joe Rogan Experience'
const FEED_URL = 'http://joeroganexp.joerogan.libsynpro.com/rss'
const FEED_URL_NOT_FOUND = 'http://www.google.com/'
const FEED_URL_VALUE = 'https://mp3s.nashownotes.com/pc20rss.xml'
const EPISODE_ID = 16795090
const RECENT_FEEDS_COUNT = 3
const RECENT_EPISODES_COUNT = 3
const RECENT_EPISODES_EXCLUDE = 'news'
it('Requires API credentials', () => {
expect(() => {
const apiNoCreds = lib()
}).toThrow()
})
it('Custom', async () => {
expect.assertions(4)
const results = await api.custom('search/byterm', {
q: SEARCH_TERM,
})
expect(results.status).toEqual('true')
expect(results.feeds.length).toBeGreaterThan(0)
expect(results).toHaveProperty('query', SEARCH_TERM)
expect(results).toHaveProperty('feeds')
})
it('Search by term (async)', async () => {
expect.assertions(4)
const results = await api.searchByTerm(SEARCH_TERM)
expect(results.status).toEqual('true')
expect(results.feeds.length).toBeGreaterThan(0)
expect(results).toHaveProperty('query', SEARCH_TERM)
expect(results).toHaveProperty('feeds')
// expect(results.feeds[0].id).toEqual(FEED_ID)
// expect(results.feeds[0].title).toEqual(FEED_TITLE)
})
it('Search by term (promise)', async () => {
expect.assertions(4)
return api.searchByTerm(SEARCH_TERM).then((results) => {
expect(results.status).toEqual('true')
expect(results.feeds.length).toBeGreaterThan(0)
expect(results).toHaveProperty('query', SEARCH_TERM)
expect(results).toHaveProperty('feeds')
// expect(results.feeds[0].id).toEqual(FEED_ID)
// expect(results.feeds[0].title).toEqual(FEED_TITLE)
})
})
it('Search by term (value)', async () => {
expect.assertions(4)
const searchTerm = 'no agenda'
const results = await api.searchByTerm(searchTerm, 'lightning')
expect(results.status).toEqual('true')
expect(results.feeds.length).toBeGreaterThan(0)
expect(results).toHaveProperty('query', searchTerm)
expect(results).toHaveProperty('feeds')
// expect(results.feeds[0].id).toEqual(FEED_ID)
// expect(results.feeds[0].title).toEqual(FEED_TITLE)
})
it('Search episodes by person', async () => {
expect.assertions(4)
const results = await api.searchEpisodesByPerson(SEARCH_PERSON)
expect(results.status).toEqual('true')
expect(results.items.length).toBeGreaterThan(0)
expect(results).toHaveProperty('query', SEARCH_PERSON)
expect(results).toHaveProperty('items')
})
// it('Add feed by URL', async () => {
// const results = await api.addByFeedUrl(FEED_URL)
// expect.assertions(1)
// expect(results.status).toEqual('true')
// })
// it('Add feed by iTunes ID', async () => {
// const results = await api.addByItunesId(FEED_ITUNES_ID)
// console.log(results)
// expect.assertions(1)
// expect(results.status).toEqual('true')
// })
it('Episodes By Feed Id', async () => {
expect.assertions(2)
const results = await api.episodesByFeedId(FEED_ID)
// console.log(results)
expect(results.items.length).toBeGreaterThan(0)
expect(results).toHaveProperty('query', FEED_ID.toString())
// expect(results.items[0].feedId).toEqual(FEED_ID.toString()) // TODO is it feedid or feedId?
})
it('Episodes By Feed URL', async () => {
expect.assertions(3)
const results = await api.episodesByFeedUrl(FEED_URL)
expect(results.items.length).toBeGreaterThan(0)
expect(results).toHaveProperty('query', FEED_URL)
expect(results.items[0].feedId).toEqual(FEED_ID)
})
it('Episodes By Feed iTunes ID', async () => {
expect.assertions(3)
const results = await api.episodesByItunesId(FEED_ITUNES_ID)
expect(results.items.length).toBeGreaterThan(0)
expect(results).toHaveProperty('query', FEED_ITUNES_ID.toString())
expect(results.items[0].feedId).toEqual(FEED_ID)
})
it('Episodes By ID', async () => {
expect.assertions(1)
const results = await api.episodesById(EPISODE_ID)
// expect(results).toHaveProperty('query', EPISODE_ID.toString())
expect(results.episode.id).toEqual(EPISODE_ID)
})
it('Episodes random', async () => {
expect.assertions(2)
const results = await api.episodesRandom(2)
// expect(results).toHaveProperty('query', EPISODE_ID.toString())
expect(results.count).toEqual(2)
expect(results.episodes.length).toEqual(2)
})
it('Podcasts By Feed URL', async () => {
expect.assertions(3)
const results = await api.podcastsByFeedUrl(FEED_URL)
expect(results).toHaveProperty('query.url', FEED_URL)
expect(results.feed.id).toEqual(FEED_ID)
expect(results.feed.itunesId).toEqual(FEED_ITUNES_ID)
})
it('Podcasts By Feed URL not found', async () => {
expect.assertions(1)
try {
const results = await api.podcastsByFeedUrl(FEED_URL_NOT_FOUND)
} catch (e) {
expect(e.code).toEqual(400)
}
})
it('Podcasts By Feed ID', async () => {
expect.assertions(3)
const results = await api.podcastsByFeedId(FEED_ID)
expect(results).toHaveProperty('query.id', FEED_ID.toString())
expect(results.feed.id).toEqual(FEED_ID)
expect(results.feed.itunesId).toEqual(FEED_ITUNES_ID)
})
it('Podcasts By Feed iTunes ID', async () => {
expect.assertions(3)
const results = await api.podcastsByFeedItunesId(FEED_ITUNES_ID)
expect(results).toHaveProperty('query.id', FEED_ITUNES_ID.toString())
expect(results.feed.id).toEqual(FEED_ID)
expect(results.feed.itunesId).toEqual(FEED_ITUNES_ID)
})
it('Podcasts By tag', async () => {
expect.assertions(3)
const results = await api.podcastsByTag()
expect(results.status).toEqual('true')
expect(results.feeds.length).toBeGreaterThan(1)
expect(results.count).toBeGreaterThan(1)
})
it('Podcasts trending', async () => {
expect.assertions(3)
const results = await api.podcastsTrending()
expect(results.status).toEqual('true')
expect(results.feeds.length).toEqual(10)
expect(results.count).toEqual(10)
})
it('Podcasts dead', async () => {
expect.assertions(3)
const results = await api.podcastsDead()
expect(results.status).toEqual('true')
expect(results.feeds.length).toBeGreaterThan(1)
expect(results.count).toBeGreaterThan(1)
})
it('Recent Feeds', async () => {
expect.assertions(1)
const results = await api.recentFeeds(RECENT_FEEDS_COUNT, null, 'news')
// console.log(results)
// expect(results).toHaveProperty('count', RECENT_FEEDS_COUNT)
// expect(results).toHaveProperty('max', RECENT_FEEDS_COUNT.toString())
expect(results.feeds.length).toEqual(RECENT_FEEDS_COUNT)
})
it('Recent Feeds in language', async () => {
expect.assertions(2)
const results = await api.recentFeeds(RECENT_FEEDS_COUNT, null, null, 'ja')
// console.log(results)
// expect(results).toHaveProperty('count', RECENT_FEEDS_COUNT)
// expect(results).toHaveProperty('max', RECENT_FEEDS_COUNT.toString())
expect(results.feeds.length).toEqual(RECENT_FEEDS_COUNT)
expect(results.feeds[0].language).toEqual('ja')
})
it('Recent Episodes', async () => {
expect.assertions(3)
const results = await api.recentEpisodes(RECENT_FEEDS_COUNT)
expect(results).toHaveProperty('count', RECENT_FEEDS_COUNT)
expect(results).toHaveProperty('max', RECENT_FEEDS_COUNT.toString())
expect(results.items.length).toEqual(RECENT_FEEDS_COUNT)
})
it('Recent Episodes', async () => {
expect.assertions(6)
const results = await api.recentEpisodes(
RECENT_EPISODES_COUNT,
RECENT_EPISODES_EXCLUDE
)
expect(results).toHaveProperty('count', RECENT_EPISODES_COUNT)
expect(results).toHaveProperty('max', RECENT_EPISODES_COUNT.toString())
expect(results.items.length).toEqual(RECENT_EPISODES_COUNT)
expect(results.items[0].title).toEqual(
expect.not.stringContaining(RECENT_EPISODES_EXCLUDE)
)
expect(results.items[1].title).toEqual(
expect.not.stringContaining(RECENT_EPISODES_EXCLUDE)
)
expect(results.items[2].title).toEqual(
expect.not.stringContaining(RECENT_EPISODES_EXCLUDE)
)
})
it('Recent New Feeds', async () => {
expect.assertions(1)
const results = await api.recentNewFeeds()
expect(results).toHaveProperty('status', 'true')
})
it('Recent soundbites', async () => {
expect.assertions(3)
const results = await api.recentSoundbites(20)
expect(results).toHaveProperty('status', 'true')
expect(results.items.length).toBeGreaterThan(1)
expect(results.count).toBeGreaterThan(1)
})
it('Value By Feed URL', async () => {
expect.assertions(2)
const results = await api.valueByFeedUrl(FEED_URL_VALUE)
expect(results).toHaveProperty('query.url', FEED_URL_VALUE)
expect(results).toHaveProperty('value')
})
it('Value By Feed ID', async () => {
expect.assertions(2)
const results = await api.valueByFeedId(FEED_ID_VALUE)
expect(results).toHaveProperty('query.id', FEED_ID_VALUE.toString())
expect(results).toHaveProperty('value')
})
it('Stats Current', async () => {
expect.assertions(2)
const results = await api.statsCurrent()
expect(results).toHaveProperty('status', 'true')
expect(results.stats).toHaveProperty('feedCountTotal')
})
it('Categories list', async () => {
expect.assertions(5)
const results = await api.categoriesList()
expect(results).toHaveProperty('status', 'true')
expect(results.feeds.length).toBeGreaterThan(10)
expect(results.count).toBeGreaterThan(10)
expect(results.feeds[0]).toHaveProperty('id', 1)
expect(results.feeds[0]).toHaveProperty('name', 'Arts')
})
// it('Hub pun notify', async () => {
// expect.assertions(2)
// const results = await api.hubPubNotify(75075)
// expect(results).toHaveProperty('status', 'true')
// expect(results).toHaveProperty('description', 'Feed marked for immediate update.')
// })
| comster/podcast-index-api |
<|start_filename|>gson/src/test/java/com/google/gson/internal/bind/JsonTreeReaderTest.java<|end_filename|>
/*
* Copyright (C) 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.gson.internal.bind;
import com.google.gson.JsonArray;
import com.google.gson.JsonNull;
import com.google.gson.JsonObject;
import com.google.gson.stream.JsonToken;
import java.io.IOException;
import junit.framework.TestCase;
@SuppressWarnings("resource")
public class JsonTreeReaderTest extends TestCase {
public void testSkipValue_emptyJsonObject() throws IOException {
JsonTreeReader in = new JsonTreeReader(new JsonObject());
in.skipValue();
assertEquals(JsonToken.END_DOCUMENT, in.peek());
}
public void testSkipValue_filledJsonObject() throws IOException {
JsonObject jsonObject = new JsonObject();
JsonArray jsonArray = new JsonArray();
jsonArray.add('c');
jsonArray.add("text");
jsonObject.add("a", jsonArray);
jsonObject.addProperty("b", true);
jsonObject.addProperty("i", 1);
jsonObject.add("n", JsonNull.INSTANCE);
JsonObject jsonObject2 = new JsonObject();
jsonObject2.addProperty("n", 2L);
jsonObject.add("o", jsonObject2);
jsonObject.addProperty("s", "text");
JsonTreeReader in = new JsonTreeReader(jsonObject);
in.skipValue();
assertEquals(JsonToken.END_DOCUMENT, in.peek());
}
}
<|start_filename|>gson/src/test/java/com/google/gson/internal/UnsafeAllocatorInstantiationTest.java<|end_filename|>
/*
* Copyright (C) 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.gson.internal;
import junit.framework.TestCase;
/**
* Test unsafe allocator instantiation
* @author <NAME>
*/
public final class UnsafeAllocatorInstantiationTest extends TestCase {
public interface Interface {
}
public static abstract class AbstractClass {
}
public static class ConcreteClass {
}
/**
* Ensure that the {@link java.lang.UnsupportedOperationException} is thrown when trying
* to instantiate an interface
*/
public void testInterfaceInstantiation() {
UnsafeAllocator unsafeAllocator = UnsafeAllocator.create();
try {
unsafeAllocator.newInstance(Interface.class);
fail();
} catch (Exception e) {
assertEquals(e.getClass(), UnsupportedOperationException.class);
}
}
/**
* Ensure that the {@link java.lang.UnsupportedOperationException} is thrown when trying
* to instantiate an abstract class
*/
public void testAbstractClassInstantiation() {
UnsafeAllocator unsafeAllocator = UnsafeAllocator.create();
try {
unsafeAllocator.newInstance(AbstractClass.class);
fail();
} catch (Exception e) {
assertEquals(e.getClass(), UnsupportedOperationException.class);
}
}
/**
* Ensure that no exception is thrown when trying to instantiate a concrete class
*/
public void testConcreteClassInstantiation() {
UnsafeAllocator unsafeAllocator = UnsafeAllocator.create();
try {
unsafeAllocator.newInstance(ConcreteClass.class);
} catch (Exception e) {
fail();
}
}
}
| cadaver123/gson |
<|start_filename|>windex.css<|end_filename|>
/* Windex directory listing
------------------------- */
body {
font-family: system-ui, -apple-system, 'Segoe UI', sans-serif;
color: #333;
background: #EEE;
padding-bottom: 120px;
}
.container {
max-width: 600px;
margin: 0 auto;
}
a {
text-decoration: none;
color: #15E;
}
a:visited { color: #818;}
a:hover { color: #038;}
/* table */
table {
width: 100%;
border-collapse: collapse;
}
/* hide header row */
tr:first-child { display: none; }
/* icon */
td:nth-child(1) {
width: 24px;
vertical-align: middle;
}
td:nth-child(1) a { display: block; }
/* file name */
td:nth-child(2) {
width: 100%;
}
td:nth-child(2) a {
display: block;
padding: 8px 10px;
border-radius: 4px;
position: relative;
}
/* pointer for folders */
td:nth-child(2) a[href$="/"]:after {
content: '';
display: block;
width: 9px;
height: 9px;
position: absolute;
right: 15px;
top: 11px;
transform: rotate(45deg);
border-style: solid;
border-width: 3px 3px 0 0;
border-color: #08F;
}
/* Parent directory */
/* select href="/folder/" but not "folder/" */
td:nth-child(2) a[href^="/"][href$="/"]:after {
border-width: 0 0 3px 3px;
border-color: #AAA;
right: 11px
}
td:nth-child(2) a:hover {
background: #CDF;
}
/* size */
td:nth-child(3) {
color: #888;
font-size: 0.9rem;
}
| desandro/windex |
<|start_filename|>chicago-dice-game-ex1/src/components/GameProgress.js<|end_filename|>
import React from 'react';
import { H2 } from '../styled';
import { useGameStateContext } from '../useGameContext';
const PlayerProgress = ({ round, playerTurn, dieOne, dieTwo }) => {
return (
<>
Target: {round} <br />
Player {playerTurn} rolled <span>{dieOne + dieTwo}</span>
</>
);
};
const Winner = ({ playerAScore, playerBScore }) => {
return playerAScore > playerBScore ? (
<>
Winner is <span>Player A</span>!{" "}
</>
) : playerBScore > playerAScore ? (
<>
Winner is <span>Player B</span>!
</>
) : (
<>
It's a tie. You are all losers.{" "}
<span role="img" aria-label="ROFL">
🤣
</span>
</>
);
};
const GameProgress = () => {
const {
round,
playerTurn,
dieOne,
dieTwo,
gameEnded,
playerAScore,
playerBScore
} = useGameStateContext();
if (round === 1) {
return <H2>Welcome to Chicago Dice</H2>;
}
if (round > 1) {
return (
<H2>
{gameEnded ? (
<div>
<Winner {...{ playerAScore, playerBScore }} />
</div>
) : null}
<PlayerProgress {...{ round, playerTurn, dieOne, dieTwo }} />
</H2>
);
}
};
export default GameProgress;
<|start_filename|>chicago-dice-game-ex3/src/index.js<|end_filename|>
import './styles.css';
import React from 'react';
import ReactDOM from 'react-dom';
import DiceBoard from './components/DiceBoard';
import GameProgress from './components/GameProgress';
import PlayerScores from './components/PlayerScores';
import RollDiceButton from './components/RollDiceButton';
import { GameProvider } from './provider';
// if (process.env.NODE_ENV === 'development') {
// const whyDidYouRender = require('@welldone-software/why-did-you-render');
// whyDidYouRender(React, {
// onlyLogs: true,
// //include: [/^DiceBoard/]
// include: [/^[/A-za-z/]/]
// });
// }
function App() {
return (
<div className="App">
<GameProvider>
<GameProgress />
<DiceBoard />
<RollDiceButton />
<PlayerScores />
</GameProvider>
</div>
);
}
const rootElement = document.getElementById("root");
ReactDOM.render(<App />, rootElement);
<|start_filename|>chicago-dice-game-ex2/src/components/DiceBoard.js<|end_filename|>
import { random } from 'lodash';
import React, { useEffect } from 'react';
import { DiceContainer } from '../styled';
import { useGameContext } from '../useGameContext';
const DiceBoard = () => {
const { dieOne, dieTwo, rollingDice, updateDice } = useGameContext();
useEffect(() => {
function rollDice() {
return {
dieOne: random(1, 6),
dieTwo: random(1, 6)
};
}
if (rollingDice) {
const payload = rollDice();
updateDice(payload);
}
}, [rollingDice, updateDice]);
return (
<>
<DiceContainer>
<span>{dieOne}</span>
<span>{dieTwo}</span>
</DiceContainer>
</>
);
};
export default DiceBoard;
<|start_filename|>chicago-dice-game-ex3/src/styled.js<|end_filename|>
import styled from 'styled-components';
export const DiceContainer = styled.div`
display: flex;
justify-content: space-around;
background: blueviolet;
color: white;
padding: 0.5em;
font-size: 2em;
span {
border: 1px solid white;
padding: 0.5em 0.75em;
border-radius: 0.3em;
}
`;
export const ScoreContainer = styled.div`
display: flex;
justify-content: space-around;
align-items: center;
margin-top: 1em;
border-top: 1px dotted darkgray;
`;
export const Button = styled.button`
margin-top: 1em;
font-size: 1em;
padding: 0.5em;
border-radius: 0.25em;
cursor: pointer;
`;
export const H2 = styled.h2`
color: black;
min-height: 2em;
span {
color: blueviolet;
}
div {
font-size: 2rem;
}
`;
export const PlayerContainer = styled.div`
padding: 1em;
margin-top: 1em;
`;
<|start_filename|>chicago-dice-game-ex2/src/useGameContext.js<|end_filename|>
import { useContext } from 'react';
import { GameContext } from './provider';
const decideTurn = current => {
return current === "A" ? "B" : "A";
};
const useGameContext = () => {
const [state, dispatch] = useContext(GameContext);
if (dispatch === undefined) {
throw new Error("Must have dispatch defined");
}
function initRoll(currentPlayer) {
const nextTurn = decideTurn(currentPlayer);
dispatch(draft => {
draft.rollingDice = true;
draft.playerTurn = nextTurn;
if (nextTurn === "A") {
draft.round++;
}
if (draft.round === 12 && nextTurn === "B") {
draft.gameEnded = true;
}
});
}
function updateDice({ dieOne, dieTwo }) {
dispatch(draft => {
draft.rollingDice = false;
draft.dieOne = dieOne;
draft.dieTwo = dieTwo;
const winning = dieOne + dieTwo === draft.round;
if (draft.playerTurn === "A" && winning) {
draft.playerAScore++;
}
if (draft.playerTurn === "B" && winning) {
draft.playerBScore++;
}
});
}
return {
...state,
initRoll,
updateDice
};
};
export { useGameContext };
<|start_filename|>chicago-dice-game-ex3/src/components/RollDiceButton.js<|end_filename|>
import React from 'react';
import { Button } from '../styled';
import { useGameContext } from '../useGameContext';
const RollDiceButton = () => {
const [state, dispatch] = useGameContext();
const { playerTurn, gameEnded } = state;
const { initRoll } = dispatch;
function handleClick() {
initRoll(playerTurn);
}
return (
<Button onClick={handleClick} disabled={gameEnded}>
Roll the Dice
</Button>
);
};
export default RollDiceButton;
<|start_filename|>chicago-dice-game-ex3/src/provider.js<|end_filename|>
import React from 'react';
import { useImmer } from 'use-immer';
const defaultState = {
dieOne: 0,
dieTwo: 0,
round: 1,
playerAScore: 0,
playerBScore: 0,
playerTurn: null,
rollingDice: false,
gameEnded: false
};
const StateContext = React.createContext();
const DispatchContext = React.createContext();
const GameProvider = ({ children }) => {
const [state, dispatch] = useImmer({ ...defaultState });
// alternatively without Immer: const [state, dispatch] = useState({});
return (
<StateContext.Provider value={state}>
<DispatchContext.Provider value={dispatch}>
{children}
</DispatchContext.Provider>
</StateContext.Provider>
);
};
export { GameProvider, StateContext, DispatchContext };
<|start_filename|>chicago-dice-game-ex1/src/useGameContext.js<|end_filename|>
import { useContext } from 'react';
import { DispatchContext, StateContext } from './provider';
const useGameStateContext = () => {
const state = useContext(StateContext);
if (state === undefined) {
throw new Error("Ut oh, where is my state?");
}
return state;
};
const useGameDispatchContext = () => {
const dispatch = useContext(DispatchContext);
if (dispatch === undefined) {
throw new Error("Ut oh, where is my dispatch?");
}
return dispatch;
};
export { useGameDispatchContext, useGameStateContext };
<|start_filename|>chicago-dice-game-ex1/src/provider.js<|end_filename|>
import produce from 'immer';
import React, { useReducer } from 'react';
const defaultState = {
dieOne: 0,
dieTwo: 0,
round: 1,
playerAScore: 0,
playerBScore: 0,
playerTurn: null,
rollingDice: false,
gameEnded: false
};
const decideTurn = current => {
return current === "A" ? "B" : "A";
};
const DispatchContext = React.createContext();
const StateContext = React.createContext();
const ACTION_TYPES = {
initRoll: "INIT_ROLL",
updateDice: "UPDATE_DICE_AND_SCORE"
};
const reducer = (state, action) =>
produce(state, draft => {
switch (action.type) {
case ACTION_TYPES.initRoll: {
const nextTurn = decideTurn(action.payload);
draft.rollingDice = true;
draft.playerTurn = nextTurn;
if (nextTurn === "A") {
draft.round++;
}
if (state.round === 12 && nextTurn === "B") {
draft.gameEnded = true;
}
return draft;
}
case ACTION_TYPES.updateDice: {
const { dieOne, dieTwo } = action.payload;
draft.rollingDice = false;
draft.dieOne = dieOne;
draft.dieTwo = dieTwo;
const winning = dieOne + dieTwo === draft.round;
if (state.playerTurn === "A" && winning) {
draft.playerAScore++;
}
if (state.playerTurn === "B" && winning) {
draft.playerBScore++;
}
return draft;
}
default: {
throw new Error(`Unhandled action type: ${action.type}`);
}
}
});
const GameProvider = ({ children }) => {
const initState = {
...defaultState
};
const [state, dispatch] = useReducer(reducer, initState);
return (
<StateContext.Provider value={state}>
<DispatchContext.Provider value={dispatch}>
{children}
</DispatchContext.Provider>
</StateContext.Provider>
);
};
export { GameProvider, DispatchContext, StateContext, ACTION_TYPES };
<|start_filename|>chicago-dice-game-ex3/src/useGameContext.js<|end_filename|>
import { useContext } from 'react';
import { DispatchContext, StateContext } from './provider';
function decideTurn(current) {
return current === "A" ? "B" : "A";
};
function useGameStateContext() {
const state = useContext(StateContext);
if (state === undefined) {
throw new Error("Ut oh, where is my state?");
}
return state;
};
function useGameDispatchContext() {
const dispatch = useContext(DispatchContext);
if (dispatch === undefined) {
throw new Error("Ut oh, where is my dispatch?");
}
function initRoll(currentPlayer) {
const nextTurn = decideTurn(currentPlayer);
dispatch(draft => {
draft.rollingDice = true;
draft.playerTurn = nextTurn;
if (nextTurn === "A") {
draft.round++;
}
if (draft.round === 12 && nextTurn === "B") {
draft.gameEnded = true;
}
});
}
function updateDice({ dieOne, dieTwo }) {
dispatch(draft => {
draft.rollingDice = false;
draft.dieOne = dieOne;
draft.dieTwo = dieTwo;
const winning = dieOne + dieTwo === draft.round;
if (draft.playerTurn === "A" && winning) {
draft.playerAScore++;
}
if (draft.playerTurn === "B" && winning) {
draft.playerBScore++;
}
});
}
return { initRoll, updateDice };
};
const useGameContext = () => {
return [useGameStateContext(), useGameDispatchContext()]
}
export { useGameContext };
<|start_filename|>chicago-dice-game-ex1/src/components/DiceBoard.js<|end_filename|>
import { random } from 'lodash';
import React, { useEffect } from 'react';
import { ACTION_TYPES } from '../provider';
import { DiceContainer } from '../styled';
import { useGameDispatchContext, useGameStateContext } from '../useGameContext';
const DiceBoard = () => {
const { dieOne, dieTwo, rollingDice } = useGameStateContext();
const dispatch = useGameDispatchContext();
useEffect(() => {
function rollDice() {
return {
dieOne: random(1, 6),
dieTwo: random(1, 6)
};
}
if (rollingDice) {
dispatch({
type: ACTION_TYPES.updateDice,
payload: rollDice()
});
}
}, [rollingDice, dispatch]);
return (
<>
<DiceContainer>
<span>{dieOne}</span>
<span>{dieTwo}</span>
</DiceContainer>
</>
);
};
export default DiceBoard;
<|start_filename|>chicago-dice-game-ex2/src/components/PlayerScores.js<|end_filename|>
import React from 'react';
import { PlayerContainer, ScoreContainer } from '../styled';
import { useGameContext } from '../useGameContext';
const PlayerScores = () => {
const { playerAScore, playerBScore } = useGameContext();
return (
<ScoreContainer>
<PlayerContainer>
<h4>Player A</h4>
<div>Score: {playerAScore}</div>
</PlayerContainer>
<PlayerContainer>
<h4>Player B</h4>
<div>Score: {playerBScore}</div>
</PlayerContainer>
</ScoreContainer>
);
};
export default PlayerScores;
<|start_filename|>chicago-dice-game-ex1/src/components/RollDiceButton.js<|end_filename|>
import React from 'react';
import { ACTION_TYPES } from '../provider';
import { Button } from '../styled';
import { useGameDispatchContext, useGameStateContext } from '../useGameContext';
const RollDiceButton = () => {
const { playerTurn, gameEnded } = useGameStateContext();
const dispatch = useGameDispatchContext();
function handleClick() {
dispatch({
type: ACTION_TYPES.initRoll,
payload: playerTurn
});
}
return (
<Button onClick={handleClick} disabled={gameEnded}>
Roll the Dice
</Button>
);
}
export default RollDiceButton; | cherihung/react-providers-pattern |
<|start_filename|>test/controller.error.test.js<|end_filename|>
const request = require('superagent');
describe('Controller Error', () => {
it('Error from before String', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/errortest?type=StringError');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(400);
expect(ret.response.text).toBe('Error Message');
}
});
it('Error from before Json', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/errortest?type=JsonError');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(400);
expect(ret.response.body).toEqual({ msg: 'Error Message' });
}
});
it('Error from func', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/errortest');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(405);
expect(ret.response.text).toBe('Request not allowed!');
}
});
it('Error from return', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/errortest/case');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
expect(ret.body).toEqual({
status: 405,
body: 'Request not allowed!',
name: 'MkbugError'
});
}
});
});
<|start_filename|>example/controller/ResponseTest.js<|end_filename|>
const fs = require('fs');
const path = require('path');
const { BaseController } = require('../../index');
module.exports = class ResponseTest extends BaseController {
getStringAction() {
return 'ok';
}
getNumberAction() {
return 10086;
}
getJSONAction() {
return { msg: 'ok' };
}
getBufferAction() {
return fs.readFileSync(path.resolve(process.cwd(), 'example', 'data', 'test'));
}
getStreamAction() {
return fs.createReadStream(path.resolve(process.cwd(), 'example', 'data', 'test'));
}
};
<|start_filename|>example/plugin/TestPlugin3.js<|end_filename|>
const { BasePlugin, MkbugError } = require('./../../index');
module.exports = class TestPlugin3 extends BasePlugin {
exec(req) {
if (req.query.type === 'plugin3') {
throw new MkbugError(400, { msg: 'test json' });
}
}
};
<|start_filename|>test/config.test.js<|end_filename|>
const Config = require('../bin/base.config');
const request = require('superagent');
describe('Config', () => {
it('初始化 默认路径', () => {
const case1 = new Config('case1');
expect(case1).toEqual({});
});
it('初始化 指定路径', () => {
const case1 = new Config('case1', './example');
expect(case1).toEqual({ msg: 'test1' });
});
it('初始化 JEST 覆盖 1', () => {
process.env.NODE_ENV = 'JEST';
const case2 = new Config('case2', './example');
expect(case2).toEqual({ msg: 'test2' });
});
it('初始化 JEST 覆盖 2', () => {
process.env.NODE_ENV = 'JEST';
const case3 = new Config('case3', './example');
expect(case3).toEqual({
msg: 'test2',
msg1: 'test2',
msg2: 'test2'
});
});
it('初始化 JEST 覆盖 3', () => {
process.env.NODE_ENV = 'JEST';
const case3 = new Config('case3', './example');
expect(case3.msg2).toBe('test2');
});
it('初始化 JEST From Mkbug', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/configtest/defaultconfig');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
expect(ret.body).toEqual({
msg: 'test1'
});
}
});
});
<|start_filename|>example/controller/ConfigTest.js<|end_filename|>
const { BaseController, Config } = require('../../index');
module.exports = class ConfigTest extends BaseController {
getDefaultConfigAction() {
return new Config('case1');
}
};
<|start_filename|>test/mkbug.test.js<|end_filename|>
const request = require('superagent');
describe('Mkbug', () => {
it('初始化 200', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/heath');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
}
});
it('初始化 404', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(404);
}
});
it('初始化 500', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/error');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(500);
}
});
it('Express中间件', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/cookie');
} catch (err) {
ret = err;
} finally {
const cookie = ret.headers['set-cookie'].join(',');
expect(cookie).toMatch('cookie_test=mkbug-cookie');
}
});
});
<|start_filename|>bin/utils.js<|end_filename|>
const chalk = require('chalk');
function str2path(path) {
const ret = [];
const keys = path.split('.');
keys.forEach(function transKey(key) {
const start = key.indexOf('[');
const end = key.indexOf(']');
if (start > -1 && end > -1) {
ret.push(key.substring(0, start));
ret.push(key.substring(start + 1, end));
} else {
ret.push(key);
}
});
return ret;
}
const isTest = process.env.NODE_ENV !== 'JEST';
module.exports = {
isPromise(obj) {
return !!obj && (typeof obj === 'object' || typeof obj === 'function') && typeof obj.then === 'function';
},
getMethod(method) {
const re = new RegExp(/^(get|head|post|put|delete|connect|options|patch|trace)(.*)(Action$)/);
return re[Symbol.match](method);
},
_get(obj, path, def) {
const basePath = str2path(path);
return (
basePath.reduce((ret, next) => {
return ret === undefined ? undefined : ret[next];
}, obj) || def
);
},
createContext(source, req, res) {
const ctx = {};
ctx.__proto__ = source;
ctx.req = req;
ctx.res = res;
ctx.query = req.query;
ctx.body = req.body;
ctx.params = req.params;
ctx.status = 200;
ctx.type = null;
ctx.get = req.get.bind(req);
ctx.set = res.set.bind(res);
ctx.ip = req.ip;
ctx.ips = req.ips;
return ctx;
},
LOG(msg, ...other) {
isTest && console.log(chalk.bgBlack('Mkbug.js [ LOG ]:'), msg, ...other);
},
INFO(msg, ...other) {
isTest && console.info(chalk.yellow('Mkbug.js [ INFO]:'), chalk.yellow(msg), ...other);
},
WARN(msg, ...other) {
isTest && console.warn(chalk.magenta('Mkbug.js [ WARN]:'), chalk.magenta(msg), ...other);
},
ERROR(msg, ...other) {
isTest && console.error(chalk.red('Mkbug.js [ERROR]:'), chalk.red(msg), ...other);
}
};
<|start_filename|>example/plugin/TestPlugin2.js<|end_filename|>
const { BasePlugin, MkbugError } = require('./../../index');
module.exports = class TestPlugin2 extends BasePlugin {
exec(req) {
if (req.query.type === 'plugin2') {
throw new MkbugError(400, 'test string');
}
}
};
<|start_filename|>index.js<|end_filename|>
if (!process.env.NODE_ENV) {
process.env.NODE_ENV = 'DEBUG';
}
require('./bin/express.init');
exports.Mkbug = require('./bin/mkbug');
exports.BaseController = require('./bin/base.controller');
exports.Config = require('./bin/base.config');
exports.BasePlugin = require('./bin/base.plugin');
exports.MkbugError = require('./bin/base.mkbugerror');
<|start_filename|>bin/mkbug.js<|end_filename|>
const path = require('path');
const Stream = require('stream');
const { createModule } = require('./helper');
const { LOG, INFO, ERROR } = require('./utils');
const BaseConfig = require('./base.config');
const MkbugError = require('./base.mkbugerror');
class Mkbug {
constructor(app, opts = {}) {
LOG(`Welcome to Mkbug.js (NODE_ENV = ${process.env.NODE_ENV || ''})\n`);
LOG(` ╭∩╮(︶︿︶)╭∩╮\n`);
this.app = app;
this.basePath = (opts.path && path.resolve(process.cwd(), opts.path)) || path.resolve(process.cwd(), 'src');
BaseConfig.prototype.baseUrl = this.basePath;
Object.freeze(BaseConfig.prototype);
this.prefix = '';
this.__server = null;
this.eCb = function (error) {
return error;
};
}
create(prefix = '') {
let prePath = prefix;
if (prefix === '/') {
prePath = '';
}
this.app.use(prePath, createModule(this.basePath, prePath));
return this;
}
use(...plugin) {
this.app.use(...plugin);
return this;
}
error(cb) {
this.eCb = cb;
}
start(port, cb) {
const _this = this;
this.app.use(function notFound(req, res, next) {
next(new MkbugError(404, 'Request not found!'));
});
this.app.use(function exception(error, req, res) {
const ret = _this.eCb(error);
let result = null;
let status = 500;
if (!res.finished) {
if (ret instanceof MkbugError) {
status = ret.status;
result = ret.body;
} else if (ret instanceof Error) {
result = {
name: 'Mkbug Error',
msg: `Reject by ${ret.name}!`
};
} else {
result = 'Mkbug Error';
}
res.status(status);
if (Buffer.isBuffer(result) || typeof result === 'string') {
res.end(result);
} else if (result instanceof Stream) {
result.pipe(res);
} else {
res.json(result);
}
}
});
this.__server = this.app.listen(
port,
cb ||
function callback(err) {
if (err) {
ERROR(`Failed with [PORT=${port}]`, err);
} else {
INFO(`Start with [PORT=${port}]`);
}
}
);
return this.app;
}
getServerInstance() {
return this.__server;
}
}
Object.freeze(Mkbug.prototype);
module.exports = Mkbug;
<|start_filename|>.eslintrc.js<|end_filename|>
module.exports = {
root: true,
env: {
browser: true,
node: true,
es6: true, // 使用require就不会报错了
commonjs: true
},
globals: {
describe: true,
it: true,
expect: true
},
parser: 'babel-eslint',
// 配置解析器
parserOptions: {
sourceType: 'module',
ecmaVersion: 2017
},
extends: ['eslint:recommended', 'plugin:prettier/recommended'],
// 规则内容参考:http://eslint.cn/docs/rules/
// 值为 0 时该规则不生效
// 值为 1 时是显示警告
// 值为 2 时显示报错
rules: {
// Possible Errors
'getter-return': 2,
'no-empty': ['warn', { allowEmptyCatch: true }],
'use-isnan': 2, // 使用 isNaN 判断 NaN
'no-cond-assign': 2, // 禁止条件表达式中出现赋值操作符
'no-dupe-keys': 2, // 禁止对象字面量中出现重复的key
'no-dupe-args': 2, // 禁止 function 定义中出现重名参数
'no-extra-boolean-cast': 2, // 禁止不必要的布尔转换
'no-extra-semi': 0, // 禁止不必要的分号
'no-regex-spaces': 2, // 禁止正则表达式字面量中出现多个空格
'no-irregular-whitespace': ['error', { skipComments: true }],
// Best Practices
'array-callback-return': 2, // Array return 特别注意 map 和 forEach 的区别
'no-caller': 2, // 禁止使用arguments.callee
'no-alert': 2, // 不允许使用alert,confirm,prompt
'no-useless-catch': 1,
// 'no-unused-expressions': ["error", { "allowTaggedTemplates": true }], // 禁止出现未使用过的表达式
// Variables
'no-delete-var': 2, // 不允许对变量进行delete操作
'no-shadow-restricted-names': 2, // 禁止将标识符定义为受限的名字
'no-use-before-define': [2, { functions: false }], // 禁止在变量定义之前使用它们
'no-undef': 2, // 禁止未声明的变量
'no-unused-vars': [2, { vars: 'all', args: 'after-used', ignoreRestSiblings: false }],
// Node.js and CommonJS
'no-process-exit': 'off',
'no-useless-escape': 'off',
// Stylistic Issues
'no-trailing-spaces': 2, // 禁用行位空白
// 'max-len': ['error', { code: 180 }], // 强制行的最大长度
'max-statements-per-line': ['error', { max: 1 }], // 强制每一行中所允许的最大语句数量
'max-lines': ['error', 1000], // 文件最大行数
'max-lines-per-function': ['error', 200], // 一个函数的最大行数
// ECMAScript 6
'no-const-assign': 2, // 禁止修改使用const
'no-new-symbol': 2,
'prefer-const': [
'warn',
{
destructuring: 'all'
}
],
'no-duplicate-imports': 2, // 禁止重复模块导入
'no-dupe-class-members': 2, // 禁止类成员中出现重复的名字
'require-yield': 2, // 要求 generator 函数内有 yield
'constructor-super': 2 // 验证super()in构造函数中的调用
},
overrides: [
{
files: ['**/__tests__/*.{j,t}s?(x)', '**/tests/unit/**/*.spec.{j,t}s?(x)'],
env: {
jest: true
}
}
]
};
<|start_filename|>test/plugin.test.js<|end_filename|>
const request = require('superagent');
describe('Plugin', () => {
it('Plugin生效判断 通过', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/plugintest');
} catch (err) {
ret = err;
} finally {
const cookie = ret.headers['set-cookie'].join(',');
expect(cookie).toMatch('plugin=true');
}
});
it('Plugin生效判断 阻断String类型', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/plugintest?type=plugin2');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(400);
expect(ret.response.text).toBe('test string');
}
});
it('Plugin生效判断 阻断Json类型', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/plugintest?type=plugin3');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(400);
expect(ret.response.body).toEqual({ msg: 'test json' });
}
});
});
<|start_filename|>test/controller.response.test.js<|end_filename|>
const request = require('superagent');
describe('Controller Response测试', () => {
it('Response String', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/responsetest/string');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
expect(ret.text).toBe('ok');
}
});
it('Response Number', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/responsetest/number');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
expect(ret.body).toBe(10086);
}
});
it('Response JSON', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/responsetest/json');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
expect(ret.body).toEqual({ msg: 'ok' });
}
});
it('Response Buffer', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/responsetest/buffer');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
expect(ret.text).toEqual('10086');
}
});
it('Response Stream', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/responsetest/stream');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
expect(ret.text).toEqual('10086');
}
});
});
<|start_filename|>bin/base.controller.js<|end_filename|>
const { INFO } = require('./utils');
const Base = require('./base');
class BaseController extends Base {
constructor() {
super();
}
before(request, response) {
super.before && super.before(request, response);
return true;
}
after({ duration, status, originalUrl, request, response }) {
super.after && super.after({ duration, status, originalUrl, request, response });
INFO(`${duration}ms [${status}][${request.method}]${originalUrl}`);
}
}
BaseController.prototype.__$$getMethods = function () {
const props = Object.getOwnPropertyNames(this.__proto__);
return props.filter((prop) => {
if (prop !== 'constructor' && typeof this[prop] === 'function' && prop.endsWith('Action')) {
return true;
} else {
return false;
}
});
};
module.exports = BaseController;
<|start_filename|>example/controller/base/ControllerBase.js<|end_filename|>
const ControllerBaseBase = require('./ControllerBaseBase');
module.exports = class ControllerBase extends ControllerBaseBase {
before(request, response) {
super.before(request, response);
console.log('ControllerBase before');
}
after({ duration, status, originalUrl, request, response }) {
console.log('ControllerBase after');
super.after({ duration, status, originalUrl, request, response });
}
};
<|start_filename|>bin/const.js<|end_filename|>
module.exports = {
METHODS: ['get', 'head', 'post', 'put', 'delete', 'connect', 'options', 'patch', 'trace']
};
<|start_filename|>example/controller/params/case2/_id/ParamCase2.js<|end_filename|>
const { BaseController } = require('../../../../../index');
module.exports = class ParamCase2 extends BaseController {
getAction() {
return this.params.id;
}
};
<|start_filename|>test/controller.params.test.js<|end_filename|>
const request = require('superagent');
describe('Controller URL传参 测试', () => {
it('Controller文件名定义param取值', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/params/case1/param1');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
expect(ret.text).toBe('param1');
}
});
it('Controller文件夹名定义param取值', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/params/case2/param2/paramcase2');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
expect(ret.text).toBe('param2');
}
});
it('Controller文件夹名定义param取值', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/params/case2/param3/param/param4');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
expect(ret.body).toEqual({ id: 'param3', subId: 'param4' });
}
});
});
<|start_filename|>test/controller.methods.test.js<|end_filename|>
const request = require('superagent');
describe('Controller Methods 测试', () => {
it('HTTP Methods测试 GET', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/index/test');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
}
});
it('HTTP Methods测试 HEAD', async () => {
let ret = null;
try {
ret = await request.head('http://localhost:3000/api/index/test');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
}
});
it('HTTP Methods测试 POST', async () => {
let ret = null;
try {
ret = await request.post('http://localhost:3000/api/index/test');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
}
});
it('HTTP Methods测试 PUT', async () => {
let ret = null;
try {
ret = await request.put('http://localhost:3000/api/index/test');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
}
});
it('HTTP Methods测试 DELETE', async () => {
let ret = null;
try {
ret = await request.delete('http://localhost:3000/api/index/test');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
}
});
it('HTTP Methods测试 CONNECT', async () => {
let ret = null;
try {
ret = await request.connect('http://localhost:3000/api/index/test');
} catch (err) {
ret = err;
} finally {
console.log(ret);
// 没法测试
expect(200).toBe(200);
}
});
it('HTTP Methods测试 OPTIONS', async () => {
let ret = null;
try {
ret = await request.options('http://localhost:3000/api/index/test');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
}
});
it('HTTP Methods测试 PATCH', async () => {
let ret = null;
try {
ret = await request.patch('http://localhost:3000/api/index/test');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
}
});
it('HTTP Methods测试 TRACE', async () => {
let ret = null;
try {
ret = await request.trace('http://localhost:3000/api/index/test');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
}
});
});
<|start_filename|>test/init.js<|end_filename|>
const express = require('express');
const cookieParser = require('cookie-parser');
const { Mkbug } = require('./../index');
new Mkbug(express(), {
path: './example'
})
.use('/error', (req, res, next) => {
next('test error');
})
.use('/heath', (req, res) => {
res.status(200).end();
})
.use(cookieParser())
.use('/cookie', (req, res) => {
res.cookie('cookie_test', 'mkbug-cookie');
res.end();
})
.use('/close', (req, res) => {
res.status(200).end('server down!\n');
process.exit(0);
})
.create('/api')
.start(3000);
<|start_filename|>example/controller/Index.js<|end_filename|>
const { BaseController } = require('../../index');
module.exports = class Index extends BaseController {
getTestAction() {
return 'getTestAction';
}
headTestAction() {
return 'headTestAction';
}
postTestAction() {
return 'postTestAction';
}
putTestAction() {
return 'putTestAction';
}
deleteTestAction() {
return 'deleteTestAction';
}
connectTestAction() {
return 'connectTestAction';
}
optionsTestAction() {
return 'optionsTestAction';
}
patchTestAction() {
return 'patchTestAction';
}
traceTestAction() {
return 'traceTestAction';
}
};
<|start_filename|>example/util/TestUtil/TestUtil.js<|end_filename|>
const { BaseUtil } = require('../../../index');
module.exports = class TestUtil extends BaseUtil {
sayHello(name) {
return `${name} said Hello from TestUtil.TestUtil!`;
}
};
<|start_filename|>bin/express.init.js<|end_filename|>
const express = require('express');
const Stream = require('stream');
const { METHODS } = require('./const');
const { isPromise, getMethod, createContext, INFO, WARN } = require('./utils');
const router = express.Router();
router.__proto__.attch = function (pre, controller, needParams, prefix) {
const name = controller.__$$getName();
const methods = controller.__$$getMethods();
const _this = this;
function createApi(method) {
const actions = getMethod(method);
if (actions !== null) {
const methodName = `${actions[2] === '' ? '' : actions[2].toLowerCase()}`;
if (METHODS.indexOf(actions[1]) > -1) {
let uri = '';
if (needParams) {
uri = methodName.length > 0 ? `${pre}${methodName}` : `${pre.substring(0, pre.length - 1)}`;
} else {
uri = methodName.length > 0 ? `${pre}${name.toLowerCase()}/${methodName}` : `${pre}${name.toLowerCase()}`;
}
INFO(`api = [${actions[1]}] ${prefix}${uri}`);
_this[actions[1]](`${uri}`, async function (req, res, next) {
const ctx = createContext(controller, req, res);
const start = new Date().getTime();
res.on('finish', () => {
controller.after.call(ctx, {
duration: new Date().getTime() - start,
status: ctx.status,
originalUrl: ctx.req.originalUrl,
request: ctx.req,
response: ctx.res
});
});
for (const key in controller) {
ctx[key] = controller[key];
}
let data = null;
let result = null;
try {
controller.before.call(ctx, req, res);
data = controller[method].call(ctx, req, res);
if (isPromise(data)) {
result = await data;
} else {
result = data;
}
if (!res.finished) {
ctx.type && res.type(ctx.type);
res.status(ctx.status);
if (Buffer.isBuffer(result) || typeof result === 'string') {
res.end(result);
} else if (result instanceof Stream) {
result.pipe(res);
} else {
res.json(result);
}
}
} catch (e) {
next(e);
}
});
}
} else {
WARN(`${method} in Controller ${name} is not right HTTP Method.\n`);
}
}
methods.forEach(createApi);
};
<|start_filename|>bin/base.plugin.js<|end_filename|>
const Base = require('./base');
const { isPromise } = require('./utils');
class BasePlugin extends Base {
constructor() {
super();
}
exec() {}
}
BasePlugin.prototype.run = async function (req, res, next) {
try {
const result = this.exec(req, res);
if (isPromise(result)) {
await result;
}
next();
} catch (e) {
next(e);
}
};
module.exports = BasePlugin;
<|start_filename|>bin/helper.js<|end_filename|>
const express = require('express');
const path = require('path');
const fs = require('fs');
const BaseController = require('./base.controller');
const BasePlugin = require('./base.plugin');
const { createContext, INFO, WARN, ERROR } = require('./utils');
let baseDir = '';
function doParse(modules, prefix) {
const { Controller = 'controller' } = modules;
const router = express.Router();
INFO('==========Mkbug plugins inject start===========');
const plugins = parsePlugin(path.resolve(baseDir, 'plugin'));
const createplugin = (plugin) => {
return (res, req, next) => {
const ctx = createContext(plugin, res, req);
plugin.run.call(ctx, res, req, next);
};
};
plugins.forEach((plugin) => {
router.use(createplugin(plugin));
});
INFO('==========Mkbug plugins inject end=============\n');
INFO('==========Mkbug controller mapping start==========');
parseController(router, path.resolve(baseDir, Controller), { prefix });
INFO('==========Mkbug controller mapping end============\n');
// freeze Prototype start
Object.freeze(BasePlugin.prototype);
Object.freeze(BaseController.prototype);
// end
return router;
}
function parseController(router, dir, { pre = '/', prefix }) {
if (!fs.existsSync(dir)) {
return;
}
const files = fs.readdirSync(dir);
files.forEach(function createController(file) {
const stat = fs.lstatSync(`${dir}/${file}`);
let subPath = `${pre}`;
let needParams = false;
if (file.startsWith('_')) {
subPath += `${file}/`;
subPath = subPath.replace('_', ':');
needParams = true;
}
if (stat.isFile()) {
subPath = subPath.replace('.js', '');
const Controller = require(`${dir}/${file}`);
if (typeof Controller === 'function' && Controller.constructor) {
const control = new Controller();
const className = control.__$$getName();
const fileName = file.replace('.js', '');
if (!needParams && className !== fileName) {
ERROR(`The name of file ${file} must be the same as Class name ${className}!`);
throw new Error('The name of file must be the same as Class name!');
}
if (control instanceof BaseController) {
router.attch(subPath, control, needParams, prefix);
} else {
WARN(`Controller ${file} must extends from BaseController or will be ignored!`);
}
} else {
WARN(`${file} will be ignored!`);
}
} else if (stat.isDirectory()) {
if (!file.startsWith('_')) {
subPath += `${file}/`;
}
parseController(router, path.resolve(dir, file), {
pre: subPath,
prefix
});
}
});
}
function parsePlugin(dir, parent = '') {
const plugins = [];
if (!fs.existsSync(dir)) {
return plugins;
}
const files = fs.readdirSync(dir);
files.forEach(function createPlugin(file) {
const stat = fs.lstatSync(`${dir}/${file}`);
if (stat.isFile()) {
const Plugin = require(`${dir}/${file}`);
if (typeof Plugin === 'function' && Plugin.constructor) {
const plugin = new Plugin();
const className = plugin.__$$getName();
const fileName = file.replace('.js', '');
if (className !== fileName) {
ERROR(`The name of file ${file} must be the same as Class name ${className}!`);
throw new Error('The name of file must be the same as Class name!');
}
if (plugin instanceof BasePlugin) {
INFO(`Inject plugin = ${parent !== '' ? parent + '.' : parent}${plugin.__$$getName()}`);
plugins.push(plugin);
} else {
WARN(`Plugin ${file} must extends from BasePlugin and will be ignored!`);
}
} else {
WARN(`${file} will be ignored!`);
}
} else if (stat.isDirectory()) {
const subObj = parsePlugin(path.resolve(dir, file), `${parent !== '' ? parent + '.' + file : file}`) || {};
plugins.push(...subObj);
}
});
return plugins;
}
exports.createModule = function (path, prefix) {
baseDir = path;
const router = express.Router();
try {
const files = fs.readdirSync(path);
const modules = {};
files.forEach(function modulesParse(dir) {
const stat = fs.lstatSync(`${path}/${dir}`);
if (stat.isDirectory()) {
const firstC = dir.substring(0, 1).toUpperCase();
const secondC = dir.substring(1).toLowerCase();
modules[`${firstC}${secondC}`] = dir;
}
});
router.use(doParse(modules, prefix));
} catch (e) {
throw e;
}
return router;
};
<|start_filename|>example/controller/params/case2/_id/param/_subId.js<|end_filename|>
const { BaseController } = require('../../../../../../index');
module.exports = class Case2 extends BaseController {
getAction() {
return this.params;
}
};
<|start_filename|>example/controller/base/ControllerBaseBase.js<|end_filename|>
const { BaseController } = require('../../../index');
module.exports = class ControllerBaseBase extends BaseController {
before() {
console.log('ControllerBaseBase before');
}
after() {
console.log('ControllerBaseBase after');
}
};
<|start_filename|>bin/test.js<|end_filename|>
exports.module = {
test: '1212'
};
<|start_filename|>example/controller/ErrorTest.js<|end_filename|>
const { BaseController, MkbugError } = require('../../index');
module.exports = class ErrorTest extends BaseController {
before(req) {
if (req.query.type === 'StringError') {
throw new MkbugError(400, 'Error Message');
} else if (req.query.type === 'JsonError') {
throw new MkbugError(400, { msg: 'Error Message' });
}
}
getAction() {
throw new MkbugError();
}
getCaseAction() {
return new MkbugError();
}
};
<|start_filename|>example/controller/HeaderApiTest.js<|end_filename|>
const { BaseController } = require('../../index');
module.exports = class HeaderApiTest extends BaseController {
getAction() {
return this.get('from-header');
}
getHeaderAction() {
this.set('from-header', 'from-header');
return '';
}
getIpAction() {
return this.ip;
}
getIpsAction() {
return this.ips;
}
};
<|start_filename|>bin/base.mkbugerror.js<|end_filename|>
class MkbugError extends Error {
constructor(status, responseBody) {
super();
this.status = status || 405;
this.body = responseBody || 'Request not allowed!';
this.name = 'MkbugError';
}
}
Object.freeze(MkbugError.prototype);
module.exports = MkbugError;
<|start_filename|>test/controller.header.test.js<|end_filename|>
const request = require('superagent');
describe('Controller Header', () => {
it('Header from client', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/headerapitest').set('from-header', 'from-header');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
expect(ret.text).toBe('from-header');
}
});
it('Header from server', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/headerapitest/header');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
expect(ret.header['from-header']).toBe('from-header');
}
});
it('Ip test', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/headerapitest/ip');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
expect(ret.text).toBe('::ffff:127.0.0.1');
}
});
it('Ip test', async () => {
let ret = null;
try {
ret = await request.get('http://localhost:3000/api/headerapitest/ips');
} catch (err) {
ret = err;
} finally {
expect(ret.status).toBe(200);
expect(ret.text).toBe('[]');
}
});
});
<|start_filename|>bin/base.config.js<|end_filename|>
const path = require('path');
const fs = require('fs');
const { ERROR } = require('./utils');
class BaseConfig {
constructor(
name = 'config',
path = '',
opts = {
encoding: 'utf8'
}
) {
this.name = name;
this.mode = process.env.NODE_ENV || '';
this.values = {};
if (path) {
this.baseUrl = path;
}
return this.__$$parseConfig(opts);
}
}
BaseConfig.prototype.__$$parseConfig = function (opts) {
const base = path.resolve(this.baseUrl || 'src', 'config');
try {
const baseConfig = `${base}/${this.name}.conf`;
const config = `${base}/${this.name}.${this.mode}.conf`;
if (fs.existsSync(baseConfig)) {
this.__$$parseFile(fs.readFileSync(baseConfig, opts));
}
if (fs.existsSync(config)) {
this.__$$parseFile(fs.readFileSync(config, opts));
}
} catch (e) {
ERROR(e);
}
return this.values;
};
BaseConfig.prototype.__$$parseFile = function (str = '') {
const tmpStr = str.replace('\r\n', '\n');
const lineArr = tmpStr.split('\n');
const _this = this;
function transFunc(line) {
const keyValue = line.split('=');
if (keyValue[1] && keyValue[1] !== '') {
_this.values[keyValue[0]] = keyValue[1];
}
}
lineArr.forEach(transFunc);
};
module.exports = BaseConfig;
<|start_filename|>example/controller/ExtendsTest.js<|end_filename|>
const ControllerBase = require('./base/ControllerBase');
module.exports = class ExtendsTest extends ControllerBase {
before(request, response) {
super.before(request, response);
console.log('Request start');
}
getAction() {
return 'hello world';
}
after() {
console.log('Request end');
super.after({});
}
};
<|start_filename|>bin/base.js<|end_filename|>
module.exports = class Base {
constructor() {
this.__$$name = this.constructor.name;
}
__$$getName() {
return this.__$$name;
}
};
<|start_filename|>example/plugin/TestPlugin1.js<|end_filename|>
const { BasePlugin } = require('./../../index');
module.exports = class TestPlugin1 extends BasePlugin {
exec(req, res) {
res.cookie('plugin', 'true');
}
};
<|start_filename|>example/controller/PluginTest.js<|end_filename|>
const { BaseController } = require('../../index');
module.exports = class PluginTest extends BaseController {
getAction() {
return 'ok';
}
};
| mkbug-com/mkbug.js |
<|start_filename|>src/Evolve/Dialect/Schema.cs<|end_filename|>
using EvolveDb.Connection;
using EvolveDb.Utilities;
namespace EvolveDb.Dialect
{
internal abstract class Schema
{
protected readonly WrappedConnection _wrappedConnection;
public Schema(string schemaName, WrappedConnection wrappedConnection)
{
Name = Check.NotNullOrEmpty(schemaName, nameof(schemaName));
_wrappedConnection = Check.NotNull(wrappedConnection, nameof(wrappedConnection));
}
public string Name { get; }
public abstract bool IsExists();
public abstract bool IsEmpty();
public abstract bool Create();
public abstract bool Erase();
public abstract bool Drop();
}
}
| JayDZimmerman/Evolve |
<|start_filename|>cmd/chgogen/model.go<|end_filename|>
package main
import (
"log"
"strconv"
"strings"
"github.com/dave/jennifer/jen"
)
func generateModel(packageName, structName string, getter bool, columns []chColumns) {
f := jen.NewFile(packageName)
var fields []jen.Code
for _, c := range columns {
field := getFieldByType(c.Type)
fields = append(fields, jen.Id(getStandardName(c.Name)).Add(field))
}
st := f.Type().Id(structName)
st.Struct(fields...)
st.Line()
var defaultValues []jen.Code
for _, c := range columns {
if !strings.HasPrefix(c.Type, "FixedString(") && !strings.HasPrefix(c.Type, "LowCardinality(FixedString") {
continue
}
var lenStr string
if strings.HasPrefix(c.Type, "FixedString(") {
lenStr = c.Type[len("FixedString(") : len(c.Type)-1]
} else if strings.HasPrefix(c.Type, "LowCardinality(FixedString") {
lenStr = c.Type[len("LowCardinality(FixedString(") : len(c.Type)-2]
}
fixeSize, err := strconv.Atoi(lenStr)
if err != nil {
panic(err)
}
defaultValues = append(defaultValues,
jen.Id(getStandardName(c.Name)).
Op(":").Index().Byte().Parens(
jen.Id("\""+strings.Repeat(" ", fixeSize)+"\""),
),
)
}
f.Func().
Id("New" + structName).
Params().
Params(jen.Op("*").Id(structName)).
Block(
jen.Return().Op("&").Id(structName).Values(defaultValues...),
).Line()
if getter {
for _, c := range columns {
field := getFieldByType(c.Type)
st.Line()
st.Func().
Params(jen.Id("t").Op("*").Id(structName)).
Id("Get" + getStandardName(c.Name)).Params().Add(field).
Block(
jen.Return(jen.Id("t." + getStandardName(c.Name))),
).Line()
}
}
err := f.Save(strings.ToLower(structName) + "_model.go")
if err != nil {
log.Fatal(err)
}
}
func getFieldByType(chType string) *jen.Statement {
switch chType {
case "Int8":
return jen.Int8()
case "Int16":
return jen.Int16()
case "Int32":
return jen.Int32()
case "Int64":
return jen.Int64()
case "UInt8":
return jen.Uint8()
case "UInt16":
return jen.Uint16()
case "UInt32":
return jen.Uint32()
case "UInt64":
return jen.Uint64()
case "Float64":
return jen.Float64()
case "Float32":
return jen.Float32()
case "String":
return jen.String()
case "DateTime", "DateTime64", "Date32", "Date":
return jen.Qual("time", "Time")
case "IPv4":
return jen.Qual("net", "IP")
case "IPv6":
return jen.Qual("net", "IP")
case "UUID":
return jen.Index(jen.Lit(16)).Byte()
default:
if strings.HasPrefix(chType, "DateTime(") {
return jen.Qual("time", "Time")
}
if strings.HasPrefix(chType, "DateTime64(") {
return jen.Qual("time", "Time")
}
// todo support Decimal Decimal128 and Decimal256
if strings.HasPrefix(chType, "Decimal(") {
return jen.Float64()
}
if strings.HasPrefix(chType, "LowCardinality(") {
return getFieldByType(chType[15 : len(chType)-1])
}
if strings.HasPrefix(chType, "Enum8(") {
return jen.Int8()
}
if strings.HasPrefix(chType, "Enum16(") {
return jen.Int16()
}
if strings.HasPrefix(chType, "Nullable(") {
return jen.Op("*").Add(getFieldByType(chType[len("Nullable(") : len(chType)-1]))
}
if strings.HasPrefix(chType, "SimpleAggregateFunction(") {
return getFieldByType(getNestedType(chType[len("SimpleAggregateFunction("):]))
}
if strings.HasPrefix(chType, "FixedString(") {
return jen.Index().Byte()
}
if strings.HasPrefix(chType, "Array(") {
field := getFieldByType(chType[len("Array(") : len(chType)-1])
if field == nil {
return nil
}
return jen.Index().Add(field)
}
if strings.HasPrefix(chType, "Tuple(") {
var openFunc int
var fields []jen.Code
cur := 0
// for between `Tuple(` and `)`
tupleTypes := chType[6 : len(chType)-1]
idx := 1
for i, char := range tupleTypes {
if char == ',' {
if openFunc == 0 {
fields = append(
fields,
jen.Id("Field"+strconv.Itoa(idx)).Add(getFieldByType(tupleTypes[cur:i])),
)
idx++
cur = i + 2
}
continue
}
if char == '(' {
openFunc++
continue
}
if char == ')' {
openFunc--
continue
}
}
fields = append(
fields,
jen.Id("Field"+strconv.Itoa(idx)).Add(getFieldByType(tupleTypes[cur:])),
)
return jen.Struct(fields...)
}
if strings.HasPrefix(chType, "Array(") {
field := getFieldByType(chType[6 : len(chType)-1])
if field == nil {
return nil
}
return jen.Index().Add(field)
}
}
panic("NOT support " + chType)
}
<|start_filename|>internal/readerwriter/consts.go<|end_filename|>
package readerwriter
const (
// NONE compression type
NONE = 0x02
// LZ4 compression type
LZ4 = 0x82
// ZSTD compression type
ZSTD = 0x90
)
const (
// ChecksumSize is 128bits for cityhash102 checksum
ChecksumSize = 16
// CompressHeaderSize magic + compressed_size + uncompressed_size
CompressHeaderSize = 1 + 4 + 4
// HeaderSize for compress header
HeaderSize = ChecksumSize + CompressHeaderSize
// BlockMaxSize 1MB
BlockMaxSize = 1 << 20
)
<|start_filename|>column/int256.go<|end_filename|>
package column
// NewInt256 return new Raw for Int256 ClickHouse DataType
func NewInt256(nullable bool) *Raw {
return NewRaw(Int256Size, nullable)
}
<|start_filename|>column/size.go<|end_filename|>
package column
const (
// Uint8Size data Size of Uint8 Column
Uint8Size = 1
// Uint16Size data Size of Uint16 Column
Uint16Size = 2
// Uint32Size data Size of Uint32 Column
Uint32Size = 4
// Uint64Size data Size of Uint64 Column
Uint64Size = 8
// Uint128Size data Size of Uint128 Column
Uint128Size = 16
// Uint256Size data Size of Uint256 Column
Uint256Size = 32
// Int8Size data Size of Int8 Column
Int8Size = 1
// Int16Size data Size of Int16 Column
Int16Size = 2
// Int32Size data Size of Int32 Column
Int32Size = 4
// Int64Size data Size of Int64 Column
Int64Size = 8
// Int128Size data Size of Int128 Column
Int128Size = 16
// Int256Size data Size of Int256 Column
Int256Size = 32
// Float32Size data Size of Float32 Column
Float32Size = 4
// Float64Size data Size of Float64 Column
Float64Size = 8
// DateSize data Size of Date Column
DateSize = 2
// Date32Size data Size of Date32 Column
Date32Size = 4
// DatetimeSize data Size of Datetime Column
DatetimeSize = 4
// Datetime64Size data Size of Datetime64 Column
Datetime64Size = 8
// IPv4Size data Size of IPv4 Column
IPv4Size = 4
// IPv6Size data Size of IPv6 Column
IPv6Size = 16
// Decimal32Size data Size of Decimal32 Column
Decimal32Size = 4
// Decimal64Size data Size of Decimal64 Column
Decimal64Size = 8
// Decimal128Size data Size of Decimal128 Column
Decimal128Size = 16
// Decimal256Size data Size of Decimal256 Column
Decimal256Size = 32
// ArraylenSize data Size of Arraylen Column
ArraylenSize = 8
// MaplenSize data Size of Maplen Column
MaplenSize = 8
// UUIDSize data Size of UUID Column
UUIDSize = 16
)
<|start_filename|>column/int128.go<|end_filename|>
package column
// NewInt128 return new Raw for Int128 ClickHouse DataType
func NewInt128(nullable bool) *Raw {
return NewRaw(Int128Size, nullable)
}
<|start_filename|>column/uint128.go<|end_filename|>
package column
// NewUint128 return new Raw for UInt128 ClickHouse DataType
func NewUint128(nullable bool) *Raw {
return NewRaw(Uint128Size, nullable)
}
<|start_filename|>column/uint256.go<|end_filename|>
package column
// NewUint256 return new Raw for UInt256 ClickHouse DataType
func NewUint256(nullable bool) *Raw {
return NewRaw(Uint256Size, nullable)
}
<|start_filename|>column/helper.go<|end_filename|>
package column
// Table of powers of 10 for fast casting from floating types to decimal type
// representations.
var factors10 = []float64{
1e0, 1e1, 1e2, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13,
1e14, 1e15, 1e16, 1e17, 1e18,
}
const daySeconds = 24 * 60 * 60
var emptyByte = make([]byte, 1024*10)
<|start_filename|>doc_test.go<|end_filename|>
package chconn_test
import (
"context"
"fmt"
"os"
"time"
"github.com/vahid-sohrabloo/chconn/chpool"
"github.com/vahid-sohrabloo/chconn/column"
)
func Example() {
conn, err := chpool.Connect(context.Background(), os.Getenv("DATABASE_URL"))
if err != nil {
panic(err)
}
defer conn.Close()
_, err = conn.Exec(context.Background(), `DROP TABLE IF EXISTS example_table`)
if err != nil {
panic(err)
}
_, err = conn.Exec(context.Background(), `CREATE TABLE example_table (
uint64 UInt64,
uint64_nullable Nullable(UInt64)
) Engine=Memory`)
if err != nil {
panic(err)
}
col1 := column.NewUint64(false)
col2 := column.NewUint64(true)
rows := 10000000 // One hundred million rows- insert in 10 times
numInsert := 10
startInsert := time.Now()
for i := 0; i < numInsert; i++ {
col1.Reset()
col2.Reset()
for y := 0; y < rows; y++ {
col1.Append(uint64(i))
if i%2 == 0 {
col2.AppendIsNil(false)
col2.Append(uint64(i))
} else {
col2.AppendIsNil(true)
col2.AppendEmpty()
}
}
ctxInsert, cancelInsert := context.WithTimeout(context.Background(), time.Second*30)
// insert data
err = conn.Insert(ctxInsert, "INSERT INTO example_table (uint64,uint64_nullable) VALUES", col1, col2)
if err != nil {
cancelInsert()
panic(err)
}
cancelInsert()
}
fmt.Println("inserted 100M rows in ", time.Since(startInsert))
// select data
col1Read := column.NewUint64(false)
col2Read := column.NewUint64(true)
ctxSelect, cancelSelect := context.WithTimeout(context.Background(), time.Second*30)
defer cancelSelect()
startSelect := time.Now()
// insert data
selectStmt, err := conn.Select(ctxSelect, "SELECT uint64,uint64_nullable FROM example_table")
if err != nil {
panic(err)
}
// make sure close the statement after you are done with it to back it to the pool
defer selectStmt.Close()
// next block of data
// for more information about block, see: https://clickhouse.com/docs/en/development/architecture/#block
var col1Data []uint64
var col2DataNil []uint8
var col2Data []uint64
for selectStmt.Next() {
err = selectStmt.ReadColumns(col1Read, col2Read)
if err != nil {
panic(err)
}
col1Data = col1Data[:0]
col1Read.ReadAll(&col1Data)
col2DataNil = col2DataNil[:0]
col2Read.ReadAllNil(&col2DataNil)
col2Data = col2Data[:0]
col2Read.ReadAll(&col2Data)
}
// check errors
if selectStmt.Err() != nil {
panic(selectStmt.Err())
}
fmt.Println("selected 100M rows in ", time.Since(startSelect))
}
<|start_filename|>progress.go<|end_filename|>
package chconn
// Progress details of progress select query
type Progress struct {
ReadRows uint64
Readbytes uint64
TotalRows uint64
WriterRows uint64
WrittenBytes uint64
}
func newProgress() *Progress {
return &Progress{}
}
func (p *Progress) read(ch *conn) (err error) {
if p.ReadRows, err = ch.reader.Uvarint(); err != nil {
return &readError{"progress: read ReadRows", err}
}
if p.Readbytes, err = ch.reader.Uvarint(); err != nil {
return &readError{"progress: read Readbytes", err}
}
if p.TotalRows, err = ch.reader.Uvarint(); err != nil {
return &readError{"progress: read TotalRows", err}
}
if ch.serverInfo.Revision >= dbmsMinRevisionWithClientWriteInfo {
if p.WriterRows, err = ch.reader.Uvarint(); err != nil {
return &readError{"progress: read WriterRows", err}
}
if p.WrittenBytes, err = ch.reader.Uvarint(); err != nil {
return &readError{"progress: read WrittenBytes", err}
}
}
return nil
}
<|start_filename|>column/decimal128.go<|end_filename|>
package column
// NewDecimal128 return new Raw for Decimal128(3) ClickHouse DataType
func NewDecimal128(nullable bool) *Raw {
return NewRaw(Decimal128Size, nullable)
}
<|start_filename|>cmd/chgogen/main.go<|end_filename|>
package main
import (
"context"
"encoding/json"
"io/ioutil"
"log"
"os"
"path/filepath"
flag "github.com/spf13/pflag"
"github.com/vahid-sohrabloo/chconn"
"github.com/vahid-sohrabloo/chconn/column"
)
type chColumns struct {
Name string `json:"Name"`
Type string `json:"Type"`
}
type explainData []struct {
Plan struct {
NodeType string `json:"Node Type"`
Expression struct {
Inputs []struct {
Name string `json:"Name"`
Type string `json:"Type"`
} `json:"Inputs"`
Actions []struct {
NodeType string `json:"Node Type"`
ResultType string `json:"Result Type"`
Arguments []int `json:"Arguments"`
RemovedArguments []int `json:"Removed Arguments"`
Result int `json:"Result"`
} `json:"Actions"`
Outputs []chColumns `json:"Outputs"`
Positions []int `json:"Positions"`
ProjectInput bool `json:"Project Input"`
} `json:"Expression"`
Plans []struct {
NodeType string `json:"Node Type"`
Plans []struct {
NodeType string `json:"Node Type"`
ReadType string `json:"Read Type"`
Parts int `json:"Parts"`
Granules int `json:"Granules"`
} `json:"Plans"`
} `json:"Plans"`
} `json:"Plan"`
}
func main() {
getter := flag.Bool("getter", false, "generate gatter for properties")
structName := flag.String("name", "Table", "struct name")
dir, err := os.Getwd()
if err != nil {
log.Fatal(err)
}
packageName := flag.String("package", filepath.Base(dir), "package name")
flag.Parse()
var query string
file := os.Stdin
fi, err := file.Stat()
if err != nil {
log.Fatal(err)
}
size := fi.Size()
if size == 0 {
query = flag.Arg(0)
} else {
queryData, _ := ioutil.ReadAll(file)
query = string(queryData)
}
ctx := context.Background()
conn, err := chconn.Connect(ctx, os.Getenv("DATABASE_URL"))
if err != nil {
log.Fatal(err)
}
stmt, err := conn.Select(context.Background(), "EXPLAIN json = 1, actions = 1, description = 0, header = 0 "+query)
if err != nil {
log.Fatal(err)
}
col := column.NewString(false)
var explain explainData
for stmt.Next() {
err = stmt.ReadColumns(col)
if err != nil {
log.Fatal(err)
}
col.Next()
err = json.Unmarshal(col.Value(), &explain)
if err != nil {
log.Fatal(err)
}
}
if stmt.Err() != nil {
log.Fatal(stmt.Err())
}
generateEnum(*packageName, *structName, explain[0].Plan.Expression.Outputs)
generateModel(*packageName, *structName, *getter, explain[0].Plan.Expression.Outputs)
generateColumns(*packageName, *structName, explain[0].Plan.Expression.Outputs)
}
<|start_filename|>cmd/chgogen/enum.go<|end_filename|>
package main
import (
"log"
"strings"
"github.com/dave/jennifer/jen"
)
func generateEnum(packageName, structName string, columns []chColumns) {
f := jen.NewFile(packageName)
var hasEnum bool
for _, c := range columns {
if !strings.HasPrefix(c.Type, "Enum8(") && !strings.HasPrefix(c.Type, "Enum16(") {
continue
}
hasEnum = true
startIndex := len("Enum16(")
if strings.HasPrefix(c.Type, "Enum8(") {
startIndex = len("Enum8(")
}
enums := strings.Split(c.Type[startIndex:len(c.Type)-1], ", ")
values := make([]jen.Code, len(enums))
for i, e := range enums {
e = strings.ReplaceAll(e, "'", "")
parts := strings.Split(e, " = ")
values[i] = jen.Id(getStandardName(structName) + getStandardName(c.Name) + getStandardName(parts[0]))
if strings.HasPrefix(c.Type, "Enum8(") {
values[i].(*jen.Statement).Int8()
} else {
values[i].(*jen.Statement).Int16()
}
values[i].(*jen.Statement).Op("=").Id(parts[1])
}
f.Const().Defs(values...)
}
if hasEnum {
err := f.Save(strings.ToLower(structName) + "_enums.go")
if err != nil {
log.Fatal(err)
}
}
}
<|start_filename|>doc.go<|end_filename|>
// Package chconn is a low-level Clickhouse database driver.
/*
chconn is a pure Go driver for [ClickHouse] that use Native protocol
chconn aims to be low-level, fast, and performant.
If you have any suggestion or comment, please feel free to open an issue on this tutorial's GitHub page!
*/
package chconn
<|start_filename|>server_info_test.go<|end_filename|>
package chconn
import (
"context"
"errors"
"io"
"os"
"testing"
"github.com/stretchr/testify/require"
)
func TestServerInfoError(t *testing.T) {
startValidReader := 1
tests := []struct {
name string
wantErr string
numberValid int
}{
{
name: "server name",
wantErr: "ServerInfo: could not read server name",
numberValid: startValidReader,
}, {
name: "server major version",
wantErr: "ServerInfo: could not read server major version",
numberValid: startValidReader + 2,
}, {
name: "server minor version",
wantErr: "ServerInfo: could not read server minor version",
numberValid: startValidReader + 3,
}, {
name: "server revision",
wantErr: "ServerInfo: could not read server revision",
numberValid: startValidReader + 4,
}, {
name: "server timezone",
wantErr: "ServerInfo: could not read server timezone",
numberValid: startValidReader + 7,
}, {
name: "server display name",
wantErr: "ServerInfo: could not read server display name",
numberValid: startValidReader + 9,
}, {
name: "server version patch",
wantErr: "ServerInfo: could not read server version patch",
numberValid: startValidReader + 11,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
config, err := ParseConfig(os.Getenv("CHX_TEST_TCP_CONN_STRING"))
require.NoError(t, err)
config.ReaderFunc = func(r io.Reader) io.Reader {
return &readErrorHelper{
err: errors.New("timeout"),
r: r,
numberValid: tt.numberValid,
}
}
_, err = ConnectConfig(context.Background(), config)
require.Error(t, err)
readErr, ok := err.(*readError)
require.True(t, ok)
require.Equal(t, readErr.msg, tt.wantErr)
require.EqualError(t, readErr.Unwrap(), "timeout")
})
}
}
<|start_filename|>column/decimal256.go<|end_filename|>
package column
// NewDecimal256 return new Raw for Decimal256(3) ClickHouse DataType
func NewDecimal256(nullable bool) *Raw {
return NewRaw(Decimal256Size, nullable)
}
<|start_filename|>internal/cityhash102/doc.go<|end_filename|>
// Package cityhash102 COPY from https://github.com/zentures/cityhash/
/**
NOTE: The code is modified to be compatible with CityHash128 used in ClickHouse
COPY from https://github.com/ClickHouse/clickhouse-go/tree/master/lib/cityhash102
remove unused code
*/
package cityhash102
<|start_filename|>client_info.go<|end_filename|>
package chconn
import (
"os/user"
)
// ClientInfo Information about client for query.
// Some fields are passed explicitly from client and some are calculated automatically.
// Contains info about initial query source, for tracing distributed queries
// (where one query initiates many other queries).
type ClientInfo struct {
InitialUser string
InitialQueryID string
OSUser string
ClientHostname string
ClientName string
ClientVersionMajor uint64
ClientVersionMinor uint64
ClientVersionPatch uint64
ClientRevision uint64
QuotaKey string
}
// Write Only values that are not calculated automatically or passed separately are serialized.
// Revisions are passed to use format that server will understand or client was used.
func (c *ClientInfo) write(ch *conn) {
// InitialQuery
ch.writer.Uint8(1)
ch.writer.String(c.InitialUser)
ch.writer.String(c.InitialQueryID)
ch.writer.String("[::ffff:127.0.0.1]:0")
// iface type
ch.writer.Uint8(1) // tcp
ch.writer.String(c.OSUser)
ch.writer.String(c.ClientHostname)
ch.writer.String(c.ClientName)
ch.writer.Uvarint(c.ClientVersionMajor)
ch.writer.Uvarint(c.ClientVersionMinor)
ch.writer.Uvarint(c.ClientRevision)
if ch.serverInfo.Revision >= dbmsMinRevisionWithQuotaKeyInClientInfo {
ch.writer.String(c.QuotaKey)
}
if ch.serverInfo.Revision >= dbmsMinRevisionWithVersionPatch {
ch.writer.Uvarint(c.ClientVersionPatch)
}
if ch.serverInfo.Revision >= dbmsMinRevisionWithOpentelemetry {
// TODO Support Opentelemetry later
// Don't have OpenTelemetry header.
ch.writer.Uint8(0)
}
}
func (c *ClientInfo) fillOSUserHostNameAndVersionInfo() {
u, err := user.Current()
if err == nil {
c.OSUser = u.Username
}
c.ClientVersionMajor = dbmsVersionMajor
c.ClientVersionMinor = dbmsVersionMinor
c.ClientVersionPatch = dbmsVersionPatch
c.ClientRevision = dbmsVersionRevision
}
<|start_filename|>column/nullable.go<|end_filename|>
package column
import (
"io"
)
type nullable struct {
column
}
func newNullable() *nullable {
return &nullable{
column: column{
size: Uint8Size,
},
}
}
func (c *nullable) Append(v uint8) {
c.writerData = append(c.writerData,
v,
)
}
func (c *nullable) WriteTo(w io.Writer) (int64, error) {
nw, err := w.Write(c.writerData)
return int64(nw), err
}
<|start_filename|>internal/readerwriter/compress_writer.go<|end_filename|>
package readerwriter
import (
"encoding/binary"
"io"
"github.com/pierrec/lz4/v4"
"github.com/vahid-sohrabloo/chconn/internal/cityhash102"
)
type compressWriter struct {
writer io.Writer
// data uncompressed
data []byte
// data position
pos int
// data compressed
zdata []byte
}
// NewCompressWriter wrap the io.Writer
func NewCompressWriter(w io.Writer) io.Writer {
p := &compressWriter{writer: w}
p.data = make([]byte, BlockMaxSize)
zlen := lz4.CompressBlockBound(BlockMaxSize) + HeaderSize
p.zdata = make([]byte, zlen)
return p
}
func (cw *compressWriter) Write(buf []byte) (int, error) {
var n int
for len(buf) > 0 {
// Accumulate the data to be compressed.
m := copy(cw.data[cw.pos:], buf)
cw.pos += m
buf = buf[m:]
if cw.pos == len(cw.data) {
err := cw.Flush()
if err != nil {
return n, err
}
}
n += m
}
return n, nil
}
func (cw *compressWriter) Flush() (err error) {
if cw.pos == 0 {
return
}
compressedSize, err := lz4.CompressBlock(cw.data[:cw.pos], cw.zdata[HeaderSize:], nil)
if err != nil {
return err
}
compressedSize += CompressHeaderSize
// fill the header, compressed_size_32 + uncompressed_size_32
cw.zdata[16] = LZ4
binary.LittleEndian.PutUint32(cw.zdata[17:], uint32(compressedSize))
binary.LittleEndian.PutUint32(cw.zdata[21:], uint32(cw.pos))
// fill the checksum
checkSum := cityhash102.CityHash128(cw.zdata[16:], uint32(compressedSize))
binary.LittleEndian.PutUint64(cw.zdata[0:], checkSum.Lower64())
binary.LittleEndian.PutUint64(cw.zdata[8:], checkSum.Higher64())
_, err = cw.writer.Write(cw.zdata[:compressedSize+ChecksumSize])
cw.pos = 0
return err
}
<|start_filename|>cmd/chgogen/helper.go<|end_filename|>
package main
import "strings"
func getNestedType(chType string) string {
for i, v := range chType {
if v == ',' {
return chType[i+2 : len(chType)-1]
}
}
panic("Cannot found netsted type of " + chType)
}
func getStandardName(name string) string {
if name == "f" {
return "f"
}
return snakeCaseToCamelCase(strings.ReplaceAll(name, ".", "_"))
}
func snakeCaseToCamelCase(inputUnderScoreStr string) (camelCase string) {
isToUpper := false
for k, v := range inputUnderScoreStr {
if k == 0 {
camelCase = strings.ToUpper(string(inputUnderScoreStr[0]))
} else {
if isToUpper {
camelCase += strings.ToUpper(string(v))
isToUpper = false
} else {
if v == '_' {
isToUpper = true
} else {
camelCase += string(v)
}
}
}
}
return
}
<|start_filename|>internal/readerwriter/reader.go<|end_filename|>
package readerwriter
import (
"encoding/binary"
"io"
)
// Reader is a helper to read data from reader
type Reader struct {
mainReader io.Reader
input io.Reader
compressReader io.Reader
scratch [binary.MaxVarintLen64]byte
}
// NewReader get new Reader
func NewReader(input io.Reader) *Reader {
return &Reader{
input: input,
mainReader: input,
}
}
// SetCompress set compress statusp
func (r *Reader) SetCompress(c bool) {
if c {
if r.compressReader == nil {
r.compressReader = NewCompressReader(r.mainReader)
}
r.input = r.compressReader
return
}
r.input = r.mainReader
}
// Uvarint read variable uint64 value
func (r *Reader) Uvarint() (uint64, error) {
return binary.ReadUvarint(r)
}
// Int32 read Int32 value
func (r *Reader) Int32() (int32, error) {
v, err := r.Uint32()
if err != nil {
return 0, err
}
return int32(v), nil
}
// Uint32 read Uint32 value
func (r *Reader) Uint32() (uint32, error) {
if _, err := io.ReadFull(r.input, r.scratch[:4]); err != nil {
return 0, err
}
return binary.LittleEndian.Uint32(r.scratch[:4]), nil
}
// Uint64 read Uint64 value
func (r *Reader) Uint64() (uint64, error) {
if _, err := io.ReadFull(r.input, r.scratch[:8]); err != nil {
return 0, err
}
return binary.LittleEndian.Uint64(r.scratch[:8]), nil
}
// FixedString read FixedString value
func (r *Reader) FixedString(strlen int) ([]byte, error) {
buf := make([]byte, strlen)
_, err := io.ReadFull(r, buf)
return buf, err
}
// String read String value
func (r *Reader) String() (string, error) {
strlen, err := r.Uvarint()
if err != nil {
return "", err
}
str, err := r.FixedString(int(strlen))
if err != nil {
return "", err
}
return string(str), nil
}
// ByteString read string value as []byte
func (r *Reader) ByteString() ([]byte, error) {
strlen, err := r.Uvarint()
if err != nil {
return nil, err
}
if strlen == 0 {
return []byte{}, nil
}
return r.FixedString(int(strlen))
}
// ReadByte read a single byte
func (r *Reader) ReadByte() (byte, error) {
if _, err := r.input.Read(r.scratch[:1]); err != nil {
return 0, err
}
return r.scratch[0], nil
}
// Read implement Read
func (r *Reader) Read(buf []byte) (int, error) {
return io.ReadFull(r.input, buf)
}
<|start_filename|>config_test.go<|end_filename|>
package chconn
import (
"context"
"crypto/tls"
"errors"
"fmt"
"io/ioutil"
"os"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
var parseConfigTests = []struct {
name string
connString string
config *Config
}{
// Test all sslmodes
{
name: "sslmode not set (disable)",
connString: "clickhouse://vahid:secret@localhost:9000/mydb",
config: &Config{
User: "vahid",
Password: "<PASSWORD>",
Host: "localhost",
Port: 9000,
Database: "mydb",
ClientName: defaultClientName,
TLSConfig: nil,
RuntimeParams: map[string]string{},
},
},
{
name: "sslmode disable",
connString: "clickhouse://vahid:secret@localhost:9000/mydb?sslmode=disable",
config: &Config{
User: "vahid",
Password: "<PASSWORD>",
Host: "localhost",
ClientName: defaultClientName,
Port: 9000,
Database: "mydb",
TLSConfig: nil,
RuntimeParams: map[string]string{},
},
},
{
name: "sslmode allow",
connString: "clickhouse://vahid:secret@localhost:9000/mydb?sslmode=allow",
config: &Config{
User: "vahid",
Password: "<PASSWORD>",
Host: "localhost",
Port: 9000,
ClientName: defaultClientName,
Database: "mydb",
TLSConfig: nil,
RuntimeParams: map[string]string{},
Fallbacks: []*FallbackConfig{
{
Host: "localhost",
Port: 9000,
TLSConfig: &tls.Config{
InsecureSkipVerify: true,
},
},
},
},
},
{
name: "sslmode prefer",
connString: "clickhouse://vahid:secret@localhost:9000/mydb?sslmode=prefer",
config: &Config{
User: "vahid",
Password: "<PASSWORD>",
Host: "localhost",
Port: 9000,
Database: "mydb",
ClientName: defaultClientName,
TLSConfig: &tls.Config{
InsecureSkipVerify: true,
},
RuntimeParams: map[string]string{},
Fallbacks: []*FallbackConfig{
{
Host: "localhost",
Port: 9000,
TLSConfig: nil,
},
},
},
},
{
name: "sslmode require",
connString: "clickhouse://vahid:secret@localhost:9000/mydb?sslmode=require",
config: &Config{
User: "vahid",
Password: "<PASSWORD>",
Host: "localhost",
Port: 9000,
Database: "mydb",
ClientName: defaultClientName,
RuntimeParams: map[string]string{},
TLSConfig: &tls.Config{
InsecureSkipVerify: true,
},
},
},
{
name: "sslmode verify-ca",
connString: "clickhouse://vahid:secret@localhost:9000/mydb?sslmode=verify-ca",
config: &Config{
User: "vahid",
Password: "<PASSWORD>",
Host: "localhost",
Port: 9000,
ClientName: defaultClientName,
Database: "mydb",
TLSConfig: &tls.Config{
InsecureSkipVerify: true,
},
RuntimeParams: map[string]string{},
},
},
{
name: "sslmode verify-full",
connString: "clickhouse://vahid:secret@localhost:9000/mydb?sslmode=verify-full",
config: &Config{
User: "vahid",
Password: "<PASSWORD>",
Host: "localhost",
Port: 9000,
ClientName: defaultClientName,
Database: "mydb",
TLSConfig: &tls.Config{ServerName: "localhost"},
RuntimeParams: map[string]string{},
},
},
{
name: "database url everything",
connString: "clickhouse://vahid:secret@localhost:9000/mydb?sslmode=disable&client_name=chxtest&extradata=test&connect_timeout=5",
config: &Config{
User: "vahid",
Password: "<PASSWORD>",
Host: "localhost",
Port: 9000,
Database: "mydb",
TLSConfig: nil,
ConnectTimeout: 5 * time.Second,
ClientName: "chxtest",
RuntimeParams: map[string]string{
"extradata": "test",
},
},
},
{
name: "database url missing password",
connString: "clickhouse://vahid@localhost:9000/mydb?sslmode=disable",
config: &Config{
User: "vahid",
Host: "localhost",
Port: 9000,
ClientName: defaultClientName,
Database: "mydb",
TLSConfig: nil,
RuntimeParams: map[string]string{},
},
},
{
name: "database url missing user and password",
connString: "clickhouse://localhost:9000/mydb?sslmode=disable",
config: &Config{
User: defaultUsername,
Host: "localhost",
Port: 9000,
ClientName: defaultClientName,
Database: "mydb",
TLSConfig: nil,
RuntimeParams: map[string]string{},
},
},
{
name: "database url missing port",
connString: "clickhouse://vahid:secret@localhost:9000/mydb?sslmode=disable",
config: &Config{
User: "vahid",
Password: "<PASSWORD>",
Host: "localhost",
Port: 9000,
ClientName: defaultClientName,
Database: "mydb",
TLSConfig: nil,
RuntimeParams: map[string]string{},
},
},
{
name: "database url clickhouse protocol",
connString: "clickhouse://vahid@localhost:9000/mydb?sslmode=disable",
config: &Config{
User: "vahid",
Host: "localhost",
Port: 9000,
ClientName: defaultClientName,
Database: "mydb",
TLSConfig: nil,
RuntimeParams: map[string]string{},
},
},
{
name: "database url IPv4 with port",
connString: "clickhouse://vahid@127.0.0.1:5433/mydb?sslmode=disable",
config: &Config{
User: "vahid",
Host: "127.0.0.1",
ClientName: defaultClientName,
Port: 5433,
Database: "mydb",
TLSConfig: nil,
RuntimeParams: map[string]string{},
},
},
{
name: "database url IPv6 with port",
connString: "clickhouse://vahid@[2001:fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b]:5433/mydb?sslmode=disable",
config: &Config{
User: "vahid",
Host: "2001:db8::1",
Port: 5433,
ClientName: defaultClientName,
Database: "mydb",
TLSConfig: nil,
RuntimeParams: map[string]string{},
},
},
{
name: "database url IPv6 no port",
connString: "clickhouse://vahid@[2001:db8::1]/mydb?sslmode=disable",
config: &Config{
User: "vahid",
Host: "2001:db8::1",
Port: 9000,
Database: "mydb",
ClientName: defaultClientName,
TLSConfig: nil,
RuntimeParams: map[string]string{},
},
},
{
name: "DSN everything",
connString: "user=vahid password=secret host=localhost port=9000 dbname=mydb sslmode=disable client_name=chxtest connect_timeout=5",
config: &Config{
User: "vahid",
Password: "<PASSWORD>",
Host: "localhost",
Port: 9000,
Database: "mydb",
TLSConfig: nil,
ClientName: "chxtest",
ConnectTimeout: 5 * time.Second,
RuntimeParams: map[string]string{},
},
},
{
name: "DSN with escaped single quote",
connString: "user=vahid\\'s password=secret host=localhost port=9000 dbname=mydb sslmode=disable",
config: &Config{
User: "vahid's",
Password: "<PASSWORD>",
Host: "localhost",
Port: 9000,
ClientName: defaultClientName,
Database: "mydb",
TLSConfig: nil,
RuntimeParams: map[string]string{},
},
},
{
name: "DSN with escaped backslash",
connString: "user=vahid password=<PASSWORD>\\\\<PASSWORD> host=localhost port=9000 dbname=mydb sslmode=disable",
config: &Config{
User: "vahid",
Password: "<PASSWORD>",
Host: "localhost",
Port: 9000,
ClientName: defaultClientName,
Database: "mydb",
TLSConfig: nil,
RuntimeParams: map[string]string{},
},
},
{
name: "DSN with single quoted values",
connString: "user='vahid' host='localhost' dbname='mydb' sslmode='disable'",
config: &Config{
User: "vahid",
Host: "localhost",
Port: 9000,
ClientName: defaultClientName,
Database: "mydb",
TLSConfig: nil,
RuntimeParams: map[string]string{},
},
},
{
name: "DSN with single quoted value with escaped single quote",
connString: "user='vahid\\'s' host='localhost' dbname='mydb' sslmode='disable'",
config: &Config{
User: "vahid's",
Host: "localhost",
Port: 9000,
ClientName: defaultClientName,
Database: "mydb",
TLSConfig: nil,
RuntimeParams: map[string]string{},
},
},
{
name: "DSN with empty single quoted value",
connString: "user='vahid' password='' host='localhost' dbname='mydb' sslmode='disable'",
config: &Config{
User: "vahid",
Host: "localhost",
Port: 9000,
ClientName: defaultClientName,
Database: "mydb",
TLSConfig: nil,
RuntimeParams: map[string]string{},
},
},
{
name: "DSN with space between key and value",
connString: "user = 'vahid' password = '' host = 'localhost' dbname = 'mydb' sslmode='disable'",
config: &Config{
User: "vahid",
Host: "localhost",
Port: 9000,
ClientName: defaultClientName,
Database: "mydb",
TLSConfig: nil,
RuntimeParams: map[string]string{},
},
},
{
name: "URL multiple hosts",
connString: "clickhouse://vahid:secret@foo,bar,baz/mydb?sslmode=disable",
config: &Config{
User: "vahid",
Password: "<PASSWORD>",
Host: "foo",
Port: 9000,
ClientName: defaultClientName,
Database: "mydb",
TLSConfig: nil,
RuntimeParams: map[string]string{},
Fallbacks: []*FallbackConfig{
{
Host: "bar",
Port: 9000,
TLSConfig: nil,
},
{
Host: "baz",
Port: 9000,
TLSConfig: nil,
},
},
},
},
{
name: "URL multiple hosts and ports",
connString: "clickhouse://vahid:secret@foo:1,bar:2,baz:3/mydb?sslmode=disable",
config: &Config{
User: "vahid",
Password: "<PASSWORD>",
Host: "foo",
Port: 1,
ClientName: defaultClientName,
Database: "mydb",
TLSConfig: nil,
RuntimeParams: map[string]string{},
Fallbacks: []*FallbackConfig{
{
Host: "bar",
Port: 2,
TLSConfig: nil,
},
{
Host: "baz",
Port: 3,
TLSConfig: nil,
},
},
},
},
{
name: "DSN multiple hosts one port",
connString: "user=vahid password=secret host=foo,bar,baz port=9000 dbname=mydb sslmode=disable",
config: &Config{
User: "vahid",
Password: "<PASSWORD>",
Host: "foo",
Port: 9000,
ClientName: defaultClientName,
Database: "mydb",
TLSConfig: nil,
RuntimeParams: map[string]string{},
Fallbacks: []*FallbackConfig{
{
Host: "bar",
Port: 9000,
TLSConfig: nil,
},
{
Host: "baz",
Port: 9000,
TLSConfig: nil,
},
},
},
},
{
name: "DSN multiple hosts multiple ports",
connString: "user=vahid password=secret host=foo,bar,baz port=1,2,3 dbname=mydb sslmode=disable",
config: &Config{
User: "vahid",
Password: "<PASSWORD>",
Host: "foo",
Port: 1,
Database: "mydb",
TLSConfig: nil,
ClientName: defaultClientName,
RuntimeParams: map[string]string{},
Fallbacks: []*FallbackConfig{
{
Host: "bar",
Port: 2,
TLSConfig: nil,
},
{
Host: "baz",
Port: 3,
TLSConfig: nil,
},
},
},
},
{
name: "multiple hosts and fallback tsl",
connString: "user=vahid password=secret host=foo,bar,baz dbname=mydb sslmode=prefer",
config: &Config{
User: "vahid",
Password: "<PASSWORD>",
Host: "foo",
Port: 9000,
Database: "mydb",
ClientName: defaultClientName,
TLSConfig: &tls.Config{
InsecureSkipVerify: true,
},
RuntimeParams: map[string]string{},
Fallbacks: []*FallbackConfig{
{
Host: "foo",
Port: 9000,
TLSConfig: nil,
},
{
Host: "bar",
Port: 9000,
TLSConfig: &tls.Config{
InsecureSkipVerify: true,
}},
{
Host: "bar",
Port: 9000,
TLSConfig: nil,
},
{
Host: "baz",
Port: 9000,
TLSConfig: &tls.Config{
InsecureSkipVerify: true,
}},
{
Host: "baz",
Port: 9000,
TLSConfig: nil,
},
},
},
},
{
name: "enable compress",
connString: "user=vahid password=<PASSWORD> host=foo,bar,baz dbname=mydb sslmode=prefer compress=true",
config: &Config{
User: "vahid",
Password: "<PASSWORD>",
Host: "foo",
Port: 9000,
Database: "mydb",
Compress: true,
ClientName: defaultClientName,
TLSConfig: &tls.Config{
InsecureSkipVerify: true,
},
RuntimeParams: map[string]string{},
Fallbacks: []*FallbackConfig{
{
Host: "foo",
Port: 9000,
TLSConfig: nil,
},
{
Host: "bar",
Port: 9000,
TLSConfig: &tls.Config{
InsecureSkipVerify: true,
}},
{
Host: "bar",
Port: 9000,
TLSConfig: nil,
},
{
Host: "baz",
Port: 9000,
TLSConfig: &tls.Config{
InsecureSkipVerify: true,
}},
{
Host: "baz",
Port: 9000,
TLSConfig: nil,
},
},
},
},
}
func TestParseConfig(t *testing.T) {
t.Parallel()
for i, tt := range parseConfigTests {
config, err := ParseConfig(tt.connString)
if !assert.Nilf(t, err, "Test %d (%s)", i, tt.name) {
continue
}
assertConfigsEqual(t, tt.config, config, fmt.Sprintf("Test %d (%s)", i, tt.name))
}
}
func TestParseConfigDSNWithTrailingEmptyEqualDoesNotPanic(t *testing.T) {
_, err := ParseConfig("host= user= password= port= database=")
require.NoError(t, err)
}
func TestParseConfigDSNLeadingEqual(t *testing.T) {
_, err := ParseConfig("= user=vahid")
require.Error(t, err)
}
func TestParseConfigDSNTrailingBackslash(t *testing.T) {
_, err := ParseConfig(`x=x\`)
require.Error(t, err)
assert.Contains(t, err.Error(), "invalid backslash")
}
func TestConfigCopyReturnsEqualConfig(t *testing.T) {
connString := "clickhouse://vahid:secret@localhost:9000/mydb?client_name=chxtest&search_path=myschema&connect_timeout=5"
original, err := ParseConfig(connString)
require.NoError(t, err)
copied := original.Copy()
assertConfigsEqual(t, original, copied, "Test Config.Copy() returns equal config")
}
func TestConfigCopyOriginalConfigDidNotChange(t *testing.T) {
connString := "clickhouse://vahid:secret@localhost:9000/mydb?client_name=chxtest&search_path=myschema&connect_timeout=5"
original, err := ParseConfig(connString)
require.NoError(t, err)
copied := original.Copy()
assertConfigsEqual(t, original, copied, "Test Config.Copy() returns equal config")
copied.Port = uint16(5433)
copied.RuntimeParams["foo"] = "bar"
assert.Equal(t, uint16(9000), original.Port)
assert.Equal(t, "", original.RuntimeParams["foo"])
}
func TestConfigCopyCanBeUsedToConnect(t *testing.T) {
connString := os.Getenv("CHX_TEST_TCP_CONN_STRING")
original, err := ParseConfig(connString)
require.NoError(t, err)
copied := original.Copy()
assert.NotPanics(t, func() {
_, err = ConnectConfig(context.Background(), copied)
})
assert.NoError(t, err)
}
func assertConfigsEqual(t *testing.T, expected, actual *Config, testName string) {
if !assert.NotNil(t, expected) {
return
}
if !assert.NotNil(t, actual) {
return
}
assert.Equalf(t, expected.Host, actual.Host, "%s - Host", testName)
assert.Equalf(t, expected.Database, actual.Database, "%s - Database", testName)
assert.Equalf(t, expected.Port, actual.Port, "%s - Port", testName)
assert.Equalf(t, expected.User, actual.User, "%s - User", testName)
assert.Equalf(t, expected.Password, actual.Password, "%s - Password", testName)
assert.Equalf(t, expected.ConnectTimeout, actual.ConnectTimeout, "%s - ConnectTimeout", testName)
assert.Equalf(t, expected.ClientName, actual.ClientName, "%s - Client Name", testName)
assert.Equalf(t, expected.RuntimeParams, actual.RuntimeParams, "%s - RuntimeParams", testName)
// Can't test function equality, so just test that they are set or not.
assert.Equalf(t, expected.ValidateConnect == nil, actual.ValidateConnect == nil, "%s - ValidateConnect", testName)
assert.Equalf(t, expected.AfterConnect == nil, actual.AfterConnect == nil, "%s - AfterConnect", testName)
if assert.Equalf(t, expected.TLSConfig == nil, actual.TLSConfig == nil, "%s - TLSConfig", testName) {
if expected.TLSConfig != nil {
assert.Equalf(t,
expected.TLSConfig.InsecureSkipVerify,
actual.TLSConfig.InsecureSkipVerify,
"%s - TLSConfig InsecureSkipVerify",
testName,
)
assert.Equalf(t,
expected.TLSConfig.ServerName,
actual.TLSConfig.ServerName,
"%s - TLSConfig ServerName",
testName,
)
}
}
if assert.Equalf(t, len(expected.Fallbacks), len(actual.Fallbacks), "%s - Fallbacks", testName) {
for i := range expected.Fallbacks {
assert.Equalf(t, expected.Fallbacks[i].Host, actual.Fallbacks[i].Host, "%s - Fallback %d - Host", testName, i)
assert.Equalf(t, expected.Fallbacks[i].Port, actual.Fallbacks[i].Port, "%s - Fallback %d - Port", testName, i)
if assert.Equalf(t,
expected.Fallbacks[i].TLSConfig == nil,
actual.Fallbacks[i].TLSConfig == nil,
"%s - Fallback %d - TLSConfig",
testName,
i,
) {
if expected.Fallbacks[i].TLSConfig != nil {
assert.Equalf(t,
expected.Fallbacks[i].TLSConfig.InsecureSkipVerify,
actual.Fallbacks[i].TLSConfig.InsecureSkipVerify,
"%s - Fallback %d - TLSConfig InsecureSkipVerify", testName,
)
assert.Equalf(t,
expected.Fallbacks[i].TLSConfig.ServerName,
actual.Fallbacks[i].TLSConfig.ServerName,
"%s - Fallback %d - TLSConfig ServerName",
testName,
)
}
}
}
}
}
func TestParseConfigEnv(t *testing.T) {
tests := []struct {
name string
envvars map[string]string
config *Config
}{
{
// not testing no environment at all as that would use default host and that can vary.
name: "CHHOST only",
envvars: map[string]string{"CHHOST": "172.16.58.3"},
config: &Config{
User: defaultUsername,
Host: "172.16.58.3",
Port: 9000,
ClientName: defaultClientName,
Database: defaultDatabase,
TLSConfig: nil,
RuntimeParams: map[string]string{},
},
},
{
name: "All non-TLS environment",
envvars: map[string]string{
"CHHOST": "172.16.58.3",
"CHPORT": "7777",
"CHDATABASE": "foo",
"CHUSER": "bar",
"CHPASSWORD": "<PASSWORD>",
"CHCONNECT_TIMEOUT": "10",
"CHSSLMODE": "disable",
"CHCLIENTNAME": "chxtest",
},
config: &Config{
Host: "172.16.58.3",
Port: 7777,
Database: "foo",
User: "bar",
Password: "<PASSWORD>",
ConnectTimeout: 10 * time.Second,
TLSConfig: nil,
ClientName: "chxtest",
RuntimeParams: map[string]string{},
},
},
}
chEnvvars := []string{"CHHOST", "CHPORT", "CHDATABASE", "CHUSER", "CHPASSWORD", "CHCLIENTNAME", "CHSSLMODE", "CHCONNECT_TIMEOUT"}
savedEnv := make(map[string]string)
for _, n := range chEnvvars {
savedEnv[n] = os.Getenv(n)
}
defer func() {
for k, v := range savedEnv {
err := os.Setenv(k, v)
if err != nil {
t.Fatalf("Unable to restore environment: %v", err)
}
}
}()
for i, tt := range tests {
for _, n := range chEnvvars {
err := os.Unsetenv(n)
require.NoError(t, err)
}
for k, v := range tt.envvars {
err := os.Setenv(k, v)
require.NoError(t, err)
}
config, err := ParseConfig("")
if !assert.Nilf(t, err, "Test %d (%s)", i, tt.name) {
continue
}
assertConfigsEqual(t, tt.config, config, fmt.Sprintf("Test %d (%s)", i, tt.name))
}
}
func TestParseConfigError(t *testing.T) {
t.Parallel()
content := []byte("invalid tls")
tmpInvalidTLS, err := ioutil.TempFile("", "invalidtls")
if err != nil {
t.Fatal(err)
}
defer os.Remove(tmpInvalidTLS.Name()) // clean up
if _, err := tmpInvalidTLS.Write(content); err != nil {
t.Fatal(err)
}
if err := tmpInvalidTLS.Close(); err != nil {
t.Fatal(err)
}
parseConfigErrorTests := []struct {
name string
connString string
err string
errUnwarp string
}{
{
name: "invalid url",
connString: "clickhouse://invalid\t",
err: "cannot parse `clickhouse://invalid\t`: failed to parse as URL (parse \"clickhouse://invalid\\t\": net/url: invalid control character in URL)", //nolint:lll //can't change line lengh
}, {
name: "invalid port",
connString: "port=invalid",
errUnwarp: "strconv.ParseUint: parsing \"invalid\": invalid syntax",
}, {
name: "invalid port range",
connString: "port=0",
err: "cannot parse `port=0`: invalid port (outside range)",
}, {
name: "invalid connect_timeout",
connString: "connect_timeout=200g",
err: "cannot parse `connect_timeout=200g`: invalid connect_timeout (strconv.ParseInt: parsing \"200g\": invalid syntax)",
}, {
name: "negative connect_timeout",
connString: "connect_timeout=-100",
err: "cannot parse `connect_timeout=-100`: invalid connect_timeout (negative timeout)",
}, {
name: "negative sslmode",
connString: "sslmode=invalid",
err: "cannot parse `sslmode=invalid`: failed to configure TLS (sslmode is invalid)",
}, {
name: "fail load sslrootcert",
connString: "sslrootcert=invalid_address sslmode=prefer",
err: "cannot parse `sslrootcert=invalid_address sslmode=prefer`: failed to configure TLS (unable to read CA file: open invalid_address: no such file or directory)", //nolint:lll //can't change line lengh
}, {
name: "invalid sslrootcert",
connString: "sslrootcert=" + tmpInvalidTLS.Name() + " sslmode=prefer",
err: "cannot parse `sslrootcert=" + tmpInvalidTLS.Name() + " sslmode=prefer`: failed to configure TLS (unable to add CA to cert pool)", //nolint:lll //can't change line lengh
}, {
name: "not provide both sslcert and sskkey",
connString: "sslcert=invalid_address sslmode=prefer",
err: "cannot parse `sslcert=invalid_address sslmode=prefer`: failed to configure TLS (both \"sslcert\" and \"sslkey\" are required)", //nolint:lll //can't change line lengh
}, {
name: "invalid sslcert",
connString: "sslcert=invalid_address sslkey=invalid_address sslmode=prefer",
err: "cannot parse `sslcert=invalid_address sslkey=invalid_address sslmode=prefer`: failed to configure TLS (unable to read cert: open invalid_address: no such file or directory)", //nolint:lll //can't change line lengh
},
}
for i, tt := range parseConfigErrorTests {
_, err := ParseConfig(tt.connString)
if !assert.Errorf(t, err, "Test %d (%s)", i, tt.name) {
continue
}
if tt.err != "" {
if !assert.EqualError(t, err, tt.err, "Test %d (%s)", i, tt.name) {
continue
}
} else {
if !assert.EqualErrorf(t, errors.Unwrap(err), tt.errUnwarp, "Test %d (%s)", i, tt.name) {
continue
}
}
}
}
| vahid-sohrabloo/chpool |
<|start_filename|>psa/2021-08-02.html<|end_filename|>
<p>
UGWA has been updated for the 2021–22 school year. This should be the
last update for UGWA. Farewell!
</p>
| CoCowBubble/an-even-more-non-descriptive-title |
<|start_filename|>package.json<|end_filename|>
{
"name": "osssampleapp",
"version": "1.0.0",
"description": "Sample code to deploy node.js application using unified Yaml-defined CI/CD Pipelines of Azure DevOps",
"main": "server.js",
"dependencies": {
"express": "^4.14.0"
},
"devDependencies": {},
"scripts": {
"start": "node server.js"
},
"author": "cloudmelon",
"license": "MIT"
}
<|start_filename|>server.js<|end_filename|>
var express =require('express');
var app = express();
var port=process.env.PORT || 3000;
app.use(express.static(__dirname + '/public'));
app.get('/',function(req,res){
console.log('hello from server');
res.render('./public/index.html');
});
app.listen(port);
console.log('Server Listening at port'+port);
<|start_filename|>public/css/shop-homepage.css<|end_filename|>
/*!
* Start Bootstrap - Shop Homepage (http://startbootstrap.com/)
* Copyright 2013-2016 Start Bootstrap
* Licensed under MIT (https://github.com/BlackrockDigital/startbootstrap/blob/gh-pages/LICENSE)
*/
body,
html {
margin: 0px;
padding: 0px;
position: fixed;
}
<|start_filename|>public/js/demo.js<|end_filename|>
window.onload = function() {
let c = init("canvas").c,
canvas = init("canvas").canvas,
w = (canvas.width = window.innerWidth),
h = (canvas.height = window.innerHeight);
//initiation
function cc(A,B,C){
let D = 2*(A.x*(B.y-C.y)+B.x*(C.y-A.y)+C.x*(A.y-B.y));
let S = {
x: (1/D)*((A.x*A.x+A.y*A.y)*(B.y-C.y)+(B.x*B.x+B.y*B.y)*(C.y-A.y)+(C.x*C.x+C.y*C.y)*(A.y-B.y)),
y: (1/D)*((A.x*A.x+A.y*A.y)*(C.x-B.x)+(B.x*B.x+B.y*B.y)*(A.x-C.x)+(C.x*C.x+C.y*C.y)*(B.x-A.x))
}
return S;
}
function dist(A,B){
return Math.sqrt(Math.pow(A.x-B.x,2)+Math.pow(A.y-B.y,2));
}
class point{
constructor(x,y){
this.x = x;
this.y = y;
}
update(x,y){
this.x = x;
this.y = y;
}
}
class triangle{
constructor(A,B,C,lw){
this.a = A;
this.b = B;
this.c = C;
this.s = cc(A,B,C);
this.ty = this.c;
this.lw = lw;
this.x = dist(this.s,this.a);
this.c1 = dist(this.a,this.b)/2;
this.c2 = dist(this.b,this.c)/2;
this.c3 = dist(this.c,this.a)/2;
this.a2ab = Math.atan2(this.a.y-this.b.y,this.a.x-this.b.x);
this.a2bc = Math.atan2(this.b.y-this.c.y,this.b.x-this.c.x);
this.a2ca = Math.atan2(this.c.y-this.a.y,this.c.x-this.a.x);
this.rab = Math.sqrt(this.x*this.x-this.c1*this.c1);
this.rbc = Math.sqrt(this.x*this.x-this.c2*this.c2);
this.rca = Math.sqrt(this.x*this.x-this.c3*this.c3);
this.s1 = {
x:this.s.x+(this.rab/2)*Math.cos(this.a2ab+Math.PI/2),
y:this.s.y+(this.rab/2)*Math.sin(this.a2ab+Math.PI/2)
};
this.s2 = {
x:this.s.x+(this.rbc/2)*Math.cos(this.a2bc+Math.PI/2),
y:this.s.y+(this.rbc/2)*Math.sin(this.a2bc+Math.PI/2)
};
this.s3 = {
x:this.s.x+(this.rca/2)*Math.cos(this.a2ca+5*Math.PI/2),
y:this.s.y+(this.rca/2)*Math.sin(this.a2ca+5*Math.PI/2)
};
}
update(A,B,C){
this.a = A;
this.b = B;
this.c = C;
this.s = cc(A,B,C);
this.ty = this.c;
this.x = dist(this.s,this.a);
this.c1 = dist(this.a,this.b)/2;
this.c2 = dist(this.b,this.c)/2;
this.c3 = dist(this.c,this.a)/2;
this.a2ab = Math.atan2(this.a.y-this.b.y,this.a.x-this.b.x);
this.a2bc = Math.atan2(this.b.y-this.c.y,this.b.x-this.c.x);
this.a2ca = Math.atan2(this.c.y-this.a.y,this.c.x-this.a.x);
this.rab = Math.sqrt(this.x*this.x-this.c1*this.c1);
this.rbc = Math.sqrt(this.x*this.x-this.c2*this.c2);
this.rca = Math.sqrt(this.x*this.x-this.c3*this.c3);
this.s1 = {
x:this.s.x+(this.rab/2)*Math.cos(this.a2ab+Math.PI/2),
y:this.s.y+(this.rab/2)*Math.sin(this.a2ab+Math.PI/2)
};
this.s2 = {
x:this.s.x+(this.rbc/2)*Math.cos(this.a2bc+Math.PI/2),
y:this.s.y+(this.rbc/2)*Math.sin(this.a2bc+Math.PI/2)
};
this.s3 = {
x:this.s.x+(this.rca/2)*Math.cos(this.a2ca+5*Math.PI/2),
y:this.s.y+(this.rca/2)*Math.sin(this.a2ca+5*Math.PI/2)
};
}
move(m){
this.c = m;
this.s = cc(this.a,this.b,this.c);
this.x = dist(this.s,this.a);
this.c1 = dist(this.a,this.b)/2;
this.c2 = dist(this.b,this.c)/2;
this.c3 = dist(this.c,this.a)/2;
this.a2ab = Math.atan2(this.a.y-this.b.y,this.a.x-this.b.x);
this.a2bc = Math.atan2(this.b.y-this.c.y,this.b.x-this.c.x);
this.a2ca = Math.atan2(this.c.y-this.a.y,this.c.x-this.a.x);
this.rab = Math.sqrt(this.x*this.x-this.c1*this.c1);
this.rbc = Math.sqrt(this.x*this.x-this.c2*this.c2);
this.rca = Math.sqrt(this.x*this.x-this.c3*this.c3);
this.s1 = {
x:this.s.x+(this.rab/2)*Math.cos(this.a2ab+Math.PI/2),
y:this.s.y+(this.rab/2)*Math.sin(this.a2ab+Math.PI/2)
};
this.s2 = {
x:this.s.x+(this.rbc/2)*Math.cos(this.a2bc+Math.PI/2),
y:this.s.y+(this.rbc/2)*Math.sin(this.a2bc+Math.PI/2)
};
this.s3 = {
x:this.s.x+(this.rca/2)*Math.cos(this.a2ca+5*Math.PI/2),
y:this.s.y+(this.rca/2)*Math.sin(this.a2ca+5*Math.PI/2)
};
}
iterate(it){
this.it = it;
// this.t1 = new triangle(this.a,this.b,this.s1,this.lw/2);
// this.t2 = new triangle(this.a,this.s2,this.c,this.lw/2);
// this.t3 = new triangle(this.b,this.c,this.s3,this.lw/2);
this.t4 = new triangle(this.a,this.s1,this.s3,this.lw/2);
this.t5 = new triangle(this.b,this.s2,this.s1,this.lw/2);
this.t6 = new triangle(this.c,this.s3,this.s2,this.lw/2);
this.t7 = new triangle(this.s1,this.s2,this.s3,this.lw/2);
if(it < 2){
// this.t1.iterate(it+1);
// this.t2.iterate(it+1);
// this.t3.iterate(it+1);
this.t4.iterate(it+1);
this.t5.iterate(it+1);
this.t6.iterate(it+1);
this.t7.iterate(it+1);
}
// this.t1.show();
// this.t2.show();
// this.t3.show();
this.t4.show();
this.t5.show();
this.t6.show();
this.t7.show();
}
show(){
// c.beginPath();
// c.lineTo(this.a.x,this.a.y);
// c.lineTo(this.b.x,this.b.y);
// c.lineTo(this.c.x,this.c.y);
// c.strokeStyle="white";
// c.lineWidth=this.lw;
// c.lineJoin="round";
// c.closePath();
// c.stroke();
c.fillStyle="white";
c.fillRect(this.a.x-1,this.a.y-1,2,2);
c.fillRect(this.b.x-1,this.b.y-1,2,2);
c.fillRect(this.c.x-1,this.c.y-1,2,2);
c.fillStyle="#00ffff";
c.fillRect(this.s.x-0.5,this.s.y-0.5,1,1);
c.fillStyle="#00ffff";
c.fillRect(this.s1.x-0.5,this.s1.y-0.5,1,1);
c.fillStyle="#00ffff";
c.fillRect(this.s2.x-0.5,this.s2.y-0.5,1,1);
c.fillStyle="#00ffff";
c.fillRect(this.s3.x-0.5,this.s3.y-0.5,1,1);
}
}
let s = h/3,
A = new point(
w/2+s*Math.cos(0),
h/2+s*Math.sin(0)),
B = new point(
w/2+s*Math.cos(Math.PI/3),
h/2+s*Math.sin(Math.PI/3)),
C = new point(
w/2+s*Math.cos(2*Math.PI/3),
h/2+s*Math.sin(2*Math.PI/3)),
D = new point(
w/2+s*Math.cos(Math.PI),
h/2+s*Math.sin(Math.PI)),
E = new point(
w/2+s*Math.cos(4*Math.PI/3),
h/2+s*Math.sin(4*Math.PI/3)),
F = new point(
w/2+s*Math.cos(5*Math.PI/3),
h/2+s*Math.sin(5*Math.PI/3)),
G = new point(
w/2,
h/2),
lb = 1,
T = new triangle(A,B,G,lb),
T2 = new triangle(B,C,G,lb),
T3 = new triangle(C,D,G,lb),
T4 = new triangle(D,E,G,lb),
T5 = new triangle(E,F,G,lb),
T6 = new triangle(F,A,G,lb),
tx = w/2,
ty = h/2,
spd = 0;
function draw() {
//animation
if(mouse.x){
T.move(mouse);
T2.move(mouse);
T3.move(mouse);
T4.move(mouse);
T5.move(mouse);
T6.move(mouse);
}else{
T.move({x: tx,y: ty});
T2.move({x: tx,y: ty});
T3.move({x: tx,y: ty});
T4.move({x: tx,y: ty});
T5.move({x: tx,y: ty});
T6.move({x: tx,y: ty});
tx = w/2+(s*Math.cos(Math.PI/6)-45)*Math.cos(spd);
ty = h/2+(s*Math.cos(Math.PI/6)-70)*Math.sin(spd);
spd+=0.05;
}
T.iterate(0);
T2.iterate(0);
T3.iterate(0);
T4.iterate(0);
T5.iterate(0);
T6.iterate(0);
T.show();
T2.show();
T3.show();
T4.show();
T5.show();
T6.show();
}
let mouse = {
x: false,
y: false
};
let last_mouse = {};
canvas.addEventListener(
"mousemove",
function(e) {
last_mouse.x = mouse.x;
last_mouse.y = mouse.y;
mouse.x = e.pageX - this.offsetLeft;
mouse.y = e.pageY - this.offsetTop;
},
false
);
canvas.addEventListener("mouseleave", function(e) {
mouse.x = false;
});
function init(elemid) {
let canvas = document.getElementById(elemid),
c = canvas.getContext("2d"),
w = (canvas.width = window.innerWidth),
h = (canvas.height = window.innerHeight);
c.fillStyle = "rgba(30,30,30,1)";
c.fillRect(0, 0, w, h);
return {c:c,canvas:canvas};
}
window.requestAnimFrame = (function() {
return (
window.requestAnimationFrame ||
window.webkitRequestAnimationFrame ||
window.mozRequestAnimationFrame ||
window.oRequestAnimationFrame ||
window.msRequestAnimationFrame ||
function(callback) {
window.setTimeout(callback);
}
);
});
function loop() {
window.requestAnimFrame(loop);
c.fillStyle = "rgba(30,30,30,1)";
c.fillRect(0, 0, w, h);
draw();
}
window.addEventListener("resize", function() {
(w = canvas.width = window.innerWidth),
(h = canvas.height = window.innerHeight);
s = h/3,
A.update(w/2+s*Math.cos(0),h/2+s*Math.sin(0));
B.update(
w/2+s*Math.cos(Math.PI/3),
h/2+s*Math.sin(Math.PI/3)),
C.update(
w/2+s*Math.cos(2*Math.PI/3),
h/2+s*Math.sin(2*Math.PI/3)),
D.update(
w/2+s*Math.cos(Math.PI),
h/2+s*Math.sin(Math.PI)),
E.update(
w/2+s*Math.cos(4*Math.PI/3),
h/2+s*Math.sin(4*Math.PI/3)),
F.update(
w/2+s*Math.cos(5*Math.PI/3),
h/2+s*Math.sin(5*Math.PI/3)),
G.update(
w/2,
h/2),
T.update(A,B,G),
T2.update(B,C,G),
T3.update(C,D,G),
T4.update(D,E,G),
T5.update(E,F,G),
T6.update(F,A,G),
tx = w/2,
ty = h/2,
loop();
});
loop();
setInterval(loop, 1000 / 60);
} | ronaldesp/oss-cicd-devops |
<|start_filename|>templates/invoice.html<|end_filename|>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>{{ title_html }}</title>
<style type="text/css">
div {
margin-top: 1em;
}
body {
font-size: 14px;
font-family: Arial;
color: #222;
margin: 0;
padding: 2em;
}
.client p {
padding-top: 1em;
}
#top-logo {
float: left;
margin-right: 1em;
width: 100px;
}
h2 {
margin:0;
}
#seller-title {
}
.invoice-info {
margin: 0;
float: right;
}
dl { width: 100%; overflow: hidden; padding: 0; margin: 0 }
dt { float: left; width: 80%; /* adjust the width; make sure the total of both is 100% */ padding: 0; margin: 0 }
dd { font-weight: bold; float: left; width: 20%; /* adjust the width; make sure the total of both is 100% */ padding: 0; margin: 0 }
.account .address {
margin-top: 0;
}
hr.topgap {
margin: 5em 0 0 0;
border: 0;
}
.align-right {
text-align: right;
}
.align-center {
text-align: center;
}
.seller_logo {
}
table.items {
width: 100%;
margin: 1em 0;
}
table.items tr:nth-child(even) {
background-color: #EEE;
}
table.items th, table.items td {
padding: 0.25em 0 0.25em 0;
}
.font-bold {
font-weight: bold;
}
table.items thead th {
border-bottom: 1px solid #333;
}
table.items thead {
text-align: left;
}
table.items thead tr {
margin: 1em 0;
}
table.items tbody tr td:last-child,
table.items thead tr th:last-child {
text-align: right;
}
.total-due {
padding-right:2em;
}
table.items tfoot td {
border: 3px double #333;
border-left: 0;
border-right: 0;
padding: 1em 0;
}
</style>
</head>
<body>
<div class="invoice-info">
Invoice ID: {{ invoice_id }}
</div>
<img src="{{ seller.logo }}" alt="{{ seller.name }}" class="seller_logo" id="top-logo" />
<h2 id="seller-title">{{ title }}</h2>
{{ seller.address }}
<div class="client">
<p><strong>BILL TO:</strong></p>
<h2>{{ buyer.name }}</h2>
{{ buyer.address }}
</div>
<dl class="purchase-info">
<dd>Date of Purchase:</dd>
<dt>{{ date_of_purchase or "..." }}</dt>
<dd>Purchase Order No.:</dd>
<dt>{{ purchase_order_no or "..." }}</dt>
<dd>Job No.:</dd>
<dt>{{ job_no or "..." }}</dt>
</dl>
<table class="items">
<thead>
<tr>{% for attr in attribute_keys %}
<th>{{ attr }}</th>{% endfor %}
</tr>
</thead>
<tbody>{% for item in items %}
<tr>{% for val in item.vals %}
<td>{{ item[val] }}</td>{% endfor %}
</tr>{% endfor %}
</tbody>
<tfoot class="align-right">
<tr>
<td colspan="{{ attribute_keys|length }}"><strong class="total-due">TOTAL DUE:</strong> {{ total }}</td>
</tr>
</tfoot>
</table>
<div class="align-center">TERMS: {{ terms.string }}</div>
<div class="remittance-info">
{% if seller.account.number %}
<strong>PLEASE REMIT PAYMENT BY WIRE TRANSFER TO:</strong>
{{ seller.account }}
{% endif %}
<p>For any payment questions, call {{ seller.address.phone }} or email <a href="mailto:{{ seller.address.email }}">{{ seller.address.email }}</a>.</p>
</div>
</body>
</html>
| lambdal/envois |
<|start_filename|>.repo-metadata.json<|end_filename|>
{
"name": "dialogflow",
"name_pretty": "Dialogflow",
"product_documentation": "https://www.dialogflow.com/",
"client_documentation": "https://cloud.google.com/python/docs/reference/dialogflow/latest",
"issue_tracker": "https://issuetracker.google.com/savedsearches/5300385",
"release_level": "stable",
"language": "python",
"library_type": "GAPIC_AUTO",
"repo": "googleapis/python-dialogflow",
"distribution_name": "google-cloud-dialogflow",
"api_id": "dialogflow.googleapis.com",
"requires_billing": true,
"default_version": "v2",
"codeowner_team": "@googleapis/cdpe-cloudai",
"api_shortname": "dialogflow"
}
| reichenbch/python-dialogflow |
<|start_filename|>realm-manager-example/src/main/java/com/zhuinden/realmmanagerexample/CustomApplication.java<|end_filename|>
package com.zhuinden.realmmanagerexample;
import android.app.Application;
import com.zhuinden.realmmanagerexample.automigration.AutoMigration;
import io.realm.Realm;
import io.realm.RealmConfiguration;
/**
* Created by Zhuinden on 2017.09.24..
*/
public class CustomApplication
extends Application {
@Override
public void onCreate() {
super.onCreate();
Realm.init(this);
Realm.setDefaultConfiguration(new RealmConfiguration.Builder() //
.schemaVersion(11) //
.migration(new AutoMigration()) //
.initialData(realm -> {
Cat cat = new Cat();
for(CatNames catName : CatNames.values()) {
cat.setName(catName.getName());
realm.insert(cat);
}
}) //
.build());
SingletonComponent singletonComponent = DaggerSingletonComponent.create();
Injector.setComponent(singletonComponent);
}
}
| Zhuinden/singleton-realm-manager |
<|start_filename|>modules/kernel/src/process/ProcessInit.cc<|end_filename|>
//===================================================================================================================
//
// ProcessInit.cc -- Initialize the process structures
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-21 Initial 0.2.0 ADCL Initial version
// 2018-Dec-02 Initial 0.2.0 ADCL Provide the details of this function
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
// 2019-Mar-15 Initial 0.3.2 ADCL Update for new Process_t
//
//===================================================================================================================
#include "cpu.h"
#include "heap.h"
#include "timer.h"
#include "process.h"
//
// -- we need the `mmuLvl1Table` variable from the loader, but do not want to include all of
// what we did in the loader
// --------------------------------------------------------------------------------------
extern archsize_t mmuLvl1Table;
//
// -- Initialize the process structures
// ---------------------------------
EXPORT LOADER
void ProcessInit(void)
{
Process_t *proc = NEW(Process_t);
CurrentThreadAssign(proc);
if (!assert(proc != NULL)) {
CpuPanicPushRegs("Unable to allocate Current Process structure");
}
kMemSetB(proc, 0, sizeof(Process_t));
proc->topOfStack = 0;
proc->virtAddrSpace = mmuLvl1Table;
proc->pid = scheduler.nextPID ++; // -- this is the butler process ID
proc->ssAddr = STACK_LOCATION;
// -- set the process name
proc->command = (char *)HeapAlloc(20, false);
kMemSetB(proc->command, 0, 20);
kStrCpy(proc->command, "kInit");
proc->policy = POLICY_0;
proc->priority = PTY_OS;
proc->status = PROC_RUNNING;
AtomicSet(&proc->quantumLeft, PTY_OS);
proc->timeUsed = 0;
proc->wakeAtMicros = 0;
ListInit(&proc->stsQueue);
ListInit(&proc->references.list);
ProcessDoAddGlobal(proc); // no lock required -- still single threaded
CLEAN_SCHEDULER();
CLEAN_PROCESS(proc);
kprintf("ProcessInit() established the current process at %p for CPU%d\n", proc, thisCpu->cpuNum);
//
// -- Create an idle process for each CPU
// -----------------------------------
for (int i = 0; i < cpus.cpusDiscovered; i ++) {
// -- Note ProcessCreate() creates processes at the OS priority...
kprintf("starting idle process %d\n", i);
ProcessCreate("Idle Process", ProcessIdle);
}
thisCpu->lastTimer = TimerCurrentCount(timerControl);
kprintf("ProcessInit() complete\n");
}
<|start_filename|>platform/bcm2836/pic/PicInit.cc<|end_filename|>
//===================================================================================================================
//
// PicInit.cc -- Initialize the rpi2b pic
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "timer.h"
//
// -- Initialize the pic
// ------------------
EXTERN_C EXPORT KERNEL
void _PicInit(PicDevice_t *dev, const char *name)
{
if (!dev) return;
Bcm2835Pic_t *picData = (Bcm2835Pic_t *)dev->device.deviceData;
PicBase_t base1 = picData->picLoc;
PicBase_t base2 = picData->timerLoc;
int core = thisCpu->cpuNum;
if (core == 0) {
picData->picLoc = BCM2835_PIC;
picData->timerLoc = BCM2835_TIMER;
// -- for good measure, disable the FIQ
MmioWrite(base1 + INT_FIQCTL, 0x0);
MmioWrite(base2 + TIMER_LOCAL_CONTROL, 0x00000000); // ensure the local timer is disabled
// -- Disable all IRQs -- write to clear, anything that is high will be pulled low
MmioWrite(base1 + INT_IRQDIS0, 0xffffffff);
MmioWrite(base1 + INT_IRQDIS1, 0xffffffff);
MmioWrite(base1 + INT_IRQDIS2, 0xffffffff);
}
// -- perform the per-core initialization
MmioWrite(base2 + TIMER_INTERRUPT_CONTROL + (core * 4), 0x00000002); // select as IRQ for core
MmioWrite(base2 + MAILBOX_INTERRUPT_CONTROL + (core * 4), 0x0000000f); // Select IRQ handling for the IPI
MmioWrite(base2 + TIMER_IRQ_SOURCE + (core * 4), 0x00000002); // Select IRQ Source for the timer
}
<|start_filename|>platform/bcm2836/gpio/GpioEnablePin.cc<|end_filename|>
//===================================================================================================================
//
// GpioEnablePin.cc -- Enable a pin on the GPIO block
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "hardware.h"
//
// -- Select the alternate function for a Gpio pin
// --------------------------------------------
EXTERN_C EXPORT KERNEL
void _GpioEnablePin(GpioDevice_t *dev, GpioPin_t pin)
{
if (!dev) return;
if (pin < 0 || pin > 53) return;
volatile int i; // make sure the compiler does not try to optimize this away
int bank = pin / 32;
int shift = pin % 32;
MmioWrite(dev->base + GPIO_GPPUD, 0x00000000);
for (i = 0; i < 200; i ++) {}
MmioWrite(dev->base + GPIO_GPPUDCLK1 + bank * 4, 1 << shift);
for (i = 0; i < 200; i ++) {}
// MmioWrite(dev->base + GPIO_GPPUD, 0x00000000);
// MmioWrite(dev->base + GPIO_GPPUDCLK1 + bank * 4, 0x00000000);
// for (i = 0; i < 200; i ++) {}
}
<|start_filename|>modules/kernel/inc/elf.h<|end_filename|>
//===================================================================================================================
//
// elf.h -- The ELF Structures for loading ELF files
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// These are the common structures for 32- and 64-bit ELF executables
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ---------- ------- ------- ---- ----------------------------------------------------------------------------
// 2018-06-27 Initial 0.1.0 ADCL Initial version
//
//===================================================================================================================
#pragma once
#include "types.h"
//
// -- The number of identifying bytes
// -------------------------------
#define ELF_NIDENT 16
//
// -- The following are the meanings of the different positions in the eIdent field
// -----------------------------------------------------------------------------
enum {
EI_MAG0 = 0, // File Identification
EI_MAG1 = 1,
EI_MAG2 = 2,
EI_MAG3 = 3,
EI_CLASS = 4, // File Class
EI_DATA = 5, // Data Encoding
EI_VERSION = 6, // File Version
EI_OSABI = 7, // OS/ABI Identification
EI_ABIVERSION = 8, // ABI Version
EI_PAD = 9, // padding bytes in eIdent
};
//
// -- The following are the possible values for the ELF class, indicating what size the file objects
// ----------------------------------------------------------------------------------------------
enum {
ELFCLASS_NONE = 0, // Invalid
ELFCLASS_32 = 1, // 32-bit objects
ELFCLASS_64 = 2, // 64-bit objects
};
//
// -- The following are the possible values for the ELF Data encoding (big- or little-endian)
// ---------------------------------------------------------------------------------------
enum {
ELFDATA_NONE = 0, // Invalid
ELFDATA_LSB = 1, // Binary values are in little endian order
ELFDATA_MSB = 2, // Binary values are in big endian order
};
//
// -- The following are the possible values for the ELF Version, which only has 1 possible value
// ------------------------------------------------------------------------------------------
enum {
EV_CURRENT = 1, // This is the only valid value
};
//
// -- The following are the possible values for OS/ABI
// ------------------------------------------------
enum {
ELFOSABI_SYSV = 0, // System V ABI
ELFOSABI_HPUX = 1, // HP-UX Operating system
ELFOSABI_STANDALONE = 255, // Standalone Application
};
//
// -- The following are the defined types
// -----------------------------------
enum {
ET_NONE = 0, // No file type
ET_REL = 1, // Relocatable file
ET_EXEC = 2, // Executable file
ET_DYN = 3, // Dynamic or Shared object file
ET_CORE = 4, // Core file
ET_LOOS = 0xfe00, // Environment-specific use
ET_HIOS = 0xfeff,
ET_LOPROC = 0xff00, // Processor-specific use
ET_HIPROC = 0xffff,
};
//
// -- These are the program segment flags
// -----------------------------------
enum {
PF_X = 0x01, // The segment is executable
PF_W = 0x02, // The segment is writable
PF_R = 0x04, // The segment is readable
};
//
// -- Architecture-independent check for the ELF signature
// ----------------------------------------------------
#define HAS_ELF_MAGIC(x) ((x)->eIdent[EI_MAG0] == 0x7f && \
(x)->eIdent[EI_MAG1] == 'E' && \
(x)->eIdent[EI_MAG2] == 'L' && \
(x)->eIdent[EI_MAG3] == 'F')
//
// -- This is the common part of the Elf File Header and will used to complete a sanity check before continuing
// ---------------------------------------------------------------------------------------------------------
typedef struct ElfHdrCommon_t {
unsigned char eIdent[ELF_NIDENT];
elfHalf_t eType;
elfHalf_t eMachine;
elfWord_t eversion;
} __attribute__((packed)) ElfHdrCommon_t;
//
// -- This is the 32-bit ELF File Header Definition
// ---------------------------------------------
typedef struct Elf32EHdr_t {
unsigned char eIdent[ELF_NIDENT];
elfHalf_t eType;
elfHalf_t eMachine;
elfWord_t eversion;
elf32Addr_t eEntry;
elf32Off_t ePhOff; // Program Header offset
elf32Off_t eShOff; // Section Header offset
elfWord_t eFlags;
elfHalf_t eHSize; // Program Header Size
elfHalf_t ePhEntSize; // Program Header Entry Size
elfHalf_t ePhNum; // Program Header Entry Count
elfHalf_t eShEntSize; // Section Header Entry Size
elfHalf_t eShNum; // Section Header Entry Count
elfHalf_t eShStrNdx; // Section Number for the string table
} __attribute__((packed)) Elf32EHdr_t;
//
// -- This is the 64-bit ELF File Header Definition
// ---------------------------------------------
typedef struct Elf64EHdr_t {
unsigned char eIdent[ELF_NIDENT];
elfHalf_t eType;
elfHalf_t eMachine;
elfWord_t eversion;
elf64Addr_t eEntry;
elf64Off_t ePhOff; // Program Header offset
elf64Off_t eShOff; // Section Header offset
elfWord_t eFlags;
elfHalf_t eHSize; // Program Header Size
elfHalf_t ePhEntSize; // Program Header Entry Size
elfHalf_t ePhNum; // Program Header Entry Count
elfHalf_t eShEntSize; // Section Header Entry Size
elfHalf_t eShNum; // Section Header Entry Count
elfHalf_t eShStrNdx; // Section Number for the string table
} __attribute__((packed)) Elf64EHdr_t;
//
// -- This is the 32-bit ELF Program Header, which is needed to determine how to load the executable
// ----------------------------------------------------------------------------------------------
typedef struct Elf32PHdr_t {
elfWord_t pType; // Type of segment
elf32Off_t pOffset; // Offset in file
elf32Addr_t pVAddr; // Virtual Address in Memory
elf32Addr_t pPAddr; // Reserved or meaningless
elfWord_t pFileSz; // Size of segment in file
elfWord_t pMemSz; // Size of segment in memory
elfWord_t pFlags; // Segment Attributes
elfWord_t pAlign; // Alignment of segment
} __attribute__((packed)) Elf32PHdr_t;
//
// -- This is the 64-bit ELF Program Header, which is needed to determine how to load the executable
// ----------------------------------------------------------------------------------------------
typedef struct Elf64PHdr_t {
elfWord_t pType; // Type of segment
elfWord_t pFlags; // Segment Attributes
elf64Off_t pOffset; // Offset in file
elf64Addr_t pVAddr; // Virtual Address in Memory
elf64Addr_t pPAddr; // Reserved or meaningless
elfXWord_t pFileSz; // Size of segment in file
elfXWord_t pMemSz; // Size of segment in memory
elfXWord_t pAlign; // Alignment of segment
} __attribute__((packed)) Elf64PHdr_t;
<|start_filename|>arch/x86/cpu/ArchLateCpuInit.cc<|end_filename|>
//===================================================================================================================
//
// ArchLateCpuInit.cc -- Polish off the CPU structures for the x86 arch
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Feb-01 Initial v0.5.0f ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "cpu.h"
//
// -- Complete the CPU initialization
// -------------------------------
EXTERN_C EXPORT LOADER
void ArchLateCpuInit(int c)
{
kprintf("Finalizing CPU initialization\n");
ArchGsLoad(cpus.perCpuData[c].gsSelector);
ArchTssLoad(cpus.perCpuData[c].tssSelector);
}
<|start_filename|>platform/pc/inc/platform-serial.h<|end_filename|>
//===================================================================================================================
//
// platform-serial.h -- Serial definitions and functions for the x86 serial port
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-23 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#pragma once
#ifndef __SERIAL_H__
# error "Use #include \"serial.h\" and it will pick up this file; do not #include this file directly."
#endif
//
// -- on x86, this is the type we use to refer to the serial port
// -----------------------------------------------------------
typedef uint16_t SerialBase_t;
//
// -- Some constants used to help manage the serial port
// --------------------------------------------------
#define COM1 (0x3f8)
#define SERIAL_DATA 0
#define SERIAL_DIVISOR_LSB 0
#define SERIAL_INTERRUPT_ENABLE 1
#define SERIAL_DIVISOR_MSB 1
#define SERIAL_FIFO_CONTROL 2
#define SERIAL_LINE_CONTROL 3
#define SERIAL_MODEM_CONTROL 4
#define SERIAL_LINE_STATUS 5
<|start_filename|>platform/bcm2836/timer/TimerCurrentCount.cc<|end_filename|>
//===================================================================================================================
//
// TimerCurrentCount.cc -- Get the current count from the timer
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-06 Initial v0.6.0a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "timer.h"
//
// -- Get the number of ticks since boot
// ----------------------------------
EXTERN_C EXPORT KERNEL
uint64_t _TimerCurrentCount(TimerDevice_t *dev)
{
return READ_CNTPCT();
}
<|start_filename|>modules/kernel/src/pmm/PmmDoRemoveFrame.cc<|end_filename|>
//===================================================================================================================
//
// PmmDoRemoveFrame.cc -- This is a worker function to remove a frame from a given list and optionally clear it
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-11 Initial 0.3.1 ADCL Initial version
// 2020-Apr-12 #405 v0.6.1c ADCL Redesign the PMM to store the stack in the freed frames themselves
//
//===================================================================================================================
#include "types.h"
#include "spinlock.h"
#include "heap.h"
#include "pmm.h"
//
// -- Given the stack, remove a frame from the top of the stack (lock MUST be held to call)
// -------------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
frame_t _PmmDoRemoveFrame(PmmFrameInfo_t *stack, bool scrub)
{
// kprintf("PMM is pulling a frame off the stack %p\n", stack);
frame_t rv = 0; // assume we will not find anything
if (MmuIsMapped((archsize_t)stack)) {
rv = stack->frame + stack->count - 1;
stack->count --;
AtomicDec(&pmm.framesAvail);
if (stack->count == 0) {
PmmPop(stack);
}
} else {
return 0;
}
// -- scrub the frame if requested
if (scrub) PmmScrubFrame(rv);
return rv;
}
<|start_filename|>modules/kernel/src/process/ProcessReady.cc<|end_filename|>
//===================================================================================================================
//
// ProcessReady.cc -- Ready a process by putting it on the proper ready queue
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-30 Initial 0.3.2 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "lists.h"
#include "process.h"
//
// -- Make a process ready to run
// ---------------------------
EXPORT KERNEL
void ProcessDoReady(Process_t *proc)
{
#if DEBUG_ENABLED(ProcessDoReady)
kprintf("Attempting to ready process at %p\n", proc);
#endif
if (!assert(proc != NULL)) return;
assert_msg(AtomicRead(&scheduler.schedulerLockCount) > 0, "Calling `ProcessDoReady()` without the proper lock");
proc->status = PROC_READY;
switch(proc->priority) {
case PTY_OS:
Enqueue(&scheduler.queueOS, &proc->stsQueue);
break;
case PTY_HIGH:
Enqueue(&scheduler.queueHigh, &proc->stsQueue);
break;
default:
// in this case, we have a priority that is not right; assume normal from now on
proc->priority = PTY_NORM;
// ... fall through
case PTY_NORM:
Enqueue(&scheduler.queueNormal, &proc->stsQueue);
break;
case PTY_LOW:
Enqueue(&scheduler.queueLow, &proc->stsQueue);
break;
case PTY_IDLE:
Enqueue(&scheduler.queueIdle, &proc->stsQueue);
break;
}
}
<|start_filename|>modules/kernel/src/loader/LoaderFunctionInit.cc<|end_filename|>
//===================================================================================================================
//
// LoaderFunctionsInit.cc -- Set up the physical address for any functions called by the loader.
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-12 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "serial.h"
#include "entry.h"
#include "loader.h"
//
// -- Perform this function initialization
// ------------------------------------
EXTERN_C EXPORT LOADER
void LoaderFunctionInit(void)
{
FunctionPtr_t *wrk = (FunctionPtr_t *)init_start;
while (wrk != (FunctionPtr_t *)init_end) {
(*wrk)(); // -- call the function
wrk ++;
}
}
<|start_filename|>modules/kernel/src/hardware/mb1.cc<|end_filename|>
//===================================================================================================================
//
// mb1.c -- This is the parser for the Multiboot 1 information structure
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2017-Jun-05 Initial 0.1.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "serial.h"
#include "mmu.h"
#include "printf.h"
#include "hw-disc.h"
//
// -- This is the loaded modules block (which will repeat)
// ----------------------------------------------------
typedef struct Mb1Mods_t {
uint32_t modStart;
uint32_t modEnd;
char *modIdent;
uint32_t modReserved;
} __attribute__((packed)) Mb1Mods_t;
//
// -- Memory Map entries, which will repeat (pointer points to mmapAddr)
// ------------------------------------------------------------------
typedef struct Mb1MmapEntry_t {
uint32_t mmapSize;
uint64_t mmapAddr;
uint64_t mmapLength;
uint32_t mmapType;
} __attribute__((packed)) Mb1MmapEntry_t;
//
// -- This is the Multiboot 1 information structure as defined by the spec
// --------------------------------------------------------------------
typedef struct MB1 {
//
// -- These flags indicate which data elements have valid data
// --------------------------------------------------------
const uint32_t flags;
//
// -- The basic memory limits are valid when flag 0 is set; these values are in kilobytes
// -----------------------------------------------------------------------------------
const uint32_t availLowerMem;
const uint32_t availUpperMem;
//
// -- The boot device when flag 1 is set
// ----------------------------------
const uint32_t bootDev;
//
// -- The command line for this kernel when flag 2 is set
// ---------------------------------------------------
const uint32_t cmdLine;
//
// -- The loaded module list when flag 3 is set
// -----------------------------------------
const uint32_t modCount;
const uint32_t modAddr;
//
// -- The ELF symbol information (a.out-type symbols are not supported) when flag 5 is set
// ------------------------------------------------------------------------------------
const uint32_t shdrNum; // may still be 0 if not available
const uint32_t shdrSize;
const uint32_t shdrAddr;
const uint32_t shdrShndx;
//
// -- The Memory Map information when flag 6 is set
// ---------------------------------------------
const uint32_t mmapLen;
const uint32_t mmapAddr;
//
// -- The Drives information when flag 7 is set
// -----------------------------------------
const uint32_t drivesLen;
const uint32_t drivesAddr;
//
// -- The Config table when flag 8 is set
// -----------------------------------
const uint32_t configTable;
//
// -- The boot loader name when flag 9 is set
// ---------------------------------------
const uint32_t bootLoaderName;
//
// -- The APM table location when bit 10 is set
// -----------------------------------------
const uint32_t apmTable;
//
// -- The VBE interface information when bit 11 is set
// ------------------------------------------------
const uint32_t vbeControlInfo;
const uint32_t vbeModeInfo;
const uint16_t vbeMode;
const uint16_t vbeInterfaceSeg;
const uint16_t vbeInterfaceOff;
const uint16_t vbeInterfaceLen;
//
// -- The FrameBuffer information when bit 12 is set
// ----------------------------------------------
const uint64_t framebufferAddr;
const uint32_t framebufferPitch;
const uint32_t framebufferWidth;
const uint32_t framebufferHeight;
const uint8_t framebufferBpp;
const uint8_t framebufferType;
union {
struct {
const uint8_t framebufferRedFieldPos;
const uint8_t framebufferRedMaskSize;
const uint8_t framebufferGreenFieldPos;
const uint8_t framebufferGreenMaskSize;
const uint8_t framebufferBlueFieldPos;
const uint8_t framebufferBlueMaskSize;
};
struct {
const uint32_t framebufferPalletAddr;
const uint16_t framebufferPalletNumColors;
};
};
} __attribute__((packed)) MB1;
//
// -- This is the address of the MB1 Multiboot information structure
// --------------------------------------------------------------
EXTERN EXPORT LOADER_BSS MB1 *mb1Data;
//
// -- A quick MACRO to help determine if a flag is set
// ------------------------------------------------
#define CHECK_FLAG(f) (mb1Data->flags & (1<<f))
//
// -- Parse the Multiboot 1 header
// ----------------------------
EXTERN_C EXPORT LOADER
void Mb1Parse(void)
{
if (!mb1Data) return;
kprintf("Found the mbi structure at %p\n", mb1Data);
//
// -- the mb1Data structure needs to be identity mapped
// -------------------------------------------------
archsize_t mb1Page = (archsize_t)mb1Data;
MmuMapToFrame(mb1Page, mb1Page >> 12, PG_KRN);
MmuMapToFrame(mb1Page + PAGE_SIZE, (mb1Page + PAGE_SIZE) >> 12, PG_KRN);
kprintf(" The flags are: %p\n", mb1Data->flags);
//
// -- Check for basic memory information
// ----------------------------------
if (CHECK_FLAG(0)) {
kprintf("Setting basic memory information\n");
SetAvailLowerMem(mb1Data->availLowerMem);
SetAvailUpperMem(mb1Data->availUpperMem);
}
//
// -- Check for boot device information
// ---------------------------------
if (CHECK_FLAG(1)) {
// TODO: Implement this feature
}
//
// -- Check for the command line -- we might have parameters to the loader
// --------------------------------------------------------------------
if (CHECK_FLAG(2)) {
kprintf("Identifying command line information: %s\n", mb1Data->cmdLine);
}
//
// -- Check for the module information -- we will need this for the additional loaded modules (i.e. the kernel)
// ---------------------------------------------------------------------------------------------------------
if (CHECK_FLAG(3)) {
uint32_t i;
Mb1Mods_t *m;
kprintf("Module information present\n");
for (m = (Mb1Mods_t *)mb1Data->modAddr, i = 0; i < mb1Data->modCount; i ++) {
kprintf(" Found Module: %s\n", m[i].modIdent);
kprintf(" .. Name is at : %p\n", m[i].modIdent);
kprintf(" .. Start: %p\n", m[i].modStart);
kprintf(" .. End: %p\n", m[i].modEnd);
AddModule(m[i].modStart, m[i].modEnd, m[i].modIdent);
}
}
//
// -- We skip flag 4 since we will never be an a.out-type executable. Check for ELF symbols with flag 5
// --------------------------------------------------------------------------------------------------
if (CHECK_FLAG(5)) {
// TODO: Implement this feature
}
//
// -- Check for Memory Map data, which we will require
// ------------------------------------------------
if (CHECK_FLAG(6)) {
kprintf("Setting memory map data\n");
uint32_t size = mb1Data->mmapLen;
Mb1MmapEntry_t *entry = (Mb1MmapEntry_t *)(((uint32_t)mb1Data->mmapAddr));
while (size) {
if (entry->mmapType == 1) {
kprintf(" iterating in mmap\n");
kprintf(" entry address is: %p\n", entry);
kprintf(" entry type is: %x\n", entry->mmapType);
kprintf(" entry base is: %p : %p\n", (uint32_t)(entry->mmapAddr>>32),
(uint32_t)entry->mmapAddr&0xffffffff);
kprintf(" entry length is: %p : %p\n", (uint32_t)(entry->mmapLength>>32),
(uint32_t)entry->mmapLength&0xffffffff);
kprintf(" entry size is: %p\n", entry->mmapSize);
}
if (entry->mmapType == 1) AddAvailMem(entry->mmapAddr, entry->mmapLength);
uint64_t newLimit = entry->mmapAddr + entry->mmapLength;
if (newLimit > GetUpperMemLimit()) SetUpperMemLimit(newLimit);
size -= (entry->mmapSize + 4);
entry = (Mb1MmapEntry_t *)(((uint32_t)entry) + entry->mmapSize + 4);
}
kprintf("Memory Map is complete\n");
}
//
// -- Check for the drives information
// --------------------------------
if (CHECK_FLAG(7)) {
// TODO: Implement this feature
}
//
// -- Check for the config data information
// -------------------------------------
if (CHECK_FLAG(8)) {
// TODO: Implmement this feature
}
//
// -- Check for the boot loader name
// ------------------------------
if (CHECK_FLAG(9)) {
kprintf("Identifying bootloader: %s\n", mb1Data->bootLoaderName);
}
//
// -- Check for the APM table
// -----------------------
if (CHECK_FLAG(10)) {
// TODO: Implmement this feature
}
//
// -- Check for the VBE table
// -----------------------
if (CHECK_FLAG(11)) {
// TODO: Implmement this feature
}
//
// -- Check for the framebuffer information (GRUB specific; see
// https://www.gnu.org/software/grub/manual/multiboot/multiboot.html)
// ------------------------------------------------------------------
if (CHECK_FLAG(12)) {
kprintf("Capturing framebuffer information\n");
SetFrameBufferAddr((uint16_t *)mb1Data->framebufferAddr);
SetFrameBufferPitch(mb1Data->framebufferPitch);
SetFrameBufferWidth(mb1Data->framebufferWidth);
SetFrameBufferHeight(mb1Data->framebufferHeight);
SetFrameBufferBpp(mb1Data->framebufferBpp);
SetFrameBufferType((FrameBufferType)mb1Data->framebufferType);
kprintf("Frame Buffer is at: %p; The pitch is: %p; The height is: %p\n",
mb1Data->framebufferAddr, mb1Data->framebufferPitch, mb1Data->framebufferHeight);
}
kprintf("Done parsing MB1 information\n");
MmuUnmapPage(mb1Page);
MmuUnmapPage(mb1Page + PAGE_SIZE);
}
<|start_filename|>platform/pc/inc/platform-acpi.h<|end_filename|>
//===================================================================================================================
//
// platform-acpi.h -- These are the structures and functions for interacting with ACPI for the kernel
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-05 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#pragma once
#ifndef __HARDWARE_H__
# error "Use #include \"hardware.h\" and it will pick up this file; do not #include this file directly."
#endif
#include "types.h"
#include "printf.h"
#include "mmu.h"
#include "cpu.h"
//
// -- Check if the ACPI is locatable for our kernel
// ---------------------------------------------
EXPORT LOADER INLINE
void CheckAcpi(archsize_t loc) {
if (!((loc) >= ACPI_LO) && ((loc) <= ACPI_HI)) {
CpuPanicPushRegs("PANIC: ACPI is not is a supported");
} else {
MmuMapToFrame(loc & 0xfffff000, loc >> 12, PG_KRN);
}
}
//
// -- Macro to convert a char[4] into a uint_32
// -----------------------------------------
#define MAKE_SIG(s) ((uint32_t)(s[3]<<24)|(uint32_t)(s[2]<<16)|(uint32_t)(s[1]<<8)|s[0])
//
// -- this is the signature of the RSDP, expressed as a uint64_t
// ----------------------------------------------------------
#define RSDP_SIG ((uint64_t)' '<<56|(uint64_t)'R'<<48|(uint64_t)'T'<<40|(uint64_t)'P'<<32\
|(uint64_t)' '<<24|(uint64_t)'D'<<16|(uint64_t)'S'<<8|(uint64_t)'R')
//
// -- this is the structure known as the RSDP (Root System Description Pointer)
// -------------------------------------------------------------------------
typedef struct RSDP_t {
union {
char signature[8];
uint64_t lSignature;
};
uint8_t checksum;
char oemid[6];
uint8_t revision;
uint32_t rsdtAddress;
uint32_t length;
uint64_t xsdtAddress;
uint8_t extendedChecksum;
uint8_t reserved[3];
} __attribute__((packed, aligned(16))) RSDP_t;
//
// -- This function will locate the RSDP if available and note it in the hardware discovery table
// -------------------------------------------------------------------------------------------
EXTERN_C EXPORT LOADER
RSDP_t *AcpiFindRsdp(void);
//
// -- This is the common structure members that occur in nearly all of the acpi tables (except rsdp)
// ----------------------------------------------------------------------------------------------
#define ACPI_DESCRIPTION_HEADER \
union { \
char signature[4]; \
uint32_t lSignature; \
}; \
uint32_t length; \
uint8_t revision; \
uint8_t checksum; \
char oemid[6]; \
uint64_t oemTableId; \
uint32_t oemRevision; \
uint32_t creatorId; \
uint32_t creatorRevision
//
// -- Check the table to see if it is what we expect; note that this memory must be mapped before calling
// ---------------------------------------------------------------------------------------------------
EXTERN_C EXPORT LOADER
bool AcpiCheckTable(archsize_t locn, uint32_t);
//
// -- get the table signature (and check its valid); return 0 if invalid
// ------------------------------------------------------------------
EXTERN_C EXPORT LOADER
uint32_t AcpiGetTableSig(archsize_t loc);
//
// -- This is the Root System Description Table (RSDT)
// ------------------------------------------------
typedef struct RSDT_t {
ACPI_DESCRIPTION_HEADER;
uint32_t entry[0]; // there may be 0 or several of these; length must be checked
} __attribute__((packed)) RSDT_t;
//
// -- read the rdst table
// -------------------
EXTERN_C EXPORT LOADER
bool AcpiReadRsdt(archsize_t loc);
//
// -- This is the Root System Description Table (RSDT)
// ------------------------------------------------
typedef struct XSDT_t {
ACPI_DESCRIPTION_HEADER;
uint64_t entry[0]; // there may be 0 or several of these; length must be checked
} __attribute__((packed)) XSDT_t;
//
// -- read the xsdt table
// -------------------
EXTERN_C EXPORT LOADER
bool AcpiReadXsdt(archsize_t loc);
//
// -- The Multiple APIC Description Table (MADT)
// ------------------------------------------
typedef struct MADT_t {
ACPI_DESCRIPTION_HEADER;
uint32_t localIntCtrlAddr;
uint32_t flags;
uint8_t intCtrlStructs[0];
} __attribute__((packed)) MADT_t;
//
// -- Read an ACPI MADT table
// -----------------------
EXTERN_C EXPORT LOADER
void AcpiReadMadt(archsize_t loc);
//
// -- These are the types of Interrupt Controller Structure Types we can have
// -----------------------------------------------------------------------
typedef enum {
MADT_PROCESSOR_LOCAL_APIC = 0,
MADT_IO_APIC = 1,
MADT_INTERRUPT_SOURCE_OVERRIDE = 2,
MADT_NMI_SOURCE = 3,
MADT_LOCAL_APIC_NMI = 4,
MADT_LOCAL_APIC_ADDRESS_OVERRIDE = 5,
MADT_IO_SAPIC = 6,
MADT_LOCAL_SAPIC = 7,
MADT_PLATFORM_INTERRUPT_SOURCES = 8,
MADT_PROCESSOR_LOCAL_X2APIC = 9,
MADT_LOCAL_X2APIC_NMI = 0xa,
MADT_GIC = 0xb,
MADT_GICD = 0xc,
} MadtIcType;
//
// -- Local Processor APIC structure
// ------------------------------
typedef struct MadtLocalApic_t {
uint8_t type;
uint8_t len;
uint8_t procId;
uint8_t apicId;
uint32_t flags; // 0b00000001 means the processor is enabled
} __attribute__((packed)) MadtLocalApic_t;
//
// -- I/O APIC Structure
// ------------------
typedef struct MadtIoApic_t {
uint8_t type;
uint8_t len;
uint8_t apicId;
uint8_t reserved;
uint32_t ioApicAddr;
uint32_t gsiBase;
} __attribute__((packed)) MadtIoApic_t;
//
// -- Interrupt Source Override Structure
// -----------------------------------
typedef struct MadtIntSrcOverride_t {
uint8_t type;
uint8_t len;
uint8_t bus; // -- fixed: 0 = ISA
uint8_t source;
uint32_t gsInt;
uint32_t flags;
} __attribute__((packed)) MadtIntSrcOverride_t;
//
// -- NMI Interrupt Source Structure
// ------------------------------
typedef struct MadtMNISource_t {
uint8_t type;
uint8_t len;
uint16_t flags;
uint32_t gsInt;
} __attribute__((packed)) MadtMNISource_t;
//
// -- Local APIC NMI Structure
// ------------------------
typedef struct MadtLocalApicNMI_t {
uint8_t type;
uint8_t len;
uint8_t procId; // -- 0xff is all procs
uint16_t flags;
uint8_t localLINT;
} __attribute__((packed)) MadtLocalApicNMI_t;
<|start_filename|>arch/arm/inc/arch-cpu-ops.h<|end_filename|>
//===================================================================================================================
//
// arch-cpu-ops.h -- Some specific CPU operations that will happen on the armv7
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Feb-09 Initial v0.5.0g ADCL Initial version
//
//===================================================================================================================
#ifndef __CPU_H__
# error "Do not include 'arch-cpu-ops.h' directly; include 'cpu.h' instead, which will pick up this file."
#endif
#include "types.h"
//
// -- These are the working marcos that will output the inline functions
// ------------------------------------------------------------------
#define CP_ADDR(cp,cr1,op1,cr2,op2) #cp "," #op1 ",%0," #cr1 "," #cr2 "," #op2
#define CP_REG_RO(name,cp,cr1,op1,cr2,op2) \
\
EXTERN_C INLINE \
uint32_t Read##name(void) { \
uint32_t _val; \
__asm__ volatile("mrc " CP_ADDR(cp,cr1,op1,cr2,op2) : "=r" (_val)); \
return _val; \
} \
\
EXTERN_C INLINE \
uint32_t Read##name##Relaxed(void) { \
uint32_t _val; \
__asm__("mrc " CP_ADDR(cp,cr1,op1,cr2,op2) : "=r" (_val)); \
return _val; \
} \
#define CP_REG_WO(name,cp,cr1,op1,cr2,op2) \
\
EXTERN_C INLINE \
void Write##name(uint32_t val) { \
__asm__ volatile("mcr " CP_ADDR(cp,cr1,op1,cr2,op2) :: "r" (val)); \
__asm__ volatile("isb"); \
} \
\
EXTERN_C INLINE \
void Write##name##Relaxed(uint32_t val) { \
__asm__ volatile("mcr " CP_ADDR(cp,cr1,op1,cr2,op2) :: "r" (val)); \
} \
#define CP_REG_RW(name,cp,cr1,op1,cr2,op2) \
CP_REG_RO(name,cp,cr1,op1,cr2,op2) \
CP_REG_WO(name,cp,cr1,op1,cr2,op2) \
//
// == These are the cp15 registers that are available to be read/written
// ==================================================================
//
// -- C0 :: Identification Registers
// ------------------------------
CP_REG_RO(CTR, p15,c0,0,c0,1) // Cache Type Register
CP_REG_RO(MPIDR,p15,c0,0,c0,5) // Multiprocessor-Affinity Register
CP_REG_RO(CLIDR,p15,c0,1,c0,1) // Cache Level ID Register
//
// -- C1 :: System Control Registers
// ------------------------------
CP_REG_RW(SCTLR,p15,c1,0,c0,0) // System Control Register
CP_REG_RW(CPACR,p15,c1,0,c0,2) // Coprocessor Access Control Register
CP_REG_RW(NSACR,p15,c1,0,c1,2) // Non-Secure Access Control Register
CP_REG_RW(HCPTR,p15,c1,4,c1,2) // Hyp Coprocessor Trap Register
//
// -- C2 & C3 :: Memory Protection and Control Registers
// --------------------------------------------------
CP_REG_RW(TTBR0,p15,c2,0,c0,0) // Translation Table Base Register 0
CP_REG_RW(TTBR1,p15,c2,0,c0,1) // Translation Table Base Register 1
//
// -- C5 & C6 :: Memory System Fault Registers
// ----------------------------------------
CP_REG_RW(DFSR,p15,c5,0,c0,0) // The Data Fault Status Register
CP_REG_RW(IFSR,p15,c5,0,c0,1) // THe Instruction Fault Status Register
CP_REG_RW(DFAR,p15,c6,0,c0,0) // The Data Fault Address Register
CP_REG_RW(IFAR,p15,c6,0,c0,2) // The Instruction Fault Address Register
//
// -- C7 :: Cache Maintenance, Address Translation, and other Functions
// -----------------------------------------------------------------
CP_REG_WO(_ICIALLUIS,p15,c7,0,c1,0) // ICIALLUIS Branch Predictor
CP_REG_WO(_BPIALLIS,p15,c7,0,c1,6) // BPIALLIS Branch Predictor
CP_REG_WO(_ICIALLU,p15,c7,0,c5,0) // ICIALLU Branch Predictor
CP_REG_WO(_ICIMVAU,p15,c7,0,c5,1) // ICIMVAU Branch Predictor
CP_REG_WO(_BPIALL,p15,c7,0,c5,6) // BPIALL Branch Predictor
CP_REG_WO(_BPIMVA,p15,c7,0,c5,7) // BPIMVA Branch Predictor
CP_REG_WO(_DCIMVAC,p15,c7,0,c6,1) // DCIMVAC Cache Maintnenance
CP_REG_WO(_DCOSW,p15,c7,0,c6,2) // DCOSW Cache Maintenance
CP_REG_WO(_DCCMVAC,p15,c7,0,c10,1) // DCCMVAC Cache Maintenance
CP_REG_WO(_DCCSW,p15,c7,0,c10,2) // DCCSW Cache Maintenance
CP_REG_WO(_DCCMVAU,p15,c7,0,c11,1) // DCCMVAU Cache Maintenance
CP_REG_WO(_DCCIMVAC,p15,c7,0,c14,1) // DCCIMVAC Cache Maintenance
CP_REG_WO(_DCCISW,p15,c7,0,c14,2) // DCCISW Cache Maintenance
//
// -- C8 :: TLB Maintenance Functions
// -------------------------------
CP_REG_WO(TLBIMVAA,p15,c8,0,c7,3) // TLB Invalidate by MVA ALL ASID
//
// -- C9 :: TCM Control and Performance Monitors
// ------------------------------------------
//
// -- C10 :: Memory Mapping and TLB Control Registers
// -----------------------------------------------
//
// -- C11 :: TCM DMA Registers
// ------------------------
//
// -- C12 :: Security Extensions Registers
// ------------------------------------
CP_REG_RW(VBAR,p15,c12,0,c0,0) // Vector Base Address Register
//
// -- C13 :: Process, Context, and Thread ID Registers
// ------------------------------------------------
CP_REG_RW(TPIDRURO,p15,c13,0,c0,3) // User Read-Only Thread ID Register (current process)
CP_REG_RW(TPIDRPRW,p15,c13,0,c0,4) // Privilege Read-Write Thread ID Register (current CPU)
//
// -- C14 :: Generic Timer Extensions
// -------------------------------
//
// -- branch prediction maintenance
// -----------------------------
#if defined(ENABLE_BRANCH_PREDICTOR) && ENABLE_BRANCH_PREDICTOR == 1
# define WriteBPIMVA(mem) Write_BPIMVA(mem)
# define WriteBPIALL() Write_BPIALL(0)
# define WriteBPIALLIS() Write_BPIALLIS(0)
#else
# define WriteBPIMVA(mem)
# define WriteBPIALL()
# define WriteBPIALLIS()
#endif
//
// -- Synchronization Barriers
// ------------------------
#define SoftwareBarrier() __asm volatile("":::"memory")
#define MemoryBarrier() __sync_synchronize()
#define EntireSystemMemoryBarrier() __asm volatile("dmb sy":::"memory")
#define MemoryResynchronization() __asm volatile("dsb":::"memory")
#define ClearInsutructionPipeline() __asm volatile("isb":::"memory")
EXTERN_C INLINE
void SEV(void) { __asm volatile("dsb\nsev\n"); }
EXTERN_C INLINE
void DMB(void) { __asm volatile("dmb\n"); }
//
// -- a lightweight function to halt the cpu
// --------------------------------------
EXTERN_C INLINE
void HaltCpu(void) { __asm("wfi"); }
//
// -- cache maintenance functions
// ---------------------------
#if defined(ENABLE_CACHE) && ENABLE_CACHE == 1
# define WriteDCIMVAC(mem) Write_DCIMVAC(mem)
# define WriteDCOSW(sw) Write_DCOSW(sw)
# define WriteDCCMVAC(mem) Write_DCCMVAC(mem)
# define WriteDCCSW(sw) Write_DCCSW(sw)
# define WriteDCCMVAU(mem) Write_DCCMVAU(mem)
# define WriteDCCIMVAC(mem) Write_DCCIMVAC(mem)
# define WriteDCCISW(sw) Write_DCCISW(sw)
# define WriteICIALLUIS() Write_ICIALLUIS(0)
# define WriteICIALLU() Write_ICIALLU(0)
# define WriteICIMVAU(mem) Write_ICIMVAU(mem)
//
// -- Clean the cache for a block of memory
// -------------------------------------
EXTERN_C INLINE
void CleanCache(archsize_t mem, size_t len) {
MemoryBarrier();
archsize_t loc = mem & ~(CACHE_LINE_SIZE - 1);
archsize_t end = mem + len;
for ( ; loc <= end; loc += CACHE_LINE_SIZE) {
WriteDCCMVAC(loc);
}
MemoryBarrier();
}
//
// -- Force Cache Synchronization for a block of memory
// -------------------------------------------------
EXTERN_C INLINE
void InvalidateCache(archsize_t mem, size_t len)
{
MemoryBarrier();
archsize_t loc = mem & ~(CACHE_LINE_SIZE - 1);
archsize_t end = mem + len;
for ( ; loc <= end; loc += CACHE_LINE_SIZE) {
WriteDCIMVAC(loc);
}
MemoryBarrier();
}
#else
# define WriteDCIMVAC(mem)
# define WriteDCOSW(sw)
# define WriteDCCMVAC(mem)
# define WriteDCCSW(sw)
# define WriteDCCMVAU(mem)
# define WriteDCCIMVAC(mem)
# define WriteDCCISW(sw)
# define WriteICIALLUIS()
# define WriteICIALLU()
# define WriteICIMVAU(mem)
# define CleanCache(mem,len)
# define INVALIDATE_CACHE(mem,len)
#endif
//
// -- This is a well-defined sequence to clean up after changing the translation tables
// ---------------------------------------------------------------------------------
EXTERN_C INLINE
void InvalidatePage(archsize_t vma) {
MemoryBarrier();
WriteTLBIMVAA(vma & 0xfffff000);
WriteBPIALL();
MemoryBarrier();
ClearInsutructionPipeline();
}
<|start_filename|>modules/kernel/inc/msgq.h<|end_filename|>
//===================================================================================================================
//
// msgq.h -- This is the prototypes for message queues
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This is the kernel implementation of message queues.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- --------------------------------------------------------------------------
// 2020-Apr-09 Initial v0.6.1a ADCL Initial version
//
//===================================================================================================================
#pragma once
#define __MSGQ_H__
#include "types.h"
//
// -- These are the possible statuses of an individual queue
// ------------------------------------------------------
typedef enum {
MSGQ_INITIALIZING = 0,
MSGQ_ALLOCATED = 1,
MSGQ_DELETE_PENDING = 2,
} MsgqStatus_t;
//
// -- Get a text description of the status
// ------------------------------------
EXTERN_C INLINE
const char *MsgqStatName(int s) {
if (s == MSGQ_INITIALIZING) return "INIT";
else if (s == MSGQ_ALLOCATED) return "ALLOC";
else if (s == MSGQ_DELETE_PENDING) return "DLT PEND";
else return "UNKNOWN";
}
//
// -- This is one individual message queue
// ------------------------------------
typedef struct MessageQueue_t {
AtomicInt_t status; // -- The current status of this queue; backed by MsgqStatus_t
ListHead_t::List_t list; // -- This list entry on the msgqList
QueueHead_t queue; // -- The queue of msg, has its own lock; queue has its own count
ListHead_t procList; // -- This is the list of processes with this queue open
ListHead_t waiting; // -- the list of processes waiting on this queue
} MessageQueue_t;
//
// -- This is list of all message queues
// ----------------------------------
typedef ListHead_t MessageQueueList_t; // -- contains its own lock
//
// -- This is an actual message in the queue
// --------------------------------------
typedef struct Message_t {
ListHead_t::List_t list; // -- how the message is placed in queue
size_t payloadSize; // -- this is the size of the payload (specifically the struct below)
struct {
long type; // -- Posix message type -- must be positive
uint8_t data[0]; // -- Additional data
} payload;
} Message_t;
//
// -- This is the pointer to the message queue structure
// --------------------------------------------------
EXTERN EXPORT KERNEL_BSS
MessageQueueList_t msgqList;
//
// -- Initialize the global Message Queue Structures
// ----------------------------------------------
EXTERN_C EXPORT LOADER
void MessageQueueInit(void);
//
// -- Allocate and create a new message queue
// ---------------------------------------
EXTERN_C EXPORT KERNEL
MessageQueue_t *MessageQueueCreate(void);
//
// -- Release the reference to this message queue; marking for deletion when the reference count is 0
// -----------------------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void MessageQueueRelease(MessageQueue_t *msgq);
//
// -- Mark the message queue for deletion
// -----------------------------------
EXTERN_C INLINE
void MessageQueueDelete(MessageQueue_t *msgq) {
AtomicSet(&msgq->status, MSGQ_DELETE_PENDING);
MessageQueueRelease(msgq);
}
//
// -- This is the working function to send a message to a message queue
// -----------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void _MessageQueueSend(MessageQueue_t *msgq, long type, size_t sz, void *payload, bool lock);
//
// -- This is the normal API for sending a message
// --------------------------------------------
EXTERN_C INLINE
void MessageQueueSend(MessageQueue_t *msgq, long type, size_t sz, void *payload) {
_MessageQueueSend(msgq, type, sz, payload, true);
}
//
// -- Receive a message from a message queue, optionally blocking
// -----------------------------------------------------------
EXTERN_C EXPORT KERNEL
bool MessageQueueReceive(MessageQueue_t *msgq, long *type, size_t sz, void *payload, bool block);
<|start_filename|>platform/inc/pic.h<|end_filename|>
//===================================================================================================================
//
// pic.h -- These are the hardware abstractions for interacting with the Programmable Interrupt Controller
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#pragma once
#ifndef __PIC_H__
# define __PIC_H__
#endif
#include "types.h"
#include "hardware.h"
//
// -- A forward declaration
// ---------------------
struct PicDevice_t;
//
// -- get the platform-specific definitions
// -------------------------------------
#if __has_include("platform-pic.h")
# include "platform-pic.h"
#endif
#if __has_include("platform-apic.h")
# include "platform-apic.h"
#endif
//
// -- This is a control structure for the pic, all functions will be registered in this structure
// -------------------------------------------------------------------------------------------
typedef struct PicDevice_t {
GenericDevice_t device;
volatile bool ipiReady;
void (*PicInit)(PicDevice_t *, const char *);
isrFunc_t (*PicRegisterHandler)(PicDevice_t *, Irq_t, int, isrFunc_t);
void (*PicMaskIrq)(PicDevice_t *, Irq_t);
void (*PicUnmaskIrq)(PicDevice_t *, Irq_t);
void (*PicEoi)(PicDevice_t *, Irq_t);
int (*PicDetermineIrq)(PicDevice_t *);
void (*PicBroadcastIpi)(PicDevice_t *, int);
void (*PicBroadcastInit)(PicDevice_t *, uint32_t);
void (*PicBroadcastSipi)(PicDevice_t *, uint32_t, archsize_t);
} PicDevice_t;
//
// -- The global PIC control structure holding pointers to all the proper functions.
// ------------------------------------------------------------------------------
EXTERN KERNEL_DATA
PicDevice_t *picControl;
//
// -- These are the common interface functions we will use to interact with the PIC. These functions are
// not safe in that they will not check for nulls before calling the function. Therefore, caller beware!
// ------------------------------------------------------------------------------------------------------
EXPORT INLINE
void PicInit(PicDevice_t *dev, const char *name) { dev->PicInit(dev, name); }
EXPORT INLINE
isrFunc_t PicRegisterHandler(PicDevice_t *dev, Irq_t irq, int vector, isrFunc_t handler) {
return dev->PicRegisterHandler(dev, irq, vector, handler); }
EXPORT INLINE
void PicUnmaskIrq(PicDevice_t *dev, Irq_t irq) { dev->PicUnmaskIrq(dev, irq); }
EXPORT INLINE
void PicMaskIrq(PicDevice_t *dev, Irq_t irq) { dev->PicMaskIrq(dev, irq); }
EXPORT INLINE
void PicEoi(PicDevice_t *dev, Irq_t irq) { dev->PicEoi(dev, irq); }
EXPORT INLINE
archsize_t PicDetermineIrq(PicDevice_t *dev) { return dev->PicDetermineIrq(dev); }
EXPORT INLINE
void PicBroadcastIpi(PicDevice_t *dev, int ipi) { return dev->PicBroadcastIpi(dev, ipi); }
//
// -- Pick the correct PIC given what we have available
// -------------------------------------------------
EXTERN_C EXPORT LOADER
PicDevice_t *PicPick(void);
<|start_filename|>arch/x86/inc/arch-cpu-ops.h<|end_filename|>
//===================================================================================================================
//
// arch-cpu-ops.h -- Some specific CPU operations that will happen on the x86
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Feb-16 Initial v0.5.0g ADCL Initial version
//
//===================================================================================================================
#ifndef __CPU_H__
# error "Do not include 'arch-cpu-ops.h' directly; include 'cpu.h' instead, which will pick up this file."
#endif
#include "types.h"
//
// -- CPUID function -- lifted from: https://wiki.osdev.org/CPUID
// issue a single request to CPUID. Fits 'intel features', for instance note that even if only "eax" and "edx"
// are of interest, other registers will be modified by the operation, so we need to tell the compiler about it.
// -------------------------------------------------------------------------------------------------------------
EXPORT LOADER INLINE
void CPUID(int code, uint32_t *a, uint32_t *b, uint32_t *c, uint32_t *d) {
__asm volatile("cpuid\n" : "=a"(*a),"=b"(*b),"=c"(*c),"=d"(*d) : "a"(code)); }
//
// -- Model Specific Registers
// ------------------------
EXPORT LOADER INLINE
uint64_t RDMSR(uint32_t r) {
uint32_t _lo, _hi;
__asm volatile("rdmsr\n" : "=a"(_lo),"=d"(_hi) : "c"(r) : "%ebx");
return (((uint64_t)_hi) << 32) | _lo;
}
EXPORT LOADER INLINE
void WRMSR(uint32_t r, uint64_t v) {
uint32_t _lo = (uint32_t)(v & 0xffffffff);
uint32_t _hi = (uint32_t)(v >> 32);
__asm volatile("wrmsr\n" : : "c"(r),"a"(_lo),"d"(_hi));
}
//
// -- Synchronization Barriers
// ------------------------
#define SoftwareBarrier() __asm volatile("":::"memory")
#define MemoryBarrier() __sync_synchronize()
#define EntireSystemMemoryBarrier() __asm volatile("wbinvd":::"memory")
#define MemoryResynchronization() __asm volatile("wbinvd":::"memory")
#define ClearInsutructionPipeline() __asm volatile("mov %%cr3,%%eax\n mov %%eax,%%cr3":::"memory","%eax")
//
// -- a lightweight function to halt the cpu
// --------------------------------------
EXPORT INLINE
void HaltCpu(void) { __asm("hlt"); }
//
// -- cache maintenance functions
// ---------------------------
#if defined(ENABLE_CACHE) && ENABLE_CACHE == 1
EXTERN_C EXPORT INLINE
void CleanCache(archsize_t mem, size_t len) { MemoryResynchronization(); }
EXTERN_C EXPORT INLINE
void InvalidateCache(archsize_t mem, size_t len) { MemoryResynchronization(); }
#else
EXTERN_C EXPORT INLINE
void CleanCache(archsize_t mem, size_t len) { }
EXTERN_C EXPORT INLINE
void InvalidateCache(archsize_t mem, size_t len) { }
#endif
<|start_filename|>modules/kernel/src/heap/HeapSplitAt.cc<|end_filename|>
//===================================================================================================================
//
// HeapSplitAt.cc -- Split an entry to the provided adjustedSize
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Split an entry to the provided adjustToSize. This includes adding a new entry for the newly created hole into
// the heap list.
//
// +------------------------------------------------------------------+
// | The entry before splitting. Split will occur at some location |
// | within the entry. |
// +------------------------------------------------------------------+
//
// +------------------+-----------------------------------------------+
// | The new header | A brand new entry inserted into the |
// | and block of | ordered list for the remaining free memory. |
// | memory. | |
// +------------------+-----------------------------------------------+
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Jul-04 Initial version
// 2012-Sep-16 Leveraged from Century
// 2013-Sep-12 #101 Resolve issues splint exposes
// 2018-Jun-01 Initial 0.1.0 ADCL Copied this file from century32 tp century-os
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "heap.h"
//
// -- Split a block to the indicated size
// -----------------------------------
KHeapHeader_t *HeapSplitAt(OrderedList_t *entry, size_t adjustToSize)
{
KHeapHeader_t *newHdr, *oldHdr;
KHeapFooter_t *newFtr, *oldFtr;
size_t newSize;
if (!assert(entry != NULL)) HeapError("NULL entry in HeapSplitAt()", "");
HeapValidateHdr(entry->block, "HeapSplitAt()");
HeapValidatePtr("HeapSplitAt()");
// initialize the working variables
oldHdr = entry->block;
oldFtr = (KHeapFooter_t *)((byte_t *)oldHdr + oldHdr->size - sizeof(KHeapFooter_t));
newHdr = (KHeapHeader_t *)((byte_t *)oldHdr + adjustToSize);
newFtr = (KHeapFooter_t *)((byte_t *)newHdr - sizeof(KHeapFooter_t));
newSize = oldHdr->size - adjustToSize;
HeapReleaseEntry(entry); // release entry; will replace with back half
// size the allocated block properly
oldHdr->size = adjustToSize;
oldHdr->_magicUnion.isHole = 0;
newFtr->hdr = oldHdr;
newFtr->_magicUnion.magicHole = oldHdr->_magicUnion.magicHole;
// create the new hole and add it to the list
newHdr->_magicUnion.magicHole = HEAP_MAGIC;
newHdr->_magicUnion.isHole = 1;
newHdr->size = newSize;
oldFtr->_magicUnion.magicHole = newHdr->_magicUnion.magicHole;
oldFtr->hdr = newHdr;
(void)HeapNewListEntry(newHdr, 1);
// make sure we didn't make a mess
HeapValidateHdr(oldHdr, "HeapSplitAt [oldHdr]");
HeapValidateHdr(newHdr, "HeapSplitAt [newHdr]");
// return the header to the allocated block
return oldHdr;
}
<|start_filename|>modules/kernel/src/kprintf.cc<|end_filename|>
//===================================================================================================================
// kprintf.cc -- Write a formatted string to the serial port COM1 (like printf)
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Write a formatted string to the serial port. This function works similar to `printf()` from the C runtime
// library. I used to have a version publicly available, but I think it better to have a purpose-built version.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Jul-08 Initial 0.1.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "serial.h"
#include "spinlock.h"
#include "printf.h"
//
// -- This is the spinlock that is used to ensure that only one process can output to the serial port at a time
// ---------------------------------------------------------------------------------------------------------
EXPORT KERNEL_DATA Spinlock_t kprintfLock = {0};
//
// -- Output will be disabled until we have everything ready -- allowing for debug code in mixed-use functions
// --------------------------------------------------------------------------------------------------------
EXPORT KERNEL_DATA bool kPrintfEnabled = false;
//
// -- Several flags
// -------------
enum {
ZEROPAD = 1<<0, /* pad with zero */
SIGN = 1<<1, /* unsigned/signed long */
PLUS = 1<<2, /* show plus */
SPACE = 1<<3, /* space if plus */
LEFT = 1<<4, /* left justified */
SPECIAL = 1<<5, /* 0x */
LARGE = 1<<6, /* use 'ABCDEF' instead of 'abcdef' */
};
//
// -- Used for Hex numbers
// --------------------
static const char *digits = "0123456789abcdefghijklmnopqrstuvwxyz";
static const char *upper_digits = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ";
//
// -- This is a printf()-like function to print to the serial port
// ------------------------------------------------------------
int kprintf(const char *fmt, ...)
{
if (!kPrintfEnabled) return 0;
archsize_t flags = SPINLOCK_BLOCK_NO_INT(kprintfLock) {
int printed = 0;
const char *dig = digits;
va_list args;
va_start(args, fmt);
for ( ; *fmt; fmt ++) {
// -- for any character not a '%', just print the character
if (*fmt != '%') {
SerialPutChar(&debugSerial, *fmt);
printed ++;
continue;
}
// -- we know the character is a '%' char at this point
fmt ++;
if (!*fmt) goto exit;
int fmtDefn = 1;
int flags = 0;
bool isLong = false;
if (isLong); // TODO: remove this
// -- we need to check for the format modifiers, starting with zero-fill
if (*fmt == '0') {
flags |= ZEROPAD;
fmtDefn ++;
fmt ++;
}
if (*fmt == 'l') {
isLong = true;
fmtDefn ++;
fmt ++;
}
// -- now, get to the bottom of a formatted value
switch (*fmt) {
default:
fmt -= fmtDefn;
// fall through
case '%':
SerialPutChar(&debugSerial, '%');
printed ++;
continue;
case 'c': {
int c = va_arg(args, int);
SerialPutChar(&debugSerial, c & 0xff);
continue;
}
case 's': {
char *s = va_arg(args, char *);
if (!s) s = (char *)"<NULL>";
while (*s) {
SerialPutChar(&debugSerial, *s ++);
printed ++;
}
continue;
}
case 'P':
flags |= LARGE;
dig = upper_digits;
// fall through
case 'p':
{
archsize_t val = va_arg(args, archsize_t);
SerialPutChar(&debugSerial, '0');
SerialPutChar(&debugSerial, 'x');
printed += 2;
for (int j = sizeof(archsize_t) * 8 - 4; j >= 0; j -= 4) {
SerialPutChar(&debugSerial, dig[(val >> j) & 0x0f]);
printed ++;
}
break;
}
case 'X':
flags |= LARGE;
dig = upper_digits;
// fall through
case 'x':
{
archsize_t val = va_arg(args, archsize_t);
SerialPutChar(&debugSerial, '0');
SerialPutChar(&debugSerial, 'x');
printed += 2;
bool allZero = true;
for (int j = sizeof(archsize_t) * 8 - 4; j >= 0; j -= 4) {
int ch = (val >> j) & 0x0f;
if (ch != 0) allZero = false;
if (!allZero || flags & ZEROPAD) {
SerialPutChar(&debugSerial, dig[ch]);
printed ++;
}
}
if (allZero && !(flags & ZEROPAD)) {
SerialPutChar(&debugSerial, '0');
printed ++;
}
break;
}
case 'd':
{
int val = va_arg(args, int);
char buf[30];
int i = 0;
if (val < 0) {
SerialPutChar(&debugSerial, '-');
printed ++;
val = -val;
}
if (val == 0) {
SerialPutChar(&debugSerial, '0');
printed ++;
} else {
while (val) {
buf[i ++] = (val % 10) + '0';
val /= 10;
}
while (--i >= 0) {
SerialPutChar(&debugSerial, buf[i]);
printed ++;
}
}
break;
}
case 'u':
{
unsigned int val;
val = va_arg(args, unsigned int);
char buf[30];
int i = 0;
if (val == 0) {
SerialPutChar(&debugSerial, '0');
printed ++;
} else {
while (val) {
buf[i ++] = (val % 10) + '0';
val /= 10;
}
while (--i >= 0) {
SerialPutChar(&debugSerial, buf[i]);
printed ++;
}
}
break;
}
}
}
exit:
va_end(args);
SPINLOCK_RLS_RESTORE_INT(kprintfLock, flags);
return printed;
}
}
<|start_filename|>arch/arm/ProcessNewStack.cc<|end_filename|>
//===================================================================================================================
//
// ProcessNewStack.cc -- for a new process, create its stack so we can return from ProcessSwitch()
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-16 Initial 0.3.2 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "heap.h"
#include "mmu.h"
#include "pmm.h"
#include "stacks.h"
#include "process.h"
//
// -- build the stack needed to start a new process
// ---------------------------------------------
EXTERN_C EXPORT KERNEL
frame_t ProcessNewStack(Process_t *proc, void (*startingAddr)(void))
{
archsize_t *stack;
frame_t rv = PmmAllocAlignedFrames(STACK_SIZE / FRAME_SIZE, 12);
archsize_t flags = SPINLOCK_BLOCK_NO_INT(mmuStackInitLock) {
MmuMapToFrame(MMU_STACK_INIT_VADDR, rv, PG_KRN | PG_WRT);
stack = (archsize_t *)(MMU_STACK_INIT_VADDR + STACK_SIZE);
// *--stack = ProcessEnd; // -- just in case, we will self-terminate
//
// -- OK, these 2 are backwards. See arm `ProcessSwitch()` for more info. I need to restore `startingAddr()`
// into `lr` before calling `ProcessStart()` so that I get returns in the right order.
// --------------------------------------------------------------------------------------------------------
*--stack = (archsize_t)ProcessStart; // -- initialize a new process
*--stack = (archsize_t)startingAddr; // -- this is the process starting point
*--stack = 0; // -- ip
*--stack = 0; // -- r11
*--stack = 0; // -- r10
*--stack = 0; // -- r9
*--stack = 0; // -- r8
*--stack = 0; // -- r7
*--stack = 0; // -- r6
*--stack = 0; // -- r5
*--stack = 0; // -- r4
MmuUnmapPage(MMU_STACK_INIT_VADDR);
SPINLOCK_RLS_RESTORE_INT(mmuStackInitLock, flags);
}
archsize_t stackLoc = StackFind(); // get a new stack
assert(stackLoc != 0);
proc->topOfStack = ((archsize_t)stack - MMU_STACK_INIT_VADDR) + stackLoc;
MmuMapToFrame(stackLoc, rv, PG_KRN | PG_WRT);
kprintf("the new process stack is located at %p (frame %p)\n", stackLoc, rv);
return rv;
}
<|start_filename|>platform/pc/inc/platform-pic.h<|end_filename|>
//===================================================================================================================
//
// platform-pic.h -- Programmable Interrupt Controller definitions and functions for the x86
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#pragma once
#ifndef __PIC_H__
# error "Use #include \"pic.h\" and it will pick up this file; do not #include this file directly."
#endif
//
// -- on x86, this is the type we use to refer to the pic port
// --------------------------------------------------------
typedef uint16_t PicBase_t;
//
// -- This enum contains the IRQ numbers for the system (caution -- may vary by platform & PIC)
// These are initially derived from the IOAPIC redirection tables entries.
// -----------------------------------------------------------------------------------------
typedef enum {
IRQ0 = 0,
IRQ1 = 1,
IRQ2 = 2,
IRQ3 = 3,
IRQ4 = 4,
IRQ5 = 5,
IRQ6 = 6,
IRQ7 = 7,
IRQ8 = 8,
IRQ9 = 9,
IRQ10 = 10,
IRQ11 = 11,
IRQ12 = 12,
IRQ13 = 13,
IRQ14 = 14,
IRQ15 = 15, // this is the end of IRQs for the 8259(A) PIC
PIRQ0 = 16,
PIRQ1 = 17,
PIRQ2 = 18,
PIRQ3 = 19,
MIRQ0 = 20,
MIRQ1 = 21,
GPIRQ = 22,
SMI = 23,
INTR = 24,
IRQ_LAST, // THIS MUST BE THE LAST ENTRY!!
} Irq_t;
//
// -- These are the possible pic drivers for the computer
// ---------------------------------------------------
EXTERN KERNEL_DATA
struct PicDevice_t pic8259;
EXTERN KERNEL_DATA
struct PicDevice_t ioapicDriver;
//
// -- This is the base location of the timer on x86
// ---------------------------------------------
#define PIC1 0x20
#define PIC2 0xa0
//
// -- Here are the offsets for the different ports of interest
// --------------------------------------------------------
#define PIC_MASTER_COMMAND 0x00
#define PIC_MASTER_DATA 0x01
#define PIC_SLAVE_COMMAND 0x00
#define PIC_SLAVE_DATA 0x01
//
// -- Here are the function prototypes that the operation functions need to conform to
// --------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void _PicInit(PicDevice_t *dev, const char *name);
EXTERN_C EXPORT KERNEL
isrFunc_t _PicRegisterHandler(PicDevice_t *dev, Irq_t irq, int vector, isrFunc_t handler);
EXTERN_C EXPORT KERNEL
void _PicUnmaskIrq(PicDevice_t *dev, Irq_t irq);
EXTERN_C EXPORT KERNEL
void _PicMaskIrq(PicDevice_t *dev, Irq_t irq);
EXTERN_C EXPORT KERNEL
void _PicEoi(PicDevice_t *dev, Irq_t irq);
<|start_filename|>modules/kernel/src/stacks/StackAlloc.cc<|end_filename|>
//===================================================================================================================
//
// StackAlloc.cc -- Allocate the stack associated with the address handed in
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Dec-01 Initial 0.4.6d ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "stacks.h"
//
// -- allocate a stack by setting the proper bit
// ------------------------------------------
EXPORT KERNEL
void StackDoAlloc(archsize_t stackBase)
{
if (!assert(stackBase >= STACK_LOCATION)) return;
if (!assert(stackBase < STACK_LOCATION + (4 * 1024 * 1024))) return;
stackBase &= ~(STACK_SIZE - 1); // align to stack
stackBase -= STACK_LOCATION;
stackBase /= STACK_SIZE;
int index = stackBase / 32;
int bit = stackBase % 32;
stacks[index] |= (1 << bit);
}
<|start_filename|>platform/bcm2836/pic/PicMaskIrq.cc<|end_filename|>
//===================================================================================================================
//
// PicMaskIrq.cc -- Diable the PIC from passing along an IRQ
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "pic.h"
#include "printf.h"
//
// -- Disable the PIC from passing along an IRQ (some call it masking)
// ----------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void _PicMaskIrq(PicDevice_t *dev, int irq)
{
#if DEBUG_ENABLED(PicMaskIrq)
kprintf("Entering PicMaskIrq\n");
#endif
if (!dev) return;
if (irq < 0 || irq > BCM2836_LAST_IRQ) return;
Bcm2835Pic_t *picData = (Bcm2835Pic_t *)dev->device.deviceData;
int shift;
archsize_t addr;
#if DEBUG_ENABLED(PicMaskIrq)
kprintf("Sanity checks qualify PicMaskIrq, irq %d\n", irq);
#endif
if (irq >= BCM2836_CORE_BASE) {
shift = irq - BCM2836_CORE_BASE;
addr = (MMIO_VADDR + 0x01000060) + (thisCpu->cpuNum * 4);
#if DEBUG_ENABLED(PicMaskIrq)
kprintf("... bcm2836 local core interrupt\n");
#endif
MmioWrite(addr, MmioRead(addr) & ~(1 << shift));
return;
} else if (irq >= BCM2835_ARM_BASE) {
shift = irq - BCM2835_GPU_BASE1;
addr = picData->picLoc + INT_IRQDIS0;
#if DEBUG_ENABLED(PicMaskIrq)
kprintf("... arm processor interrupt\n");
#endif
} else { // GPU IRQ 0-63
shift = irq % 32;
addr = picData->picLoc + INT_IRQDIS1 + (4 * (irq / 32));
#if DEBUG_ENABLED(PicMaskIrq)
kprintf("... bcm2835 GPU interrupt\n");
#endif
}
MmioWrite(addr, 1 << shift);
#if DEBUG_ENABLED(PicMaskIrq)
kprintf("PicMaskIrq done\n");
#endif
}
<|start_filename|>modules/kernel/inc/interrupt.h<|end_filename|>
//===================================================================================================================
//
// interrupt.cc -- These are functions related to interrupts
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// These are function prototypes for interrupts management.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Oct-10 Initial 0.1.0 ADCL Initial version
// 2018-Nov-11 Initial 0.2.0 ADCL Rename this file to be more generic, better architecture abstraction
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#pragma once
#define __INTERRUPT_H__
#include "types.h"
#include "arch-interrupt.h"
//
// -- Unregister an ISR (fails if nothing is registered)
// --------------------------------------------------
EXTERN_C EXPORT KERNEL
void IsrUnregister(uint8_t interrupt);
//
// -- Register an ISR (fails if something is already registered)
// ----------------------------------------------------------
EXTERN_C EXPORT KERNEL
isrFunc_t IsrRegister(uint8_t interrupt, isrFunc_t func);
//
// -- Dump the CPU state
// ------------------
EXTERN_C EXPORT NORETURN KERNEL
void IsrDumpState(isrRegs_t *regs);
//
// -- The system call handler
// -----------------------
EXTERN_C EXPORT SYSCALL
void SyscallHandler(isrRegs_t *regs);
//
// -- An interrupt to handle TLB Flushes
// ----------------------------------
EXTERN_C EXPORT KERNEL
void IpiHandleTlbFlush(UNUSED(isrRegs_t *regs));
//
// -- An interrupt to handle debugger engagement
// ------------------------------------------
EXTERN_C EXPORT KERNEL
void IpiHandleDebugger(UNUSED(isrRegs_t *regs));
<|start_filename|>platform/pc/framebuffer/FrameBufferInit.cc<|end_filename|>
//===================================================================================================================
//
// FrameBufferInit.cc -- Frame buffer initialization for the console
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2017-May-03 Initial 0.0.0 ADCL Initial version
// 2018-Jun-13 Initial 0.1.0 ADCL Copied this file from century (fb.c) to century-os
// 2019-Feb-15 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "hw-disc.h"
#include "mmu.h"
#include "printf.h"
#include "fb.h"
//
// -- Initialize the additional frame buffer info
// -------------------------------------------
EXTERN_C EXPORT LOADER
void FrameBufferInit(void)
{
kprintf(".. Framebuffer located at: %p\n", GetFrameBufferAddr());
kprintf(".. Framebuffer size: %p\n", GetFrameBufferPitch() * GetFrameBufferHeight());
//
// -- Map the frame buffer to its final location in virtual memory
// ------------------------------------------------------------
kprintf("Mapping the Frame Buffer\n");
for (archsize_t fbVirt = MMU_FRAMEBUFFER, fbFrame = ((archsize_t)GetFrameBufferAddr()) >> 12,
fbEnd = fbVirt + (GetFrameBufferPitch() * GetFrameBufferHeight());
fbVirt < fbEnd; fbVirt += PAGE_SIZE, fbFrame ++) {
MmuMapToFrame(fbVirt, fbFrame, PG_KRN | PG_WRT | PG_DEVICE);
}
// -- goose the config to the correct fb address
SetFrameBufferAddr((uint16_t *)MMU_FRAMEBUFFER);
SetFgColor(0xffff);
SetBgColor(0x1234);
FrameBufferClear();
}
<|start_filename|>platform/bcm2836/inc/platform-serial.h<|end_filename|>
//===================================================================================================================
//
// platform-serial.h -- Serial definitions and functions for the bcm2835 serial port
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-23 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#pragma once
#ifndef __SERIAL_H__
# error "Use #include \"serial.h\" and it will pick up this file; do not #include this file directly."
#endif
#include "types.h"
//
// -- on bcm2835 SoC, this is the type we use to refer to the serial port
// -------------------------------------------------------------------
typedef archsize_t SerialBase_t;
//
// -- The following are the offsets that are used to manage the Serial Port
// ---------------------------------------------------------------------
#define AUX_IRQ (0) // Auxiliary Interrupt Status
//-------------------------------------------------------------------------------------------------------------------
#define AUXIRQ_UART (1<<0) // For AUX_IRQ, UART interrupt pending?
#define AUXIRQ_SPI1 (1<<1) // For AUX_IRQ, SPI1 interrupt pending?
#define AUXIRQ_SPI2 (1<<2) // For AUX_IRQ, SPI2 interrupt pending?
#define AUX_ENABLES (0x4) // Auxiliary Enables
//-------------------------------------------------------------------------------------------------------------------
#define AUXENB_UART (1<<0) // For AUX_IRQ, UART enable
#define AUXENB_SPI1 (1<<1) // For AUX_IRQ, SPI1 enable
#define AUXENB_SPI2 (1<<2) // For AUX_IRQ, SPI2 enable
//
// -- The auxiliary Mini UART
// -----------------------
#define AUX_MU_IO_REG (0x40) // Mini UART I/O Data
//-------------------------------------------------------------------------------------------------------------------
#define AUXMUART_TX (0xff) // Transmit data bits
#define AUXMUART_RX (0xff) // Receive data bits
#define AUX_MU_IER_REG (0x44) // Mini UART Interrupt Enable
//-------------------------------------------------------------------------------------------------------------------
#define AUXMUIER_RXENB (1<<1) // Generate interrupt when RX FIFO queue has data
#define AUXMUIER_TXENB (1<<0) // Generate interrupt when TX FIFO queue is empty
#define AUX_MU_IIR_REG (0x48) // Mini UART Interrupt Identify
//-------------------------------------------------------------------------------------------------------------------
#define AUXMUIIR_TXEMPTY (1<<1) // Set when TX FIFO is empty
#define AUXMUIIR_RXEMPTY (2<<1) // Set when RX FIFO is empty
#define AUXMUIIR_RXCLR (1<<1) // Clear RX FIFO queue
#define AUXMUIIR_TXCLR (2<<1) // Clear RX FIFO queue
#define SH_AUXMUIIR(x) (((x)&0x3)<<1) // Shift the bits for this field
#define AUX_MU_LCR_REG (0x4c) // Mini UART Line Control
//-------------------------------------------------------------------------------------------------------------------
#define AUXMULCR_DLAB (1<<7) // Set to access baud rate register; clear for operation
#define AUXMULCR_BRK (1<<6) // Set to indicate break conditions
#define AUXMULCR_SIZE (1<<0) // Data Size: 0 = 7-bits; 1 = 8 bits
#define AUX_MU_MCR_REG (0x50) // Mini UART Modem Control
//-------------------------------------------------------------------------------------------------------------------
#define AUXMUMCR_RTS (1<<1) // Clr if RTS it high; Set if RTS is low
#define AUX_MU_LSR_REG (0x54) // Mini UART Line Status
//-------------------------------------------------------------------------------------------------------------------
#define AUXMULSR_TXIDL (1<<6) // Set if TX FIFO is empty and transmitter is idle
#define AUXMULSR_TXRMPTY (1<<5) // Set if TX FIFO can accept at least 1 char
#define AUXMULSR_RXOVER (1<<1) // Set if RX FIFO overrun (recent chars discarded)
#define AUXMULSR_RXRDY (1<<0) // Set if RX FIFO has 1 char
#define AUX_MU_MSR_REG (0x58) // Mini UART Modem Status
//-------------------------------------------------------------------------------------------------------------------
#define AUXMUMSR_CTS (1<<5) // Set if CTS is low; clr if CTS hi
#define AUX_MU_SCRATCH (0x5c) // Mini UART Scratch
//-------------------------------------------------------------------------------------------------------------------
#define AUXMU_BYTE (0xff) // Single byte of storage
#define AUX_MU_CNTL_REG (0x60) // Mini UART Extra Control
//-------------------------------------------------------------------------------------------------------------------
#define AUXMUCTL_CTS (1<<7) // clear if CTS assert level low
#define AUXMUCTL_RTS (1<<6) // clear if RTS assert level low
#define AUXMUCTL_RTSAUTO (3<<4) // de-assert: 00: 3 Byt; 01: 2 Byt; 10: 1 Byt; 11: 4 Byt
#define AUXMUCTL_ENBCTSAUTO (1<<3) // enable auto flow control using CTS
#define AUXMUCTL_ENBRTSAUTO (1<<2) // enable auto flow control using RTS
#define AUXMUCTL_ENBTX (1<<1) // enable transmitter
#define AUCMUCTL_RNBRX (1<<0) // enable receiver
#define SH_AUXMUCTLRTS(x) (((x)&0x3)<<4) // adjust RTS-AUTO to the right bits
#define AUX_MU_STAT_REG (0x64) // Mini UART Extra Status
//-------------------------------------------------------------------------------------------------------------------
#define AUXMUST_TXLVL (0xf<<24) // TX FIFO level (0-8)
#define AUXMUST_RXLVL (0xf<<16) // RX FIFO level (0-8)
#define AUXMUST_TXDONE (1<<9) // Logical And of AUXMUST_TXEMPTY & AUXMUST_RXIDLE
#define AUXMUST_TXEMPTY (1<<8) // TX FIFO empty (AUXMUST_TXLVL == 0)
#define AUXMUST_CTS (1<<7) // Status of CTS line
#define AUXMUST_RTS (1<<6) // Status of RTS line
#define AUXMUST_TXFULL (1<<5) // TX FIFO Full
#define AUXMUST_RXOVER (1<<4) // Receiver Overrun
#define AUXMUST_TXIDLE (1<<3) // TX Idle (!AUXMUST_SPACE)
#define AUXMUST_RXIDLE (1<<2) // RXC Idle
#define AUXMUST_SPACE (1<<1) // TX FIFO has room
#define AUXMUST_AVAIL (1<<0) // RX FIFO has data
#define AUX_MU_BAUD_REG (0x68) // Mini UART Baudrate
//-------------------------------------------------------------------------------------------------------------------
#define AUXMU_BAUD (0xffff) // Baud Rate
<|start_filename|>modules/kernel/src/debugger/DebugPrompt.cc<|end_filename|>
//===================================================================================================================
//
// DebugPrompt.cc -- Prompt for a command and get the input into a global buffer
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-02 Initial v0.6.0a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "serial.h"
#include "debugger.h"
//
// -- Given the current command branch, prompt for and get the next command
// ---------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void DebugPrompt(DebuggerState_t state)
{
kprintf("\n (allowed: %s)\r" ANSI_CURSOR_UP(1), dbgPrompts[state].allowed);
kprintf("%s :> ", dbgPrompts[state].branch);
kMemSetB(debugCommand, 0, DEBUG_COMMAND_LEN);
int pos = 0;
while (true) {
// -- we always need room for at least one more character
if (pos == DEBUG_COMMAND_LEN - 1) {
pos = DEBUG_COMMAND_LEN - 2;
}
while (!SerialHasChar(&debugSerial)) { }
char ch = (char)SerialGetChar(&debugSerial);
// -- an enter completes the command input
if (ch == 13 || ch == 10) {
if (*debugCommand == 0) continue; // make sure we do not have an empty buffer
kprintf("\n" ANSI_ERASE_LINE);
return;
} else if (ch < ' ') continue; // other special characters ignored
else if (ch == 127) {
if (--pos < 0) {
pos = 0;
continue;
}
kprintf("\b \b");
debugCommand[pos] = '\0';
continue;
}
debugCommand[pos ++] = ch;
kprintf("%c", ch);
}
}
<|start_filename|>platform/inc/serial.h<|end_filename|>
//===================================================================================================================
//
// serial.h -- Serial debugging functions
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// These functions are used to send debugging information to the serial port.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Jun-27 Initial 0.1.0 ADCL Initial version
// 2018-Nov-13 Initial 0.2.0 ADCL Duplicate this file from libk, eliminating the libk version.
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#pragma once
#define __SERIAL_H__
#include "types.h"
#include "spinlock.h"
//
// -- go get any platform-specific definitions required for the serial port interface
// -------------------------------------------------------------------------------
#if __has_include("platform-serial.h")
# include "platform-serial.h"
#endif
//
// -- Now, carefully define a common interface for the serial port that can be used by both the loader and the
// kernel. There are 3 places we will want to be able to output to the serial port:
// 1) in the early loader before the kernel is mapped
// 2) in the loader after the kernel is mapped and can be called by far calls
// 3) in the kernel proper
//
// As such, not all the functions will be implemented with each location
// --------------------------------------------------------------------------------------------------------
typedef struct SerialDevice_t {
SerialBase_t base;
Spinlock_t lock;
void (*SerialOpen)(struct SerialDevice_t *);
bool (*SerialHasChar)(struct SerialDevice_t *);
bool (*SerialHasRoom)(struct SerialDevice_t *);
uint8_t (*SerialGetChar)(struct SerialDevice_t *);
void (*SerialPutChar)(struct SerialDevice_t *, uint8_t);
void (*SerialClose)(struct SerialDevice_t *);
void *platformData; // This will be used by the platform for additional info
} SerialDevice_t;
//
// -- Here, declare the different configurations of the serial port we will use
// -------------------------------------------------------------------------
EXTERN KERNEL_DATA
SerialDevice_t debugSerial;
//
// -- These are the common interface functions we will use to interact with the serial port. These functions are
// not safe in that they will not check for nulls before calling the function. Therefore, caller beware!
// -----------------------------------------------------------------------------------------------------------
EXTERN_C EXPORT INLINE
void SerialOpen(SerialDevice_t *dev) { dev->SerialOpen(dev); }
EXTERN_C EXPORT INLINE
void SerialClose(SerialDevice_t *dev) { dev->SerialClose(dev); }
EXTERN_C EXPORT INLINE
bool SerialHasRoom(SerialDevice_t *dev) { return dev->SerialHasRoom(dev); }
EXTERN_C EXPORT INLINE
bool SerialHasChar(SerialDevice_t *dev) { return dev->SerialHasChar(dev); }
EXTERN_C EXPORT INLINE
uint8_t SerialGetChar(SerialDevice_t *dev) { return dev->SerialGetChar(dev); }
EXTERN_C EXPORT INLINE
void SerialPutChar(SerialDevice_t *dev, uint8_t ch) { dev->SerialPutChar(dev, ch); }
//
// -- Here are the function prototypes that the operation functions need to conform to
// --------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void _SerialOpen(SerialDevice_t *dev);
EXTERN_C EXPORT KERNEL
void _SerialClose(SerialDevice_t *dev);
EXTERN_C EXPORT KERNEL
bool _SerialHasRoom(SerialDevice_t *dev);
EXTERN_C EXPORT KERNEL
bool _SerialHasChar(SerialDevice_t *dev);
EXTERN_C EXPORT KERNEL
uint8_t _SerialGetChar(SerialDevice_t *dev);
EXTERN_C EXPORT KERNEL
void _SerialPutChar(SerialDevice_t *dev, uint8_t ch);
<|start_filename|>modules/kernel/inc/process.h<|end_filename|>
//===================================================================================================================
//
// process.h -- Structures for the Process management
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This file contains the structures and prototypes needed to manage processes. This file was copied from
// Century32 and will need to be updated for this kernel.
//
// The process structure will be allocated with the new process stack -- adding it to the overhead of the process
// itself. Since the stacks will be multiples of 4K, one extra page will be allocated for the process stucture
// overhead. This will be placed below the stack in a manner where a stack underflow will not impact this
// structure unless something really goes horrible and deliberately wrong. The scheduler will be able to check
// that the stack is within bounds properly and kill the process if needed (eventually anyway). This simple
// decision will eliminate the need for process structures or the need to allocate a process from the heap.
//
// There are several statuses for processes that should be noted. They are:
// * INIT -- the process is initializing and is not ready to run yet.
// * RUN -- the process is in a runnable state. In this case, the process may or may not be the current process.
// * END -- the process has ended and the Butler process needs to clean up the structures, memory, IPC, locks, etc.
// * MTXW -- the process is waiting for a mutex and is ineligible to run.
// * SEMW -- the process is waiting for a semaphore and is ineligible to run.
// * DLYW -- the process is waiting for a timed delay and is ineligible to run.
// * MSGW -- the process is waiting for the delivery of a message and is ineligible to run.
// * ZOMB -- the process has died at the OS level for some reason or violation and the Butler process is going to
// clean it up.
//
// Additionally, we are going to support the ability for a user to hold a process. In this case, the process will
// also be ineligible to run. These held processes will be moved into another list which will maintain the
// overall status of the process.
//
// The process priorities will serve 2 functions. It will 1) provide a sequence of what is eligibe to run and
// when from a scheduler perspective. It will also 2) provide the quantum duration for which a process is able to
// use the CPU. In this case, a higher priority process will be able use the CPU longer than a low priority
// process. Additionally, the Idle process is also the Butler process. When there is something that needs to
// be done, the Butler will artificially raise its CPU priority to be an OS process while it is completing this
// work. When complete the Butler will reduce its priority again.
//
// Finally, threads will be supported not yet be supported at the OS level. If needed, they will be added at a
// later time.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Sep-23 Initial version
// 2013-Aug-20 46 Added a reference to an allocated tty session
// 2013-Aug-22 58 Normalize the TTY functions to use tty#
// 2013-Aug-25 60 Create the ability to find a process quickly by PID
// 2013-Aug-25 67 Add a list of locks a process holds
// 2013-Aug-27 66 Added a Zombied process status
// 2013-Aug-27 36 Add messages to the process structure
// 2013-Sep-01 83 Add a spinlock to the PID list array (removed 2018-Oct-14)
// 2013-Sep-02 73 Add a lock to the process structure (removed 2018-Oct-14)
// 2013-Sep-03 73 Encapsulate Process Structure
// 2018-Oct-14 Initial 0.1.0 ADCL Copied this file from Century32 and cleaned it up for century-os
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#pragma once
#include "types.h"
#include "printf.h"
#include "lists.h"
#include "cpu.h"
#include "timer.h"
#include "spinlock.h"
//
// -- For the scheduler structure, clean the cache pushing the changes to ram
// -----------------------------------------------------------------------
#define CLEAN_SCHEDULER() CleanCache((archsize_t)&scheduler, sizeof(Scheduler_t))
//
// -- For the scheduler structure, invalidate the cache forcing a re-read from ram
// ----------------------------------------------------------------------------
#define INVALIDATE_SCHEDULER() InvalidateCache((archsize_t)&scheduler, sizeof(Scheduler_t))
//
// -- for a Process structure, clean the cache pushing the changes to ram
// -------------------------------------------------------------------
#define CLEAN_PROCESS(proc) CleanCache((archsize_t)proc, sizeof(Process_t))
//
// -- for a Process structure, invalidate the cache forcing a re-read from ram
// ------------------------------------------------------------------------
#define INVALIDATE_PROCESS(proc) InvalidateCache((archsize_t)proc, sizeof(Process_t))
//
// -- This list is are the statuses available for a running process, indicating what queue it will be on.
// ---------------------------------------------------------------------------------------------------
typedef enum { PROC_INIT = 0, // This is being created and is not on a queue yet
PROC_RUNNING = 1, // This is currently running (See ProcessSwitch!)
PROC_READY = 2, // This is ready to run (See ProcessSwitch!)
PROC_TERM = 3, // This has ended
PROC_MTXW = 4, // This is waiting for a Mutex lock and is on the waiting queue
PROC_SEMW = 5, // This is waiting on a Semaphore and is on the waiting queue
PROC_DLYW = 6, // This is waiting for a timed event and is on the waiting queue
PROC_MSGW = 7, // This is waiting for a message to be delivered and in on the waiting queue
} ProcStatus_t;
//
// -- Convert a ProcStatus_t to a string
// -----------------------------------
EXPORT INLINE
const char *ProcStatusStr(ProcStatus_t s) {
if (s == PROC_INIT) return "INIT";
else if (s == PROC_RUNNING) return "RUNNING";
else if (s == PROC_READY) return "READY";
else if (s == PROC_TERM) return "TERM";
else if (s == PROC_MTXW) return "MTXW";
else if (s == PROC_SEMW) return "SEMW";
else if (s == PROC_DLYW) return "DLYW";
else if (s == PROC_MSGW) return "MSGW";
else return "Unknown!";
}
//
// -- This list is the policy choices for a running process; unused currently
// -----------------------------------------------------------------------
typedef enum { POLICY_0,
POLICY_1,
POLICY_2,
POLICY_3,
} ProcPolicy_t;
//
// -- This is list is the priority of the process, which doubles as the quantum that will be given a process
// ------------------------------------------------------------------------------------------------------
typedef enum {
PTY_IDLE = 1, // This ia an idle priority process
PTY_LOW = 5, // This is a low priority user process
PTY_NORM = 10, // This is a normal user process
PTY_HIGH = 20, // This is a high priority user process
PTY_OS = 30, // This is an OS or Driver process
} ProcPriority_t;
//
// -- Convert a ProcStatus_t to a string
// -----------------------------------
EXPORT INLINE
const char *ProcPriorityStr(ProcPriority_t p) {
if (p == PTY_IDLE) return "IDLE";
else if (p == PTY_LOW) return "LOW";
else if (p == PTY_NORM) return "NORMAL";
else if (p == PTY_HIGH) return "HIGH";
else if (p == PTY_OS) return "OS";
else return "Unknown!";
}
//
// -- This is a process structure
// ---------------------------
typedef struct Process_t {
archsize_t topOfStack; // This is the process current esp value (when not executing)
archsize_t virtAddrSpace; // This is the process top level page table
ProcStatus_t status; // This is the process status
ProcPriority_t priority; // This is the process priority
volatile AtomicInt_t quantumLeft; // This is the quantum remaining for the process (may be more than priority)
PID_t pid; // This is the PID of this process
archsize_t ssAddr; // This is the address of the process stack
char *command; // The identifying command, includes the terminating null
ProcPolicy_t policy; // This is the scheduling policy
uint64_t timeUsed; // This is the relative amount of CPU used
uint64_t wakeAtMicros; // Wake this process at or after this micros since boot
ListHead_t::List_t stsQueue; // This is the location on the current status queue
ListHead_t::List_t globalList; // This is the global list entry
int pendingErrno; // this is the pending error number for a blocked process
ListHead_t references; // NOTE the lock is required to update this structure
} Process_t;
//
// -- These are the types of resources that can hold references
// ---------------------------------------------------------
typedef enum {
REF_UNKNOWN, // not used / uninitialized
REF_MSGQ, // Message Queue resource
} RefType_t;
//
// -- this structure will help keep track of the references held by a process, and what is resource is
// referred to by what process. the result is to resolve a many-to-may relationship.
// ------------------------------------------------------------------------------------------------
typedef struct Reference_t {
RefType_t type;
void *resAddr;
Process_t *process;
ListHead_t::List_t procRefList;
ListHead_t::List_t resourceRefBy;
} Reference_t;
//
// -- This structure encapsulates the whole of the scheduler
// ------------------------------------------------------
typedef struct Scheduler_t {
// -- These fields can only be changed after ProcessLockAndPostpone(); SMP may change this
PID_t nextPID; // the next pid number to allocate
volatile uint64_t nextWake; // the next tick-since-boot when a process needs to wake up
// -- these fields will eventually be set up on a per-CPU basis
volatile bool processChangePending; // whether there is a change that is pending
archsize_t flags; // the flags for the CPU when interrupts were disabled
// -- This is a critical field controlled by its lock
volatile AtomicInt_t schedulerLockCount;// the depth of the locks
volatile AtomicInt_t postponeCount; // the depth of the number of postpone requests
int lockCpu; // the CPU that currently holds the lock (invalid when no lock is held)
// -- and the different lists a process might be on, locks in each list will be used
QueueHead_t queueOS; // this is the queue for the OS tasks -- if it can run it does
QueueHead_t queueHigh; // this is the queue for High pty tasks
QueueHead_t queueNormal; // these are the typical tasks -- most non-OS tasks will be here
QueueHead_t queueLow; // low priority tasks which do not need cpu unless there is nothing else
QueueHead_t queueIdle; // idle priority tasks
ListHead_t listBlocked; // these are blocked tasks for any number of reasons
ListHead_t listSleeping; // these are sleeping tasks, which the timer interrupt will investigate
ListHead_t listTerminated; // these are terminated tasks, which are waiting to be torn down
ListHead_t globalProcesses; // this is the complete list of all processes regardless where the reside
} Scheduler_t;
//
// -- And the scheduler object itself
// -------------------------------
EXTERN EXPORT KERNEL_DATA
Scheduler_t scheduler;
EXTERN EXPORT KERNEL_DATA
Spinlock_t schedulerLock;
//
// -- Initialize the process structures
// ---------------------------------
EXTERN_C EXPORT LOADER
void ProcessInit(void);
//
// -- Scheduler locking, postponing, unlocking, and scheduling functions
// ------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void ProcessUnlockScheduler(void);
EXTERN_C EXPORT KERNEL
void ProcessUnlockAndSchedule(void);
EXTERN_C EXPORT KERNEL
void ProcessLockScheduler(bool save = true);
EXPORT INLINE
void ProcessLockAndPostpone(void) {
ProcessLockScheduler();
AtomicInc(&scheduler.postponeCount);
INVALIDATE_SCHEDULER();
}
//
// -- Functions to block the current process
// --------------------------------------
EXTERN_C EXPORT KERNEL
void ProcessDoBlock(ProcStatus_t reason);
EXPORT INLINE
void ProcessBlock(ProcStatus_t reason) {
ProcessLockAndPostpone();
ProcessDoBlock(reason);
ProcessUnlockAndSchedule();
}
//
// -- New task initialization tasks
// -----------------------------
EXTERN_C EXPORT KERNEL
void ProcessStart(void);
//
// -- Create a new process
// --------------------
EXTERN_C EXPORT KERNEL
Process_t *ProcessCreate(const char *name, void (*startingAddr)(void));
//
// -- Switch to a new process
// -----------------------
EXTERN_C EXPORT KERNEL
void ProcessSwitch(Process_t *proc);
//
// -- Create a new stack for a new process, and populate its contents
// ---------------------------------------------------------------
EXTERN_C EXPORT KERNEL
frame_t ProcessNewStack(Process_t *proc, void (*startingAddr)(void));
//
// -- Perform a scheduling exercise to determine the next process to run
// ------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void ProcessSchedule(void);
//
// -- Place a process on the correct ready queue
// ------------------------------------------
EXTERN_C EXPORT KERNEL
void ProcessDoReady(Process_t *proc);
EXPORT INLINE
void ProcessReady(Process_t *proc) {
ProcessLockAndPostpone();
ProcessDoReady(proc);
ProcessUnlockAndSchedule();
}
//
// -- Unblock a process
// -----------------
EXTERN_C EXPORT KERNEL
void ProcessDoUnblock(Process_t *proc);
EXPORT INLINE
void ProcessUnblock(Process_t *proc) {
ProcessLockAndPostpone();
ProcessDoUnblock(proc);
ProcessUnlockAndSchedule();
}
//
// -- Update the time used for a process
// ----------------------------------
EXTERN_C EXPORT KERNEL
void ProcessUpdateTimeUsed(void);
//
// -- Sleep until the we reach the number of micro-seconds since boot
// ---------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void ProcessDoMicroSleepUntil(uint64_t when);
EXPORT INLINE
void ProcessMicroSleepUntil(uint64_t when) {
ProcessLockAndPostpone();
ProcessDoMicroSleepUntil(when);
ProcessUnlockAndSchedule();
}
EXPORT INLINE
void ProcessMicroSleep(uint64_t micros) {
ProcessMicroSleepUntil(TimerCurrentCount(timerControl) + micros);
}
EXPORT INLINE
void ProcessMilliSleep(uint64_t ms) {
ProcessMicroSleepUntil(TimerCurrentCount(timerControl) + (ms * 1000));
}
EXPORT INLINE
void ProcessSleep(uint64_t secs) {
ProcessMicroSleepUntil(TimerCurrentCount(timerControl) + (secs * 1000000));
}
//
// -- Terminate a task
// ----------------
EXTERN_C EXPORT KERNEL
void ProcessTerminate(Process_t *proc);
//
// -- End current process
// -------------------
EXTERN_C EXPORT KERNEL
void ProcessEnd(void);
//
// -- remove the process for its list, if it is on one
// ------------------------------------------------
EXTERN_C EXPORT KERNEL
void ProcessListRemove(Process_t *proc);
//
// -- Idle when there is nothing to do
// --------------------------------
EXTERN_C EXPORT KERNEL
void ProcessIdle(void);
//
// -- Debugging functions to output the scheduler state
// -------------------------------------------------
EXTERN_C EXPORT KERNEL
void ProcessDoCheckQueue(void);
EXPORT INLINE
void ProcessCheckQueue(void) {
ProcessLockAndPostpone();
ProcessDoCheckQueue();
ProcessUnlockAndSchedule();
}
//
// -- Add a process to the global process List
// ----------------------------------------
EXTERN_C INLINE
void ProcessDoAddGlobal(Process_t *proc) {
ListAddTail(&scheduler.globalProcesses, &proc->globalList);
}
EXTERN_C INLINE
void ProcessAddGlobal(Process_t *proc) {
ProcessLockAndPostpone();
ProcessDoAddGlobal(proc);
ProcessUnlockAndSchedule();
}
<|start_filename|>modules/kernel/src/process/ProcessIdle.cc<|end_filename|>
//===================================================================================================================
//
// ProcessIdle.cc -- This is an idle process to use on the CPU when there is nothing else to do
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Mar-27 Initial v0.5.1a ADCL Initial version
//
//===================================================================================================================
#include "cpu.h"
#include "heap.h"
#include "timer.h"
#include "process.h"
//
// -- Idle when there is nothing to do
// --------------------------------
EXTERN_C EXPORT KERNEL
void ProcessIdle(void)
{
currentThread->priority = PTY_IDLE;
while (true) {
assert(currentThread->status == PROC_RUNNING);
EnableInterrupts();
HaltCpu();
}
}
<|start_filename|>modules/kernel/src/hardware/hw-disc.cc<|end_filename|>
//===================================================================================================================
//
// hw-disc.cc -- hardware discovery structure implementation
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2017-Jun-07 Initial 0.1.0 ADCL Initial version
// 2019-Feb-14 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "hw-disc.h"
//
// -- This is the local version of what we have found for hardware; these will be located in the loader addr space.
// -------------------------------------------------------------------------------------------------------------
HIDDEN LOADER_BSS
HardwareDiscovery_t _localHwDisc;
EXPORT LOADER_DATA
HardwareDiscovery_t *localHwDisc = &_localHwDisc;
<|start_filename|>platform/bcm2836/inc/platform-init.h<|end_filename|>
//===================================================================================================================
//
// platform-init.h -- Some platform-specific initialization routines
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-10 Initial v0.6.1b ADCL Initial version
//
//===================================================================================================================
#pragma once
#ifndef __HARDWARE_H__
# error "Use #include \"hardware.h\" and it will pick up this file; do not #include this file directly."
#endif
#include "types.h"
#define PlatformDiscovery()
//
// -- all low memory is available on rpi2b
// ------------------------------------
#define LowMemCheck(frame) (frame?true:false)
<|start_filename|>modules/kernel/src/hardware/mb2.cc<|end_filename|>
//===================================================================================================================
//
// mb2.c -- This is the parser for the Multiboot 1 information structure
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2017-Jun-07 Initial 0.1.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "serial.h"
#include "mmu.h"
#include "printf.h"
#include "hw-disc.h"
//
// -- MB2 tag types
// -------------
enum {
MB2_TAG_LAST_TAG = 0,
MB2_TAG_CMD_LINE = 1,
MB2_TAG_LOADER = 2,
MB2_TAG_MODULE = 3,
MB2_TAG_BASIC_MEM = 4,
MB2_TAG_BOOT_DEV = 5,
MB2_TAG_MMAP = 6,
MB2_TAG_VBE = 7,
MB2_TAG_FRAMEBUFFER = 8,
MB2_TAG_ELF_SYMS = 9,
MB2_TAG_APM = 10,
MB2_TAG_EFI32 = 11,
MB2_TAG_EFI64 = 12,
MB2_TAG_SMBIOS = 13,
MB2_TAG_RSDP_V1 = 14,
MB2_TAG_RSDP_V2 = 15,
MB2_TAG_NET_INFO = 16,
MB2_TAG_EFI_MMAP = 17,
MB2_TAG_EFI_BOOT_SRV = 18,
MB2_TAG_EFI_IMG_32 = 19,
MB2_TAG_EFI_IMG_64 = 20,
MB2_TAG_LOAD_ADDR = 21,
};
//
// -- The fixed multiboot info structure elements
// -------------------------------------------
typedef struct Mb2Fixed_t {
uint32_t totalSize;
uint32_t reserved;
} __attribute__((packed)) Mb2Fixed_t;
//
// -- This is the basic tag header information -- every tag has one
// -------------------------------------------------------------
typedef struct Mb2BasicTag_t {
uint32_t type;
uint32_t size;
} __attribute__((packed)) Mb2BasicTag_t;
//
// -- The command line for the boot
// -----------------------------
typedef struct Mb2CmdLine_t {
Mb2BasicTag_t tag; // type == 1
char cmdLine[0];
} __attribute__((packed)) Mb2CmdLine_t;
//
// -- The bootloader name
// -------------------
typedef struct Mb2Loader_t {
Mb2BasicTag_t tag; // type == 2
char name[0];
} __attribute__((packed)) Mb2Loader_t;
//
// -- A laoded module
// ---------------
typedef struct Mb2Module_t {
Mb2BasicTag_t tag; // type == 3
uint32_t modStart;
uint32_t modEnd;
char name[0];
} __attribute__((packed)) Mb2Module_t;
//
// -- Basic memory info
// -----------------
typedef struct Mb2BasicMem_t {
Mb2BasicTag_t tag; // type == 4; size == 16
uint32_t memLower;
uint32_t memUpper;
} __attribute__((packed)) Mb2BasicMem_t;
//
// -- Boot device information
// -----------------------
typedef struct Mb2BootDevice_t {
Mb2BasicTag_t tag; // type == 5; size == 20
uint32_t biosDev;
uint32_t partition;
uint32_t subPartition;
} __attribute__((packed)) Mb2BootDevice_t;
//
// -- Memory Map
// ----------
typedef struct Mb2MemMap_t {
Mb2BasicTag_t tag; // type == 6
uint32_t entrySize;
uint32_t entryVersion;
struct {
uint64_t baseAddr;
uint64_t length;
uint32_t type;
uint32_t reserved;
} entries [0];
} __attribute__((packed)) Mb2MemMap_t;
//
// -- The VBE Table
// -------------
typedef struct Mb2VbeInfo_t {
Mb2BasicTag_t tag; // type == 7; size == 784
uint16_t vbeMode;
uint16_t vbeInterfaceSeg;
uint16_t vbeInterfaceOff;
uint16_t vbeInterfaceLen;
uint8_t vbeControlInfo[512];
uint8_t vbeModeInfo[256];
} __attribute__((packed)) Mb2VbeInfo_t;
//
// -- The FrameBuffer Info
// --------------------
typedef struct Mb2FbInfo_t {
Mb2BasicTag_t tag; // type == 8
uint64_t fbAddr;
uint32_t fbPitch;
uint32_t fbWidth;
uint32_t fbHeight;
uint8_t fbBpp;
uint8_t fbType;
union {
struct { // when fbType == 0
uint32_t palletColors;
struct {
uint8_t red;
uint8_t green;
uint8_t blue;
} color [0];
} pallet;
struct { // when fbType == 1
uint8_t redFieldPos;
uint8_t redMaskSize;
uint8_t greenFieldPos;
uint8_t greenMaskSize;
uint8_t blueFieldPos;
uint8_t blueMaskSize;
} rgb;
};
} __attribute__((packed)) Mb2FbInfo_t;
//
// -- The ELF Symbols
// ---------------
typedef struct Mb2ElfSymbols_t {
Mb2BasicTag_t tag; // type == 9
uint16_t num;
uint16_t entSize;
uint16_t shndx;
uint16_t reserved;
uint8_t sectionHdrs[0];
} __attribute__((packed)) Mb2ElfSymbols_t;
//
// -- The APM Table
// -------------
typedef struct Mb2Apm_t {
Mb2BasicTag_t tag; // type == 10; size == 28
uint16_t version;
uint16_t cseg;
uint32_t offset;
uint16_t cseg16;
uint16_t dseg;
uint16_t flags;
uint16_t csegLen;
uint16_t cseg16Len;
uint16_t dsegLen;
} __attribute__((packed)) Mb2Apm_t;
//
// -- EFI 32-bit system table pointer
// -------------------------------
typedef struct Mb2Efi32_t {
Mb2BasicTag_t tag; // type == 11; size == 12
uint32_t pointer;
} __attribute__((packed)) Mb2Efi32_t;
//
// -- EFI 64-bit system table pointer
// -------------------------------
typedef struct Mb2Efi64_t {
Mb2BasicTag_t tag; // type == 12; size == 16
uint64_t pointer;
} __attribute__((packed)) Mb2Efi64_t;
//
// -- SMBIOS Tables
// -------------
typedef struct Mb2SmBios_t {
Mb2BasicTag_t tag; // type == 13
uint8_t major;
uint8_t minor;
uint8_t reserved[6];
uint8_t smBiosTables[0];
} __attribute__((packed)) Mb2SmBios_t;
//
// -- RSDPv1 Tables
// -------------
typedef struct Mb2RsdpV1_t {
Mb2BasicTag_t tag; // type == 14
uint8_t rsdpV1Copy[0];
} __attribute__((packed)) Mb2RsdpV1_t;
//
// -- RSDPv2 Tables
// -------------
typedef struct Mb2RsdpV2_t {
Mb2BasicTag_t tag; // type == 15
uint8_t rsdpV2Copy[0];
} __attribute__((packed)) Mb2RsdpV2_t;
//
// -- Networking Information
// ----------------------
typedef struct Mb2NetInfo_t {
Mb2BasicTag_t tag; // type == 16
uint8_t dhcpAck[0];
} __attribute__((packed)) Mb2NetInfo_t;
//
// -- EFI Memory Map
// --------------
typedef struct Mb2EfiMemMap_t {
Mb2BasicTag_t tag; // type == 17
uint32_t descriptorSize;
uint32_t descriptorVer;
uint8_t efiMemMap[0];
} __attribute__((packed)) Mb2EfiMemMap_t;
//
// -- EFI Boot Services Not Terminated (EFI Boot Services still available)
// --------------------------------------------------------------------
typedef struct Mb2EfiBootServ_t {
Mb2BasicTag_t tag; // type == 18; size == 8
} __attribute__((packed)) Mb2EfiBootServ_t;
//
// -- EFI 32-bit image handle pointer
// -------------------------------
typedef struct Mb2EfiImage32_t {
Mb2BasicTag_t tag; // type == 19; size == 12
uint32_t pointer;
} __attribute__((packed)) Mb2EfiImage32_t;
//
// -- EFI 64-bit image handle pointer
// -------------------------------
typedef struct Mb2EfiImage64_t {
Mb2BasicTag_t tag; // type == 20; size == 16
uint64_t pointer;
} __attribute__((packed)) Mb2EfiImage64_t;
//
// -- Image load phys address
// -----------------------
typedef struct Mb2LoadPhysAddr_t {
Mb2BasicTag_t tag; // type == 21; size == 12
uint32_t baseAddr;
} __attribute__((packed)) Mb2LoadPhysAddr_t;
//
// -- The multiboot 2 information structure
// -------------------------------------
EXTERN LOADER_BSS
void *mb2Data;
EXTERN LOADER_BSS
void *mb1Data;
//
// -- Mb2Parse() -- Read the multiboot 2 information from the data provided and store it locally
// ------------------------------------------------------------------------------------------
EXTERN_C EXPORT LOADER
void Mb2Parse(void)
{
if (!mb2Data) return;
archsize_t mb2Page = (archsize_t)mb2Data;
MmuMapToFrame(mb2Page, mb2Page >> 12, PG_KRN);
MmuMapToFrame(mb2Page + PAGE_SIZE, (mb2Page + PAGE_SIZE) >> 12, PG_KRN);
kprintf("Parsing MB2 Info at %p (MB1 info at %p)\n", mb2Data, mb1Data);
kprintf(".. size = %x\n", ((uint32_t *)mb2Data)[0]);
kprintf(".. resv = %x\n", ((uint32_t *)mb2Data)[1]);
uint32_t locn = (uint32_t)mb2Data + sizeof(Mb2Fixed_t);
bool lastTag = false;
while (!lastTag) {
Mb2BasicTag_t *tag = (Mb2BasicTag_t *)locn;
kprintf("MB2 info: at %p: %x\n", tag, tag->type);
switch (tag->type) {
case MB2_TAG_LAST_TAG:
kprintf(".. Last Tag\n");
lastTag = true;
break;
case MB2_TAG_CMD_LINE:
kprintf("%s\n", ((Mb2CmdLine_t *)locn)->cmdLine);
break;
case MB2_TAG_LOADER:
kprintf("%s\n", ((Mb2Loader_t *)locn)->name);
break;
case MB2_TAG_MODULE: {
kprintf("Module information present\n");
Mb2Module_t *m = (Mb2Module_t *)locn;
AddModule(m->modStart, m->modEnd, m->name);
break;
}
case MB2_TAG_BASIC_MEM: {
Mb2BasicMem_t *mem = (Mb2BasicMem_t *)locn;
kprintf("Setting basic memory information\n");
SetAvailLowerMem(mem->memLower);
SetAvailUpperMem(mem->memUpper);
break;
}
case MB2_TAG_BOOT_DEV: {
kprintf(".. Boot Device\n");
// Mb2BootDevice_t *dev = (Mb2BootDevice_t *)locn;
// MbLocalSetBootDev(dev->biosDev, dev->partition, dev->subPartition, 0xffffffff);
break;
}
case MB2_TAG_MMAP: {
kprintf("Setting memory map data\n");
Mb2MemMap_t *mmap = (Mb2MemMap_t *)locn;
uint32_t s = tag->size / mmap->entrySize;
for (uint32_t i = 0; i < s; i ++) {
if (mmap->entries[i].type == 1) AddAvailMem(mmap->entries[i].baseAddr, mmap->entries[i].length);
uint64_t newLimit = mmap->entries[i].baseAddr + mmap->entries[i].length;
if (newLimit > GetUpperMemLimit()) SetUpperMemLimit(newLimit);
}
break;
}
case MB2_TAG_VBE: {
kprintf(".. VBE info\n");
// Mb2VbeInfo_t *vbe = (Mb2VbeInfo_t *)locn;
// MbLocalSetVbe(vbe->vbeMode, vbe->vbeInterfaceSeg, vbe->vbeInterfaceOff, vbe->vbeInterfaceLen,
// vbe->vbeControlInfo, vbe->vbeModeInfo);
break;
}
case MB2_TAG_FRAMEBUFFER: {
Mb2FbInfo_t *fb = (Mb2FbInfo_t *)locn;
SetFrameBufferAddr((uint16_t *)fb->fbAddr);
SetFrameBufferPitch(fb->fbPitch);
SetFrameBufferWidth(fb->fbWidth);
SetFrameBufferHeight(fb->fbHeight);
SetFrameBufferBpp(fb->fbBpp);
SetFrameBufferType((FrameBufferType)fb->fbType);
kprintf("Frame Buffer is at: %p; The pitch is: %p; The height is: %p\n",
(archsize_t)fb->fbAddr, (archsize_t)fb->fbPitch, (archsize_t)fb->fbHeight);
break;
}
case MB2_TAG_ELF_SYMS: {
kprintf(".. Elf Syms\n");
// Mb2ElfSymbols_t *elf = (Mb2ElfSymbols_t *)locn;
// MbLocalSetElfSyms(elf->num, elf->entSize, elf->shndx);
break;
}
case MB2_TAG_APM: {
kprintf(".. APM\n");
// Mb2Apm_t *apm = (Mb2Apm_t *)locn;
// MbLocalSetApm(apm->version, apm->cseg, apm->offset, apm->cseg16, apm->dseg, apm->flags, apm->csegLen,
// apm->cseg16Len, apm->dsegLen);
break;
}
case MB2_TAG_EFI32:
kprintf(".. EFI32 System Table\n");
break;
case MB2_TAG_EFI64:
kprintf(".. EFI64 System Table\n");
break;
case MB2_TAG_SMBIOS:
kprintf(".. SMBIOS Table\n");
break;
case MB2_TAG_RSDP_V1: {
Mb2RsdpV1_t *rdsp = (Mb2RsdpV1_t *)locn;
kprintf(".. RSDPV1 Table: %c%c%c%c%c%c%c%c\n", rdsp->rsdpV1Copy[0], rdsp->rsdpV1Copy[1],
rdsp->rsdpV1Copy[2], rdsp->rsdpV1Copy[3], rdsp->rsdpV1Copy[4], rdsp->rsdpV1Copy[5],
rdsp->rsdpV1Copy[6], rdsp->rsdpV1Copy[7]);
break;
}
case MB2_TAG_RSDP_V2:{
Mb2RsdpV2_t *rdsp = (Mb2RsdpV2_t *)locn;
kprintf(".. RSDPV2Table: %c%c%c%c%c%c%c%c\n", rdsp->rsdpV2Copy[0], rdsp->rsdpV2Copy[1],
rdsp->rsdpV2Copy[2], rdsp->rsdpV2Copy[3], rdsp->rsdpV2Copy[4], rdsp->rsdpV2Copy[5],
rdsp->rsdpV2Copy[6], rdsp->rsdpV2Copy[7]);
break;
}
case MB2_TAG_NET_INFO:
kprintf(".. Network Information Table\n");
break;
case MB2_TAG_EFI_MMAP:
kprintf(".. EFI Memory Map\n");
break;
case MB2_TAG_EFI_BOOT_SRV:
kprintf(".. EFI Boot Services not Terminated\n");
break;
case MB2_TAG_EFI_IMG_32:
kprintf(".. EFI 32-bit Image Pointer\n");
break;
case MB2_TAG_EFI_IMG_64:
kprintf(".. EFI 64-bit Image Pointer\n");
break;
case MB2_TAG_LOAD_ADDR: {
Mb2LoadPhysAddr_t *addr = (Mb2LoadPhysAddr_t *)locn;
kprintf(".. Load Base Address: %p\n", addr->baseAddr);
break;
}
default:
kprintf("Unimplemented MB2 type: %x\n", tag->type);
break;
}
locn += (tag->size + (~(tag->size - 1) & 0x7));
}
MmuUnmapPage(mb2Page);
MmuUnmapPage(mb2Page + PAGE_SIZE);
}
<|start_filename|>platform/bcm2836/gpio/GpioVars.cc<|end_filename|>
//===================================================================================================================
//
// GpioVars.cc -- These are the variables for the BCM2835 GPIO block
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "mmu.h"
#include "hardware.h"
//
// -- This is the device structure that will be used for the kernel to access the gpio
// --------------------------------------------------------------------------------
EXPORT KERNEL_DATA
GpioDevice_t kernelGpio = {
.base = KRN_GPIO_BASE,
.GpioSelectAlt = _GpioSelectAlt,
.GpioEnablePin = _GpioEnablePin,
};
<|start_filename|>arch/arm/IsrDumpState.cc<|end_filename|>
//===================================================================================================================
//
// IsrDumpState.cc -- For exceptions, dump the current state of the processor from the registers
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-30 Initial 0.2.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "cpu.h"
#include "interrupt.h"
EXTERN_C EXPORT NORETURN KERNEL
void IsrDumpState(isrRegs_t *regs)
{
kprintf("At address: %p\n", regs);
kprintf(" R0: %p R1: %p R2: %p\n", regs->r0, regs->r1, regs->r2);
kprintf(" R3: %p R4: %p R5: %p\n", regs->r3, regs->r4, regs->r5);
kprintf(" R6: %p R7: %p R8: %p\n", regs->r6, regs->r7, regs->r8);
kprintf(" R9: %p R10: %p R11: %p\n", regs->r9, regs->r10, regs->r11);
kprintf("R12: %p SP: %p LR_ret: %p\n", regs->r12, regs->sp_svc, regs->lr_ret);
kprintf("SPSR_ret: %p type: %x\n", regs->spsr_ret, regs->type);
kprintf("\nAdditional Data Points:\n");
kprintf("User LR: %p User SP: %p\n", regs->lr_usr, regs->sp_usr);
kprintf("Svc LR: %p\n", regs->lr_svc);
while (true) {
Halt();
}
}
<|start_filename|>modules/kernel/src/butler/ButlerCleanPmm.cc<|end_filename|>
//===================================================================================================================
//
// ButlerCleanPmm.cc -- Clean up a PMM frame, sanitizing it
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-10 Initial v0.6.1b ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "pmm.h"
#include "butler.h"
//
// -- The Butler has been notified of a PMM frame to clean
// ----------------------------------------------------
void ButlerCleanPmm(void)
{
PmmScrubBlock();
}
<|start_filename|>platform/pc/pic/PicUnmaskIrq.cc<|end_filename|>
//===================================================================================================================
//
// PicUnmaskIrq.cc -- Enable the PIC to pass along an IRQ
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "hardware.h"
#include "pic.h"
//
// -- Enable the PIC to pass along an IRQ (some call it unmasking)
// ------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void _PicUnmaskIrq(PicDevice_t *dev, Irq_t i)
{
if (!dev) return;
if (i < 0 || i > 15) return;
uint16_t port;
int irq = (int)i;
if (irq < 8) {
port = PIC1 + PIC_MASTER_DATA;
} else {
port = PIC2 + PIC_SLAVE_DATA;
irq -= 8;
}
outb(port, inb(port) & ~(1 << irq));
}
<|start_filename|>modules/kernel/src/debugger/DebugTimer.cc<|end_filename|>
//===================================================================================================================
//
// DebugTimer.cc -- Debug the timer across all cores
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-05 Initial v0.6.0a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "serial.h"
#include "process.h"
#include "debugger.h"
//
// -- Debug the timer over all CPUs
// -----------------------------
EXTERN_C EXPORT KERNEL
void DebugTimer(void)
{
while (true) {
if (kStrLen(debugCommand) == 0) DebugPrompt(debugState);
DebuggerCommand_t cmd = DebugParse(debugState);
switch(cmd) {
case CMD_EXIT:
debugState = DBG_HOME;
return;
case CMD_COUNTS:
DebugTimerCounts();
debugState = DBG_TIMER;
break;
case CMD_CONFIG:
// DebugTimerConfig();
debugState = DBG_TIMER;
break;
case CMD_ERROR:
default:
kprintf(ANSI_ATTR_BOLD ANSI_FG_RED
"Something went wrong (timer) -- a bug in the debugger is likely\n" ANSI_ATTR_NORMAL);
continue;
}
}
}
<|start_filename|>platform/pc/apic/LApicBroadcastInit.cc<|end_filename|>
//===================================================================================================================
//
// LApicBroadcastInit.cc -- Broadcast an INIT IPI to all cores
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Jun-16 Initial 0.4.6 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "timer.h"
#include "hardware.h"
#include "pic.h"
//
// -- Broadcast an INIT to all CPUs (including myself)
// ------------------------------------------------
EXTERN_C EXPORT LOADER
void _LApicBroadcastInit(PicDevice_t *dev, uint32_t core)
{
if (!dev) return;
LapicIcrHi_t hi = {
.destination = (uint8_t)core,
};
LapicIcrLo_t lo = {0};
lo.deliveryMode = DELMODE_INIT;
lo.destinationMode = 0;
lo.deliveryStatus = 1;
lo.level = 1;
lo.trigger = 1;
lo.destinationShorthand = 0b00;
MmioWrite(LAPIC_MMIO + LAPIC_ICR_HI, hi.raw);
MmioWrite(LAPIC_MMIO + LAPIC_ICR_LO, lo.raw);
}
<|start_filename|>arch/x86/mmu/MmuNewVirtualSpace.cc<|end_filename|>
//===================================================================================================================
//
// MmuNewVirtualSpace.cc -- For a new process, create the user virtual address space
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-16 Initial 0.3.2 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "spinlock.h"
#include "pmm.h"
#include "mmu.h"
//
// -- for x86, we need to copy the kernel address space from PD[512] to PD[1023]
// --------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
frame_t MmuNewVirtualSpace(frame_t stack)
{
frame_t rv = PmmAllocateFrame();
MmuClearFrame(rv);
archsize_t flags = SPINLOCK_BLOCK_NO_INT(mmuTableInitLock) {
MmuMapToFrame(MMU_NEW_TABLE_INIT, rv, PG_KRN | PG_WRT);
PageEntry_t *tgtPD = (PageEntry_t *)MMU_NEW_TABLE_INIT;
PageEntry_t *srcPD = (PageEntry_t *)PAGE_DIR_VADDR;
for (int i = 512; i < 1024; i ++) tgtPD[i] = srcPD[i];
MmuUnmapPage(MMU_NEW_TABLE_INIT);
SPINLOCK_RLS_RESTORE_INT(mmuTableInitLock, flags);
}
return rv;
}
<|start_filename|>platform/bcm2836/pic/PicVars.cc<|end_filename|>
//===================================================================================================================
//
// PicVars.cc -- These are the variables for the bcm2835 Pic
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "interrupt.h"
#include "cpu.h"
#include "pic.h"
//
// -- This is the data that will be used to manage the pic
// ----------------------------------------------------
EXPORT KERNEL_DATA
Bcm2835Pic_t bcm2835Data = {
.picLoc = BCM2835_PIC,
.timerLoc = BCM2835_TIMER,
};
//
// -- This is the device description that is used to output data to the serial port during loader initialization
// ----------------------------------------------------------------------------------------------------------
EXPORT KERNEL_DATA
PicDevice_t picBcm2835 = {
.device = { .deviceData = (DeviceData_t *)&bcm2835Data, },
.ipiReady = false,
.PicInit = _PicInit,
.PicMaskIrq = _PicMaskIrq,
.PicUnmaskIrq = _PicUnmaskIrq,
.PicEoi = (void (*)(PicDevice_t *, Irq_t))EmptyFunction,
.PicDetermineIrq = _PicDetermineIrq,
.PicBroadcastIpi = _PicBroadcastIpi,
};
//
// -- This is the pic we are going to use
// -----------------------------------
EXPORT KERNEL_DATA
PicDevice_t *picControl = &picBcm2835;
//
// -- An array of handlers
// --------------------
EXPORT KERNEL_DATA
MbHandler_t mbHandlers[MAX_IPI] = {
NULL,
(MbHandler_t)Halt, // We are panicing all CPUs; do nothing
IpiHandleTlbFlush,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
IpiHandleDebugger,
}; // limit to 100 messages for now
//
// -- This is the number of cores that have responded to an IPI
// ---------------------------------------------------------
EXPORT KERNEL_BSS
AtomicInt_t mb0Resp = {0};
<|start_filename|>platform/pc/cores/CoresStart.cc<|end_filename|>
//===================================================================================================================
//
// CoresStart.cc -- Start the cores for the x86-pc
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Jan-04 Initial v0.5.0d ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "timer.h"
#include "hardware.h"
#include "process.h"
#include "printf.h"
#include "pmm.h"
#include "pic.h"
//
// -- an definition for the entry point for the cores
// -----------------------------------------------
EXTERN_C EXPORT KERNEL
void entryAp(void);
//
// -- start the other cores; remains in the kernel since we may want to do this later as well
// ---------------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void CoresStart(void)
{
if (cpus.cpusDiscovered < 2) return;
//
// -- Load the trampoline code into the low 1MB of memory
// ---------------------------------------------------
uint8_t *trampoline = (uint8_t *)X86_TRAMPOLINE; // for S&G, start at 32K
extern uint8_t _smpStart[];
extern uint8_t _smpEnd[];
MmuUnmapPage(X86_TRAMPOLINE);
MmuMapToFrame(X86_TRAMPOLINE, X86_TRAMPOLINE >> 12, PG_KRN | PG_WRT);
kprintf("Copying the AP entry code to %p\n", trampoline);
kprintf("... start at %p\n", _smpStart);
kprintf("... length is %p\n", _smpEnd - _smpStart);
kMemMove(trampoline, _smpStart, _smpEnd - _smpStart); // something in here is overwriting MMU tables
kprintf("... moved...\n");
// -- remap as read only!!
// MmuUnmapPage(X86_TRAMPOLINE);
// MmuMapToFrame(X86_TRAMPOLINE, X86_TRAMPOLINE >> 12, PG_KRN | PG_DEVICE);
kprintf("Memory remapped\n");
cpus.perCpuData[0].location = ArchCpuLocation();
for (int i = 1; i < cpus.cpusDiscovered; i ++) {
cpus.cpuStarting = i;
AtomicSet(&cpus.perCpuData[cpus.cpuStarting].state, CPU_STARTING);
kprintf("Starting core %d \n", i);
picControl->PicBroadcastInit(picControl, i);
picControl->PicBroadcastSipi(picControl, i, (archsize_t)trampoline);
while (AtomicRead(&cpus.perCpuData[cpus.cpuStarting].state) == CPU_STARTING) {}
}
}
<|start_filename|>modules/kernel/inc/syscall.h<|end_filename|>
//===================================================================================================================
// syscall.h -- This is the kernel internal definitions for handling system calls
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-02 Initial 0.1.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#pragma once
#include "types.h"
//
// -- System function 1: Receive a message
// ------------------------------------
EXTERN_C EXPORT SYSCALL
void SyscallReceiveMessage(isrRegs_t *regs);
//
// -- System function 2: Send a message
// ------------------------------------
EXTERN_C EXPORT SYSCALL
void SyscallSendMessage(isrRegs_t *regs);
<|start_filename|>modules/kernel/src/debugger/DebugMessageQueue.cc<|end_filename|>
//===================================================================================================================
//
// DebugMessageQueue.cc -- Debug the message queues
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-10 Initial v0.6.1a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "pic.h"
#include "debugger.h"
//
// -- Debug the message queues
// ------------------------
EXTERN_C EXPORT KERNEL
void DebugMsgq(void)
{
while (true) {
if (kStrLen(debugCommand) == 0) DebugPrompt(debugState);
DebuggerCommand_t cmd = DebugParse(debugState);
switch(cmd) {
case CMD_EXIT:
debugState = DBG_HOME;
return;
case CMD_STAT:
DebugMsgqStatus();
debugState = DBG_MSGQ;
break;
case CMD_SHOW:
debugState = DBG_MSGQ;
break;
case CMD_ERROR:
default:
kprintf(ANSI_ATTR_BOLD ANSI_FG_RED
"Something went wrong (msgq) -- a bug in the debugger is likely\n" ANSI_ATTR_NORMAL);
continue;
}
}
}
<|start_filename|>modules/kernel/src/syscall/SyscallHandler.cc<|end_filename|>
//===================================================================================================================
//
// SyscallHandler.cc -- Handler for system calls
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Handle system calls
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-01 Initial 0.1.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "syscall.h"
#include "interrupt.h"
#include <errno.h>
//
// -- This is a static function to make sure there is always a function to call
// -------------------------------------------------------------------------
EXTERN_C HIDDEN SYSCALL
void SyscallNullHandler(isrRegs_t *regs)
{
SYSCALL_RETURN(regs) = -ENOSYS;
}
//
// -- The ISR Handler Table
// ---------------------
HIDDEN SYSCALL_DATA
isrFunc_t syscallHandlers[] = {
SyscallNullHandler, // Function 0; trivial call
SyscallReceiveMessage, // Function 1: receive a message
SyscallSendMessage, // Function 2: send a message
};
//
// -- This is the ISR Handler routine
// -------------------------------
EXTERN_C EXPORT SYSCALL
void SyscallHandler(isrRegs_t *regs)
{
if ((uint32_t)SYSCALL_FUNC_NO(regs) >= sizeof(syscallHandlers) / sizeof(isrFunc_t)) {
SyscallNullHandler(regs);
return;
}
isrFunc_t handler = syscallHandlers[SYSCALL_FUNC_NO(regs)];
handler(regs);
}
<|start_filename|>modules/kernel/src/msgq/MsgqVars.cc<|end_filename|>
//===================================================================================================================
//
// MsgqVars.cc -- Message Queue variables
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- --------------------------------------------------------------------------
// 2020-Apr-09 Initial v0.6.1a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "msgq.h"
//
// -- This is the pointer to the message queue structure
// --------------------------------------------------
EXPORT KERNEL_BSS
MessageQueueList_t msgqList;
<|start_filename|>modules/kernel/src/ipi/IpiHandleDebugger.cc<|end_filename|>
//===================================================================================================================
//
// IpiHandleDebugger.cc -- Stop the cores and handle debugger requests as required
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-03 Initial v0.6.0a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "pic.h"
#include "timer.h"
#include "debugger.h"
//
// -- Stop a core and wait for permission to continue
// -----------------------------------------------
EXTERN_C EXPORT KERNEL
void IpiHandleDebugger(isrRegs_t *regs)
{
AtomicInc(&debugCommunication.coresEngaged);
switch(debugCommunication.command) {
case DIPI_ENGAGE:
// -- no action required
break;
case DIPI_TIMER:
// -- get the current timer from each core and report the results
debugCommunication.timerValue[thisCpu->cpuNum] = TimerCurrentCount(timerControl);
AtomicInc(&debugCommunication.coresResponded);
break;
default:
kprintf("\n\nCPU%d: Unimplemented Debugger command %d\n", debugCommunication.command);
break;
}
while (AtomicRead(&debugCommunication.coresEngaged) != 0) {}
PicEoi(picControl, (Irq_t)0);
}
<|start_filename|>arch/x86/cpu/ArchGdtSetup.cc<|end_filename|>
//===================================================================================================================
//
// ArchGdtSetup.cc -- Initialize the GDT into its final location
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This function will be used to initialize the GDT to its final location. From the memory map located here,
// http://eryjus.ddns.net:3000/projects/century-os/wiki/Low_Memory_Usage_Map, the final GDT will be located at
// physical address `0x10000`. It may take up several frames depending on the number of CPUs that we support
// and/or the number of CPUs we discover. Currently, this is a small number of CPUs.
//
// The number of GDT entries we need is easily calculated: 9 + (CPU_count * 3). Each GDT Entry is 8 bytes long.
// Therefore, the number of CPUs we can support in a single frame is: floor(((4096 / 8) - 9) / 3) = 167. 167
// CPUs is quite simply a ridiculous number of CPUs at this juncture.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Jan-05 Initial v0.5.0e ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "mmu.h"
#include "printf.h"
#include "cpu.h"
//
// -- Initialize the GDT to its final location
// ----------------------------------------
EXTERN_C EXPORT LOADER
void ArchGdtSetup(void)
{
// -- first, calculate the number of frames we need for the GDT
size_t gdtEntries = (cpus.cpusDiscovered * 3) + 9;
size_t gdtFrames = ((gdtEntries * 8) / PAGE_SIZE) + ((gdtEntries * 8) & (PAGE_SIZE - 1) ? 1 : 0);
// -- Now, we can map the GDT pages and clear the table
archsize_t vGdt = X86_VIRT_GDT;
frame_t fGdt = X86_PHYS_GDT >> 12;
for (size_t i = 0; i < gdtFrames; i ++, fGdt ++, vGdt += PAGE_SIZE) {
MmuMapToFrame(vGdt, fGdt, PG_KRN | PG_WRT);
}
kMemSetB((void *)X86_VIRT_GDT, 0x00, gdtFrames * PAGE_SIZE);
// -- Now, we start populating the GDT Entries -- first the 9 standard entries
Descriptor_t *gdt = (Descriptor_t *)X86_VIRT_GDT;
gdt[0] = NULL_GDT; // 0x00: NULL GDT Entry (required)
gdt[1] = KCODE_GDT; // 0x08: kernel code
gdt[2] = KDATA_GDT; // 0x10: kernel stack (and data)
gdt[3] = UCODE_GDT; // 0x18: user code
gdt[4] = UDATA_GDT; // 0x20: user stack (and data)
gdt[5] = KDATA_GDT; // 0x28: kernel data
gdt[6] = UDATA_GDT; // 0x30: user data
gdt[7] = LCODE_GDT; // loader code (not used)
gdt[8] = LDATA_GDT; // loader data and stack (not used)
// -- now the TSS and gs segment selector for each CPU (Redmine #433 goes here)
for (int i = 0; i < cpus.cpusDiscovered; i ++) {
gdt[ 9 + (i * 3)] = GS_GDT((archsize_t)(&cpus.perCpuData[i].cpu)); // `gs` for this CPU
gdt[10 + (i * 3)] = TSS32_GDT((archsize_t(&cpus.perCpuData[i].tss))); // 32-bit TSS entry for this CPU
gdt[11 + (i * 3)] = NULL_GDT; // NULL TSS entry part 2 (reserved for 64-bit)
}
// -- Finally we need to load the new GDT
struct {
uint16_t size;
uintptr_t loc;
} __attribute__((packed)) gdtRec = {
(uint16_t)((gdtEntries * 8) - 1),
X86_VIRT_GDT,
};
// -- load the GDT register
ArchLoadGdt(&gdtRec);
kprintf("Permanent GDT established\n");
}
<|start_filename|>modules/kernel/src/butler/ButlerMemCheck.cc<|end_filename|>
//===================================================================================================================
//
// ButlerMemCheck.cc -- Check the memory frame to see if it can be freed
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Up to 4MB, check the memory to see if it can be freed.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-11 Initial v0.6.1b ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "pmm.h"
#include "butler.h"
//
// -- Check the memory to see if it is eligible to be freed
// -----------------------------------------------------
EXTERN_C EXPORT LOADER
bool ButlerMemCheck(frame_t frame)
{
archsize_t addr = frame << 12;
archsize_t krnBeg = 0x100000;
archsize_t krnEnd = krnStabPhys + krnStabSize;
if (frame < 0x100) return LowMemCheck(frame);
if (addr >= krnBeg && addr < krnEnd) return false;
return true;
}
<|start_filename|>modules/kernel/src/heap/HeapRemoveFromList.cc<|end_filename|>
//===================================================================================================================
//
// HeapRemoveFromList.cc -- Remove an ordered list entry from the list
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Remove an ordered list entry from the list.
//
// On calling, the contents of entry have the following characteristics:
// +-----------------------------------------------------------------+
// | Entry |
// +-----------------------------------------------------------------+
// | block -- points to proper header |
// | size -- the size of the block, with header/footer |
// | prev -- points to the previous (smaller) block (may be null) |
// | next -- points to the next (larger) block (may be null) |
// +-----------------------------------------------------------------+
// | block->entry -- is equal to the parm entry |
// +-----------------------------------------------------------------+
//
// On exit, the following characteristics:
// +-----------------------------------------------------------------+
// | Entry |
// +-----------------------------------------------------------------+
// | prev -- zero |
// | next -- zero |
// +-----------------------------------------------------------------+
// | block->entry -- unchanged (an important fact to remember) |
// +-----------------------------------------------------------------+
// | hKeap->heapMemory -- may be NULL on return if last entry is |
// | removed |
// | hKeap->heap512 -- may be NULL on return if last entry is |
// | removed or nothing is >= 512 bytes |
// | hKeap->heap4K -- may be NULL on return if last entry is |
// | removed or nothing is >= 4096 bytes |
// | hKeap->heap16K -- may be NULL on return if last entry is |
// | removed or nothing is >= 16384 bytes |
// +-----------------------------------------------------------------+
//
// if on entry, entry->next != null, then entry->next->prev is set to
// entry->prev.
//
// if on entry, entry->prev != null, then entry->prev->next is set to
// entry->next.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Jul-12 Initial version
// 2012-Sep-19 Leveraged from Century
// 2018-Jun-01 Initial 0.1.0 ADCL Copied this file from century32 to century-os
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "heap.h"
//
// -- Remove an entry from the Ordered List
// -------------------------------------
void HeapRemoveFromList(OrderedList_t *entry)
{
if (!assert(entry != NULL)) HeapError("NULL entry in HeapRemoveFromList()", "");
HeapValidateHdr(entry->block, "HeapRemoveFromList()");
if (kHeap->heapMemory == entry) {
kHeap->heapMemory = kHeap->heapMemory->next;
}
if (kHeap->heap512 == entry) {
kHeap->heap512 = kHeap->heap512->next;
}
if (kHeap->heap1K == entry) {
kHeap->heap1K = kHeap->heap1K->next;
}
if (kHeap->heap4K == entry) {
kHeap->heap4K = kHeap->heap4K->next;
}
if (kHeap->heap16K == entry) {
kHeap->heap16K = kHeap->heap16K->next;
}
if (entry->next) entry->next->prev = entry->prev;
if (entry->prev) entry->prev->next = entry->next;
entry->next = entry->prev = 0;
}
<|start_filename|>modules/kernel/inc/cpu.h<|end_filename|>
//===================================================================================================================
//
// cpu.h -- Standard CPU functions
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// These are the common low-level functions that need to be implemented to manage the CPU resource by the OS.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-May-25 Initial 0.1.0 ADCL Initial version as I move functions from century32
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#pragma once
#define __CPU_H__
#include "types.h"
//
// -- This is the state of a CPU
// --------------------------
typedef enum {
CPU_STOPPED = 0,
CPU_STARTING = 1,
CPU_STARTED = 2,
CPU_BAD = 0xffff,
} CpuState_t;
//
// -- forward declare the process structure
// -------------------------------------
struct Process_t;
//
// -- Set up the common CPU elements across all archs. The actual ArchCpu_t structure will be defined
// in the arch-cpu.h include.
// ------------------------------------------------------------------------------------------------
#define COMMON_CPU_ELEMENTS \
int cpuNum; \
archsize_t stackTop; \
archsize_t location; \
struct AtomicInt_t state; \
int kernelLocksHeld; \
bool reschedulePending; \
int disableIntDepth; \
ArchCpu_t *cpu; \
INT_UNSTABLE struct Process_t *process; \
uint64_t lastTimer; \
uint64_t cpuIdleTime; \
frame_t stackFrame;
#if __has_include("arch-cpu.h")
# include "arch-cpu.h"
#endif
//
// -- Mark this CPU as started so the next one can be released
// --------------------------------------------------------
#define NextCpu(c) AtomicSet(&cpus.perCpuData[c].state, CPU_STARTED)
//
// -- Halt the CPU
// ------------
EXTERN_C EXPORT KERNEL
void Halt(void) __attribute__((noreturn));
//
// -- Panic-halt the OS, reporting the problems and the system state
// --------------------------------------------------------------
EXTERN_C EXPORT NORETURN KERNEL
void CpuPanic(const char *reason, isrRegs_t *regs);
//
// -- Panic-halt the OS, pushing the registers onto the stack
// -------------------------------------------------------
EXTERN_C EXPORT NORETURN KERNEL
void CpuPanicPushRegs(const char *reason);
//
// -- Enable interrupts if they are disabled; assembly language function
// ------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void EnableInterrupts(void);
//
// -- Disable interrupts and return the current flags state; assembly language function
// ---------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
archsize_t DisableInterrupts(void);
//
// -- Restore the flags state back to the provided state; note all flags are updates; assembly language function
// ----------------------------------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void RestoreInterrupts(archsize_t flg);
//
// -- Set a block of memory to the specified byte
// -------------------------------------------
EXTERN_C EXPORT KERNEL
void kMemSetB(void *buf, uint8_t wrd, size_t cnt);
//
// -- Set a block of memory to the specified word
// -------------------------------------------
EXTERN_C EXPORT KERNEL
void kMemSetW(void *buf, uint16_t wrd, size_t cnt);
//
// -- Move a block of memory from one location to another
// ---------------------------------------------------
EXTERN_C EXPORT KERNEL
void kMemMove(void *tgt, void *src, size_t cnt);
//
// -- Copy a string from one location to another
// ------------------------------------------
EXTERN_C EXPORT KERNEL
void kStrCpy(char *dest, const char *src);
//
// -- Copy a string from one location to another
// ------------------------------------------
EXTERN_C EXPORT KERNEL
int kStrCmp(const char *str1, const char *str2);
//
// -- Get the length of a string
// --------------------------
EXTERN_C EXPORT KERNEL
size_t kStrLen(const char *s);
//
// -- Start any APs that need to be started
// -------------------------------------
EXTERN_C EXPORT KERNEL
void CoresStart(void);
//
// -- Perform the initialization of the cpu data structure
// ----------------------------------------------------
EXTERN_C EXPORT LOADER
void CpuInit(void);
//
// -- A do-nothing function for use with drivers
// ------------------------------------------
EXTERN_C EXPORT KERNEL
void EmptyFunction(void);
//
// -- This structure defines all the data for all the cpus on the system
// ------------------------------------------------------------------
typedef struct Cpu_t {
int cpusDiscovered;
SMP_UNSTABLE int cpusRunning;
SMP_UNSTABLE int cpuStarting;
ArchCpu_t perCpuData[MAX_CPUS];
} Cpu_t;
//
// -- A function to initialize the CPU structure for the cpu starting
// ---------------------------------------------------------------
EXTERN_C EXPORT KERNEL
archsize_t CpuMyStruct(void);
//
// -- This is the cpu abstraction variable structure
// ----------------------------------------------
EXTERN EXPORT KERNEL_BSS
Cpu_t cpus;
#include "atomic.h"
<|start_filename|>modules/kernel/src/msgq/MsgqRelease.cc<|end_filename|>
//===================================================================================================================
//
// MsgqRelease.cc -- Remove the reference to this message queue
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This is going to be a little tricky since we first need to prove that this process contains a reference to the
// Queue.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- --------------------------------------------------------------------------
// 2020-Apr-09 Initial v0.6.1a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "heap.h"
#include "process.h"
#include "cpu.h"
#include "spinlock.h"
#include "lists.h"
#include "msgq.h"
//
// -- Release the reference to this message queue; marking for deletion when the reference count is 0
// -----------------------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void MessageQueueRelease(MessageQueue_t *msgq)
{
Reference_t *ref = NULL;
archsize_t flags = SPINLOCK_BLOCK_NO_INT(currentThread->references.lock) {
ListHead_t::List_t *wrk = currentThread->references.list.next;
while (wrk != ¤tThread->references.list) {
Reference_t *r = FIND_PARENT(wrk, Reference_t, procRefList);
if (r->resAddr == msgq) {
ListRemoveInit(&r->procRefList);
currentThread->references.count --;
ref = r;
goto exit;
}
wrk = wrk->next;
}
exit:
SPINLOCK_RLS_RESTORE_INT(currentThread->references.lock, flags);
}
// -- if we did not find anything, we're done
if (!ref) return;
// -- now, we need to clean up the Message Queue
flags = SPINLOCK_BLOCK_NO_INT(msgq->procList.lock) {
ListRemoveInit(&ref->procRefList);
msgq->procList.count --;
SPINLOCK_RLS_RESTORE_INT(msgq->procList.lock, flags);
}
FREE(ref);
}
<|start_filename|>modules/kernel/inc/spinlock.h<|end_filename|>
//===================================================================================================================
//
// spinlock.h -- Structures for spinlock management
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Oct-14 Initial 0.1.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#pragma once
#include "types.h"
#include "cpu.h"
//
// -- This macro basically disappears because but helps to delineate the block that requires the lock
//
// Block Usage:
// ------------
//
// Spinlock lock = {0};
// SPINLOCK_BLOCK(lock) {
// // Do some important stuff here...
// SPINLOCK_RLS(lock);
// }
//
// Note that in this context, the trailing ';' is required.
// -----------------------------------------------------------------------------------------------
#define SPINLOCK_BLOCK(lock) SpinLock(&(lock));
//
// -- This marco only exists so I do not need to type an '&' with each unlock
// -----------------------------------------------------------------------
#define SPINLOCK_RLS(lock) SpinUnlock(&(lock))
//
// -- This macro exists to help with code readaibility -- get a lock and save interrupts
//
// Block Usage:
// ------------
//
// Spinlock lock = {0};
// archsize_t flags = SPINLOCK_BLOCK_NO_INT(lock) {
// // Do some important stuff here...
// SPINLOCK_RLS_RESTORE_INT(lock);
// }
//
// Note that in this context, the trailing ';' is required.
// ----------------------------------------------------------------------------------
#define SPINLOCK_BLOCK_NO_INT(lock) ({ \
archsize_t flags = DisableInterrupts(); \
SpinLock(&(lock)); \
flags; \
});
//
// -- This macro exists to help with code readaibility -- restore interrupts and release lock
// ---------------------------------------------------------------------------------------
#define SPINLOCK_RLS_RESTORE_INT(lock,f) do { \
SpinUnlock(&(lock)); \
RestoreInterrupts(f); \
} while (false)
//
// -- This is the spinlock structure which notes who holds the lock
// -------------------------------------------------------------
typedef struct Spinlock_t {
SMP_UNSTABLE int lock;
} Spinlock_t;
//
// -- This inline function will lock a spinlock, busy looping indefinitely until a lock is obtained
// ---------------------------------------------------------------------------------------------
EXPORT INLINE
void SpinLock(Spinlock_t *lock) {
int exp, des;
do {
exp = 0;
des = 1;
} while (!__atomic_compare_exchange(&(lock->lock), &exp, &des, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST));
}
//
// -- This inline function will unlock a spinlock, clearing the lock holder
// ---------------------------------------------------------------------
EXPORT INLINE
void SpinUnlock(Spinlock_t *lock) {
int zero = 0;
__atomic_store(&(lock->lock), &zero, __ATOMIC_SEQ_CST);
}
//
// -- This inline function will determine if a spinlock is locked
// -----------------------------------------------------------
EXPORT INLINE
bool SpinlockIsLocked(Spinlock_t *lock) {
int l;
__atomic_load(&(lock->lock), &l, __ATOMIC_SEQ_CST);
return l == 1;
}
//
// -- This is the lock that controls access to the address space for initializing the table
// -------------------------------------------------------------------------------------
EXTERN EXPORT KERNEL_DATA
Spinlock_t mmuTableInitLock;
//
// -- This is the lock that controls access to the address space for initializing the table
// -------------------------------------------------------------------------------------
EXTERN EXPORT KERNEL_DATA
Spinlock_t mmuStackInitLock;
//
// -- This macro will clean (flush) the cache for a Spinlock, making changes visible to all
// -------------------------------------------------------------------------------------
#define CLEAN_SPINLOCK(lock) CleanCache(lock, sizeof(Spinlock_t))
//
// -- This macro will invalidate the cache for a Spinlock, forcing it the be re-read from memory
// ------------------------------------------------------------------------------------------
#define INVALIDATE_SPINLOCK(lock) InvalidateCache(lock, sizeof(Spinlock_t))
<|start_filename|>platform/bcm2836/inc/platform-mailbox.h<|end_filename|>
//===================================================================================================================
//
// platform-mailbox.h -- Mailbox definitions and functions for the bcm2835
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// FrameBufferInit() will be called from the loader code, so this device will need to be available from the loader
// and the kernel.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#pragma once
#ifndef __HARDWARE_H__
# error "Use #include \"hardware.h\" and it will pick up this file; do not #include this file directly."
#endif
//
// -- Define a common interface for the GPIO functions that are needed
// ----------------------------------------------------------------
typedef struct MailboxDevice_t {
archsize_t base;
void (*MailboxSend)(struct MailboxDevice_t *, archsize_t, archsize_t);
archsize_t (*MailboxReceive)(struct MailboxDevice_t *, archsize_t);
} MailboxDevice_t;
//
// -- Here, declare the different configurations of the GPIO will use
// ---------------------------------------------------------------
EXTERN KERNEL_DATA
MailboxDevice_t kernelMailbox;
//
// -- These are the common interface functions we will use to interact with the GPIO. These functions are
// not safe in that they will not check for nulls before calling the function. Therefore, caller beware!
// -----------------------------------------------------------------------------------------------------------
EXPORT INLINE
void MailboxSend(MailboxDevice_t *dev, archsize_t mb, archsize_t msg) { dev->MailboxSend(dev, mb, msg); }
EXPORT INLINE
archsize_t MailboxReceive(MailboxDevice_t *dev, archsize_t mb) { return dev->MailboxReceive(dev, mb); }
//
// -- Here are the function prototypes needed for these operations
// ------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void _MailboxSend(MailboxDevice_t *dev, archsize_t mb, archsize_t);
EXTERN_C EXPORT KERNEL
archsize_t _MailboxReceive(MailboxDevice_t *dev, archsize_t mb);
//
// -- Some mailbox address offsets
// ----------------------------
#define MB_READ (0x00) // MB: Receiving mail
#define MB_POLL (0x10) // MB: Read witout receiving
#define MB_SENDER (0x14) // MB: Sender information
#define MB_STATUS (0x18) // MB: Information
#define MB_CONFIG (0x1c) // MB: Settings
#define MB_WRITE (0x20) // MB: Send mail
<|start_filename|>arch/arm/cpu/ArchFpuInit.cc<|end_filename|>
//===================================================================================================================
//
// ArchFpuInit.cc -- Initialize the core to handle FPU instructions
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Jun-16 Initial 0.4.6 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "platform.h"
#include "serial.h"
//
// -- Initialize the core to be able to use FPU instructions
// ------------------------------------------------------
EXTERN_C EXPORT LOADER
void ArchFpuInit(void)
{
//
// -- prepare the FPU for accepting commands
// --------------------------------------
archsize_t cpacr = ReadCPACR();
cpacr |= (0b11<<20);
cpacr |= (0b11<<22);
WriteCPACR(cpacr);
//
// -- and enable the fpu
// ------------------
WRITE_FPEXC(1<<30);
}
<|start_filename|>modules/kernel/src/kInitAp.cc<|end_filename|>
//===================================================================================================================
//
// kInitAp.cc -- Kernel entry point for each AP -- bypassing the major structure init
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Feb-03 Initial v0.5.0f ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "cpu.h"
#include "platform.h"
#include "timer.h"
#include "process.h"
#include "heap.h"
#include "pic.h"
#include "entry.h"
#include "serial.h"
//
// -- This flag will indicate the point when we are ready to clean up
// ---------------------------------------------------------------
volatile bool startCleanup = false;
//
// -- This is AP Entry point. While we have a shared temporary stack and need to get that
// -----------------------------------------------------------------------------------------------------
extern "C" EXPORT KERNEL
void kInitAp(void)
{
ArchLateCpuInit(cpus.cpuStarting);
PlatformApInit();
ApTimerInit(timerControl, 1000);
kprintf("CPU %x running...\n", thisCpu->cpuNum);
Process_t *proc = NEW(Process_t);
kMemSetB(proc, 0, sizeof(Process_t));
assert(proc != NULL);
proc->pid = scheduler.nextPID ++;
proc->ssAddr = thisCpu->stackFrame;
proc->virtAddrSpace = mmuLvl1Table;
// -- set the process name
proc->command = (char *)HeapAlloc(20, false);
kMemSetB(proc->command, 0, 20);
kStrCpy(proc->command, "kInitAp( )");
proc->command[8] = thisCpu->cpuNum + '0';
proc->policy = POLICY_0;
proc->priority = PTY_OS;
proc->status = PROC_RUNNING;
AtomicSet(&proc->quantumLeft, PTY_OS);
proc->timeUsed = 0;
proc->wakeAtMicros = 0;
ListInit(&proc->stsQueue);
ListInit(&proc->references.list);
kprintf("kInitAp() established the current process at %p for CPU%d\n", proc, thisCpu->cpuNum);
CurrentThreadAssign(proc);
kprintf("Assigning the starting timer for CPU%d\n", thisCpu->cpuNum);
thisCpu->lastTimer = TimerCurrentCount(timerControl);
// -- Now we immediately self-terminate to give the scheduler to something else
kprintf("Enabling interrupts on CPU %d\n", thisCpu->cpuNum);
kprintf("Cpus running is %d\n", cpus.cpusRunning);
ProcessAddGlobal(proc); // lock required
EnableInterrupts();
kprintf("Interrupts enabled on CPU %d\n", thisCpu->cpuNum);
NextCpu(cpus.cpuStarting);
kprintf("CPU%d signalled the next CPU to start\n", thisCpu->cpuNum);
// -- core 1 will be trying to clean up before core 3 is started; hold all cpus at this barrier until ready
while (!startCleanup) {}
ProcessMicroSleep(0);
ProcessEnd();
assert(false);
while (true) {}
}
<|start_filename|>platform/inc/platform.h<|end_filename|>
//===================================================================================================================
//
// platform.h -- These are the common functions for interacting with the platform
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-05 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#pragma once
#define __PLATFORM_H__
#include "types.h"
//
// -- This is the early platform initialization function
// --------------------------------------------------
EXTERN_C EXPORT LOADER
void PlatformEarlyInit(void);
//
// -- Complete the platform initialization
// ------------------------------------
EXTERN_C EXPORT LOADER
void PlatformInit(void);
//
// -- Complete the platform-specific initialization for the AP
// --------------------------------------------------------
EXTERN_C EXPORT KERNEL
void PlatformApInit(void);
<|start_filename|>modules/kernel/src/process/ProcessCreate.cc<|end_filename|>
//===================================================================================================================
//
// ProcessCreate.cc -- Create a new process, setting everything up to be able to schedule it
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-16 Initial 0.3.2 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "heap.h"
#include "mmu.h"
#include "printf.h"
#include "process.h"
//
// -- Create a new process and get it ready to be scheduled
// -----------------------------------------------------
EXPORT KERNEL
Process_t *ProcessCreate(const char *name, void (*startingAddr)(void))
{
extern archsize_t mmuLvl1Table;
Process_t *rv = NEW(Process_t);
if (!assert_msg(rv != NULL, "Out of memory allocating a new Process_t")) {
CpuPanicPushRegs("Out of memory allocating a new Process_t");
}
kMemSetB(rv, 0, sizeof(Process_t));
rv->pid = scheduler.nextPID ++;
// -- set the name of the process
int len = kStrLen(name + 1);
rv->command = (char *)HeapAlloc(len, false);
rv->command[len + 1] = 0;
kStrCpy(rv->command, name);
rv->policy = POLICY_0;
rv->priority = PTY_OS;
rv->status = PROC_INIT;
AtomicSet(&rv->quantumLeft, 0);
rv->timeUsed = 0;
rv->wakeAtMicros = 0;
ListInit(&rv->stsQueue);
ListInit(&rv->references.list);
//
// -- Construct the stack for the architecture
// ----------------------------------------
rv->ssAddr = ProcessNewStack(rv, startingAddr);
//
// -- Construct the new addres space for the process
// ----------------------------------------------
rv->virtAddrSpace = mmuLvl1Table;
#if DEBUG_ENABLED(ProcessCreate)
kprintf("ProcessCreate() created a new process at %p\n", rv);
#endif
//
// -- Put this process on the queue to execute
// ----------------------------------------
ProcessLockAndPostpone();
rv->status = PROC_READY;
ProcessDoAddGlobal(rv);
ProcessDoReady(rv);
ProcessUnlockAndSchedule();
return rv;
}
<|start_filename|>platform/bcm2836/pic/PicBroadcastIpi.cc<|end_filename|>
//===================================================================================================================
//
// PicBroadcastIpi.cc -- Broadcast an IPI to all CPUs
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Jun-08 Initial 0.4.5 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "pic.h"
//
// -- Broadcast an IPI to all CPUs (including myself)
// -----------------------------------------------
EXTERN_C EXPORT KERNEL
void _PicBroadcastIpi(PicDevice_t *dev, int ipi)
{
if (!dev) return;
if (!dev->ipiReady) return;
AtomicSet(&mb0Resp, 1);
#if DEBUG_ENABLED(PicBroadcastIpi)
kprintf("For IPI broadcast Qualified on CPU %d\n", thisCpu->cpuNum);
#endif
for (int i = 0; i < cpus.cpusRunning; i ++) {
if (i != thisCpu->cpuNum) {
#if DEBUG_ENABLED(PicBroadcastIpi)
kprintf("Sending to mailbox for cpu %d\n", i);
#endif
MmioWrite(IPI_MAILBOX_BASE + (0x10 * i), (archsize_t)ipi);
}
}
while (AtomicRead(&mb0Resp) != cpus.cpusRunning) {}
#if DEBUG_ENABLED(PicBroadcastIpi)
kprintf(".. Completed on CPU %d\n", thisCpu->cpuNum);
#endif
}
<|start_filename|>platform/pc/interrupts/IsrInt00.cc<|end_filename|>
//===================================================================================================================
//
// IsrInt00.cc -- Divide by 0 handler
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// The most basic divide by 0 exception handler
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Oct-10 Initial 0.1.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "interrupt.h"
//
// -- This is the ISR Handler routine
// -------------------------------
EXTERN_C EXPORT KERNEL
void IsrInt00(isrRegs_t *regs)
{
kprintf("\nDivide Overflow\n");
IsrDumpState(regs);
}
<|start_filename|>modules/kernel/src/heap/HeapFindHole.cc<|end_filename|>
//===================================================================================================================
//
// HeapFindHole.cc -- Find the smallest hole that has the size needed
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Find the smallest hole that has the size required. Align the block as necessary and ensure the remaining block
// is big enough.
//
// ------------------------------------------------------------------------------------------------------------------
//
// IMPORTANT PROGRAMMING NOTE:
// The calling function must guarantee that adjustedSize be >= sizeof(KHeapHeader) + sizeof(KHeapFooter) + 1.
// This function will not check its validity.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Jul-02 Initial version
// 2012-Sep-16 Leveraged from Century
// 2013-Sep-12 #101 Resolve issues splint exposes
// 2018-Jun-01 Initial 0.1.0 ADCL Copied this file from century32 to century-os
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "heap.h"
//
// -- Find the best fit hole in the list of holes
// -------------------------------------------
OrderedList_t *HeapFindHole(size_t adjustedSize, bool align)
{
OrderedList_t *wrk = NULL;
size_t wrkSize;
// First determine the right starting point for searching.
if (adjustedSize < 512) wrk = kHeap->heapMemory;
if (adjustedSize >= 512 && adjustedSize < 1024) wrk = kHeap->heap512;
if (adjustedSize >= 1024 && adjustedSize < 4096) wrk = kHeap->heap1K;
if (adjustedSize >= 4096 && adjustedSize < 16384) wrk = kHeap->heap4K;
if (adjustedSize >= 16384) wrk = kHeap->heap16K;
// in theory, wrk is now optimized for a faster search for the right size
while (wrk) { // while we have something to work with...
if (wrk->size < adjustedSize) {
wrk = wrk->next;
continue;
}
// first entry of sufficient size and we are not aligning; use it
if (wrk->size >= adjustedSize && !align) return wrk;
// at this point, guaranteed to be looking for an aligned block
// find the real block location; now, calculate the new block size
wrkSize = wrk->size - (HeapCalcPageAdjustment(wrk) - (archsize_t)wrk->block);
// check if we have overrun the block
if (wrkSize <= 0) {
wrk = wrk->next;
continue;
}
// wrkSize now has the available memory for the block after adjusting
// for page alignment; remember we pulled the size of the header out
// check for a fit
if (wrkSize >= adjustedSize - sizeof(KHeapHeader_t)) return wrk;
// not big enough yet, move on
wrk = wrk->next;
}
// no memory to allocate
return 0;
}
<|start_filename|>platform/pc/timer/TimerCurrentCount.cc<|end_filename|>
//===================================================================================================================
//
// TimerCurrentCount.cc -- Get the current count from the timer
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-19 Initial 0.3.2 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "timer.h"
//
// -- This is the number of ticks since boot
// --------------------------------------
EXPORT KERNEL_DATA
uint64_t microsSinceBoot = 0;
//
// -- Get the number of ticks since boot
// ----------------------------------
EXPORT KERNEL
uint64_t _TimerCurrentCount(TimerDevice_t *dev)
{
return microsSinceBoot;
}
<|start_filename|>platform/pc/serial/SerialVars.cc<|end_filename|>
//===================================================================================================================
//
// SerialVars.cc -- These are the variables for the Serial Port for x86
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-23 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "serial.h"
//
// -- This is the device description that will be used for outputting data to the debugging serial port
// -------------------------------------------------------------------------------------------------
EXPORT KERNEL_DATA
SerialDevice_t debugSerial = {
.base = COM1,
.lock = {0},
.SerialOpen = _SerialOpen,
.SerialHasChar = _SerialHasChar,
.SerialHasRoom = _SerialHasRoom,
.SerialGetChar = _SerialGetChar,
.SerialPutChar = _SerialPutChar,
};
<|start_filename|>arch/x86/inc/arch-interrupt.h<|end_filename|>
//===================================================================================================================
//
// arch-interrupt.cc -- These are functions related to interrupts for the i686 architecture
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// These are function prototypes for interrupts management
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-11 Initial 0.2.0 ADCL Initial version
// 2019-Feb-09 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#pragma once
#ifndef __INTERRUPT_H__
# error "Do not include 'arch-interrupt.h' directly. Include 'interrupt.h' and this file will be included"
#endif
#include "types.h"
//
// -- Set up an IDT gate
// ------------------
EXTERN_C EXPORT KERNEL
void ArchIdtSetGate(uint8_t num, archsize_t base, archsize_t sel, uint8_t flags);
//
// -- These functions are the specific Interrupt service routines (before the handler)
// --------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void IsrInt00(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt01(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt02(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt03(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt04(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt05(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt06(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt07(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt08(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt09(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt0a(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt0b(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt0c(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt0d(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt0e(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt0f(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt10(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt11(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt12(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt13(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt14(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt15(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt16(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt17(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt18(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt19(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt1a(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt1b(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt1c(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt1d(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt1e(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void IsrInt1f(isrRegs_t *regs);
EXTERN_C EXPORT KERNEL
void ArchIntNone(isrRegs_t *regs);
//
// -- A Local prototype to prevent the compiler from name mangling
// ------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void IsrHandler(isrRegs_t regs);
//
// -- These are some macros to assist in the system calls handling
// ------------------------------------------------------------
#define SYSCALL_FUNC_NO(regs) ((regs)->eax)
#define SYSCALL_RETURN(regs) ((regs)->eax)
#define SYSCALL_RCVMSG_PARM1(regs) ((regs)->edi)
#define SYSCALL_SNDMSG_PARM1(regs) ((regs)->edx)
#define SYSCALL_SNDMSG_PARM2(regs) ((regs)->edi)
<|start_filename|>modules/kernel/inc/types.h<|end_filename|>
//===================================================================================================================
//
// types.h -- Common type definitions for all architectures
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// These types are architecture independent. In the end, we add the architecture-specific types with the proper
// size.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-May-24 Initial 0.1.0 ADCL Initial version
// 2018-Nov-04 Initial 0.1.0 ADCL Added Compile Time Assertions from
// http://www.pixelbeat.org/programming/gcc/static_assert.html
// 2018-Nov-11 Initial 0.2.0 ADCL Address architecture abstraction issues
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#pragma once
#define __TYPES_H__
#include "constants.h"
#include "debug.h"
//
// -- these are the only 2 standard include files that are safe to include
// --------------------------------------------------------------------
#include <stdint.h>
#include <stddef.h>
//
// -- manage the release compiler flag
// --------------------------------
#if !defined(RELEASE)
# define RELEASE 0
#else
# if RELEASE > 1
# undef RELEASE
# define RELEASE 1
# endif
#endif
//
// -- some things to add readability/direction to the linker
// ------------------------------------------------------
#define EXPORT __attribute__((visibility("default")))
#define HIDDEN __attribute__((visibility("hidden")))
#define EXTERN extern
#define EXTERN_C EXTERN "C"
#define NORETURN __attribute__((noreturn))
#define INLINE inline __attribute__((always_inline))
#define ALIGN(x) __attribute__((align(x)))
#define INT_UNSTABLE volatile /* changed by an interrupt handler */
#define SMP_UNSTABLE volatile /* changed by another core */
#define THR_UNSTABLE volatile /* changed by another thread */
#define UNSTABLE volatile /* changed by 2 or more of the above */
//
// -- Things that might appear on the ENTRY section
// ---------------------------------------------
#define ENTRY __attribute__((section(".text.entry")))
#define ENTRY_DATA __attribute__((section(".data.entry")))
#define ENTRY_BSS __attribute__((section(".bss.entry")))
#define KERNEL __attribute__((section(".text")))
#define KERNEL_DATA __attribute__((section(".data")))
#define KERNEL_BSS __attribute__((section(".bss")))
#define LOADER __attribute__((section(".ldrtext")))
#define LOADER_DATA __attribute__((section(".ldrdata")))
#define LOADER_BSS __attribute__((section(".ldrbss")))
#define SYSCALL __attribute__((section(".text.syscall")))
#define SYSCALL_DATA __attribute__((section(".data.syscall")))
#define SYSCALL_BSS __attribute__((section(".bss.syscall")))
//
// -- Define UNUSED, based on which parser we are using
// -------------------------------------------------
#ifdef UNUSED
#elif defined(__GNUC__)
# define UNUSED(x) x __attribute__((unused))
#elif defined(__LCLINT__)
# define UNUSED(x) /*@unused@*/ x
#else
# define UNUSED(x) x
#endif
//
// -- some basic macros to help with coding
// -------------------------------------
#define ABS(x) ((x)>=0?(x):-(x))
#define MIN(x,y) ((x)<=(y)?(x):(y))
#define MAX(x,y) ((x)>=(y)?(x):(y))
/* adapted from http: *research.microsoft.com/... */
typedef char * va_list;
#define _INTSIZEOF(n) ((sizeof(n) + sizeof(int) - 1) & ~(sizeof(int) - 1))
#define va_start(ap,v) (ap = (va_list)&v + _INTSIZEOF(v))
#define va_arg(ap,t) (*(t *)((ap += _INTSIZEOF(t)) - _INTSIZEOF(t)))
#define va_end(ap) (ap = (va_list)0)
//
// -- Some compiler hints
// -------------------
#define likely(x) __builtin_expect((x),1)
#define unlikely(x) __builtin_expect((x),0)
//
// -- Some compile-time assertions to help with size checking!
// --------------------------------------------------------
/* Note we need the 2 concats below because arguments to ##
* are not expanded, so we need to expand __LINE__ with one indirection
* before doing the actual concatenation. */
#define ASSERT_CONCAT_(a, b) a##b
#define ASSERT_CONCAT(a, b) ASSERT_CONCAT_(a, b)
#define ct_assert(e) enum { ASSERT_CONCAT(assert_line_, __LINE__) = 1/(!!(e)) }
//
// -- Some additional runtime assertion checking; purposefully set up for use in conditions
// -------------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
bool AssertFailure(const char *expr, const char *msg, const char *file, int line);
#ifdef assert
# undef assert
#endif
#if RELEASE == 1
# define assert(e) true
# define assert_msg(e,m) true
#else
# define assert(e) (likely((e)) ? true : AssertFailure(#e, NULL, __FILE__, __LINE__))
# define assert_msg(e,m) (likely((e)) ? true : AssertFailure(#e, (m), __FILE__, __LINE__))
#endif
//
// -- Define the types that will be used by the ELF loader
// ----------------------------------------------------
typedef uint64_t elf64Addr_t;
typedef uint64_t elf64Off_t;
typedef uint32_t elf32Addr_t;
typedef uint32_t elf32Off_t;
typedef int64_t elfSXWord_t;
typedef uint64_t elfXWord_t;
typedef int32_t elfSWord_t;
typedef uint32_t elfWord_t;
typedef uint16_t elfHalf_t;
//
// -- Now include the architecture-specific types
// -------------------------------------------
#include "arch-types.h"
//
// -- This is a process ID (or PID) -- the same width regardless of arch
// ------------------------------------------------------------------
typedef uint32_t PID_t;
//
// -- This is the size of a frame for the PMM (which is tied to the address width for this architecture)
// --------------------------------------------------------------------------------------------------
typedef archsize_t frame_t;
//
// -- This is a generic byte definition
// ---------------------------------
typedef uint8_t byte_t;
//
// -- This is the type of a key used for the IPC calls
// ------------------------------------------------
typedef int32_t key_t;
//
// -- The current PID
// ---------------
EXTERN volatile KERNEL_BSS
PID_t currentPID;
//
// -- This is the prototype definition for an ISR handler routine
// -----------------------------------------------------------
typedef void (*isrFunc_t)(isrRegs_t *);
//
// -- The definition of a NULL ISR Handler Function
// ---------------------------------------------
const isrFunc_t NULL_ISR = (isrFunc_t)NULL;
//
// -- The ISR Handlers
// ----------------
EXTERN KERNEL_BSS
isrFunc_t isrHandlers[256];
#include "lists.h"
<|start_filename|>modules/kernel/src/cpu/CpuInit.cc<|end_filename|>
//===================================================================================================================
//
// CpuInit.cc -- Initialize the cpu structures
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Jan-21 Initial v0.5.0f ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "stacks.h"
#include "entry.h"
#include "mmu.h"
#include "pmm.h"
#include "pic.h"
#include "cpu.h"
//
// -- Initialize the cpus structure
// -----------------------------
EXTERN_C EXPORT LOADER
void CpuInit(void)
{
ArchEarlyCpuInit();
for (int i = 0; i < MAX_CPUS; i ++) {
// -- start with this stack
archsize_t stack = STACK_LOCATION;
frame_t frame = ldrStackFrame; // -- frame for cpu0
// -- other cores get a different stack
if (i > 0) {
stack = StackFind();
frame = PmmAllocateFrame();
MmuMapToFrame(stack, frame, PG_KRN | PG_WRT);
}
cpus.perCpuData[i].cpuNum = i;
cpus.perCpuData[i].location = -1; // will be filled in later
cpus.perCpuData[i].stackTop = stack + STACK_SIZE;
AtomicSet(&cpus.perCpuData[i].state, (i < cpus.cpusDiscovered ? CPU_STOPPED : CPU_BAD));
cpus.perCpuData[i].kernelLocksHeld = 0;
cpus.perCpuData[i].reschedulePending = false;
cpus.perCpuData[i].disableIntDepth = 0;
cpus.perCpuData[i].cpu = &cpus.perCpuData[i];
cpus.perCpuData[i].process = NULL;
cpus.perCpuData[i].stackFrame = frame;
kprintf("Calling per cpu(%d)\n", i);
ArchPerCpuInit(i);
kprintf("..back\n");
}
AtomicSet(&cpus.perCpuData[0].state, CPU_STARTED);
cpus.cpusRunning = 1;
cpus.cpuStarting = -1;
// -- the location will be done in `CoresStart()`
ArchLateCpuInit(0);
kprintf("Done with CPU setup!! CPU numer is %d\n", thisCpu->cpuNum);
}
<|start_filename|>modules/kernel/src/heap/HeapMergeLeft.cc<|end_filename|>
//===================================================================================================================
//
// HeapMergeLeft.cc -- Merge the freeing block with the block to the left if free as well
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Merge the freeing block with the block to the left if free as well
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Jul-26 Initial version
// 2012-Sep-16 Leveraged from Century
// 2013-Sep-12 #101 Resolve issues splint exposes
// 2013-Sep-13 #74 Rewrite Debug.h to use assertions and write to TTY_LOG
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "heap.h"
//
// -- Merge this hole with the one on the left
// ----------------------------------------
OrderedList_t *HeapMergeLeft(KHeapHeader_t *hdr)
{
KHeapFooter_t *leftFtr = NULL;
KHeapHeader_t *leftHdr = NULL;
KHeapFooter_t *thisFtr = NULL;
if (!assert(hdr != NULL)) HeapError("Bad Header passed into HeapMergeLeft()", "");
thisFtr = (KHeapFooter_t *)((char *)hdr + hdr->size - sizeof(KHeapFooter_t));
leftFtr = (KHeapFooter_t *)((char *)hdr - sizeof(KHeapFooter_t));
// -- Check of this fits before dereferencing the pointer -- may end in `#PF` if first block
if ((byte_t *)leftHdr < kHeap->strAddr) return 0;
leftHdr = leftFtr->hdr;
if (!leftHdr->_magicUnion.isHole) return 0; // make sure the left block is a hole
HeapReleaseEntry(leftHdr->entry);
leftHdr->size += hdr->size;
thisFtr->hdr = leftHdr;
leftHdr->_magicUnion.isHole = thisFtr->_magicUnion.isHole = 1;
return HeapNewListEntry(leftHdr, 0);
}
<|start_filename|>arch/arm/inc/arch-cpu.h<|end_filename|>
//===================================================================================================================
//
// arch-cpu.h -- This file contains the definitions for setting up the ARM for RPi2b
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-11 Initial 0.2.0 ADCL Initial version
// 2018-Nov-13 Initial 0.2.0 ADCL Copy the MMIO functions from century into this file
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#ifndef __CPU_H__
# error "Do not include 'arch-cpu.h' directly; include 'cpu.h' instead, which will pick up this file."
#endif
#include "types.h"
#include "atomic.h"
//
// -- This is the abstraction of the CPU.
// -----------------------------------
typedef struct ArchCpu_t {
COMMON_CPU_ELEMENTS
} ArchCpu_t;
//
// -- Perform the Archictecture-Specifc CPU initialization required
// -------------------------------------------------------------
#define ArchEarlyCpuInit()
//
// -- Complete the final initialization for the CPU
// ---------------------------------------------
EXTERN_C EXPORT LOADER
void ArchLateCpuInit(int c);
//
// -- Complete the initialization for the arch-specific CPU elements
// --------------------------------------------------------------
#define ArchPerCpuInit(...)
//
// -- Arch Specific cpu location determination
// ----------------------------------------
#define ArchCpuLocation() ReadMPIDR()
//
// -- This is the max IOAPICs that can be defined for this arch
// ---------------------------------------------------------
#define MAX_IOAPIC 1
//
// -- This is the natural byte alignment for this architecture
// --------------------------------------------------------
#define BYTE_ALIGNMENT 4
//
// -- These macros assist with the management of the MMU mappings -- picking the address apart into indexes
// into the various tables
// -----------------------------------------------------------------------------------------------------
#define KRN_TTL1_ENTRY(a) (&((Ttl1_t *)ARMV7_TTL1_TABLE_VADDR)[(a) >> 20])
#define KRN_TTL1_ENTRY4(a) (&((Ttl1_t *)ARMV7_TTL1_TABLE_VADDR)[((a) >> 20) & 0xffc])
#define KRN_TTL2_MGMT(a) (&((Ttl2_t *)ARMV7_TTL2_MGMT)[(a) >> 22])
#define KRN_TTL2_ENTRY(a) (&((Ttl2_t *)ARMV7_TTL2_TABLE_VADDR)[(a) >> 12])
//
// -- a macro to read a 32-bit control register
// -----------------------------------------
#define MRC(cp15Spec) ({ \
uint32_t _val; \
__asm__ volatile("mrc " cp15Spec : "=r" (_val)); \
_val; \
})
//
// -- a macro to read a 32-bit floating point register
// ------------------------------------------------
#define VMRS(vfpspec) ({ \
uint32_t _val; \
__asm__ volatile("vmrs %0, " vfpspec : "=r" (_val)); \
_val; \
})
//
// -- a macro to write a 32-bit control register
// ------------------------------------------
#define MCR(cp15Spec,val) ({ \
__asm__ volatile("mcr " cp15Spec :: "r" (val)); \
__asm__ volatile("isb"); \
})
//
// -- a macro to write a 32-bit control register
// ------------------------------------------
#define VMSR(vfpspec,val) ({ \
__asm__ volatile("vmsr " vfpspec ", %0" :: "r" (val)); \
__asm__ volatile("isb"); \
})
//
// -- a macro to read a 64-bit control register
// -----------------------------------------
#define MRRC(cp15Spec) ({ \
uint32_t _lval, _hval; \
__asm__ volatile("mrrc " cp15Spec : "=r" (_lval), "=r" (_hval)); \
(((uint64_t)(_hval))<<32)|_lval; \
})
//
// -- a macro to write to a 64-bit control register
// ---------------------------------------------
#define MCRR(cp15Spec,val) ({ \
uint32_t _lval = (uint32_t)(val & 0xffffffff); \
uint32_t _hval = (uint32_t)(val >> 32); \
__asm__ volatile("mcrr " cp15Spec :: "r" (_lval), "r" (_hval)); \
})
//
// -- Access to the FPEXC register
// ----------------------------
#define FPEXC "fpexc"
#define READ_FPEXC() VMRS(FPEXC)
#define WRITE_FPEXC(val) VMSR(FPEXC,val)
//
// -- Initialize the core to use the FPU
// ----------------------------------
EXTERN_C EXPORT LOADER
void ArchFpuInit(void);
#define ApTimerInit(t,f) TimerInit(t, f)
//
// -- Inlcude the arch-specific CPU operations
// ----------------------------------------
#include "arch-cpu-ops.h"
//
// -- Some optimizations for the elements we will get to frequently
// -------------------------------------------------------------
#define thisCpu ((ArchCpu_t *)ReadTPIDRPRW())
#define currentThread ((Process_t *)ReadTPIDRURO())
EXTERN_C EXPORT INLINE
void CurrentThreadAssign(Process_t *p) { thisCpu->process = p; WriteTPIDRURO((archsize_t)p); }
//
// -- Bochs magic breakpoint (which will not work on arm)
// ---------------------------------------------------
#define BOCHS_BREAK
#define BOCHS_TOGGLE_INSTR
<|start_filename|>modules/kernel/src/debugger/DebugScheduler.cc<|end_filename|>
//===================================================================================================================
//
// DebugScheduler.cc -- Debug the scheduler
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// We have gotten to this point, we know we are debugging the scheduler.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-03 Initial v0.6.0a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "serial.h"
#include "process.h"
#include "debugger.h"
//
// -- Control where we go to debug the scheduler
// ------------------------------------------
EXTERN_C EXPORT KERNEL
void DebugScheduler(void)
{
while (true) {
if (kStrLen(debugCommand) == 0) DebugPrompt(debugState);
DebuggerCommand_t cmd = DebugParse(debugState);
switch(cmd) {
case CMD_EXIT:
debugState = DBG_HOME;
return;
case CMD_SHOW:
DebugSchedulerShow();
debugState = DBG_SCHED;
break;
case CMD_STAT:
DebugSchedulerStat();
debugState = DBG_SCHED;
break;
case CMD_RUNNING:
DebugSchedulerRunning();
debugState = DBG_SCHED;
break;
case CMD_READY:
debugState = DBG_HOME;
break;
case CMD_LIST:
debugState = DBG_HOME;
return;
case CMD_ERROR:
default:
kprintf(ANSI_ATTR_BOLD ANSI_FG_RED
"Something went wrong (scheduler) -- a bug in the debugger is likely\n" ANSI_ATTR_NORMAL);
continue;
}
}
}
<|start_filename|>arch/x86/mmu/MmuInit.cc<|end_filename|>
//===================================================================================================================
//
// MmuInit.cc -- Complete the MMU initialization for the x86 architecture
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// The goal of this function is to make sure that MMU is ready to run all the kernel functions as well as the
// loader ones. We have already mapped the lower 4MB of memory and we should be able to use plenty of PMM frames
// to get the initialization complete.
//
// One thing I will want to watch for is that I am encroaching on the kernel. It is a possibility and so I want
// to build this check into the function so that I can panic the kernel if I encroach on the kernel data. This
// will be done by comapring to `_kernelEnd`.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-13 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "serial.h"
#include "mmu.h"
#include "entry.h"
#include "hw-disc.h"
#include "printf.h"
#include "loader.h"
//
// -- Complete the initialization of the Mmu for the loader to function properly
// --------------------------------------------------------------------------
EXTERN_C EXPORT LOADER
void MmuInit(void)
{
kPrintfEnabled = true; // nothing required here to enable this
//
// -- Next up is the IVT -- which needs to be mapped. This one is rather trivial.
// ----------------------------------------------------------------------------
MmuMapToFrame(EXCEPT_VECTOR_TABLE, intTableAddr, PG_KRN | PG_WRT);
}
<|start_filename|>modules/kernel/src/AssertFailure.cc<|end_filename|>
//===================================================================================================================
//
// AssertFailure.cc -- Handle outputting that an assertion failed
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Nov-29 Initial 0.4.6b ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
//
// -- Handle outputting that an assertion failed
// ------------------------------------------
EXPORT KERNEL
bool AssertFailure(const char *expr, const char *msg, const char *file, int line)
{
kprintf("\n!!! ASSERT FAILURE !!!\n%s(%d) %s %s\n\n", file, line, expr, (msg?msg:""));
// -- always return false in case this is used in a conditional
return false;
}
<|start_filename|>platform/bcm2836/serial/SerialOpen.cc<|end_filename|>
//===================================================================================================================
//
// SerialOpen.cc -- Initialize a serial port for debugging output
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-13 Initial 0.2.0 ADCL Initial version -- leveraged out of century's `uart-dev.c`
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
// 2019-Feb-10 Initial 0.3.0 ADCL Remove the call to BusyWait() and replace with a simple loop
//
//===================================================================================================================
#include "types.h"
#include "hardware.h"
#include "serial.h"
//
// -- Initialize the UART Serial Port
// -------------------------------
EXTERN_C EXPORT KERNEL
void _SerialOpen(SerialDevice_t *dev)
{
if (!dev) return;
SerialBase_t base = dev->base;
// -- must start by enabling the mini-UART; no register access will work until...
MmioWrite(base + AUX_ENABLES, 1);
// -- Disable all interrupts
MmioWrite(base + AUX_MU_IER_REG, 0);
// -- Reset the control register
MmioWrite(base + AUX_MU_CNTL_REG, 0);
// -- Program the Line Control Register -- 8 bits, please
MmioWrite(base + AUX_MU_LCR_REG, 3); //
// -- Program the Modem Control Register -- reset
MmioWrite(base + AUX_MU_MCR_REG, 0);
// -- Disable all interrupts -- again
MmioWrite(base + AUX_MU_IER_REG, 0);
// -- Clear all interrupts
MmioWrite(base + AUX_MU_IIR_REG, 0xc6);
// -- Set the BAUD to 115200 -- ((250,000,000/115200)/8)-1 = 270
MmioWrite(base + AUX_MU_BAUD_REG, 270);
GpioDevice_t *gpio = (GpioDevice_t *)dev->platformData;
GpioSelectAlt(gpio, GPIO14, ALT5);
GpioSelectAlt(gpio, GPIO15, ALT5);
GpioEnablePin(gpio, GPIO14);
GpioEnablePin(gpio, GPIO15);
// -- Enable TX/RX
MmioWrite(base + AUX_MU_CNTL_REG, 3);
// -- clear the input buffer
while ((MmioRead(base + AUX_MU_LSR_REG) & (1<<0)) != 0) MmioRead(base + AUX_MU_IO_REG);
}
<|start_filename|>modules/kernel/inc/stacks.h<|end_filename|>
//===================================================================================================================
//
// stacks.h -- Some helpers to managing kernel stacks
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// There are several kernel stack locations that need to be managed. These will all use the same address space.
// These functions will assist in this.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Dec-01 Initial 0.4.6d ADCL Initial version
//
//===================================================================================================================
#pragma once
#include "types.h"
#include "cpu.h"
#include "spinlock.h"
//
// -- The number of stacks we will manage, divided into 32-bit dwords
// Notice the `+ 31`.. this will take care of rounding partial dwords
// ------------------------------------------------------------------
#define STACK_COUNT ((((4 * 1024 * 1024) / STACK_SIZE) + 31) / 32)
//
// -- This will be the bitmat we will use to keep track of the stacks
// ---------------------------------------------------------------
EXTERN EXPORT KERNEL_DATA
uint32_t stacks[STACK_COUNT];
//
// -- This is the lock that will protect the bitmap
// ---------------------------------------------
EXTERN EXPORT KERNEL_DATA
Spinlock_t stackBitmapLock;
//
// -- Allocate a stack
// ----------------
EXTERN_C EXPORT KERNEL
void StackDoAlloc(archsize_t stackBase);
EXPORT INLINE
void StackAlloc(archsize_t stackBase) {
archsize_t flags = SPINLOCK_BLOCK_NO_INT(stackBitmapLock) {
StackDoAlloc(stackBase);
SPINLOCK_RLS_RESTORE_INT(stackBitmapLock, flags);
}
}
//
// -- Release a stack
// ---------------
EXTERN_C EXPORT KERNEL
void StackRelease(archsize_t stackBase);
//
// -- Find an available stack
// -----------------------
EXTERN_C EXPORT KERNEL
archsize_t StackFind(void);
<|start_filename|>platform/pc/apic/IoApicEoi.cc<|end_filename|>
//===================================================================================================================
//
// IoApicInit.cc -- Perform an EOI on the Local APIC
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-19 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "mmu.h"
#include "pic.h"
//
// -- End of interrupt signal
// -----------------------
EXTERN_C EXPORT KERNEL
void _IoApicEoi(PicDevice_t *dev, UNUSED(Irq_t irq))
{
if (!dev) return;
IoApicDeviceData_t *data = (IoApicDeviceData_t *)dev->device.deviceData;
MmioWrite(data->localApicBase + LAPIC_EOI, 0);
}
<|start_filename|>modules/kernel/src/frame-buffer/FrameBufferDrawChar.cc<|end_filename|>
//===================================================================================================================
//
// FrameBufferDrawChar.cc -- Draw a character on to the screen
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// *** PROGRAMMING NOTE ***
//
// This function has some important shortcomings, which are detailed here:
// * Line wrapping is not implemented
// * Screen scrolling is not implemented
//
// Therefore, only one page of data can be printed currently
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2017-May-03 Initial 0.0.0 ADCL Initial version
// 2018-Jun-13 Initial 0.1.0 ADCL Copied this file from century to century-os
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "hw-disc.h"
#include "printf.h"
#include "fb.h"
//
// -- This is the internally linked system monospace font
// ---------------------------------------------------
extern uint8_t systemFont[];
//
// -- Draw a character on the screen
// ------------------------------
void FrameBufferDrawChar(char ch)
{
if (GetRowPos() > HEIGHT / FONT_HEIGHT) return;
if (ch & 0x80) {
if ((ch & 0xc0) == 0xc0) ch = '?';
else return;
}
if (ch == '\n') {
SetColPos(0);
SetRowPos(GetRowPos() + 1);
return;
}
if (ch == '\t') {
SetColPos(GetColPos() + (8 - (GetColPos() % 8)));
if (GetColPos() > GetFrameBufferWidth() / FONT_WIDTH) {
SetColPos(0);
SetRowPos(GetRowPos() + 1);
}
return;
}
uint8_t *chImg = &systemFont[ch * FONT_HEIGHT]; // first the character image (16 rows per image)
uint16_t *where = &((uint16_t *)GetFrameBufferAddr())[
(GetRowPos() * GetFrameBufferWidth() * FONT_HEIGHT) + (GetColPos() * FONT_WIDTH)];
for (int i = 0; i < FONT_HEIGHT; i ++, where += GetFrameBufferWidth()) {
uint8_t c = chImg[i];
for (int j = 0; j < FONT_WIDTH; j ++, c = c >> 1) {
if (c & 0x01) where[j] = GetFgColor();
else where[j] = GetBgColor();
}
}
SetColPos(GetColPos() + 1);
}
<|start_filename|>modules/kernel/src/loader/LoaderMain.cc<|end_filename|>
//===================================================================================================================
//
// LoaderMain.cc -- The main routine for the loader module
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2017-Jun-07 Initial 0.0.0 ADCL Initial version
// 2018-Nov-11 Initial 0.2.0 ADCL Update the architecture abstraction for rpi2b
// 2019-Feb-10 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "hw-disc.h"
#include "pmm.h"
#include "serial.h"
#include "mmu.h"
#include "cpu.h"
#include "heap.h"
#include "fb.h"
#include "platform.h"
#include "entry.h"
#include "loader.h"
//
// -- The actual loader main function
// -------------------------------
EXTERN_C EXPORT LOADER NORETURN
void LoaderMain(archsize_t arg0, archsize_t arg1, archsize_t arg2)
{
LoaderFunctionInit(); // go and initialize all the function locations
MmuInit(); // Complete the MMU initialization for the loader
PlatformEarlyInit();
kprintf("Welcome\n");
FrameBufferInit();
HeapInit();
PmmInit();
PlatformInit();
if (!assert(stabEnd >= (4 * 1024 * 1024))) {
kprintf("The kernel is bigger than 4MB; time to add more page mappings!\n");
Halt();
}
// -- Theoretically, after this point, there should be very little architecture-dependent code
JumpKernel(kInit, STACK_LOCATION);
// -- if we ever get here, we have some big problems!
assert_msg(false, "Returned from kInit() back to LoaderMain()!!!");
while (1) {}
}
<|start_filename|>platform/pc/apic/IoApicRegisterHandler.cc<|end_filename|>
//===================================================================================================================
//
// IoApicRegisterHandler.cc -- Register a handler to take care of an IRQ
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-20 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "hw-disc.h"
#include "mmu.h"
#include "interrupt.h"
#include "pic.h"
//
// -- Register an IRQ handler
// -----------------------
EXTERN_C EXPORT KERNEL
isrFunc_t _IoApicRegisterHandler(PicDevice_t *dev, Irq_t irq, int vector, isrFunc_t handler)
{
if (!dev) return (isrFunc_t)-1;
if (!handler) return (isrFunc_t)-1;
if (irq < 0 || irq > 23) return (isrFunc_t)-1;
if (vector < 0 || vector > 255) return (isrFunc_t)-1;
kprintf("Processing an audited request to map irq %x to vector %x\n", irq, vector);
PicMaskIrq(dev, irq);
Ioapicredtbl_t redir;
redir.reg = 0;
redir.intvec = vector;
redir.delmod = DELMODE_FIXED;
redir.destmod = 0; // physical cpu delivery
redir.intpol = 1; // active low
redir.triggerMode = 1; // level triggered
redir.intMask = 1; // leave this masked!!
redir.dest = 0; // apic id 1 for now
IoApicDeviceData_t *data = (IoApicDeviceData_t *)dev->device.deviceData;
archsize_t reg = IoApicRedir(data, irq);
kprintf(".. the table register offset is %x\n", reg);
kprintf(".. Expect to write %p and %p to the APIC registers\n", redir.reg0, redir.reg1);
IoapicWrite(data->ioapicBase, reg, redir.reg0);
IoapicWrite(data->ioapicBase, reg + 1, redir.reg1);
kprintf(".. the values of the APIC registers are now %p and %p\n", IoapicRead(data->ioapicBase, reg),
IoapicRead(data->ioapicBase, reg + 1));
isrFunc_t rv = IsrRegister(vector, handler);
PicUnmaskIrq(dev, irq);
kprintf(".. Request complete\n");
return rv;
}
<|start_filename|>platform/bcm2836/serial/SerialPutChar.cc<|end_filename|>
//===================================================================================================================
//
// SerialPutChar.cc -- Write a single character to the UART Serial Port
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-13 Initial 0.2.0 ADCL Initial version -- leveraged out of century's `uart-dev.c`
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "hardware.h"
#include "spinlock.h"
#include "serial.h"
EXTERN_C EXPORT LOADER
void ___SerialPutChar(int ch)
{
debugSerial.SerialPutChar(&debugSerial, (uint8_t)ch);
}
//
// -- Write a single character to the UART
// ------------------------------------
EXTERN_C EXPORT KERNEL
void _SerialPutChar(SerialDevice_t *dev, uint8_t ch)
{
if (!dev) return;
if (ch == '\n') dev->SerialPutChar(dev, '\r');
archsize_t flags = SPINLOCK_BLOCK_NO_INT(dev->lock) {
while ((MmioRead(dev->base + AUX_MU_LSR_REG) & (1<<5)) == 0) { }
MmioWrite(dev->base + AUX_MU_IO_REG, ch);
SPINLOCK_RLS_RESTORE_INT(dev->lock, flags);
}
}
<|start_filename|>platform/pc/timer/TimerInit.cc<|end_filename|>
//===================================================================================================================
//
// TimerInit.cc -- Initialize the Programmable Interrupt Timer (PIT)
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Note that this is only included for legacy reasons and Century-OS will prefer the Local APIC for the timer.
// For the moment, it is also used for initialization.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Sep-16 Leveraged from Royalty
// 2012-Sep-23 set new SpuriousIRQ handler
// 2013-Sep-03 #73 Encapsulate Process Structure
// 2013-Sep-12 #101 Resolve issues splint exposes
// 2018-Oct-28 Initial 0.1.0 ADCL Copied this function from Century32 to Centrury-OS
// 2019-Feb-09 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "timer.h"
//
// -- This is the callback function that is triggered with each interrupt
// -------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void TimerCallBack(isrRegs_t *reg);
//
// -- Set the timer to fire at the desires frequency
// ----------------------------------------------
EXTERN_C EXPORT KERNEL
void _TimerInit(TimerDevice_t *dev, uint32_t frequency)
{
if (!dev) return;
dev->pic = picControl;
uint16_t port = dev->base;
uint32_t divisor = 1193180 / frequency;
uint8_t l = (uint8_t)(divisor & 0xff);
uint8_t h = (uint8_t)((divisor >> 8) & 0xff);
PicRegisterHandler(dev->pic, IRQ0, 32, dev->TimerCallBack);
outb(port + TIMER_COMMAND, 0x36);
outb(port + TIMER_CHAN_0, l);
outb(port + TIMER_CHAN_0, h);
}
<|start_filename|>platform/pc/serial/SerialPutChar.cc<|end_filename|>
//===================================================================================================================
//
// SerialPutChar.cc -- Output a single character to the serial port
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2017-Nov-11 Initial 0.0.0 ADCL Initial version -- well, documentated at the first time
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "hardware.h"
#include "spinlock.h"
#include "serial.h"
//
// -- Output a single character to the serial port
// --------------------------------------------
EXTERN_C EXPORT KERNEL
void _SerialPutChar(SerialDevice_t *dev, uint8_t ch)
{
if (!dev) return;
if (ch == '\n') dev->SerialPutChar(dev, '\r');
archsize_t flags = SPINLOCK_BLOCK_NO_INT(dev->lock) {
while (!dev->SerialHasRoom(dev)) {}
outb(dev->base + SERIAL_DATA, ch);
SPINLOCK_RLS_RESTORE_INT(dev->lock, flags);
}
}
<|start_filename|>modules/kernel/src/msgq/MsgqCreate.cc<|end_filename|>
//===================================================================================================================
//
// MsgqCreate.cc -- Find and create a new message queue, returning its pointer
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- --------------------------------------------------------------------------
// 2020-Apr-09 Initial v0.6.1a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "heap.h"
#include "process.h"
#include "cpu.h"
#include "spinlock.h"
#include "msgq.h"
//
// -- Create a message queue
// ----------------------
EXTERN_C EXPORT KERNEL
MessageQueue_t *MessageQueueCreate(void)
{
archsize_t flags;
// -- First create the message Queue
MessageQueue_t *rv = NEW(MessageQueue_t);
assert(rv != NULL);
AtomicSet(&rv->status, MSGQ_INITIALIZING);
ListInit(&rv->list);
ListInit(&rv->queue.list);
rv->queue.count = 0;
rv->queue.lock = {0};
ListInit(&rv->procList.list);
rv->procList.count = 0;
rv->procList.lock = {0};
ListInit(&rv->waiting.list);
rv->waiting.count = 0;
rv->waiting.lock = {0};
// -- With that done, add the reference to the queue and to the Process
Reference_t *ref = NEW(Reference_t);
assert(ref != NULL);
ref->type = REF_MSGQ;
ListInit(&ref->procRefList);
ListInit(&ref->resourceRefBy);
ref->process = currentThread;
ref->resAddr = rv;
flags = SPINLOCK_BLOCK_NO_INT(currentThread->references.lock) {
ListAddTail(¤tThread->references, &ref->procRefList);
SPINLOCK_RLS_RESTORE_INT(currentThread->references.lock, flags);
}
flags = SPINLOCK_BLOCK_NO_INT(rv->procList.lock) {
ListAddTail(&rv->procList, &ref->resourceRefBy);
SPINLOCK_RLS_RESTORE_INT(rv->procList.lock, flags);
}
// -- finally, we can add it to the msgq list and return its value
flags = SPINLOCK_BLOCK_NO_INT(msgqList.lock) {
ListAddTail(&msgqList, &rv->list);
msgqList.count ++;
AtomicSet(&rv->status, MSGQ_ALLOCATED);
SPINLOCK_RLS_RESTORE_INT(msgqList.lock, flags);
}
return rv;
}
<|start_filename|>arch/x86/mmu/MmuClearFrame.cc<|end_filename|>
//===================================================================================================================
//
// MmuClearFrame.cc -- Clear a frame before adding it into the paging structures formally
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Note that this function does not need to trigger a TLB flush on other cores since this is not a shared mapping.
// Only one CPU can get a lock to perform this function at a time, so by definition, no other cores require a TLB
// flush -- the state when the lock is released is the same as it was when the lock was obtained: nothing is mapped.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-06 Initial 0.3.0 ADCL Initial Version
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "spinlock.h"
#include "mmu.h"
//
// -- Mount a frame into the kernel address space and clear its contents
// ------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void MmuClearFrame(frame_t frame)
{
archsize_t flags = SPINLOCK_BLOCK_NO_INT(frameClearLock) {
PageEntry_t *pte = PT_ENTRY(MMU_CLEAR_FRAME);
if (!pte->p) {
pte->frame = frame;
pte->rw = X86_MMU_WRITE;
pte->us = X86_MMU_USER;
pte->p = X86_MMU_PRESENT_TRUE;
InvalidatePage(MMU_CLEAR_FRAME);
}
kMemSetB((void *)MMU_CLEAR_FRAME, 0, FRAME_SIZE);
MmuUnmapPage(MMU_CLEAR_FRAME);
SPINLOCK_RLS_RESTORE_INT(frameClearLock, flags);
}
}
<|start_filename|>arch/arm/cpu/ArchLateCpuInit.cc<|end_filename|>
//===================================================================================================================
//
// ArchCpuLateInit.cc -- Complete the final initialization for the CPU
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Jun-16 Initial 0.4.6 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "platform.h"
#include "serial.h"
#include "pic.h"
#include "cpu.h"
//
// -- Complete the final CPU initialization steps
EXTERN_C EXPORT LOADER
void ArchLateCpuInit(int c)
{
ArchFpuInit();
WriteTPIDRPRW((uint32_t)cpus.perCpuData[c].cpu);
}
<|start_filename|>modules/kernel/src/process/ProcessUpdateTimeUsed.cc<|end_filename|>
//===================================================================================================================
//
// ProcessUpdateTimeUsed.cc -- Update the time used for the current process before changing
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-18 Initial 0.3.2 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "lists.h"
#include "timer.h"
#include "process.h"
//
// -- Get the current timer value and update the time used of the current process
// ---------------------------------------------------------------------------
EXPORT KERNEL
void ProcessUpdateTimeUsed(void)
{
uint64_t now = TimerCurrentCount(timerControl);
uint64_t elapsed = now - thisCpu->lastTimer;
thisCpu->lastTimer = now;
if (currentThread == NULL) {
thisCpu->cpuIdleTime += elapsed;
} else {
currentThread->timeUsed += elapsed;
}
}
<|start_filename|>platform/bcm2836/mailbox/MailboxSend.cc<|end_filename|>
//===================================================================================================================
//
// MailboxSend.cc -- SEnd a message to a mailbox
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Please note that this function will perform the adjustment between ARM/VC address space.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Jan-05 Initial 0.2.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "hardware.h"
//
// -- Send a message to the mailbox
// -----------------------------
EXTERN_C EXPORT KERNEL
void _MailboxSend(MailboxDevice_t *dev, archsize_t mb, archsize_t msg)
{
kprintf("Checking dev..\n");
if (!dev) return;
kprintf("Checking msg..\n");
if ((msg & 0x0f) != 0) return;
kprintf("Checking mb..\n");
if ((mb & 0xfffffff0) != 0) return;
kprintf(".. Preparing to send data...\n");
while (MmioRead(dev->base + MB_STATUS) & (1 << 31)) { }
msg -= ARM_MAILBOX_OFFSET;
MmioWrite(dev->base + MB_WRITE, msg | mb);
kprintf(".. Data Sent...\n");
}
<|start_filename|>modules/kernel/src/interrupts/IsrRegister.cc<|end_filename|>
//===================================================================================================================
//
// IsrRegister.cc -- Register an ISR Handler to the table
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Also checks for an already registered handler and will not replace it if one already exists.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Jul-21 Initial version
// 2012-Sep-16 Leveraged from Century
// 2018-Jul-06 Initial 0.1.0 ADCL Copied this file from century32 to century-os
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "printf.h"
#include "interrupt.h"
//
// -- Register an ISR handler to the ISR Handler table
// ------------------------------------------------
isrFunc_t IsrRegister(uint8_t interrupt, isrFunc_t func)
{
kprintf("Request to map vector %x to function at %p\n", interrupt, func);
archsize_t flags = DisableInterrupts();
isrFunc_t rv = isrHandlers[interrupt];
isrHandlers[interrupt] = func;
RestoreInterrupts(flags);
return rv;
}
<|start_filename|>modules/kernel/src/process/ProcessUnblock.cc<|end_filename|>
//===================================================================================================================
//
// ProcessUnblock.cc -- Unblock a process
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-22 Initial 0.3.2 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "lists.h"
#include "process.h"
//
// -- Block the current process
// -------------------------
EXPORT KERNEL
void ProcessDoUnblock(Process_t *proc)
{
if (!assert(proc != NULL)) return;
assert_msg(AtomicRead(&scheduler.schedulerLockCount) > 0,
"Calling `ProcessDoUnblock()` without holding the proper lock");
proc->status = PROC_READY;
ProcessDoReady(proc);
}
<|start_filename|>arch/x86/mmu/MmuDumpTables.cc<|end_filename|>
//===================================================================================================================
//
// MmuDumpTables.cc -- Dump the Paging Tables for a virtual address
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2017-Jun-27 Initial 0.1.0 ADCL Initial version
// 2019-Apr-28 0.4.1 ADCL Resurrected from an old commit (and updated) for debugging
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "cpu.h"
#include "pmm.h"
#include "mmu.h"
//
// -- Dump the MMU Tables for a specific address
// ------------------------------------------
EXTERN_C EXPORT KERNEL
void MmuDumpTables(archsize_t addr)
{
archsize_t cr3 = RECURSIVE_PD_VADDR;
kprintf("\nMmuDumpTables: Walking the page tables for address %p\n", addr);
kprintf("Level Tabl-Addr Index Next Frame us rw pr\n");
kprintf("----- ---------- ---------- ---------- -- -- --\n");
uint32_t i = (addr >> 22) & 0x3ff;
PageEntry_t *w = &((PageEntry_t *)cr3)[i];
kprintf("PD %p %p %p %s %s %s\n", cr3, i, w->frame, w->us?"1":"0", w->rw?"1":"0", w->p?"1":"0");
if (!w->p) return;
PageEntry_t *wt = (PageEntry_t *)(RECURSIVE_VADDR + 0x1000 * i);
i = (addr >> 12) & 0x3ff;
w = &wt[i];
kprintf("PT %p %p %p %s %s %s\n", wt, i, w->frame, w->us?"1":"0", w->rw?"1":"0", w->p?"1":"0");
}
<|start_filename|>platform/pc/apic/LApicBroadcastIpi.cc<|end_filename|>
//===================================================================================================================
//
// LApicBroadcastIpi.cc -- Broadcast an IPI to all CPUs
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Jun-08 Initial 0.4.5 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "timer.h"
#include "hardware.h"
#include "pic.h"
//
// -- Broadcast an IPI to all CPUs (including myself)
// -----------------------------------------------
EXTERN_C EXPORT KERNEL
void _LApicBroadcastIpi(PicDevice_t *dev, int ipi)
{
#if DEBUG_ENABLED(LApicBroadcastIpi)
kprintf("Entered %s on CPU %d for dev %p\n", __func__, thisCpu->cpuNum, dev);
#endif
if (!dev) return;
if (!dev->ipiReady) {
#if DEBUG_ENABLED(LApicBroadcastIpi)
kprintf("IPI still not ready at %p\n", dev);
#endif
return;
}
#if DEBUG_ENABLED(LApicBroadcastIpi)
kprintf(".. Qualified on CPU %d\n", thisCpu->cpuNum);
#endif
// uint32_t icr = (0b11<<18) | (1<<14) | ipi; // all except self | Assert | vector
uint32_t icr = (0b11<<18) | ipi; // all except self | vector
MmioWrite(LAPIC_MMIO + LAPIC_ICR_HI, 0x00);
MmioWrite(LAPIC_MMIO + LAPIC_ICR_LO, icr);
#if DEBUG_ENABLED(LApicBroadcastIpi)
kprintf(".. The ESR report %p\n", MmioRead(LAPIC_MMIO + LAPIC_ESR));
kprintf(".. Delivery status reports %p\n", MmioRead(LAPIC_MMIO + LAPIC_ICR_LO));
kprintf(".. Completed on CPU %d\n", thisCpu->cpuNum);
#endif
}
<|start_filename|>modules/kernel/inc/lists.h<|end_filename|>
//===================================================================================================================
//
// lists.h -- Standard list for the entire kernel
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// The list structures in this file are standard for all lists in the entire Century-OS implementaiton. All lists
// will make use of these structures.
//
// The inspiration for this list structure and implementation is taken from the Linux list implementation.
// References can be found in the Linux Kernel Development book, chapter 6 and the linux source file at
// http://www.cs.fsu.edu/~baker/devices/lxr/http/source/linux/include/linux/list.h
//
// In short, the list implementation is a circular doubly linked list. As such there is no specific head and tail.
//
// ------------------------------------------------------------------------------------------------------------------
//
// IMPORTANT PROGRAMMING NOTE:
// These functions are not atomic. This means that all calling functions will also need to mantain locks on the
// structures and lists before maintaining the list. Put another way, the caller is required to ensure that
// nothing else changes the list while these functions are being executed.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2016-Sep-12 Initial 0.0.0 ADCL Initial version
// 2018-May-24 Initial 0.1.0 ADCL Copy this file from century to century-os
// 2018-Nov-09 0.1.0 ADCL Reformat the list structures to have a list head, creating a separate type.
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
// 2019-Mar-10 Initial 0.3.1 ADCL Establish QueueHead_t and StackHead_t types
//
//===================================================================================================================
#pragma once
#include "types.h"
#include "spinlock.h"
//
// -- This macro determines the offset of a member of a structure. The 'magic' of this macro is the calculation
// of an address as a offset from the address 0x00000000.
// ----------------------------------------------------------------------------------------------------------
#define MEMBER_OFFSET(type,member) ((uint32_t)(&((type *)0x00000000)->member))
//
// -- This macro determines the address of the parent of a member structure.
// Usage: FIND_PARENT(list->next, Process, global);
// ----------------------------------------------------------------------
#define FIND_PARENT(ptr,type,member) ({ \
const typeof(((type *)0x00000000)->member) *__mptr = (ptr); \
(type *)((char *)__mptr - MEMBER_OFFSET(type,member)); })
//
// -- This is the header of the list.
// -------------------------------
typedef struct ListHead_t {
typedef struct List_t {
struct List_t *prev;
struct List_t *next;
} List_t;
List_t list;
Spinlock_t lock; // -- this or a "bigger" lock must be obtained to change the list contents
size_t count; // -- this is available for use by software; not used by `lists.h`
} ListHead_t;
//
// -- Declare and initialize a new List not in a structure
// ----------------------------------------------------
#define NEW_LIST(name) ListHead_t name = { { &(name.list), &(name.list) }, {0, 0}, 0 };
//
// -- Initialize a list to point to itself
// ------------------------------------
EXPORT INLINE
void ListInit(ListHead_t::List_t * const list) { list->next = list->prev = list; }
//
// -- Low-level function to add a node to a list
// ------------------------------------------
EXPORT INLINE
void __list_add(ListHead_t::List_t * const nw, ListHead_t::List_t * const pv, ListHead_t::List_t * const nx) {
nx->prev = nw; nw->next = nx; nw->prev = pv; pv->next = nw;
}
//
// -- Low-level function to delete a node from a list
// -----------------------------------------------
EXPORT INLINE
void __list_del(ListHead_t::List_t * const pv, ListHead_t::List_t * const nx) {
nx->prev = pv; pv->next = nx;
}
//
// -- Add a new node to a list (which is right ahead of the head)
// -----------------------------------------------------------
EXPORT INLINE
void ListAdd(ListHead_t * const head, ListHead_t::List_t * const nw) {
__list_add(nw, &head->list, head->list.next);
}
//
// -- Add a new node to a list (which will be right behind the tail)
// --------------------------------------------------------------
EXPORT INLINE
void ListAddTail(ListHead_t * const head, ListHead_t::List_t * const nw) {
__list_add(nw, head->list.prev, &head->list);
}
//
// -- Delete a node from a list (and clear the node's pointers to NULL)
// -----------------------------------------------------------------
EXPORT INLINE
void ListRemove(ListHead_t::List_t * const entry) {
__list_del(entry->prev, entry->next); entry->next = entry->prev = 0;
}
//
// -- Delete a node from a list (and and initialize the node to be properly empty)
// ----------------------------------------------------------------------------
EXPORT INLINE
void ListRemoveInit(ListHead_t::List_t * const entry) {
__list_del(entry->prev, entry->next); ListInit(entry);
}
//
// -- Is this list empty or not? Notice that both the address and the contents are constant
// --------------------------------------------------------------------------------------
EXPORT INLINE
bool IsListEmpty(const ListHead_t * const head) {
return (head->list.next == &head->list);
}
//
// -- Is this entry last in the list? Notice that both the address and the contents are constant
// -------------------------------------------------------------------------------------------
EXPORT INLINE
bool IsLastInList(const ListHead_t * const head, const ListHead_t::List_t * const entry) {
return entry->next == &head->list;
}
//
// -- Move an entry from one list to another (in front of the head)
// -------------------------------------------------------------
EXPORT INLINE
void ListMove(ListHead_t * const head, ListHead_t::List_t * const entry) {
__list_del(entry->prev, entry->next); ListAdd(head, entry);
}
//
// -- Move an entry from one list to another (after the tail)
// -------------------------------------------------------
EXPORT INLINE
void ListMoveTail(ListHead_t * const head, ListHead_t::List_t * const entry) {
__list_del(entry->prev, entry->next); ListAddTail(head, entry);
}
//
// -- Count the number of items in the list
// -------------------------------------
EXPORT INLINE
int ListCount(ListHead_t *const head) {
int rv = 0;
ListHead_t::List_t *wrk = head->list.next;
while (wrk != &head->list) {
rv ++;
wrk = wrk->next;
}
return rv;
}
//
// -- This is a queue; the next thing to operate on is at head
// --------------------------------------------------------
typedef ListHead_t QueueHead_t;
//
// -- Enqueue a node onto a queue
// ---------------------------
EXPORT INLINE
void Enqueue(QueueHead_t *head, ListHead_t::List_t *list) { ListAddTail(head, list); }
//
// -- This is a stack; the next thing to operate on is at head
// --------------------------------------------------------
typedef ListHead_t StackHead_t;
//
// -- Push a node onto a stack
// ------------------------
EXPORT INLINE
void Push(StackHead_t *head, ListHead_t::List_t *list) { ListAdd(head, list); }
<|start_filename|>modules/kernel/src/process/ProcessBlock.cc<|end_filename|>
//===================================================================================================================
//
// ProcessBlock.cc -- Block a process
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Note that this function will leave the current process not on any queue. It is up to the calling procedure to
// manage the queue that this Process_t structure is left on.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-22 Initial 0.3.2 ADCL Initial version
// 2019-Nov-27 Initial 0.4.6 ADCL Reformat for a single spinlock lock
//
//===================================================================================================================
#include "types.h"
#include "lists.h"
#include "process.h"
//
// -- Block the current process
// -------------------------
EXPORT KERNEL
void ProcessDoBlock(ProcStatus_t reason)
{
if (!assert(reason >= PROC_INIT && reason <= PROC_MSGW)) return;
if (!assert(currentThread != 0)) return;
assert_msg(AtomicRead(&scheduler.schedulerLockCount) > 0, "Calling `ProcessDoBlock()` without the proper lock");
currentThread->status = reason;
currentThread->pendingErrno = 0;
scheduler.processChangePending = true;
ProcessSchedule();
}
<|start_filename|>arch/arm/inc/arch-mmu.h<|end_filename|>
//===================================================================================================================
//
// arch-mmu.h -- The rpi2b structures for interfacing with the Memory Management Unit (MMU)
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// The ARM architecture is different than the x86-family architecture. The MMU tables are called Translation
// Tables and there are 2 Levels: 1 and 2. There is 1 TTL1 table that is 16K long and up to 4096 TTL2 tables
// that are 1K long each. We are going to stuff 4 X 1K (consecutive) tables into a single 4K frame, mapping all
// 4K as a single operation, and we will aggregate 4 X 4K frames into one 16K aligned TTL1 table.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-11 Initial 0.2.0 ADCL Initial version
// 2018-Nov-14 Initial 0.2.0 ADCL Copied the structures from century
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#ifndef __MMU_H__
# error "Do not include 'arch-mmu-prevalent.h' directly; include 'mmu.h' instead, which will pick up this file."
#endif
#include <stdint.h>
#include <stddef.h>
#include <stdbool.h>
#include "types.h"
//
// -- The Translation Table Level 1 structure (TTL1)
// ----------------------------------------------
typedef struct Ttl1_t {
unsigned int fault : 2; // 00=fault; 01=TTL2 table address; 01 and 11 unused
unsigned int sbz : 3; // sbz = should be zero
unsigned int domain : 4; // domain -- we will use 0b0000 for now
unsigned int p : 1; // unimplemented in the rpi2b arch; use 0
unsigned int ttl2 : 22; // the frame address of the ttl2 table (notice aligned to 1K)
} __attribute__((packed)) Ttl1_t;
//
// -- The Translation Table Level 2 structure (TTL2)
// ----------------------------------------------
typedef struct Ttl2_t {
unsigned int fault : 2; // 00=fault; 01=large page(not used); 1x=small page (x sets execute never)
unsigned int b : 1; // buffered
unsigned int c : 1; // cached
unsigned int ap : 2; // access permissions
unsigned int tex : 3; // Type Extension
unsigned int apx : 1; // access permission extension
unsigned int s : 1; // sharable
unsigned int nG : 1; // not Global
unsigned int frame : 20; // this is the final 4K frame address
} __attribute__((packed)) Ttl2_t;
//
// -- This is a function to create a new top-level paging structure
// -------------------------------------------------------------
EXTERN_C EXPORT KERNEL
frame_t MmuMakeNewTtl1Table(void);
EXTERN_C EXPORT KERNEL
void MmuDumpTables(archsize_t addr);
//
// -- Several macros to help with debugging the MMU Tables
// ----------------------------------------------------
#define MMU_TTL1_ENTRY(addr) (&(((Ttl1_t *)ARMV7_TTL1_TABLE_VADDR)[addr >> 20]))
#define MMU_TTL2_ENTRY(addr) (&(((Ttl2_t *)ARMV7_TTL2_TABLE_VADDR)[addr >> 12]))
<|start_filename|>arch/arm/inc/arch-types.h<|end_filename|>
//===================================================================================================================
//
// arch-types.h -- Type definitions specific to rpi2b architectures
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// These types are architecture dependent.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-11 Initial 0.2.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#ifndef __TYPES_H__
# error "Do not include 'arch-types.h' directly; include 'types.h' instead, which will pick up this file."
#endif
//
// -- This is the address width size for this architecture
// ----------------------------------------------------
typedef uint32_t archsize_t;
//
// -- This is the equivalent to a port size for x86
// ---------------------------------------------
typedef archsize_t devaddr_t;
//
// -- This is the order of the registers on the stack
// -----------------------------------------------
typedef struct isrRegs_t {
archsize_t sp_usr;
archsize_t lr_usr;
archsize_t sp_svc;
archsize_t lr_svc;
archsize_t type;
archsize_t r0;
archsize_t r1;
archsize_t r2;
archsize_t r3;
archsize_t r4;
archsize_t r5;
archsize_t r6;
archsize_t r7;
archsize_t r8;
archsize_t r9;
archsize_t r10;
archsize_t r11;
archsize_t r12;
archsize_t lr_ret;
archsize_t spsr_ret;
} isrRegs_t;
<|start_filename|>platform/bcm2836/inc/platform-gpio.h<|end_filename|>
//===================================================================================================================
//
// platform-gpio.h -- Definitions and functions for the GPIO hardware for bcm2835
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#pragma once
#ifndef __HARDWARE_H__
# error "Use #include \"hardware.h\" and it will pick up this file; do not #include this file directly."
#endif
//
// -- This is the enumeration of the GPIO pins that are available for use
// -------------------------------------------------------------------
typedef enum {
GPIO0, GPIO1, GPIO2, GPIO3, GPIO4, GPIO5, GPIO6, GPIO7, GPIO8, GPIO9,
GPIO10, GPIO11, GPIO12, GPIO13, GPIO14, GPIO15, GPIO16, GPIO17, GPIO18, GPIO19,
GPIO20, GPIO21, GPIO22, GPIO23, GPIO24, GPIO25, GPIO26, GPIO27, GPIO28, GPIO29,
GPIO30, GPIO31, GPIO32, GPIO33, GPIO34, GPIO35, GPIO36, GPIO37, GPIO38, GPIO39,
GPIO40, GPIO41, GPIO42, GPIO43, GPIO44, GPIO45, GPIO46, GPIO47, GPIO48, GPIO49,
GPIO50, GPIO51, GPIO52, GPIO53,
} GpioPin_t;
//
// -- These are the class of alternate function that can be used with each GPIO pin
// -----------------------------------------------------------------------------
typedef enum {
ALT0, ALT1, ALT2, ALT3, ALT4, ALT5,
} GpioAlt_t;
//
// -- Define a common interface for the GPIO functions that are needed
// ----------------------------------------------------------------
typedef struct GpioDevice_t {
archsize_t base;
void (*GpioSelectAlt)(struct GpioDevice_t *, GpioPin_t, GpioAlt_t);
void (*GpioEnablePin)(struct GpioDevice_t *, GpioPin_t);
void (*GpioDisablePin)(struct GpioDevice_t *, GpioPin_t);
} GpioDevice_t;
//
// -- Here, declare the different configurations of the GPIO will use
// ---------------------------------------------------------------
EXTERN KERNEL_DATA
GpioDevice_t kernelGpio;
//
// -- These are the common interface functions we will use to interact with the GPIO. These functions are
// not safe in that they will not check for nulls before calling the function. Therefore, caller beware!
// -----------------------------------------------------------------------------------------------------------
EXPORT INLINE
void GpioSelectAlt(GpioDevice_t *dev, GpioPin_t pin, GpioAlt_t alt) { dev->GpioSelectAlt(dev, pin, alt); }
EXPORT INLINE
void GpioEnablePin(GpioDevice_t *dev, GpioPin_t pin) { dev->GpioEnablePin(dev, pin); }
EXPORT INLINE
void GpioDisablePin(GpioDevice_t *dev, GpioPin_t pin) { dev->GpioDisablePin(dev, pin); }
//
// -- Here are the function prototypes needed for these operations
// ------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void _GpioSelectAlt(GpioDevice_t *dev, GpioPin_t pin, GpioAlt_t alt);
EXTERN_C EXPORT KERNEL
void _GpioEnablePin(GpioDevice_t *dev, GpioPin_t pin);
EXTERN_C EXPORT KERNEL
void _GpioDisablePin(GpioDevice_t *dev, GpioPin_t pin);
//
// -- since there is a lot of duplication, reuse these values
// -------------------------------------------------------
#define GPIO_PININ (0b000) // Pin is an input
#define GPIO_PINOUT (0b001) // Pin is an output
#define GPIO_AFUNC0 (0b100) // Takes alternate function 0
#define GPIO_AFUNC1 (0b101) // Takes alternate function 1
#define GPIO_AFUNC2 (0b110) // Takes alternate function 2
#define GPIO_AFUNC3 (0b111) // Takes alternate function 3
#define GPIO_AFUNC4 (0b011) // Takes alternate function 4
#define GPIO_AFUNC5 (0b110) // Takes alternate function 5
#define GPIO_FSEL0 (0x0) // GPIO Function Select 0
//-------------------------------------------------------------------------------------------------------------------
#define GPIOFSEL0_SEL9 (7<<27) // Function Select 9
#define GPIOFSEL0_SEL8 (7<<24) // Function Select 8
#define GPIOFSEL0_SEL7 (7<<21) // Function Select 7
#define GPIOFSEL0_SEL6 (7<<18) // Function Select 6
#define GPIOFSEL0_SEL5 (7<<15) // Function Select 5
#define GPIOFSEL0_SEL4 (7<<12) // Function Select 4
#define GPIOFSEL0_SEL3 (7<<9) // Function Select 3
#define GPIOFSEL0_SEL2 (7<<6) // Function Select 2
#define GPIOFSEL0_SEL1 (7<<3) // Function Select 1
#define GPIOFSEL0_SEL0 (7<<0) // Function Select 0
#define SH_SEL9(x) (((x)&0x7)<<27) // Shift to the proper bits
#define SH_SEL8(x) (((x)&0x7)<<24) // Shift to the proper bits
#define SH_SEL7(x) (((x)&0x7)<<21) // Shift to the proper bits
#define SH_SEL6(x) (((x)&0x7)<<18) // Shift to the proper bits
#define SH_SEL5(x) (((x)&0x7)<<15) // Shift to the proper bits
#define SH_SEL4(x) (((x)&0x7)<<12) // Shift to the proper bits
#define SH_SEL3(x) (((x)&0x7)<<9) // Shift to the proper bits
#define SH_SEL2(x) (((x)&0x7)<<6) // Shift to the proper bits
#define SH_SEL1(x) (((x)&0x7)<<3) // Shift to the proper bits
#define SH_SEL0(x) (((x)&0x7)<<0) // Shift to the proper bits
#define GPIO_FSEL1 (4) // GPIO Function Select 1
//-------------------------------------------------------------------------------------------------------------------
#define GPIOFSEL1_SEL19 (7<<27) // Function Select 19
#define GPIOFSEL1_SEL18 (7<<24) // Function Select 18
#define GPIOFSEL1_SEL17 (7<<21) // Function Select 17
#define GPIOFSEL1_SEL16 (7<<18) // Function Select 16
#define GPIOFSEL1_SEL15 (7<<15) // Function Select 15
#define GPIOFSEL1_SEL14 (7<<12) // Function Select 14
#define GPIOFSEL1_SEL13 (7<<9) // Function Select 13
#define GPIOFSEL1_SEL12 (7<<6) // Function Select 12
#define GPIOFSEL1_SEL11 (7<<3) // Function Select 11
#define GPIOFSEL1_SEL10 (7<<0) // Function Select 10
#define SH_SEL19(x) (((x)&0x7)<<27) // Shift to the proper bits
#define SH_SEL18(x) (((x)&0x7)<<24) // Shift to the proper bits
#define SH_SEL17(x) (((x)&0x7)<<21) // Shift to the proper bits
#define SH_SEL16(x) (((x)&0x7)<<18) // Shift to the proper bits
#define SH_SEL15(x) (((x)&0x7)<<15) // Shift to the proper bits
#define SH_SEL14(x) (((x)&0x7)<<12) // Shift to the proper bits
#define SH_SEL13(x) (((x)&0x7)<<9) // Shift to the proper bits
#define SH_SEL12(x) (((x)&0x7)<<6) // Shift to the proper bits
#define SH_SEL11(x) (((x)&0x7)<<3) // Shift to the proper bits
#define SH_SEL10(x) (((x)&0x7)<<0) // Shift to the proper bits
#define GPIO_FSEL2 (8) // GPIO Function Select 2
//-------------------------------------------------------------------------------------------------------------------
#define GPIOFSEL2_SEL29 (7<<27) // Function Select 29
#define GPIOFSEL2_SEL28 (7<<24) // Function Select 28
#define GPIOFSEL2_SEL27 (7<<21) // Function Select 27
#define GPIOFSEL2_SEL26 (7<<18) // Function Select 26
#define GPIOFSEL2_SEL25 (7<<15) // Function Select 25
#define GPIOFSEL2_SEL24 (7<<12) // Function Select 24
#define GPIOFSEL2_SEL23 (7<<9) // Function Select 23
#define GPIOFSEL2_SEL22 (7<<6) // Function Select 22
#define GPIOFSEL2_SEL21 (7<<3) // Function Select 21
#define GPIOFSEL2_SEL20 (7<<0) // Function Select 20
#define SH_SEL29(x) (((x)&0x7)<<27) // Shift to the proper bits
#define SH_SEL28(x) (((x)&0x7)<<24) // Shift to the proper bits
#define SH_SEL27(x) (((x)&0x7)<<21) // Shift to the proper bits
#define SH_SEL26(x) (((x)&0x7)<<18) // Shift to the proper bits
#define SH_SEL25(x) (((x)&0x7)<<15) // Shift to the proper bits
#define SH_SEL24(x) (((x)&0x7)<<12) // Shift to the proper bits
#define SH_SEL23(x) (((x)&0x7)<<9) // Shift to the proper bits
#define SH_SEL22(x) (((x)&0x7)<<6) // Shift to the proper bits
#define SH_SEL21(x) (((x)&0x7)<<3) // Shift to the proper bits
#define SH_SEL20(x) (((x)&0x7)<<0) // Shift to the proper bits
#define GPIO_FSEL3 (0xc) // GPIO Function Select 3
//-------------------------------------------------------------------------------------------------------------------
#define GPIOFSEL3_SEL39 (7<<27) // Function Select 39
#define GPIOFSEL3_SEL38 (7<<24) // Function Select 38
#define GPIOFSEL3_SEL37 (7<<21) // Function Select 37
#define GPIOFSEL3_SEL36 (7<<18) // Function Select 36
#define GPIOFSEL3_SEL35 (7<<15) // Function Select 35
#define GPIOFSEL3_SEL34 (7<<12) // Function Select 34
#define GPIOFSEL3_SEL33 (7<<9) // Function Select 33
#define GPIOFSEL3_SEL32 (7<<6) // Function Select 32
#define GPIOFSEL3_SEL31 (7<<3) // Function Select 31
#define GPIOFSEL3_SEL30 (7<<0) // Function Select 30
#define SH_SEL39(x) (((x)&0x7)<<27) // Shift to the proper bits
#define SH_SEL38(x) (((x)&0x7)<<24) // Shift to the proper bits
#define SH_SEL37(x) (((x)&0x7)<<21) // Shift to the proper bits
#define SH_SEL36(x) (((x)&0x7)<<18) // Shift to the proper bits
#define SH_SEL35(x) (((x)&0x7)<<15) // Shift to the proper bits
#define SH_SEL34(x) (((x)&0x7)<<12) // Shift to the proper bits
#define SH_SEL33(x) (((x)&0x7)<<9) // Shift to the proper bits
#define SH_SEL32(x) (((x)&0x7)<<6) // Shift to the proper bits
#define SH_SEL31(x) (((x)&0x7)<<3) // Shift to the proper bits
#define SH_SEL30(x) (((x)&0x7)<<0) // Shift to the proper bits
#define GPIO_FSEL4 (0x10) // GPIO Function Select 4
//-------------------------------------------------------------------------------------------------------------------
#define GPIOFSEL4_SEL49 (7<<27) // Function Select 49
#define GPIOFSEL4_SEL48 (7<<24) // Function Select 48
#define GPIOFSEL4_SEL47 (7<<21) // Function Select 47
#define GPIOFSEL4_SEL46 (7<<18) // Function Select 46
#define GPIOFSEL4_SEL45 (7<<15) // Function Select 45
#define GPIOFSEL4_SEL44 (7<<12) // Function Select 44
#define GPIOFSEL4_SEL43 (7<<9) // Function Select 43
#define GPIOFSEL4_SEL42 (7<<6) // Function Select 42
#define GPIOFSEL4_SEL41 (7<<3) // Function Select 41
#define GPIOFSEL4_SEL40 (7<<0) // Function Select 40
#define SH_SEL49(x) (((x)&0x7)<<27) // Shift to the proper bits
#define SH_SEL48(x) (((x)&0x7)<<24) // Shift to the proper bits
#define SH_SEL47(x) (((x)&0x7)<<21) // Shift to the proper bits
#define SH_SEL46(x) (((x)&0x7)<<18) // Shift to the proper bits
#define SH_SEL45(x) (((x)&0x7)<<15) // Shift to the proper bits
#define SH_SEL44(x) (((x)&0x7)<<12) // Shift to the proper bits
#define SH_SEL43(x) (((x)&0x7)<<9) // Shift to the proper bits
#define SH_SEL42(x) (((x)&0x7)<<6) // Shift to the proper bits
#define SH_SEL41(x) (((x)&0x7)<<3) // Shift to the proper bits
#define SH_SEL40(x) (((x)&0x7)<<0) // Shift to the proper bits
#define GPIO_FSEL5 (0x14) // GPIO Function Select 5
//-------------------------------------------------------------------------------------------------------------------
#define GPIOFSEL5_SEL53 (7<<9) // Function Select 53
#define GPIOFSEL5_SEL52 (7<<6) // Function Select 52
#define GPIOFSEL5_SEL51 (7<<3) // Function Select 51
#define GPIOFSEL5_SEL50 (7<<0) // Function Select 50
#define SH_SEL53(x) (((x)&0x7)<<9) // Shift to the proper bits
#define SH_SEL52(x) (((x)&0x7)<<6) // Shift to the proper bits
#define SH_SEL51(x) (((x)&0x7)<<3) // Shift to the proper bits
#define SH_SEL50(x) (((x)&0x7)<<0) // Shift to the proper bits
#define GPIO_SET0 (0x1c) // GPIO Pin Output Set 0
//-------------------------------------------------------------------------------------------------------------------
#define GPIOSET0_31 (1<<31) // Set GPIO pin 31
#define GPIOSET0_30 (1<<30) // Set GPIO pin 30
#define GPIOSET0_29 (1<<29) // Set GPIO pin 29
#define GPIOSET0_28 (1<<28) // Set GPIO pin 28
#define GPIOSET0_27 (1<<27) // Set GPIO pin 27
#define GPIOSET0_26 (1<<26) // Set GPIO pin 26
#define GPIOSET0_25 (1<<25) // Set GPIO pin 25
#define GPIOSET0_24 (1<<24) // Set GPIO pin 24
#define GPIOSET0_23 (1<<23) // Set GPIO pin 23
#define GPIOSET0_22 (1<<22) // Set GPIO pin 22
#define GPIOSET0_21 (1<<21) // Set GPIO pin 21
#define GPIOSET0_20 (1<<20) // Set GPIO pin 20
#define GPIOSET0_19 (1<<19) // Set GPIO pin 19
#define GPIOSET0_18 (1<<18) // Set GPIO pin 18
#define GPIOSET0_17 (1<<17) // Set GPIO pin 17
#define GPIOSET0_16 (1<<16) // Set GPIO pin 16
#define GPIOSET0_15 (1<<15) // Set GPIO pin 15
#define GPIOSET0_14 (1<<14) // Set GPIO pin 14
#define GPIOSET0_13 (1<<13) // Set GPIO pin 13
#define GPIOSET0_12 (1<<12) // Set GPIO pin 12
#define GPIOSET0_11 (1<<11) // Set GPIO pin 11
#define GPIOSET0_10 (1<<10) // Set GPIO pin 10
#define GPIOSET0_9 (1<<9) // Set GPIO pin 9
#define GPIOSET0_8 (1<<8) // Set GPIO pin 8
#define GPIOSET0_7 (1<<7) // Set GPIO pin 7
#define GPIOSET0_6 (1<<6) // Set GPIO pin 6
#define GPIOSET0_5 (1<<5) // Set GPIO pin 5
#define GPIOSET0_4 (1<<4) // Set GPIO pin 4
#define GPIOSET0_3 (1<<3) // Set GPIO pin 3
#define GPIOSET0_2 (1<<2) // Set GPIO pin 2
#define GPIOSET0_1 (1<<1) // Set GPIO pin 1
#define GPIOSET0_0 (1<<0) // Set GPIO pin 0
#define GPIO_SET1 (0x20) // GPIO Pin Output Set 1
//-------------------------------------------------------------------------------------------------------------------
#define GPIOSET1_53 (1<<21) // Set GPIO pin 53
#define GPIOSET1_52 (1<<20) // Set GPIO pin 52
#define GPIOSET1_51 (1<<19) // Set GPIO pin 51
#define GPIOSET1_50 (1<<18) // Set GPIO pin 50
#define GPIOSET1_49 (1<<17) // Set GPIO pin 49
#define GPIOSET1_48 (1<<16) // Set GPIO pin 48
#define GPIOSET1_47 (1<<15) // Set GPIO pin 47
#define GPIOSET1_46 (1<<14) // Set GPIO pin 46
#define GPIOSET1_45 (1<<13) // Set GPIO pin 45
#define GPIOSET1_44 (1<<12) // Set GPIO pin 44
#define GPIOSET1_43 (1<<11) // Set GPIO pin 43
#define GPIOSET1_42 (1<<10) // Set GPIO pin 42
#define GPIOSET1_41 (1<<9) // Set GPIO pin 41
#define GPIOSET1_40 (1<<8) // Set GPIO pin 40
#define GPIOSET1_39 (1<<7) // Set GPIO pin 39
#define GPIOSET1_38 (1<<6) // Set GPIO pin 38
#define GPIOSET1_37 (1<<5) // Set GPIO pin 37
#define GPIOSET1_36 (1<<4) // Set GPIO pin 36
#define GPIOSET1_35 (1<<3) // Set GPIO pin 35
#define GPIOSET1_34 (1<<2) // Set GPIO pin 34
#define GPIOSET1_33 (1<<1) // Set GPIO pin 33
#define GPIOSET1_32 (1<<0) // Set GPIO pin 32
#define GPIO_CLR0 (0x28) // GPIO Pin Output Clear 0
//-------------------------------------------------------------------------------------------------------------------
#define GPIOCLR0_31 (1<<31) // Clear GPIO pin 31
#define GPIOCLR0_30 (1<<30) // Clear GPIO pin 30
#define GPIOCLR0_29 (1<<29) // Clear GPIO pin 29
#define GPIOCLR0_28 (1<<28) // Clear GPIO pin 28
#define GPIOCLR0_27 (1<<27) // Clear GPIO pin 27
#define GPIOCLR0_26 (1<<26) // Clear GPIO pin 26
#define GPIOCLR0_25 (1<<25) // Clear GPIO pin 25
#define GPIOCLR0_24 (1<<24) // Clear GPIO pin 24
#define GPIOCLR0_23 (1<<23) // Clear GPIO pin 23
#define GPIOCLR0_22 (1<<22) // Clear GPIO pin 22
#define GPIOCLR0_21 (1<<21) // Clear GPIO pin 21
#define GPIOCLR0_20 (1<<20) // Clear GPIO pin 20
#define GPIOCLR0_19 (1<<19) // Clear GPIO pin 19
#define GPIOCLR0_18 (1<<18) // Clear GPIO pin 18
#define GPIOCLR0_17 (1<<17) // Clear GPIO pin 17
#define GPIOCLR0_16 (1<<16) // Clear GPIO pin 16
#define GPIOCLR0_15 (1<<15) // Clear GPIO pin 15
#define GPIOCLR0_14 (1<<14) // Clear GPIO pin 14
#define GPIOCLR0_13 (1<<13) // Clear GPIO pin 13
#define GPIOCLR0_12 (1<<12) // Clear GPIO pin 12
#define GPIOCLR0_11 (1<<11) // Clear GPIO pin 11
#define GPIOCLR0_10 (1<<10) // Clear GPIO pin 10
#define GPIOCLR0_9 (1<<9) // Clear GPIO pin 9
#define GPIOCLR0_8 (1<<8) // Clear GPIO pin 8
#define GPIOCLR0_7 (1<<7) // Clear GPIO pin 7
#define GPIOCLR0_6 (1<<6) // Clear GPIO pin 6
#define GPIOCLR0_5 (1<<5) // Clear GPIO pin 5
#define GPIOCLR0_4 (1<<4) // Clear GPIO pin 4
#define GPIOCLR0_3 (1<<3) // Clear GPIO pin 3
#define GPIOCLR0_2 (1<<2) // Clear GPIO pin 2
#define GPIOCLR0_1 (1<<1) // Clear GPIO pin 1
#define GPIOCLR0_0 (1<<0) // Clear GPIO pin 0
#define GPIO_CLR1 (0x2c) // GPIO Pin Output Clear 1
//-------------------------------------------------------------------------------------------------------------------
#define GPIOCLR1_53 (1<<21) // Clear GPIO pin 53
#define GPIOCLR1_52 (1<<20) // Clear GPIO pin 52
#define GPIOCLR1_51 (1<<19) // Clear GPIO pin 51
#define GPIOCLR1_50 (1<<18) // Clear GPIO pin 50
#define GPIOCLR1_49 (1<<17) // Clear GPIO pin 49
#define GPIOCLR1_48 (1<<16) // Clear GPIO pin 48
#define GPIOCLR1_47 (1<<15) // Clear GPIO pin 47
#define GPIOCLR1_46 (1<<14) // Clear GPIO pin 46
#define GPIOCLR1_45 (1<<13) // Clear GPIO pin 45
#define GPIOCLR1_44 (1<<12) // Clear GPIO pin 44
#define GPIOCLR1_43 (1<<11) // Clear GPIO pin 43
#define GPIOCLR1_42 (1<<10) // Clear GPIO pin 42
#define GPIOCLR1_41 (1<<9) // Clear GPIO pin 41
#define GPIOCLR1_40 (1<<8) // Clear GPIO pin 40
#define GPIOCLR1_39 (1<<7) // Clear GPIO pin 39
#define GPIOCLR1_38 (1<<6) // Clear GPIO pin 38
#define GPIOCLR1_37 (1<<5) // Clear GPIO pin 37
#define GPIOCLR1_36 (1<<4) // Clear GPIO pin 36
#define GPIOCLR1_35 (1<<3) // Clear GPIO pin 35
#define GPIOCLR1_34 (1<<2) // Clear GPIO pin 34
#define GPIOCLR1_33 (1<<1) // Clear GPIO pin 33
#define GPIOCLR1_32 (1<<0) // Clear GPIO pin 32
#define GPIO_LEV0 (0x34) // GPIO Pin Level 0
//-------------------------------------------------------------------------------------------------------------------
#define GPIOLVL0_31 (1<<31) // Level GPIO pin 31
#define GPIOLVL0_30 (1<<30) // Level GPIO pin 30
#define GPIOLVL0_29 (1<<29) // Level GPIO pin 29
#define GPIOLVL0_28 (1<<28) // Level GPIO pin 28
#define GPIOLVL0_27 (1<<27) // Level GPIO pin 27
#define GPIOLVL0_26 (1<<26) // Level GPIO pin 26
#define GPIOLVL0_25 (1<<25) // Level GPIO pin 25
#define GPIOLVL0_24 (1<<24) // Level GPIO pin 24
#define GPIOLVL0_23 (1<<23) // Level GPIO pin 23
#define GPIOLVL0_22 (1<<22) // Level GPIO pin 22
#define GPIOLVL0_21 (1<<21) // Level GPIO pin 21
#define GPIOLVL0_20 (1<<20) // Level GPIO pin 20
#define GPIOLVL0_19 (1<<19) // Level GPIO pin 19
#define GPIOLVL0_18 (1<<18) // Level GPIO pin 18
#define GPIOLVL0_17 (1<<17) // Level GPIO pin 17
#define GPIOLVL0_16 (1<<16) // Level GPIO pin 16
#define GPIOLVL0_15 (1<<15) // Level GPIO pin 15
#define GPIOLVL0_14 (1<<14) // Level GPIO pin 14
#define GPIOLVL0_13 (1<<13) // Level GPIO pin 13
#define GPIOLVL0_12 (1<<12) // Level GPIO pin 12
#define GPIOLVL0_11 (1<<11) // Level GPIO pin 11
#define GPIOLVL0_10 (1<<10) // Level GPIO pin 10
#define GPIOLVL0_9 (1<<9) // Level GPIO pin 9
#define GPIOLVL0_8 (1<<8) // Level GPIO pin 8
#define GPIOLVL0_7 (1<<7) // Level GPIO pin 7
#define GPIOLVL0_6 (1<<6) // Level GPIO pin 6
#define GPIOLVL0_5 (1<<5) // Level GPIO pin 5
#define GPIOLVL0_4 (1<<4) // Level GPIO pin 4
#define GPIOLVL0_3 (1<<3) // Level GPIO pin 3
#define GPIOLVL0_2 (1<<2) // Level GPIO pin 2
#define GPIOLVL0_1 (1<<1) // Level GPIO pin 1
#define GPIOLVL0_0 (1<<0) // Level GPIO pin 0
#define GPIO_LEV1 (0x38) // GPIO Pin Level 1
//-------------------------------------------------------------------------------------------------------------------
#define GPIOLVL1_53 (1<<21) // Level GPIO pin 53
#define GPIOLVL1_52 (1<<20) // Level GPIO pin 52
#define GPIOLVL1_51 (1<<19) // Level GPIO pin 51
#define GPIOLVL1_50 (1<<18) // Level GPIO pin 50
#define GPIOLVL1_49 (1<<17) // Level GPIO pin 49
#define GPIOLVL1_48 (1<<16) // Level GPIO pin 48
#define GPIOLVL1_47 (1<<15) // Level GPIO pin 47
#define GPIOLVL1_46 (1<<14) // Level GPIO pin 46
#define GPIOLVL1_45 (1<<13) // Level GPIO pin 45
#define GPIOLVL1_44 (1<<12) // Level GPIO pin 44
#define GPIOLVL1_43 (1<<11) // Level GPIO pin 43
#define GPIOLVL1_42 (1<<10) // Level GPIO pin 42
#define GPIOLVL1_41 (1<<9) // Level GPIO pin 41
#define GPIOLVL1_40 (1<<8) // Level GPIO pin 40
#define GPIOLVL1_39 (1<<7) // Level GPIO pin 39
#define GPIOLVL1_38 (1<<6) // Level GPIO pin 38
#define GPIOLVL1_37 (1<<5) // Level GPIO pin 37
#define GPIOLVL1_36 (1<<4) // Level GPIO pin 36
#define GPIOLVL1_35 (1<<3) // Level GPIO pin 35
#define GPIOLVL1_34 (1<<2) // Level GPIO pin 34
#define GPIOLVL1_33 (1<<1) // Level GPIO pin 33
#define GPIOLVL1_32 (1<<0) // Level GPIO pin 32
#define GPIO_EDS0 (0x40) // GPIO Pin Event Detect Status 0
//-------------------------------------------------------------------------------------------------------------------
#define GPIOEDS0_31 (1<<31) // Event Detected on GPIO pin 31
#define GPIOEDS0_30 (1<<30) // Event Detected on GPIO pin 30
#define GPIOEDS0_29 (1<<29) // Event Detected on GPIO pin 29
#define GPIOEDS0_28 (1<<28) // Event Detected on GPIO pin 28
#define GPIOEDS0_27 (1<<27) // Event Detected on GPIO pin 27
#define GPIOEDS0_26 (1<<26) // Event Detected on GPIO pin 26
#define GPIOEDS0_25 (1<<25) // Event Detected on GPIO pin 25
#define GPIOEDS0_24 (1<<24) // Event Detected on GPIO pin 24
#define GPIOEDS0_23 (1<<23) // Event Detected on GPIO pin 23
#define GPIOEDS0_22 (1<<22) // Event Detected on GPIO pin 22
#define GPIOEDS0_21 (1<<21) // Event Detected on GPIO pin 21
#define GPIOEDS0_20 (1<<20) // Event Detected on GPIO pin 20
#define GPIOEDS0_19 (1<<19) // Event Detected on GPIO pin 19
#define GPIOEDS0_18 (1<<18) // Event Detected on GPIO pin 18
#define GPIOEDS0_17 (1<<17) // Event Detected on GPIO pin 17
#define GPIOEDS0_16 (1<<16) // Event Detected on GPIO pin 16
#define GPIOEDS0_15 (1<<15) // Event Detected on GPIO pin 15
#define GPIOEDS0_14 (1<<14) // Event Detected on GPIO pin 14
#define GPIOEDS0_13 (1<<13) // Event Detected on GPIO pin 13
#define GPIOEDS0_12 (1<<12) // Event Detected on GPIO pin 12
#define GPIOEDS0_11 (1<<11) // Event Detected on GPIO pin 11
#define GPIOEDS0_10 (1<<10) // Event Detected on GPIO pin 10
#define GPIOEDS0_9 (1<<9) // Event Detected on GPIO pin 9
#define GPIOEDS0_8 (1<<8) // Event Detected on GPIO pin 8
#define GPIOEDS0_7 (1<<7) // Event Detected on GPIO pin 7
#define GPIOEDS0_6 (1<<6) // Event Detected on GPIO pin 6
#define GPIOEDS0_5 (1<<5) // Event Detected on GPIO pin 5
#define GPIOEDS0_4 (1<<4) // Event Detected on GPIO pin 4
#define GPIOEDS0_3 (1<<3) // Event Detected on GPIO pin 3
#define GPIOEDS0_2 (1<<2) // Event Detected on GPIO pin 2
#define GPIOEDS0_1 (1<<1) // Event Detected on GPIO pin 1
#define GPIOEDS0_0 (1<<0) // Event Detected on GPIO pin 0
#define GPIO_EDS1 (0x44) // GPIO Pin Event Detect Status 1
//-------------------------------------------------------------------------------------------------------------------
#define GPIOEDS1_53 (1<<21) // Event Detected on GPIO pin 53
#define GPIOEDS1_52 (1<<20) // Event Detected on GPIO pin 52
#define GPIOEDS1_51 (1<<19) // Event Detected on GPIO pin 51
#define GPIOEDS1_50 (1<<18) // Event Detected on GPIO pin 50
#define GPIOEDS1_49 (1<<17) // Event Detected on GPIO pin 49
#define GPIOEDS1_48 (1<<16) // Event Detected on GPIO pin 48
#define GPIOEDS1_47 (1<<15) // Event Detected on GPIO pin 47
#define GPIOEDS1_46 (1<<14) // Event Detected on GPIO pin 46
#define GPIOEDS1_45 (1<<13) // Event Detected on GPIO pin 45
#define GPIOEDS1_44 (1<<12) // Event Detected on GPIO pin 44
#define GPIOEDS1_43 (1<<11) // Event Detected on GPIO pin 43
#define GPIOEDS1_42 (1<<10) // Event Detected on GPIO pin 42
#define GPIOEDS1_41 (1<<9) // Event Detected on GPIO pin 41
#define GPIOEDS1_40 (1<<8) // Event Detected on GPIO pin 40
#define GPIOEDS1_39 (1<<7) // Event Detected on GPIO pin 39
#define GPIOEDS1_38 (1<<6) // Event Detected on GPIO pin 38
#define GPIOEDS1_37 (1<<5) // Event Detected on GPIO pin 37
#define GPIOEDS1_36 (1<<4) // Event Detected on GPIO pin 36
#define GPIOEDS1_35 (1<<3) // Event Detected on GPIO pin 35
#define GPIOEDS1_34 (1<<2) // Event Detected on GPIO pin 34
#define GPIOEDS1_33 (1<<1) // Event Detected on GPIO pin 33
#define GPIOEDS1_32 (1<<0) // Event Detected on GPIO pin 32
#define GPIO_REN0 (0x4c) // GPIO Pin Rising Edge Detect Status 0
//-------------------------------------------------------------------------------------------------------------------
#define GPIOREN0_31 (1<<31) // Set Rising Edge Detect on GPIO pin 31
#define GPIOREN0_30 (1<<30) // Set Rising Edge Detect on GPIO pin 30
#define GPIOREN0_29 (1<<29) // Set Rising Edge Detect on GPIO pin 29
#define GPIOREN0_28 (1<<28) // Set Rising Edge Detect on GPIO pin 28
#define GPIOREN0_27 (1<<27) // Set Rising Edge Detect on GPIO pin 27
#define GPIOREN0_26 (1<<26) // Set Rising Edge Detect on GPIO pin 26
#define GPIOREN0_25 (1<<25) // Set Rising Edge Detect on GPIO pin 25
#define GPIOREN0_24 (1<<24) // Set Rising Edge Detect on GPIO pin 24
#define GPIOREN0_23 (1<<23) // Set Rising Edge Detect on GPIO pin 23
#define GPIOREN0_22 (1<<22) // Set Rising Edge Detect on GPIO pin 22
#define GPIOREN0_21 (1<<21) // Set Rising Edge Detect on GPIO pin 21
#define GPIOREN0_20 (1<<20) // Set Rising Edge Detect on GPIO pin 20
#define GPIOREN0_19 (1<<19) // Set Rising Edge Detect on GPIO pin 19
#define GPIOREN0_18 (1<<18) // Set Rising Edge Detect on GPIO pin 18
#define GPIOREN0_17 (1<<17) // Set Rising Edge Detect on GPIO pin 17
#define GPIOREN0_16 (1<<16) // Set Rising Edge Detect on GPIO pin 16
#define GPIOREN0_15 (1<<15) // Set Rising Edge Detect on GPIO pin 15
#define GPIOREN0_14 (1<<14) // Set Rising Edge Detect on GPIO pin 14
#define GPIOREN0_13 (1<<13) // Set Rising Edge Detect on GPIO pin 13
#define GPIOREN0_12 (1<<12) // Set Rising Edge Detect on GPIO pin 12
#define GPIOREN0_11 (1<<11) // Set Rising Edge Detect on GPIO pin 11
#define GPIOREN0_10 (1<<10) // Set Rising Edge Detect on GPIO pin 10
#define GPIOREN0_9 (1<<9) // Set Rising Edge Detect on GPIO pin 9
#define GPIOREN0_8 (1<<8) // Set Rising Edge Detect on GPIO pin 8
#define GPIOREN0_7 (1<<7) // Set Rising Edge Detect on GPIO pin 7
#define GPIOREN0_6 (1<<6) // Set Rising Edge Detect on GPIO pin 6
#define GPIOREN0_5 (1<<5) // Set Rising Edge Detect on GPIO pin 5
#define GPIOREN0_4 (1<<4) // Set Rising Edge Detect on GPIO pin 4
#define GPIOREN0_3 (1<<3) // Set Rising Edge Detect on GPIO pin 3
#define GPIOREN0_2 (1<<2) // Set Rising Edge Detect on GPIO pin 2
#define GPIOREN0_1 (1<<1) // Set Rising Edge Detect on GPIO pin 1
#define GPIOREN0_0 (1<<0) // Set Rising Edge Detect on GPIO pin 0
#define GPIO_REN1 (0x50) // GPIO Pin Rising Edge Detect Status 1
//-------------------------------------------------------------------------------------------------------------------
#define GPIOREN1_53 (1<<21) // Set Rising Edge Detect on GPIO pin 53
#define GPIOREN1_52 (1<<20) // Set Rising Edge Detect on GPIO pin 52
#define GPIOREN1_51 (1<<19) // Set Rising Edge Detect on GPIO pin 51
#define GPIOREN1_50 (1<<18) // Set Rising Edge Detect on GPIO pin 50
#define GPIOREN1_49 (1<<17) // Set Rising Edge Detect on GPIO pin 49
#define GPIOREN1_48 (1<<16) // Set Rising Edge Detect on GPIO pin 48
#define GPIOREN1_47 (1<<15) // Set Rising Edge Detect on GPIO pin 47
#define GPIOREN1_46 (1<<14) // Set Rising Edge Detect on GPIO pin 46
#define GPIOREN1_45 (1<<13) // Set Rising Edge Detect on GPIO pin 45
#define GPIOREN1_44 (1<<12) // Set Rising Edge Detect on GPIO pin 44
#define GPIOREN1_43 (1<<11) // Set Rising Edge Detect on GPIO pin 43
#define GPIOREN1_42 (1<<10) // Set Rising Edge Detect on GPIO pin 42
#define GPIOREN1_41 (1<<9) // Set Rising Edge Detect on GPIO pin 41
#define GPIOREN1_40 (1<<8) // Set Rising Edge Detect on GPIO pin 40
#define GPIOREN1_39 (1<<7) // Set Rising Edge Detect on GPIO pin 39
#define GPIOREN1_38 (1<<6) // Set Rising Edge Detect on GPIO pin 38
#define GPIOREN1_37 (1<<5) // Set Rising Edge Detect on GPIO pin 37
#define GPIOREN1_36 (1<<4) // Set Rising Edge Detect on GPIO pin 36
#define GPIOREN1_35 (1<<3) // Set Rising Edge Detect on GPIO pin 35
#define GPIOREN1_34 (1<<2) // Set Rising Edge Detect on GPIO pin 34
#define GPIOREN1_33 (1<<1) // Set Rising Edge Detect on GPIO pin 33
#define GPIOREN1_32 (1<<0) // Set Rising Edge Detect on GPIO pin 32
#define GPIO_FEN0 (0x58) // GPIO Pin Falling Edge Detect Status 0
//-------------------------------------------------------------------------------------------------------------------
#define GPIOFEN0_31 (1<<31) // Set Falling Edge Detect on GPIO pin 31
#define GPIOFEN0_30 (1<<30) // Set Falling Edge Detect on GPIO pin 30
#define GPIOFEN0_29 (1<<29) // Set Falling Edge Detect on GPIO pin 29
#define GPIOFEN0_28 (1<<28) // Set Falling Edge Detect on GPIO pin 28
#define GPIOFEN0_27 (1<<27) // Set Falling Edge Detect on GPIO pin 27
#define GPIOFEN0_26 (1<<26) // Set Falling Edge Detect on GPIO pin 26
#define GPIOFEN0_25 (1<<25) // Set Falling Edge Detect on GPIO pin 25
#define GPIOFEN0_24 (1<<24) // Set Falling Edge Detect on GPIO pin 24
#define GPIOFEN0_23 (1<<23) // Set Falling Edge Detect on GPIO pin 23
#define GPIOFEN0_22 (1<<22) // Set Falling Edge Detect on GPIO pin 22
#define GPIOFEN0_21 (1<<21) // Set Falling Edge Detect on GPIO pin 21
#define GPIOFEN0_20 (1<<20) // Set Falling Edge Detect on GPIO pin 20
#define GPIOFEN0_19 (1<<19) // Set Falling Edge Detect on GPIO pin 19
#define GPIOFEN0_18 (1<<18) // Set Falling Edge Detect on GPIO pin 18
#define GPIOFEN0_17 (1<<17) // Set Falling Edge Detect on GPIO pin 17
#define GPIOFEN0_16 (1<<16) // Set Falling Edge Detect on GPIO pin 16
#define GPIOFEN0_15 (1<<15) // Set Falling Edge Detect on GPIO pin 15
#define GPIOFEN0_14 (1<<14) // Set Falling Edge Detect on GPIO pin 14
#define GPIOFEN0_13 (1<<13) // Set Falling Edge Detect on GPIO pin 13
#define GPIOFEN0_12 (1<<12) // Set Falling Edge Detect on GPIO pin 12
#define GPIOFEN0_11 (1<<11) // Set Falling Edge Detect on GPIO pin 11
#define GPIOFEN0_10 (1<<10) // Set Falling Edge Detect on GPIO pin 10
#define GPIOFEN0_9 (1<<9) // Set Falling Edge Detect on GPIO pin 9
#define GPIOFEN0_8 (1<<8) // Set Falling Edge Detect on GPIO pin 8
#define GPIOFEN0_7 (1<<7) // Set Falling Edge Detect on GPIO pin 7
#define GPIOFEN0_6 (1<<6) // Set Falling Edge Detect on GPIO pin 6
#define GPIOFEN0_5 (1<<5) // Set Falling Edge Detect on GPIO pin 5
#define GPIOFEN0_4 (1<<4) // Set Falling Edge Detect on GPIO pin 4
#define GPIOFEN0_3 (1<<3) // Set Falling Edge Detect on GPIO pin 3
#define GPIOFEN0_2 (1<<2) // Set Falling Edge Detect on GPIO pin 2
#define GPIOFEN0_1 (1<<1) // Set Falling Edge Detect on GPIO pin 1
#define GPIOFEN0_0 (1<<0) // Set Falling Edge Detect on GPIO pin 0
#define GPIO_FEN1 (0x5c) // GPIO Pin Falling Edge Detect Status 1
//-------------------------------------------------------------------------------------------------------------------
#define GPIOFEN1_53 (1<<21) // Set Falling Edge Detect on GPIO pin 53
#define GPIOFEN1_52 (1<<20) // Set Falling Edge Detect on GPIO pin 52
#define GPIOFEN1_51 (1<<19) // Set Falling Edge Detect on GPIO pin 51
#define GPIOFEN1_50 (1<<18) // Set Falling Edge Detect on GPIO pin 50
#define GPIOFEN1_49 (1<<17) // Set Falling Edge Detect on GPIO pin 49
#define GPIOFEN1_48 (1<<16) // Set Falling Edge Detect on GPIO pin 48
#define GPIOFEN1_47 (1<<15) // Set Falling Edge Detect on GPIO pin 47
#define GPIOFEN1_46 (1<<14) // Set Falling Edge Detect on GPIO pin 46
#define GPIOFEN1_45 (1<<13) // Set Falling Edge Detect on GPIO pin 45
#define GPIOFEN1_44 (1<<12) // Set Falling Edge Detect on GPIO pin 44
#define GPIOFEN1_43 (1<<11) // Set Falling Edge Detect on GPIO pin 43
#define GPIOFEN1_42 (1<<10) // Set Falling Edge Detect on GPIO pin 42
#define GPIOFEN1_41 (1<<9) // Set Falling Edge Detect on GPIO pin 41
#define GPIOFEN1_40 (1<<8) // Set Falling Edge Detect on GPIO pin 40
#define GPIOFEN1_39 (1<<7) // Set Falling Edge Detect on GPIO pin 39
#define GPIOFEN1_38 (1<<6) // Set Falling Edge Detect on GPIO pin 38
#define GPIOFEN1_37 (1<<5) // Set Falling Edge Detect on GPIO pin 37
#define GPIOFEN1_36 (1<<4) // Set Falling Edge Detect on GPIO pin 36
#define GPIOFEN1_35 (1<<3) // Set Falling Edge Detect on GPIO pin 35
#define GPIOFEN1_34 (1<<2) // Set Falling Edge Detect on GPIO pin 34
#define GPIOFEN1_33 (1<<1) // Set Falling Edge Detect on GPIO pin 33
#define GPIOFEN1_32 (1<<0) // Set Falling Edge Detect on GPIO pin 32
#define GPIO_HEN0 (0x64) // GPIO Pin High Detect Status 0
//-------------------------------------------------------------------------------------------------------------------
#define GPIOHEN0_31 (1<<31) // Set High Detect on GPIO pin 31
#define GPIOHEN0_30 (1<<30) // Set High Detect on GPIO pin 30
#define GPIOHEN0_29 (1<<29) // Set High Detect on GPIO pin 29
#define GPIOHEN0_28 (1<<28) // Set High Detect on GPIO pin 28
#define GPIOHEN0_27 (1<<27) // Set High Detect on GPIO pin 27
#define GPIOHEN0_26 (1<<26) // Set High Detect on GPIO pin 26
#define GPIOHEN0_25 (1<<25) // Set High Detect on GPIO pin 25
#define GPIOHEN0_24 (1<<24) // Set High Detect on GPIO pin 24
#define GPIOHEN0_23 (1<<23) // Set High Detect on GPIO pin 23
#define GPIOHEN0_22 (1<<22) // Set High Detect on GPIO pin 22
#define GPIOHEN0_21 (1<<21) // Set High Detect on GPIO pin 21
#define GPIOHEN0_20 (1<<20) // Set High Detect on GPIO pin 20
#define GPIOHEN0_19 (1<<19) // Set High Detect on GPIO pin 19
#define GPIOHEN0_18 (1<<18) // Set High Detect on GPIO pin 18
#define GPIOHEN0_17 (1<<17) // Set High Detect on GPIO pin 17
#define GPIOHEN0_16 (1<<16) // Set High Detect on GPIO pin 16
#define GPIOHEN0_15 (1<<15) // Set High Detect on GPIO pin 15
#define GPIOHEN0_14 (1<<14) // Set High Detect on GPIO pin 14
#define GPIOHEN0_13 (1<<13) // Set High Detect on GPIO pin 13
#define GPIOHEN0_12 (1<<12) // Set High Detect on GPIO pin 12
#define GPIOHEN0_11 (1<<11) // Set High Detect on GPIO pin 11
#define GPIOHEN0_10 (1<<10) // Set High Detect on GPIO pin 10
#define GPIOHEN0_9 (1<<9) // Set High Detect on GPIO pin 9
#define GPIOHEN0_8 (1<<8) // Set High Detect on GPIO pin 8
#define GPIOHEN0_7 (1<<7) // Set High Detect on GPIO pin 7
#define GPIOHEN0_6 (1<<6) // Set High Detect on GPIO pin 6
#define GPIOHEN0_5 (1<<5) // Set High Detect on GPIO pin 5
#define GPIOHEN0_4 (1<<4) // Set High Detect on GPIO pin 4
#define GPIOHEN0_3 (1<<3) // Set High Detect on GPIO pin 3
#define GPIOHEN0_2 (1<<2) // Set High Detect on GPIO pin 2
#define GPIOHEN0_1 (1<<1) // Set High Detect on GPIO pin 1
#define GPIOHEN0_0 (1<<0) // Set High Detect on GPIO pin 0
#define GPIO_HEN1 (0x68) // GPIO Pin High Detect Status 1
//-------------------------------------------------------------------------------------------------------------------
#define GPIOHEN1_53 (1<<21) // Set High Detect on GPIO pin 53
#define GPIOHEN1_52 (1<<20) // Set High Detect on GPIO pin 52
#define GPIOHEN1_51 (1<<19) // Set High Detect on GPIO pin 51
#define GPIOHEN1_50 (1<<18) // Set High Detect on GPIO pin 50
#define GPIOHEN1_49 (1<<17) // Set High Detect on GPIO pin 49
#define GPIOHEN1_48 (1<<16) // Set High Detect on GPIO pin 48
#define GPIOHEN1_47 (1<<15) // Set High Detect on GPIO pin 47
#define GPIOHEN1_46 (1<<14) // Set High Detect on GPIO pin 46
#define GPIOHEN1_45 (1<<13) // Set High Detect on GPIO pin 45
#define GPIOHEN1_44 (1<<12) // Set High Detect on GPIO pin 44
#define GPIOHEN1_43 (1<<11) // Set High Detect on GPIO pin 43
#define GPIOHEN1_42 (1<<10) // Set High Detect on GPIO pin 42
#define GPIOHEN1_41 (1<<9) // Set High Detect on GPIO pin 41
#define GPIOHEN1_40 (1<<8) // Set High Detect on GPIO pin 40
#define GPIOHEN1_39 (1<<7) // Set High Detect on GPIO pin 39
#define GPIOHEN1_38 (1<<6) // Set High Detect on GPIO pin 38
#define GPIOHEN1_37 (1<<5) // Set High Detect on GPIO pin 37
#define GPIOHEN1_36 (1<<4) // Set High Detect on GPIO pin 36
#define GPIOHEN1_35 (1<<3) // Set High Detect on GPIO pin 35
#define GPIOHEN1_34 (1<<2) // Set High Detect on GPIO pin 34
#define GPIOHEN1_33 (1<<1) // Set High Detect on GPIO pin 33
#define GPIOHEN1_32 (1<<0) // Set High Detect on GPIO pin 32
#define GPIO_LEN0 (0x70) // GPIO Pin Low Detect Status 0
//-------------------------------------------------------------------------------------------------------------------
#define GPIOLEN0_31 (1<<31) // Set Low Detect on GPIO pin 31
#define GPIOLEN0_30 (1<<30) // Set Low Detect on GPIO pin 30
#define GPIOLEN0_29 (1<<29) // Set Low Detect on GPIO pin 29
#define GPIOLEN0_28 (1<<28) // Set Low Detect on GPIO pin 28
#define GPIOLEN0_27 (1<<27) // Set Low Detect on GPIO pin 27
#define GPIOLEN0_26 (1<<26) // Set Low Detect on GPIO pin 26
#define GPIOLEN0_25 (1<<25) // Set Low Detect on GPIO pin 25
#define GPIOLEN0_24 (1<<24) // Set Low Detect on GPIO pin 24
#define GPIOLEN0_23 (1<<23) // Set Low Detect on GPIO pin 23
#define GPIOLEN0_22 (1<<22) // Set Low Detect on GPIO pin 22
#define GPIOLEN0_21 (1<<21) // Set Low Detect on GPIO pin 21
#define GPIOLEN0_20 (1<<20) // Set Low Detect on GPIO pin 20
#define GPIOLEN0_19 (1<<19) // Set Low Detect on GPIO pin 19
#define GPIOLEN0_18 (1<<18) // Set Low Detect on GPIO pin 18
#define GPIOLEN0_17 (1<<17) // Set Low Detect on GPIO pin 17
#define GPIOLEN0_16 (1<<16) // Set Low Detect on GPIO pin 16
#define GPIOLEN0_15 (1<<15) // Set Low Detect on GPIO pin 15
#define GPIOLEN0_14 (1<<14) // Set Low Detect on GPIO pin 14
#define GPIOLEN0_13 (1<<13) // Set Low Detect on GPIO pin 13
#define GPIOLEN0_12 (1<<12) // Set Low Detect on GPIO pin 12
#define GPIOLEN0_11 (1<<11) // Set Low Detect on GPIO pin 11
#define GPIOLEN0_10 (1<<10) // Set Low Detect on GPIO pin 10
#define GPIOLEN0_9 (1<<9) // Set Low Detect on GPIO pin 9
#define GPIOLEN0_8 (1<<8) // Set Low Detect on GPIO pin 8
#define GPIOLEN0_7 (1<<7) // Set Low Detect on GPIO pin 7
#define GPIOLEN0_6 (1<<6) // Set Low Detect on GPIO pin 6
#define GPIOLEN0_5 (1<<5) // Set Low Detect on GPIO pin 5
#define GPIOLEN0_4 (1<<4) // Set Low Detect on GPIO pin 4
#define GPIOLEN0_3 (1<<3) // Set Low Detect on GPIO pin 3
#define GPIOLEN0_2 (1<<2) // Set Low Detect on GPIO pin 2
#define GPIOLEN0_1 (1<<1) // Set Low Detect on GPIO pin 1
#define GPIOLEN0_0 (1<<0) // Set Low Detect on GPIO pin 0
#define GPIO_LEN1 (0x74) // GPIO Pin Low Detect Status 1
//-------------------------------------------------------------------------------------------------------------------
#define GPIOLEN1_53 (1<<21) // Set Low Detect on GPIO pin 53
#define GPIOLEN1_52 (1<<20) // Set Low Detect on GPIO pin 52
#define GPIOLEN1_51 (1<<19) // Set Low Detect on GPIO pin 51
#define GPIOLEN1_50 (1<<18) // Set Low Detect on GPIO pin 50
#define GPIOLEN1_49 (1<<17) // Set Low Detect on GPIO pin 49
#define GPIOLEN1_48 (1<<16) // Set Low Detect on GPIO pin 48
#define GPIOLEN1_47 (1<<15) // Set Low Detect on GPIO pin 47
#define GPIOLEN1_46 (1<<14) // Set Low Detect on GPIO pin 46
#define GPIOLEN1_45 (1<<13) // Set Low Detect on GPIO pin 45
#define GPIOLEN1_44 (1<<12) // Set Low Detect on GPIO pin 44
#define GPIOLEN1_43 (1<<11) // Set Low Detect on GPIO pin 43
#define GPIOLEN1_42 (1<<10) // Set Low Detect on GPIO pin 42
#define GPIOLEN1_41 (1<<9) // Set Low Detect on GPIO pin 41
#define GPIOLEN1_40 (1<<8) // Set Low Detect on GPIO pin 40
#define GPIOLEN1_39 (1<<7) // Set Low Detect on GPIO pin 39
#define GPIOLEN1_38 (1<<6) // Set Low Detect on GPIO pin 38
#define GPIOLEN1_37 (1<<5) // Set Low Detect on GPIO pin 37
#define GPIOLEN1_36 (1<<4) // Set Low Detect on GPIO pin 36
#define GPIOLEN1_35 (1<<3) // Set Low Detect on GPIO pin 35
#define GPIOLEN1_34 (1<<2) // Set Low Detect on GPIO pin 34
#define GPIOLEN1_33 (1<<1) // Set Low Detect on GPIO pin 33
#define GPIOLEN1_32 (1<<0) // Set Low Detect on GPIO pin 32
#define GPIO_AREN0 (0x7c) // GPIO Pin Async Rising Edge Detect Status 0
//-------------------------------------------------------------------------------------------------------------------
#define GPIOAREN0_31 (1<<31) // Set Async Rising Edge Detect on GPIO pin 31
#define GPIOAREN0_30 (1<<30) // Set Async Rising Edge Detect on GPIO pin 30
#define GPIOAREN0_29 (1<<29) // Set Async Rising Edge Detect on GPIO pin 29
#define GPIOAREN0_28 (1<<28) // Set Async Rising Edge Detect on GPIO pin 28
#define GPIOAREN0_27 (1<<27) // Set Async Rising Edge Detect on GPIO pin 27
#define GPIOAREN0_26 (1<<26) // Set Async Rising Edge Detect on GPIO pin 26
#define GPIOAREN0_25 (1<<25) // Set Async Rising Edge Detect on GPIO pin 25
#define GPIOAREN0_24 (1<<24) // Set Async Rising Edge Detect on GPIO pin 24
#define GPIOAREN0_23 (1<<23) // Set Async Rising Edge Detect on GPIO pin 23
#define GPIOAREN0_22 (1<<22) // Set Async Rising Edge Detect on GPIO pin 22
#define GPIOAREN0_21 (1<<21) // Set Async Rising Edge Detect on GPIO pin 21
#define GPIOAREN0_20 (1<<20) // Set Async Rising Edge Detect on GPIO pin 20
#define GPIOAREN0_19 (1<<19) // Set Async Rising Edge Detect on GPIO pin 19
#define GPIOAREN0_18 (1<<18) // Set Async Rising Edge Detect on GPIO pin 18
#define GPIOAREN0_17 (1<<17) // Set Async Rising Edge Detect on GPIO pin 17
#define GPIOAREN0_16 (1<<16) // Set Async Rising Edge Detect on GPIO pin 16
#define GPIOAREN0_15 (1<<15) // Set Async Rising Edge Detect on GPIO pin 15
#define GPIOAREN0_14 (1<<14) // Set Async Rising Edge Detect on GPIO pin 14
#define GPIOAREN0_13 (1<<13) // Set Async Rising Edge Detect on GPIO pin 13
#define GPIOAREN0_12 (1<<12) // Set Async Rising Edge Detect on GPIO pin 12
#define GPIOAREN0_11 (1<<11) // Set Async Rising Edge Detect on GPIO pin 11
#define GPIOAREN0_10 (1<<10) // Set Async Rising Edge Detect on GPIO pin 10
#define GPIOAREN0_9 (1<<9) // Set Async Rising Edge Detect on GPIO pin 9
#define GPIOAREN0_8 (1<<8) // Set Async Rising Edge Detect on GPIO pin 8
#define GPIOAREN0_7 (1<<7) // Set Async Rising Edge Detect on GPIO pin 7
#define GPIOAREN0_6 (1<<6) // Set Async Rising Edge Detect on GPIO pin 6
#define GPIOAREN0_5 (1<<5) // Set Async Rising Edge Detect on GPIO pin 5
#define GPIOAREN0_4 (1<<4) // Set Async Rising Edge Detect on GPIO pin 4
#define GPIOAREN0_3 (1<<3) // Set Async Rising Edge Detect on GPIO pin 3
#define GPIOAREN0_2 (1<<2) // Set Async Rising Edge Detect on GPIO pin 2
#define GPIOAREN0_1 (1<<1) // Set Async Rising Edge Detect on GPIO pin 1
#define GPIOAREN0_0 (1<<0) // Set Async Rising Edge Detect on GPIO pin 0
#define GPIO_AREN1 (0x80) // GPIO Pin Async Rising Edge Detect Status 1
//-------------------------------------------------------------------------------------------------------------------
#define GPIOAREN1_53 (1<<21) // Set Async Rising Edge Detect on GPIO pin 53
#define GPIOAREN1_52 (1<<20) // Set Async Rising Edge Detect on GPIO pin 52
#define GPIOAREN1_51 (1<<19) // Set Async Rising Edge Detect on GPIO pin 51
#define GPIOAREN1_50 (1<<18) // Set Async Rising Edge Detect on GPIO pin 50
#define GPIOAREN1_49 (1<<17) // Set Async Rising Edge Detect on GPIO pin 49
#define GPIOAREN1_48 (1<<16) // Set Async Rising Edge Detect on GPIO pin 48
#define GPIOAREN1_47 (1<<15) // Set Async Rising Edge Detect on GPIO pin 47
#define GPIOAREN1_46 (1<<14) // Set Async Rising Edge Detect on GPIO pin 46
#define GPIOAREN1_45 (1<<13) // Set Async Rising Edge Detect on GPIO pin 45
#define GPIOAREN1_44 (1<<12) // Set Async Rising Edge Detect on GPIO pin 44
#define GPIOAREN1_43 (1<<11) // Set Async Rising Edge Detect on GPIO pin 43
#define GPIOAREN1_42 (1<<10) // Set Async Rising Edge Detect on GPIO pin 42
#define GPIOAREN1_41 (1<<9) // Set Async Rising Edge Detect on GPIO pin 41
#define GPIOAREN1_40 (1<<8) // Set Async Rising Edge Detect on GPIO pin 40
#define GPIOAREN1_39 (1<<7) // Set Async Rising Edge Detect on GPIO pin 39
#define GPIOAREN1_38 (1<<6) // Set Async Rising Edge Detect on GPIO pin 38
#define GPIOAREN1_37 (1<<5) // Set Async Rising Edge Detect on GPIO pin 37
#define GPIOAREN1_36 (1<<4) // Set Async Rising Edge Detect on GPIO pin 36
#define GPIOAREN1_35 (1<<3) // Set Async Rising Edge Detect on GPIO pin 35
#define GPIOAREN1_34 (1<<2) // Set Async Rising Edge Detect on GPIO pin 34
#define GPIOAREN1_33 (1<<1) // Set Async Rising Edge Detect on GPIO pin 33
#define GPIOAREN1_32 (1<<0) // Set Async Rising Edge Detect on GPIO pin 32
#define GPIO_AFEN0 (0x88) // GPIO Pin Async Falling Edge Detect Status 0
//-------------------------------------------------------------------------------------------------------------------
#define GPIOAFEN0_31 (1<<31) // Set Async Falling Edge Detect on GPIO pin 31
#define GPIOAFEN0_30 (1<<30) // Set Async Falling Edge Detect on GPIO pin 30
#define GPIOAFEN0_29 (1<<29) // Set Async Falling Edge Detect on GPIO pin 29
#define GPIOAFEN0_28 (1<<28) // Set Async Falling Edge Detect on GPIO pin 28
#define GPIOAFEN0_27 (1<<27) // Set Async Falling Edge Detect on GPIO pin 27
#define GPIOAFEN0_26 (1<<26) // Set Async Falling Edge Detect on GPIO pin 26
#define GPIOAFEN0_25 (1<<25) // Set Async Falling Edge Detect on GPIO pin 25
#define GPIOAFEN0_24 (1<<24) // Set Async Falling Edge Detect on GPIO pin 24
#define GPIOAFEN0_23 (1<<23) // Set Async Falling Edge Detect on GPIO pin 23
#define GPIOAFEN0_22 (1<<22) // Set Async Falling Edge Detect on GPIO pin 22
#define GPIOAFEN0_21 (1<<21) // Set Async Falling Edge Detect on GPIO pin 21
#define GPIOAFEN0_20 (1<<20) // Set Async Falling Edge Detect on GPIO pin 20
#define GPIOAFEN0_19 (1<<19) // Set Async Falling Edge Detect on GPIO pin 19
#define GPIOAFEN0_18 (1<<18) // Set Async Falling Edge Detect on GPIO pin 18
#define GPIOAFEN0_17 (1<<17) // Set Async Falling Edge Detect on GPIO pin 17
#define GPIOAFEN0_16 (1<<16) // Set Async Falling Edge Detect on GPIO pin 16
#define GPIOAFEN0_15 (1<<15) // Set Async Falling Edge Detect on GPIO pin 15
#define GPIOAFEN0_14 (1<<14) // Set Async Falling Edge Detect on GPIO pin 14
#define GPIOAFEN0_13 (1<<13) // Set Async Falling Edge Detect on GPIO pin 13
#define GPIOAFEN0_12 (1<<12) // Set Async Falling Edge Detect on GPIO pin 12
#define GPIOAFEN0_11 (1<<11) // Set Async Falling Edge Detect on GPIO pin 11
#define GPIOAFEN0_10 (1<<10) // Set Async Falling Edge Detect on GPIO pin 10
#define GPIOAFEN0_9 (1<<9) // Set Async Falling Edge Detect on GPIO pin 9
#define GPIOAFEN0_8 (1<<8) // Set Async Falling Edge Detect on GPIO pin 8
#define GPIOAFEN0_7 (1<<7) // Set Async Falling Edge Detect on GPIO pin 7
#define GPIOAFEN0_6 (1<<6) // Set Async Falling Edge Detect on GPIO pin 6
#define GPIOAFEN0_5 (1<<5) // Set Async Falling Edge Detect on GPIO pin 5
#define GPIOAFEN0_4 (1<<4) // Set Async Falling Edge Detect on GPIO pin 4
#define GPIOAFEN0_3 (1<<3) // Set Async Falling Edge Detect on GPIO pin 3
#define GPIOAFEN0_2 (1<<2) // Set Async Falling Edge Detect on GPIO pin 2
#define GPIOAFEN0_1 (1<<1) // Set Async Falling Edge Detect on GPIO pin 1
#define GPIOAFEN0_0 (1<<0) // Set Async Falling Edge Detect on GPIO pin 0
#define GPIO_AFEN1 (0x8c) // GPIO Pin Async Falling Edge Detect Status 1
//-------------------------------------------------------------------------------------------------------------------
#define GPIOAFEN1_53 (1<<21) // Set Async Falling Edge Detect on GPIO pin 53
#define GPIOAFEN1_52 (1<<20) // Set Async Falling Edge Detect on GPIO pin 52
#define GPIOAFEN1_51 (1<<19) // Set Async Falling Edge Detect on GPIO pin 51
#define GPIOAFEN1_50 (1<<18) // Set Async Falling Edge Detect on GPIO pin 50
#define GPIOAFEN1_49 (1<<17) // Set Async Falling Edge Detect on GPIO pin 49
#define GPIOAFEN1_48 (1<<16) // Set Async Falling Edge Detect on GPIO pin 48
#define GPIOAFEN1_47 (1<<15) // Set Async Falling Edge Detect on GPIO pin 47
#define GPIOAFEN1_46 (1<<14) // Set Async Falling Edge Detect on GPIO pin 46
#define GPIOAFEN1_45 (1<<13) // Set Async Falling Edge Detect on GPIO pin 45
#define GPIOAFEN1_44 (1<<12) // Set Async Falling Edge Detect on GPIO pin 44
#define GPIOAFEN1_43 (1<<11) // Set Async Falling Edge Detect on GPIO pin 43
#define GPIOAFEN1_42 (1<<10) // Set Async Falling Edge Detect on GPIO pin 42
#define GPIOAFEN1_41 (1<<9) // Set Async Falling Edge Detect on GPIO pin 41
#define GPIOAFEN1_40 (1<<8) // Set Async Falling Edge Detect on GPIO pin 40
#define GPIOAFEN1_39 (1<<7) // Set Async Falling Edge Detect on GPIO pin 39
#define GPIOAFEN1_38 (1<<6) // Set Async Falling Edge Detect on GPIO pin 38
#define GPIOAFEN1_37 (1<<5) // Set Async Falling Edge Detect on GPIO pin 37
#define GPIOAFEN1_36 (1<<4) // Set Async Falling Edge Detect on GPIO pin 36
#define GPIOAFEN1_35 (1<<3) // Set Async Falling Edge Detect on GPIO pin 35
#define GPIOAFEN1_34 (1<<2) // Set Async Falling Edge Detect on GPIO pin 34
#define GPIOAFEN1_33 (1<<1) // Set Async Falling Edge Detect on GPIO pin 33
#define GPIOAFEN1_32 (1<<0) // Set Async Falling Edge Detect on GPIO pin 32
#define GPIO_GPPUD (0x94) // GPIO Pin Pull Up/Down Enable
//-------------------------------------------------------------------------------------------------------------------
#define GPIOPUD_OFF (0b00) // Disable Pull Up/Down Control
#define GPIOPUD_DOWN (0b01) // Enable Pull Down
#define GPIOPUD_UP (0b10) // Enable Pull Up
#define GPIO_GPPUDCLK1 (0x98) // GPIO Pin Pull Up/Down Enable Clock 0
//-------------------------------------------------------------------------------------------------------------------
#define GPIOCLK1_31 (1<<31) // Assert Clock on GPIO pin 31
#define GPIOCLK1_30 (1<<30) // Assert Clock on GPIO pin 30
#define GPIOCLK1_29 (1<<29) // Assert Clock on GPIO pin 29
#define GPIOCLK1_28 (1<<28) // Assert Clock on GPIO pin 28
#define GPIOCLK1_27 (1<<27) // Assert Clock on GPIO pin 27
#define GPIOCLK1_26 (1<<26) // Assert Clock on GPIO pin 26
#define GPIOCLK1_25 (1<<25) // Assert Clock on GPIO pin 25
#define GPIOCLK1_24 (1<<24) // Assert Clock on GPIO pin 24
#define GPIOCLK1_23 (1<<23) // Assert Clock on GPIO pin 23
#define GPIOCLK1_22 (1<<22) // Assert Clock on GPIO pin 22
#define GPIOCLK1_21 (1<<21) // Assert Clock on GPIO pin 21
#define GPIOCLK1_20 (1<<20) // Assert Clock on GPIO pin 20
#define GPIOCLK1_19 (1<<19) // Assert Clock on GPIO pin 19
#define GPIOCLK1_18 (1<<18) // Assert Clock on GPIO pin 18
#define GPIOCLK1_17 (1<<17) // Assert Clock on GPIO pin 17
#define GPIOCLK1_16 (1<<16) // Assert Clock on GPIO pin 16
#define GPIOCLK1_15 (1<<15) // Assert Clock on GPIO pin 15
#define GPIOCLK1_14 (1<<14) // Assert Clock on GPIO pin 14
#define GPIOCLK1_13 (1<<13) // Assert Clock on GPIO pin 13
#define GPIOCLK1_12 (1<<12) // Assert Clock on GPIO pin 12
#define GPIOCLK1_11 (1<<11) // Assert Clock on GPIO pin 11
#define GPIOCLK1_10 (1<<10) // Assert Clock on GPIO pin 10
#define GPIOCLK1_9 (1<<9) // Assert Clock on GPIO pin 9
#define GPIOCLK1_8 (1<<8) // Assert Clock on GPIO pin 8
#define GPIOCLK1_7 (1<<7) // Assert Clock on GPIO pin 7
#define GPIOCLK1_6 (1<<6) // Assert Clock on GPIO pin 6
#define GPIOCLK1_5 (1<<5) // Assert Clock on GPIO pin 5
#define GPIOCLK1_4 (1<<4) // Assert Clock on GPIO pin 4
#define GPIOCLK1_3 (1<<3) // Assert Clock on GPIO pin 3
#define GPIOCLK1_2 (1<<2) // Assert Clock on GPIO pin 2
#define GPIOCLK1_1 (1<<1) // Assert Clock on GPIO pin 1
#define GPIOCLK1_0 (1<<0) // Assert Clock on GPIO pin 0
#define GPIO_GPPUDCLK2 (0x9c) // GPIO Pin Pull Up/Down Enable CLock 0
//-------------------------------------------------------------------------------------------------------------------
#define GPIOCLK2_53 (1<<21) // Assert Clock on GPIO pin 53
#define GPIOCLK2_52 (1<<20) // Assert Clock on GPIO pin 52
#define GPIOCLK2_51 (1<<19) // Assert Clock on GPIO pin 51
#define GPIOCLK2_50 (1<<18) // Assert Clock on GPIO pin 50
#define GPIOCLK2_49 (1<<17) // Assert Clock on GPIO pin 49
#define GPIOCLK2_48 (1<<16) // Assert Clock on GPIO pin 48
#define GPIOCLK2_47 (1<<15) // Assert Clock on GPIO pin 47
#define GPIOCLK2_46 (1<<14) // Assert Clock on GPIO pin 46
#define GPIOCLK2_45 (1<<13) // Assert Clock on GPIO pin 45
#define GPIOCLK2_44 (1<<12) // Assert Clock on GPIO pin 44
#define GPIOCLK2_43 (1<<11) // Assert Clock on GPIO pin 43
#define GPIOCLK2_42 (1<<10) // Assert Clock on GPIO pin 42
#define GPIOCLK2_41 (1<<9) // Assert Clock on GPIO pin 41
#define GPIOCLK2_40 (1<<8) // Assert Clock on GPIO pin 40
#define GPIOCLK2_39 (1<<7) // Assert Clock on GPIO pin 39
#define GPIOCLK2_38 (1<<6) // Assert Clock on GPIO pin 38
#define GPIOCLK2_37 (1<<5) // Assert Clock on GPIO pin 37
#define GPIOCLK2_36 (1<<4) // Assert Clock on GPIO pin 36
#define GPIOCLK2_35 (1<<3) // Assert Clock on GPIO pin 35
#define GPIOCLK2_34 (1<<2) // Assert Clock on GPIO pin 34
#define GPIOCLK2_33 (1<<1) // Assert Clock on GPIO pin 33
#define GPIOCLK2_32 (1<<0) // Assert Clock on GPIO pin 32
<|start_filename|>platform/bcm2836/pic/PicDetermineIrq.cc<|end_filename|>
//===================================================================================================================
//
// PicDetermineIrq.cc -- Get the current IRQ from the PIC
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "timer.h"
#include "printf.h"
#include "pic.h"
//
// -- From an interrupt, determine what is the IRQ to handle
// ------------------------------------------------------
EXTERN_C EXPORT KERNEL
int _PicDetermineIrq(PicDevice_t *dev)
{
if (!dev) return -1;
Bcm2835Pic_t *picData = (Bcm2835Pic_t *)dev->device.deviceData;
int core = thisCpu->cpuNum;
archsize_t rv;
//
// -- start by checking the core's interrupts
// ---------------------------------------
archsize_t irq = MmioRead(picData->timerLoc + TIMER_IRQ_SOURCE + (core * 4)) & 0xff; // mask out the relevant ints
rv = __builtin_ffs(irq);
if (rv != 0) return BCM2836_CORE_BASE + (rv - 1);
//
// -- ok, not a core-specific interrupt, check ints 0-31
// --------------------------------------------------
irq = MmioRead(picData->picLoc + INT_IRQPEND1);
rv = __builtin_ffs(irq);
if (rv != 0) return BCM2835_GPU_BASE0 + (rv - 1);
//
// -- now, if we make it here, try ints 32-63
// ---------------------------------------
irq = MmioRead(picData->picLoc + INT_IRQPEND2);
rv = __builtin_ffs(irq);
if (rv != 0) return BCM2835_GPU_BASE1 + (rv - 1);
//
// -- finally if we get here, it must be a spurious interrupt
// -------------------------------------------------------
return (archsize_t)-1;
}
<|start_filename|>modules/kernel/src/stacks/StackVars.cc<|end_filename|>
//===================================================================================================================
//
// StackVars.cc -- Kernel Stack Cariables
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Dec-01 Initial 0.4.6d ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "stacks.h"
//
// -- This will be the bitmat we will use to keep track of the stacks
// ---------------------------------------------------------------
EXPORT KERNEL_DATA uint32_t stacks[STACK_COUNT] = {0};
//
// -- This is the lock that will protect the bitmap
// ---------------------------------------------
EXPORT KERNEL_DATA Spinlock_t stackBitmapLock = {0};
<|start_filename|>modules/kernel/src/debugger/DebuggerRelease.cc<|end_filename|>
//===================================================================================================================
//
// DebuggerRelease.cc -- Release the other cores from a stopped state
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-03 Initial v0.6.0a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "pic.h"
#include "debugger.h"
//
// -- Release the other cores from a stopped state
// --------------------------------------------
EXTERN_C EXPORT KERNEL
void DebuggerRelease(void)
{
AtomicSet(&debugCommunication.coresEngaged, 0);
RestoreInterrupts(debugCommunication.debuggerFlags);
}
<|start_filename|>modules/kernel/src/debugger/DebugMsgqStat.cc<|end_filename|>
//===================================================================================================================
//
// DebugMsgqStat.cc -- Dump the message queue list status
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-10 Initial v0.6.1a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "msgq.h"
#include "debugger.h"
//
// -- Print an individual Message Queue Stats
// ---------------------------------------
EXTERN_C HIDDEN KERNEL
void PrintMsgqStats(MessageQueue_t *q)
{
kprintf(ANSI_ATTR_BOLD);
DbgSpace(12, kprintf("| %p ", q));
kprintf(ANSI_ATTR_NORMAL);
DbgSpace(10, kprintf("| %s ", MsgqStatName(AtomicRead(&q->status))));
DbgSpace(14, kprintf("| %d ", q->queue.count));
DbgSpace(12, kprintf("| %d ", ListCount(&q->queue)));
DbgSpace(14, kprintf("| %d ", q->waiting.count));
DbgSpace(12, kprintf("| %d ", ListCount(&q->waiting)));
kprintf("|\n");
}
//
// -- Debug the message queues
// ------------------------
EXTERN_C EXPORT KERNEL
void DebugMsgqStatus(void)
{
DebuggerEngage(DIPI_ENGAGE);
kprintf("+------------+----------+--------------+------------+--------------+------------+\n");
kprintf("| " ANSI_ATTR_BOLD ANSI_FG_BLUE "Queue Addr" ANSI_ATTR_NORMAL " | " ANSI_ATTR_BOLD ANSI_FG_BLUE
"Status" ANSI_ATTR_NORMAL " | " ANSI_ATTR_BOLD ANSI_FG_BLUE "#Msgs Stated" ANSI_ATTR_NORMAL
" | " ANSI_ATTR_BOLD ANSI_FG_BLUE "#Msgs Calc" ANSI_ATTR_NORMAL " | " ANSI_ATTR_BOLD ANSI_FG_BLUE
"#Wait Stated" ANSI_ATTR_NORMAL " | " ANSI_ATTR_BOLD ANSI_FG_BLUE "#Wait Calc" ANSI_ATTR_NORMAL
" |\n");
kprintf("+------------+----------+--------------+------------+--------------+------------+\n");
ListHead_t::List_t *wrk = msgqList.list.next;
while (wrk != &msgqList.list) {
PrintMsgqStats(FIND_PARENT(wrk, MessageQueue_t, list));
wrk = wrk->next;
}
kprintf("+------------+----------+--------------+------------+--------------+------------+\n");
DebuggerRelease();
}
<|start_filename|>modules/kernel/src/cpu/CpuMyStruct.cc<|end_filename|>
//===================================================================================================================
//
// CpuMyStruct.cc -- For the APs, initialize the specific elements in the cpus array
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Feb-03 Initial v0.5.0f ADCL Initial version
//
//===================================================================================================================
#include "cpu.h"
#include "printf.h"
#include "pic.h"
//
// -- Complete the cpu structure initialization for this core
// -------------------------------------------------------
EXTERN_C EXPORT LOADER
archsize_t CpuMyStruct(void)
{
int idx = cpus.cpuStarting;
volatile ArchCpu_t *rv = &cpus.perCpuData[idx];
rv->location = ArchCpuLocation();
cpus.cpusRunning ++;
return (archsize_t)rv;
}
<|start_filename|>platform/bcm2836/mailbox/MailboxReceive.cc<|end_filename|>
//===================================================================================================================
//
// MailboxReceive.cc -- Receive a message from a mailbox
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Please note that this function will perform the adjustment between ARM/VC address space.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Jan-05 Initial 0.2.0 ADCL Initial version
// 2019-Feb-15 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "hardware.h"
//
// -- Receive a message from the mailbox
// ----------------------------------
EXTERN_C EXPORT KERNEL
archsize_t _MailboxReceive(MailboxDevice_t *dev, uint32_t mailbox)
{
if (!dev) return -1;
if ((mailbox & 0xfffffff0) != 0) return -1;
kprintf(".. Preparing to receive data\n");
while (true) {
while (MmioRead(dev->base + MB_STATUS) & (1 << 30)) { }
uint32_t msg = MmioRead(dev->base + MB_READ);
if ((msg & 0x0f) == mailbox) {
kprintf(".. Data received\n");
return ((msg & 0xfffffff0) + ARM_MAILBOX_OFFSET);
}
}
}
<|start_filename|>platform/pc/init/PlatformEarlyInit.cc<|end_filename|>
//===================================================================================================================
//
// PlatformEarlyInit.cc -- Handle the early initialization for the pc platform
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This function is called after `MmuEarlyInit()`, so we expect to have access to kernel virtual memory addresses.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-05 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "hardware.h"
#include "hw-disc.h"
#include "serial.h"
#include "mmu.h"
#include "pic.h"
#include "interrupt.h"
#include "printf.h"
#include "platform.h"
//
// -- Handle the early initialization for the pc platform
// ---------------------------------------------------
EXTERN_C EXPORT LOADER
void PlatformEarlyInit(void)
{
SerialOpen(&debugSerial); // initialize the serial port so we can output debug data
kprintf("Hello...\n");
if (CheckCpuid() != 0) {
SetCpuid(true);
CollectCpuid();
}
HwDiscovery();
RSDP_t *rsdp = AcpiFindRsdp();
if (rsdp == NULL) {
cpus.cpusDiscovered = 1;
goto exit;
}
// -- temporarily map the acpi tables
MmuMapToFrame((archsize_t)rsdp, (frame_t)(((archsize_t)rsdp) >> 12), PG_KRN);
if ((rsdp->xsdtAddress != 0) && (AcpiReadXsdt(rsdp->xsdtAddress) == true)) {
// -- do nothing here...
} else {
AcpiReadRsdt(rsdp->rsdtAddress);
kprintf("The APIC base address is at %p\n", READ_APIC_BASE());
}
if (cpus.cpusDiscovered > MAX_CPUS) cpus.cpusDiscovered = MAX_CPUS;
cpus.cpusRunning = 1;
// -- unmap the acpi tables
MmuUnmapPage((archsize_t)rsdp);
exit:
// -- Complete the CPU initialization
CpuInit();
}
<|start_filename|>platform/pc/acpi/AcpiReadMadt.cc<|end_filename|>
//===================================================================================================================
//
// AcpiReadMadt.cc -- Read the MADT table and determine what we will do with the information
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-07 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "hw-disc.h"
#include "lists.h" // for MEMBER_OFFSET
#include "hardware.h"
//
// -- Read the ACPI MADT Table, and figure out what it means to CenturyOS
// -------------------------------------------------------------------
EXTERN_C EXPORT LOADER
void AcpiReadMadt(archsize_t loc)
{
MADT_t *madt = (MADT_t *)loc;
kprintf(".... MADT table length is %p\n", madt->length);
kprintf(".... MADT flags are %p\n", madt->flags);
kprintf(".... MADT Local IC Address is %p\n", madt->localIntCtrlAddr);
uint8_t *wrk = (uint8_t *)(loc + MEMBER_OFFSET(MADT_t,intCtrlStructs));
uint8_t *first = wrk;
while (wrk - first < (long)(madt->length - MEMBER_OFFSET(MADT_t,intCtrlStructs))) {
uint8_t len = wrk[1];
switch(wrk[0]) {
case MADT_PROCESSOR_LOCAL_APIC:
{
MadtLocalApic_t *local = (MadtLocalApic_t *)wrk;
cpus.cpusDiscovered ++;
IncLocalApic();
kprintf(".... MADT_PROCESSOR_LOCAL_APIC\n");
kprintf("...... Proc ID %x; APIC ID %x; %s (%d found so far)\n", local->procId, local->apicId,
local->flags&1?"enabled":"disabled", cpus.cpusDiscovered);
}
break;
case MADT_IO_APIC:
{
MadtIoApic_t *local = (MadtIoApic_t *)wrk;
kprintf(".... MADT_IO_APIC\n");
kprintf("...... APIC Addr: %p, Global Sys Int Base: %x\n", local->ioApicAddr, local->gsiBase);
AddIoapic(local->ioApicAddr, local->gsiBase);
}
break;
case MADT_INTERRUPT_SOURCE_OVERRIDE:
{
MadtIntSrcOverride_t *local = (MadtIntSrcOverride_t *)wrk;
kprintf(".... MADT_INTERRUPT_SOURCE_OVERRIDE\n");
kprintf("...... source: %x, Global Sys Int: %x\n", (uint32_t)local->source, local->gsInt);
kprintf("...... Polarity: %x; Trigger Mode: %x\n", local->flags & 0x03, (local->flags >> 2) & 0x03);
}
break;
case MADT_NMI_SOURCE:
{
MadtMNISource_t *local = (MadtMNISource_t *)wrk;
kprintf(".... MADT_NMI_SOURCE\n");
kprintf("...... Global Sys Int: %x\n", local->gsInt);
kprintf("...... Polarity: %x; Trigger Mode: %x\n", local->flags & 0x03, (local->flags >> 2) & 0x03);
}
break;
case MADT_LOCAL_APIC_NMI:
{
MadtLocalApicNMI_t *local = (MadtLocalApicNMI_t *)wrk;
kprintf(".... MADT_LOCAL_APIC_NMI\n");
kprintf("...... APIC Proc ID: %x; local INT#: %x\n", (uint32_t)local->procId, (uint32_t)local->localLINT);
kprintf("...... Polarity: %x; Trigger Mode: %x\n", local->flags & 0x03, (local->flags >> 2) & 0x03);
}
break;
case MADT_LOCAL_APIC_ADDRESS_OVERRIDE:
kprintf("!!!! MADT IC Table Type MADT_LOCAL_APIC_ADDRESS_OVERRIDE is not supported\n");
break;
case MADT_IO_SAPIC:
kprintf("!!!! MADT IC Table Type MADT_IO_SAPIC is not supported\n");
break;
case MADT_LOCAL_SAPIC:
kprintf("!!!! MADT IC Table Type MADT_LOCAL_SAPIC is not supported\n");
break;
case MADT_PLATFORM_INTERRUPT_SOURCES:
kprintf("!!!! MADT IC Table Type MADT_PLATFORM_INTERRUPT_SOURCES is not supported\n");
break;
case MADT_PROCESSOR_LOCAL_X2APIC:
kprintf("!!!! MADT IC Table Type MADT_PROCESSOR_LOCAL_X2APIC is not supported\n");
break;
case MADT_LOCAL_X2APIC_NMI:
kprintf("!!!! MADT IC Table Type MADT_LOCAL_X2APIC_NMI is not supported\n");
break;
case MADT_GIC:
kprintf("!!!! MADT IC Table Type GIC is not supported\n");
break;
case MADT_GICD:
kprintf("!!!! MADT IC Table Type GICD is not supported\n");
break;
default:
kprintf("!!!! Unknown MADT IC Table Type: %x\n", (uint32_t)wrk[0]);
break;
}
wrk += len;
}
}
<|start_filename|>arch/x86/cpu/ArchIdtSetGate.cc<|end_filename|>
//===================================================================================================================
//
// ArchIdtSetGate.cc -- Set the gate in the IDT.
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This function sets up the IDT gate in the table itself
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-May-30 Initial 0.1.0 ADCL Initial version
// 2019-Feb-09 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "cpu.h"
//
// -- Construct a single IDT gate
// ---------------------------
EXTERN_C EXPORT KERNEL
void ArchIdtSetGate(uint8_t num, archsize_t base, archsize_t sel, uint8_t flags)
{
IdtEntry_t *idtEntries = (IdtEntry_t *)X86_VIRT_IDT;
if (num > 255) return;
idtEntries[num].baseLow = (uint16_t)(base & 0xffff);
idtEntries[num].baseHigh = (uint16_t)((base >> 16) & 0xffff);
idtEntries[num].sel = sel;
idtEntries[num].always0 = 0;
idtEntries[num].flags = flags;
}
<|start_filename|>modules/kernel/src/interrupts/IsrUnregister.cc<|end_filename|>
//===================================================================================================================
//
// IsrUnregister.cc -- Unregister an ISR Handler
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Jul-06 Initial 0.1.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "printf.h"
#include "interrupt.h"
//
// -- Remove an ISR handler from the handlers table
// ---------------------------------------------
void IsrUnregister(uint8_t interrupt)
{
archsize_t flags = DisableInterrupts();
if (isrHandlers[interrupt] == NULL_ISR) {
kprintf("When unregistering interrupt %d, no handler is registered\n", interrupt);
} else {
isrHandlers[interrupt] = NULL_ISR;
}
RestoreInterrupts(flags);
}
<|start_filename|>platform/pc/pic/PicEoi.cc<|end_filename|>
//===================================================================================================================
//
// PicEoi.cc -- Issue an end of interrupt to the interrupt controller
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "hardware.h"
#include "pic.h"
//
// -- Issue an EOI for the timer
// --------------------------
EXTERN_C EXPORT KERNEL
void _PicEoi(PicDevice_t *dev, Irq_t irq)
{
if (!dev) return;
if (irq >= 8) outb(PIC2 + PIC_SLAVE_COMMAND, 0x20);
outb(PIC1 + PIC_MASTER_COMMAND, 0x20);
}
<|start_filename|>modules/kernel/src/pmm/PmmDoAllocAlignedFrames.cc<|end_filename|>
//===================================================================================================================
//
// PmmAllocAlignedFrames.cc -- Allocate a number of frames (which could be 1 frame) and align the resulting block
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This is going to be messy. First, we cannot lock the table while we search since the time to search is going
// to take so long. We would block nearly everything for an extended period of time during a complicated
// algorithm. So, we are going to perform some of this work before we get the lock. To reduce the amount of
// opportunity for conflict (although it will not eliminate it), we will work from the bottom of the stack up.
//
// Once we find a candidate that should fit our needs, we will get the lock, double check our evaluation, and
// remove it from the stack.
//
// From there, it will be one of 4 scenarios:
// 1. the block is perfectly aligned and perfectly sized -- well very low proably
// 2. the block is perfectly aligned but has extra frames after it
// 3. the block is sized and aligned such that the last frames of the block are aligned and sized just right for
// our needs
// 4. the block is quite large and has extra frames both before and after it
//
// If this looks like the heap block splitting scenarios, it is the same.
//
// Once we have removed it from the stack, we will trim off (as appropriate) both the starting frames and the
// trailing frames, adding the extra frames back onto the top of the stack.
//
// This algorithm is inefficient since it must look through the 'small frame blocks' at the top of the stack
// before it gets to the bigger 'large frame blocks' at the bottom of the stack.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-11 Initial 0.3.1 ADCL Initial version
// 2020-Apr-12 #405 v0.6.1c ADCL Redesign the PMM to store the stack in the freed frames themselves
//
//===================================================================================================================
#include "types.h"
#include "spinlock.h"
#include "heap.h"
#include "pmm.h"
//
// -- Split the block as needed to pull out the proper alignment and size of frames
// -----------------------------------------------------------------------------
EXTERN_C HIDDEN KERNEL
frame_t PmmSplitBlock(PmmFrameInfo_t *stack, frame_t frame, size_t blockSize, frame_t atFrame, size_t count)
{
if (frame < atFrame) {
// -- Create a new block with the leading frames
PmmPush(stack, frame, atFrame - blockSize);
// -- adjust the existing block
frame = atFrame;
blockSize -= (atFrame - blockSize);
}
// -- check for frames to remove at the end of this block; or free the block since it is not needed
if (blockSize > count) {
// -- adjust this block to remove what we want
frame += count;
blockSize -= count;
// -- finally push this block back onto the stack
PmmPush(stack, frame, blockSize);
}
// -- what is left at this point is `count` frames at `atFrame`; return this value
return atFrame;
}
//
// -- This function is the working to find a frame that is properly aligned and allocate multiple contiguous frames
// -------------------------------------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
frame_t PmmDoAllocAlignedFrames(Spinlock_t *lock, PmmFrameInfo_t *stack, const size_t count, const size_t bitAlignment)
{
// kprintf("Handling a request to allocate frames aligned to %d-bit precision\n", bitAlignment);
//
// -- start by determining the bits we cannot have enabled when we evaluate a frame
// -----------------------------------------------------------------------------
frame_t frameBits = ~(((frame_t)-1) << (bitAlignment<12?0:bitAlignment-12));
// -- if there is no alignment required, save the hassle
if (frameBits == 0) return PmmAllocateFrame();
frame_t rv = 0;
if (!MmuIsMapped((archsize_t)stack)) return 0;
archsize_t flags = SPINLOCK_BLOCK_NO_INT(*lock) {
SPINLOCK_BLOCK(pmm.searchLock) {
MmuMapToFrame((archsize_t)pmm.search, stack->frame, PG_WRT | PG_KRN);
while(true) {
frame_t end = pmm.search->frame + pmm.search->count - 1;
frame_t next;
// -- here we determine if the block is big enough
if (((pmm.search->frame + frameBits) & ~frameBits) + count - 1 <= end) {
frame_t p = pmm.search->prev;
frame_t n = pmm.search->next;
frame_t f = pmm.search->frame;
size_t sz = pmm.search->count;
MmuUnmapPage((archsize_t)pmm.search);
if (n) {
MmuMapToFrame((archsize_t)pmm.search, n, PG_WRT | PG_KRN);
pmm.search->prev = p;
MmuUnmapPage((archsize_t)pmm.search);
}
if (p) {
MmuMapToFrame((archsize_t)pmm.search, p, PG_WRT | PG_KRN);
pmm.search->next = n;
MmuUnmapPage((archsize_t)pmm.search);
}
rv = PmmSplitBlock(stack, f, sz, (f + frameBits) & ~frameBits, count);
goto exit;
}
// -- move to the next node
next = pmm.search->next;
MmuUnmapPage((archsize_t)pmm.search);
// -- here we check if we made it to the end of the stack
if (next) MmuMapToFrame((archsize_t)pmm.search, next, PG_WRT | PG_KRN);
else goto exit;
}
exit:
SPINLOCK_RLS(pmm.searchLock);
}
SPINLOCK_RLS_RESTORE_INT(*lock, flags);
}
kprintf("Aligned PMM Allocation is finally returning frame %x\n", rv);
return rv;
}
<|start_filename|>platform/pc/acpi/AcpiReadXsdt.cc<|end_filename|>
//===================================================================================================================
//
// AcpiReadXsdt.cc -- Validate and read the XSDT table
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-06 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "mmu.h"
#include "hardware.h"
//
// -- read the xsdt table
// -------------------
EXTERN_C EXPORT LOADER
bool AcpiReadXsdt(archsize_t loc)
{
kprintf("Reading the XSDT\n");
CheckAcpi(loc);
if (!AcpiCheckTable(loc, MAKE_SIG("XSDT"))) {
kprintf("The XSDT does not match the required checks\n");
return false;
}
XSDT_t *xsdt = (XSDT_t *)loc;
uint32_t entries = (xsdt->length - ACPI_HDR_SIZE) / sizeof(uint64_t);
kprintf("... checking %x entries\n", entries);
for (uint32_t i = 0; i < entries; i ++) {
kprintf("The address for entry %x is %p\n", i, xsdt->entry[i]);
if (xsdt->entry[i]) AcpiGetTableSig(xsdt->entry[i]);
}
return true;
}
<|start_filename|>platform/bcm2836/timer/TimerInit.cc<|end_filename|>
//===================================================================================================================
//
// TimerInit.cc -- Initialize the rpi2b timer
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-21 Initial 0.2.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "interrupt.h"
#include "printf.h"
#include "pic.h"
#include "timer.h"
//
// -- Set the timer to fire at the desires frequency
// ----------------------------------------------
EXTERN_C EXPORT KERNEL
void _TimerInit(TimerDevice_t *dev, uint32_t frequency)
{
if (!dev) return;
if (thisCpu->cpuNum == 0) {
IsrRegister(BCM2836_CORE_CNTPNSIRQ, dev->TimerCallBack);
dev->factor = READ_CNTFRQ() / 1000000.0;
kprintf("IsrHandler registered\n");
}
if (READ_CNTFRQ() == 0) {
CpuPanicPushRegs("PANIC: Unable to determine the clock frequency (read as 0)\n");
}
//
// -- So now, I should be able to calculate the desired interval. This is done by taking the clock
// frequency and dividing it by the requested frequency. i.e.: READ_CNTFRQ / frequency.
// ---------------------------------------------------------------------------------------------
WRITE_CNTP_CVAL((uint64_t)-1); // set the cval to its limit just to be in control
dev->reloadValue = 1000000 / frequency;
PicInit(dev->pic, "PIC"); // now, init the pic first
WRITE_CNTP_TVAL(dev->reloadValue);
WRITE_CNTP_CTL(1); // enable the timer
PicUnmaskIrq(dev->pic, BCM2836_CORE_CNTPNSIRQ);
kprintf("Timer Initialized\n");
}
<|start_filename|>platform/pc/init/PlatformApInit.cc<|end_filename|>
//===================================================================================================================
//
// PlatformApInit.cc -- Perform any AP core specific initialization for the platform
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Feb-29 Initial v0.5.0h ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "platform.h"
#include "serial.h"
#include "pic.h"
#include "cpu.h"
//
// -- Complete the platform initialization for the AP
// -----------------------------------------------
EXTERN_C EXPORT KERNEL
void PlatformApInit(void)
{
TimerInit(timerControl, 1000);
}
<|start_filename|>platform/bcm2836/mailbox/MailboxVars.cc<|end_filename|>
//===================================================================================================================
//
// MailboxVars.cc -- These are the variables for the BCM2835 Mailboxes
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "hardware.h"
//
// -- This is the device structure that will be used for the kernel to access the gpio
// --------------------------------------------------------------------------------
EXPORT KERNEL_DATA
MailboxDevice_t kernelMailbox = {
.base = KRN_MAILBOX_BASE,
.MailboxSend = _MailboxSend,
.MailboxReceive = _MailboxReceive,
};
<|start_filename|>arch/arm/DataAbortHandler.cc<|end_filename|>
//===================================================================================================================
//
// DataAbortHandler.cc -- Handle a data abort
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Dec-01 Initial 0.2.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "cpu.h"
#include "interrupt.h"
EXPORT KERNEL_DATA
const char *causes[] = {
"Unknown", // 0b00000
"Alignment Fault (fault on first lookup)", // 0b00001
"Debug event", // 0b00010
"Access Flag fault (First level)", // 0b00011
"Fault on instruction cache maintenance", // 0b00100
"Translation fault (First level)", // 0b00101
"Access Flag fault (Second level)", // 0b00110
"Translation fault (Second level)", // 0b00111
"Synchronous external abort", // 0b01000
"Domain fault (First level)", // 0b01001
"Unknown", // 0b01010
"Domain fault (Second level)", // 0b01011
"Synchronous external abort on translation table walk (First level)", // 0b01100
"Permission fault (First level)", // 0b01101
"Synchronous external abort on translation table walk (Second level)", // 0b01110
"Permission fault (Second level)", // 0b01111
"TLB conflict abort", // 0b10000
"Unknown", // 0b10001
"Unknown", // 0b10010
"Unknown", // 0b10011
"Implementation Defined Lockdown", // 0b10100
"Unknown", // 0b10101
"Asynchronous external abort", // 0b10110
"Unknown", // 0b10111
"Asynchronous parity error on memory access", // 0b11000
"Synchronous parity error on memory access", // 0b11001
"Implementation Defined coprocessor", // 0b11010
"Unknown", // 0b11011
"Synchronous parity error on translation table walk (First level)", // 0b11100
"Unknown", // 0b11101
"Synchronous parity error on translation table walk (Second level)", // 0b11110
"Unknown", // 0b11111
};
//
// -- Handle a data exception
// -----------------------
EXTERN_C EXPORT KERNEL
void DataAbortHandler(isrRegs_t *regs)
{
archsize_t dfsr = ReadDFSR();
int cause = ((dfsr & (1 << 10)) >> 6) | (dfsr & 0xf);
kprintf("Data Exception:\n");
kprintf(".. Data Fault Address: %p\n", ReadDFAR());
kprintf(".. Data Fault Status Register: %p\n", dfsr);
kprintf(".. Fault status %x: %s\n", cause, causes[cause]);
kprintf(".. Fault occurred because of a %s\n", (dfsr&(1<<11)?"write":"read"));
IsrDumpState(regs);
}
<|start_filename|>modules/kernel/src/heap/HeapAlignToPage.cc<|end_filename|>
//===================================================================================================================
//
// HeapAlignToPage.cc -- Align a block to a page boundary
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Split an entry to the first page boundary after allocating the header. This will result in a free block on the
// left of the page boundary. This block may be small and if so will need to be added to the previous block (which
// is allocated by definition) or at the beginning of the heap memory (special case).
//
// +------------------------------------------------------------------+
// | The entry before splitting. Split will occur at some location |
// | within the entry. |
// +------------------------------------------------------------------+
//
// One of 2 results will occur (as below):
//
// Page
// Boundary
// |
// |
// V
// +------------------+-----------------------------------------------+
// | A small block | A brand new entry inserted into the |
// | too small to | ordered list for the remaining free memory. |
// | add as a hole. | |
// +------------------+-----------------------------------------------+
// | A block of new | A brand new entry inserted into the |
// | free memory | ordered list for the remaining free memory. |
// | inserted to lst | |
// +------------------+-----------------------------------------------+
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Jul-04 Initial version
// 2012-Jul-28 #53 Fix small blocks corruption
// 2012-Sep-16 Leveraged from Century
// 2013-Sep-12 #101 Resolve issues splint exposes
// 2013-Sep-13 #74 Rewrite Debug.h to use assertions and write to TTY_LOG
// 2018-Jun-01 Initial 0.1.0 ADCL Copy this file from century32 to century-os
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "heap.h"
//
// -- Align a block to a Page boundary
// --------------------------------
OrderedList_t *HeapAlignToPage(OrderedList_t *entry)
{
KHeapHeader_t *newHdr, *oldHdr;
KHeapFooter_t *newFtr, *oldFtr;
size_t leftSize, rightSize;
OrderedList_t *ret;
if (!assert(entry != 0)) HeapError("NULL entry in HeapAlignToPage()", "");
HeapValidateHdr(entry->block, "HeapAlignToPage()");
// initialize the working variables
oldHdr = entry->block;
newHdr = (KHeapHeader_t *)(HeapCalcPageAdjustment(entry));
newFtr = (KHeapFooter_t *)((char *)newHdr - sizeof(KHeapFooter_t));
oldFtr = (KHeapFooter_t *)((char *)oldHdr + oldHdr->size - sizeof(KHeapFooter_t));
leftSize = (char *)newFtr - (char *)oldHdr + sizeof(KHeapFooter_t);
rightSize = (char *)oldFtr - (char *)newHdr + sizeof(KHeapFooter_t);
HeapReleaseEntry(entry); // will have better one(s) later
// size the left block properly
if (leftSize < MIN_HOLE_SIZE) {
KHeapHeader_t *wrkHdr;
wrkHdr = ((KHeapFooter_t *)((byte_t *)oldHdr - sizeof(KHeapFooter_t )))->hdr;
if ((byte_t *)wrkHdr >= kHeap->strAddr) {
KHeapFooter_t sav;
KHeapFooter_t *tmp = (KHeapFooter_t *)((char *)wrkHdr + wrkHdr->size - sizeof(KHeapFooter_t));
sav = *tmp;
wrkHdr->size += leftSize;
tmp = (KHeapFooter_t *)((char *)wrkHdr + wrkHdr->size - sizeof(KHeapFooter_t));
*tmp = sav;
HeapValidateHdr(wrkHdr, "Work Header in HeapAlignToPage()");
}
oldHdr = 0;
} else {
oldHdr->_magicUnion.magicHole = HEAP_MAGIC;
oldHdr->_magicUnion.isHole = 1;
oldHdr->size = leftSize;
newFtr->hdr = oldHdr;
newFtr->_magicUnion.magicHole = oldHdr->_magicUnion.magicHole;
(void)HeapNewListEntry(oldHdr, 1);
HeapValidateHdr(oldHdr, "Old Header in HeapAlignToPage() else");
}
// size the right block properly
newHdr->_magicUnion.magicHole = HEAP_MAGIC;
newHdr->_magicUnion.isHole = 1;
newHdr->size = rightSize;
oldFtr->hdr = newHdr;
oldFtr->_magicUnion.magicHole = newHdr->_magicUnion.magicHole;
ret = HeapNewListEntry(newHdr, 1);
if (oldHdr) HeapValidateHdr(oldHdr, "Old Header in HeapAlignToPage() at return");
HeapValidateHdr(newHdr, "New Header in HeapAlignToPage() at return");
return ret;
}
<|start_filename|>platform/bcm2836/cores/CoresStart.cc<|end_filename|>
//===================================================================================================================
//
// CoresStart.cc -- Start the cores for the rpi2b
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Jan-04 Initial v0.5.0d ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "timer.h"
#include "hardware.h"
#include "pic.h"
#include "printf.h"
#include "mmu.h"
#include "process.h"
#include "cpu.h"
//
// -- an definition for the entry point for the cores
// -----------------------------------------------
EXTERN_C EXPORT KERNEL
void entryAp(void);
//
// -- start the other cores; remains in the kernel since we may want to do this later as well
// ---------------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void CoresStart(void)
{
if (cpus.cpusDiscovered < 2) return;
cpus.perCpuData[0].location = ArchCpuLocation();
for (int i = 1; i < cpus.cpusDiscovered; i ++) {
cpus.cpuStarting = i;
AtomicSet(&cpus.perCpuData[cpus.cpuStarting].state, CPU_STARTING);
kprintf("Starting core with message to %p\n", IPI_MAILBOX_BASE + 0x0c + (0x10 * i));
MmioWrite(IPI_MAILBOX_BASE + 0x0c + (0x10 * i), (uint32_t)entryAp);
SEV();
kprintf(".. waiting for core to start...\n");
while (AtomicRead(&cpus.perCpuData[cpus.cpuStarting].state) == CPU_STARTING) { ProcessMilliSleep(1); }
}
}
<|start_filename|>modules/kernel/src/hardware/HwDiscovery.cc<|end_filename|>
//===================================================================================================================
//
// HwDiscovery.cc -- This source contains the i686 implementation of the hardware discovery.
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2017-Jun-09 Initial 0.1.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "hardware.h"
#include "hw-disc.h"
//
// -- Perform the hardware discovery
// ------------------------------
EXTERN_C EXPORT LOADER
void HwDiscovery(void)
{
kMemSetB(localHwDisc, 0, sizeof(HardwareDiscovery_t));
Mb1Parse();
Mb2Parse();
PlatformDiscovery();
}
<|start_filename|>arch/arm/mmu/MmuUnmapPage.cc<|end_filename|>
//===================================================================================================================
//
// MmuUnmapPage.cc -- Unmap a page in virtual address space, returning the frame in case something else needs done.
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-21 Initial 0.2.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "cpu.h"
#include "printf.h"
#include "pic.h"
#include "process.h"
#include "mmu.h"
//
// -- Check for the page and unmap if it is mapped.
// ---------------------------------------------
EXTERN_C EXPORT KERNEL
frame_t MmuUnmapPage(archsize_t addr)
{
frame_t rv = 0;
archsize_t flags = SPINLOCK_BLOCK_NO_INT(tlbFlush.lock) {
tlbFlush.addr = -1;
PicBroadcastIpi(picControl, IPI_TLB_FLUSH);
Ttl1_t *ttl1Table = (Ttl1_t *)(ARMV7_TTL1_TABLE_VADDR);
Ttl1_t *ttl1Entry = &ttl1Table[addr >> 20];
Ttl2_t *ttl2Tables = (Ttl2_t *)(ARMV7_TTL2_TABLE_VADDR);
Ttl2_t *ttl2Entry = &ttl2Tables[addr >> 12];
if (ttl1Entry->fault == ARMV7_MMU_FAULT) goto exit;
if (ttl2Entry->fault == ARMV7_MMU_FAULT) goto exit;
rv = ttl2Entry->frame;
*(uint32_t *)ttl2Entry = 0;
exit:
WriteDCCMVAC((uint32_t)ttl2Entry);
InvalidatePage(addr);
//
// -- Finally, wait for all the CPUs to complete the flush before continuing
// -----------------------------------------------------------------------
AtomicSet(&tlbFlush.count, cpus.cpusRunning - 1);
tlbFlush.addr = addr & ~(PAGE_SIZE - 1);
while (AtomicRead(&tlbFlush.count) != 0 && picControl->ipiReady) {
ProcessMilliSleep(150);
}
SPINLOCK_RLS_RESTORE_INT(tlbFlush.lock, flags);
}
return rv;
}
<|start_filename|>modules/kernel/src/heap/HeapFree.cc<|end_filename|>
//===================================================================================================================
//
// HeapFree.cc -- Free a block back into the heap
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Free a block back into the heap
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Jul-26 Initial version
// 2012-Sep-16 Leveraged from Century
// 2013-Sep-01 #80 Re-implement Mutexes (that work now) (2018-05-31: removed)
// 2013-Sep-12 #101 Resolve issues splint exposes
// 2013-Sep-13 #74 Rewrite Debug.h to use assertions and write to TTY_LOG
// 2018-May-31 Initial 0.1.0 ADCL Copied this file from century32 to century-os
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "spinlock.h"
#include "heap.h"
//
// -- This is the spinlock used to control single access to the heap
// --------------------------------------------------------------
extern Spinlock_t heapLock;
//
// -- Free a block of memory back to the heap
// ---------------------------------------
void HeapFree(void *mem)
{
OrderedList_t *entry = 0;
KHeapHeader_t *hdr;
KHeapFooter_t *ftr;
if (!mem) return;
archsize_t flags = SPINLOCK_BLOCK_NO_INT(heapLock) {
hdr = (KHeapHeader_t *)((byte_t *)mem - sizeof(KHeapHeader_t));
ftr = (KHeapFooter_t *)((byte_t *)hdr + hdr->size - sizeof(KHeapFooter_t));
HeapValidateHdr(hdr, "Heap structures have been overrun by data!!");
HeapCheckHealth();
if (hdr->_magicUnion.isHole) goto exit;
if (hdr->_magicUnion.magicHole != HEAP_MAGIC || ftr->_magicUnion.magicHole != HEAP_MAGIC) goto exit;
if (ftr->hdr != hdr) goto exit;
HeapCheckHealth();
entry = HeapMergeRight(hdr);
HeapCheckHealth();
entry = HeapMergeLeft(hdr);
HeapCheckHealth();
if (entry) hdr = entry->block; // reset header if changed
if (!entry) entry = hdr->entry; // if nothing changes, get this entry
hdr->_magicUnion.isHole = ftr->_magicUnion.isHole = 1;
if (entry) HeapAddToList(entry); // now add to the ordered list
else (void)HeapNewListEntry(hdr, 1);
exit:
HeapCheckHealth();
CLEAN_HEAP();
SPINLOCK_RLS_RESTORE_INT(heapLock, flags);
}
}
<|start_filename|>modules/kernel/inc/printf.h<|end_filename|>
//===================================================================================================================
//
// printf.h -- A printf()-like function to write output to the serial port
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-11 Initial 0.1.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#pragma once
#include "types.h"
//
// -- used to "turn on" kprintf() output
// ----------------------------------
EXTERN EXPORT KERNEL_DATA
bool kPrintfEnabled;
//
// -- This function operates like printf()
// ------------------------------------
EXTERN_C EXPORT KERNEL
int kprintf(const char *fmt, ...);
<|start_filename|>platform/inc/timer.h<|end_filename|>
//===================================================================================================================
//
// timer.h -- The structures and function prototypes for the PIT timer
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Note that the use of the PIT is dependent on the use ouf the PIC (Programmable Interrupt Controller)
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Oct-28 Initial 0.1.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#pragma once
#define __TIMER_H__
#include "types.h"
struct TimerDevice_t;
//
// -- get the platform-specific definitions
// -------------------------------------
#if __has_include("platform-timer.h")
# include "platform-timer.h"
#endif
#include "pic.h"
//
// -- This is a control structure for the timer, all functions will be registered in this structure
// ---------------------------------------------------------------------------------------------
typedef struct TimerDevice_t {
TimerBase_t base;
PicDevice_t *pic;
archsize_t reloadValue;
float factor;
void (*TimerCallBack)(isrRegs_t *reg);
void (*TimerInit)(struct TimerDevice_t *, uint32_t);
void (*TimerEoi)(struct TimerDevice_t *);
void (*TimerPlatformTick)(struct TimerDevice_t *);
uint64_t (*TimerCurrentCount)(struct TimerDevice_t *);
} TimerDevice_t;
//
// -- The global timer control structure holding pointers to all the proper functions.
// --------------------------------------------------------------------------------
EXTERN KERNEL_DATA
TimerDevice_t *timerControl;
//
// -- These are the common interface functions we will use to interact with the timer. These functions are
// not safe in that they will not check for nulls before calling the function. Therefore, caller beware!
// ------------------------------------------------------------------------------------------------------
EXPORT INLINE
void TimerInit(TimerDevice_t *dev, uint32_t freq) { dev->TimerInit(dev, freq); }
EXPORT INLINE
void TimerEoi(TimerDevice_t *dev) { dev->TimerEoi(dev); }
EXPORT INLINE
void TimerPlatformTick(TimerDevice_t *dev) { dev->TimerPlatformTick(dev); }
EXPORT INLINE
uint64_t TimerCurrentCount(TimerDevice_t *dev) { return dev->TimerCurrentCount(dev); }
//
// -- Here are the function prototypes that the operation functions need to conform to
// --------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void TimerCallBack(isrRegs_t *reg);
EXTERN_C EXPORT KERNEL
void _TimerInit(TimerDevice_t *dev, uint32_t freq);
EXTERN_C EXPORT KERNEL
void _TimerEoi(TimerDevice_t *dev);
EXTERN_C EXPORT KERNEL
void _TimerPlatformTick(TimerDevice_t *dev);
EXTERN_C EXPORT KERNEL
uint64_t _TimerCurrentCount(TimerDevice_t *dev);
//
// -- Pick the correct Timer given what we have available
// ---------------------------------------------------
EXTERN_C EXPORT LOADER
TimerDevice_t *TimerPick(void);
<|start_filename|>modules/kernel/inc/fb.h<|end_filename|>
//===================================================================================================================
//
// fb.h -- Framebuffer functions
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Jun-13 Initial 0.1.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#pragma once
#include "types.h"
//
// -- Initialize the frame buffer
// ---------------------------
EXTERN_C EXPORT LOADER
void FrameBufferInit(void);
//
// -- Clear the frame buffer
// ----------------------
EXTERN_C EXPORT KERNEL
void FrameBufferClear(void);
//
// -- Parse an RGB color in the form '#ffffff' into an RGB color
// ----------------------------------------------------------
EXTERN_C EXPORT KERNEL
uint16_t FrameBufferParseRGB(const char *c);
//
// -- Draw a character on the screen
// ------------------------------
EXTERN_C EXPORT KERNEL
void FrameBufferDrawChar(char ch);
//
// -- Write a screen on the screen
// ----------------------------
EXTERN_C EXPORT KERNEL
void FrameBufferPutS(const char *s);
//
// -- Output a hex string to the screen
// ---------------------------------
EXPORT INLINE
void FrameBufferPutHex(uint32_t val) {
FrameBufferDrawChar('0');
FrameBufferDrawChar('x');
for (int i = 28; i >= 0; i -= 4) {
char c = (((val) >> i) & 0x0f);
if (c > 9) FrameBufferDrawChar(c - 10 + 'a');
else FrameBufferDrawChar(c + '0');
}
}
<|start_filename|>modules/kernel/src/process/ProcessUnlockAndSchedule.cc<|end_filename|>
//===================================================================================================================
//
// ProcessUnlockAndSchedule.cc -- Exit a postponed schedule block and take care of any pending schedule changes
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-18 Initial 0.3.2 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "lists.h"
#include "timer.h"
#include "spinlock.h"
#include "process.h"
//
// -- decrease the lock count on the scheduler
// ----------------------------------------
EXPORT KERNEL
void ProcessUnlockAndSchedule(void)
{
assert_msg(AtomicRead(&scheduler.postponeCount) > 0, "postponeCount out if sync");
if (AtomicDecAndTest0(&scheduler.postponeCount) == true) {
if (scheduler.processChangePending != false) {
scheduler.processChangePending = false; // need to clear this to actually perform a change
#if DEBUG_ENABLED(ProcessUnlockAndSchedule)
kprintf("Finally scheduling!\n");
#endif
ProcessSchedule();
}
}
ProcessUnlockScheduler();
}
<|start_filename|>platform/pc/timer/TimerVars.cc<|end_filename|>
//===================================================================================================================
//
// TimerVars.cc -- These are the variables for the x86 Timer
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "timer.h"
//
// -- This is the device description for the PIT
// ------------------------------------------
EXPORT KERNEL_DATA
TimerDevice_t timer8253Control = {
.base = PC_TIMER,
.TimerCallBack = TimerCallBack,
.TimerInit = _TimerInit,
.TimerEoi = _TimerEoi,
.TimerPlatformTick = _TimerPlatformTick,
.TimerCurrentCount = _TimerCurrentCount,
};
//
// -- This is the timer controller we use for this runtime
// ----------------------------------------------------
EXPORT KERNEL_DATA
TimerDevice_t *timerControl = &timer8253Control;
<|start_filename|>platform/bcm2836/timer/TimerEoi.cc<|end_filename|>
//===================================================================================================================
//
// TimerEoi.cc -- Issue an end of interrupt for the timer
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-21 Initial 0.2.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "timer.h"
//
// -- Issue an EOI for the timer
// --------------------------
EXTERN_C EXPORT KERNEL
void _TimerEoi(TimerDevice_t *dev)
{
if (!dev) return;
WRITE_CNTP_TVAL(dev->reloadValue);
WRITE_CNTP_CTL(1); // -- enable the timer
}
<|start_filename|>modules/kernel/src/msgq/MsgqInit.cc<|end_filename|>
//===================================================================================================================
//
// MsgqInit.cc -- Initialize the Message Queue global structures
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- --------------------------------------------------------------------------
// 2020-Apr-09 Initial v0.6.1a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "heap.h"
#include "butler.h"
#include "msgq.h"
//
// -- Initialize the global Message Queue Structures
// ----------------------------------------------
EXTERN_C EXPORT LOADER
void MessageQueueInit(void)
{
ListInit(&msgqList.list);
msgqList.count = 0;
msgqList.lock = {0};
// -- create the butler message queue now so it is ready when the first processes are terminated.
butlerMsgq = MessageQueueCreate();
}
<|start_filename|>arch/arm/ExceptionInit.cc<|end_filename|>
//===================================================================================================================
//
// ExceptionInit.cc -- This function will initialize the system to handle interrupts
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-27 Initial 0.2.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "printf.h"
#include "mmu.h"
#include "interrupt.h"
//
// -- This is a local prototype for a low level setup function
// --------------------------------------------------------
EXTERN_C EXPORT KERNEL
void IdtSetAddr(void);
EXTERN_C EXPORT KERNEL
uint32_t GetMode(void);
//
// -- This is the actual interrupt exception table
// --------------------------------------------
InterruptVector_t *exceptVect = (InterruptVector_t *)EXCEPT_VECTOR_TABLE;
//
// -- These are the handlers that get control on an interrupt
// -------------------------------------------------------
EXTERN_C EXPORT KERNEL
void ResetTarget(void) __attribute__((noreturn));
EXTERN_C EXPORT KERNEL
void UndefinedTarget(void) __attribute__((noreturn));
EXTERN_C EXPORT KERNEL
void SuperTarget(void);
EXTERN_C EXPORT KERNEL
void PrefetchTarget(void) __attribute__((noreturn));
EXTERN_C EXPORT KERNEL
void DataAbortTarget(void) __attribute__((noreturn));
EXTERN_C EXPORT KERNEL
void IRQTarget(void);
EXTERN_C EXPORT KERNEL
void FIQTarget(void);
//
// -- Set up the Exception Vector Table
// ---------------------------------
EXTERN_C EXPORT LOADER
void ExceptionInit(void)
{
IdtSetAddr();
kprintf("Mode is: %x\n", GetMode());
exceptVect->reset = IVEC_JUMP_ASM;
exceptVect->undefined = IVEC_JUMP_ASM;
exceptVect->supervisorCall = IVEC_JUMP_ASM;
exceptVect->prefetchAbort = IVEC_JUMP_ASM;
exceptVect->dataAbort = IVEC_JUMP_ASM;
exceptVect->unused = IVEC_JUMP_ASM;
exceptVect->irqInterrupt = IVEC_JUMP_ASM;
exceptVect->fiqInterrupt = IVEC_JUMP_ASM;
exceptVect->resetTarget = (archsize_t)ResetTarget;
exceptVect->undefinedTarget = (archsize_t)UndefinedTarget;
exceptVect->supervisorCallTarget = (archsize_t)SuperTarget;
exceptVect->perfetchAbortTarget = (archsize_t)PrefetchTarget;
exceptVect->dataAbortTarget = (archsize_t)DataAbortTarget;
exceptVect->unusedTarget = (archsize_t)NULL; // Never used
exceptVect->irqInterruptTarget = (archsize_t)IRQTarget;
exceptVect->fiqInterruptTarget = (archsize_t)FIQTarget;
}
<|start_filename|>platform/pc/interrupts/IsrHandler.cc<|end_filename|>
//===================================================================================================================
//
// IsrHandler.cc -- The common ISR handler routine
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// All ISRs are handled by a common service program. This is it. But it is currently a stub.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-May-29 Initial 0.1.0 ADCL Initial version
// 2019-Feb-09 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "printf.h"
#include "interrupt.h"
//
// -- The ISR Handler Table
// ---------------------
EXPORT KERNEL_BSS
isrFunc_t isrHandlers[256] = {NULL_ISR};
//
// -- This is the common ISR Handler entry routine
// --------------------------------------------
EXTERN_C EXPORT KERNEL
void IsrHandler(isrRegs_t regs)
{
if (isrHandlers[regs.intno] != NULL) {
isrFunc_t handler = isrHandlers[regs.intno];
handler(®s);
} else {
kprintf("PANIC: Unhandled Interrupt #%x\n", regs.intno);
CpuPanic("", ®s);
}
}
<|start_filename|>platform/bcm2836/inc/platform-pic.h<|end_filename|>
//===================================================================================================================
//
// platform-pic.h -- Programmable Interrupt Controller definitions and functions for the bcm2835
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#pragma once
#ifndef __PIC_H__
# error "Use #include \"pic.h\" and it will pick up this file; do not #include this file directly."
#endif
#include "types.h"
//
// -- on x86, this is the type we use to refer to the pic port
// --------------------------------------------------------
typedef archsize_t PicBase_t;
typedef int Irq_t;
//
// -- These are the possible pic drivers for the computer
// ---------------------------------------------------
EXTERN KERNEL_DATA
struct PicDevice_t picBcm2835;
//
// -- This is the number of cores that have responded to an IPI
// ---------------------------------------------------------
EXTERN EXPORT KERNEL_BSS
AtomicInt_t mb0Resp;
//
// -- Define the pic data we need to keep track of
// --------------------------------------------
typedef struct Bcm2835Pic_t {
GenericDevice_t base;
PicBase_t picLoc;
archsize_t timerLoc;
} Bcm2835Pic_t;
//
// -- This is the handler for a mailbox ipi message
// ---------------------------------------------
typedef void (*MbHandler_t)(isrRegs_t *);
//
// -- And an array of handlers
// ------------------------
EXTERN EXPORT KERNEL_DATA
MbHandler_t mbHandlers[MAX_IPI]; // limit to 100 messages for now
//
// -- Handle messaged to mailbox 0
// ----------------------------
EXTERN_C EXPORT KERNEL
void PicMailbox0Handler(isrRegs_t *regs);
//
// -- Here are the function prototypes that the operation functions need to conform to
// --------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void _PicInit(PicDevice_t *dev, const char *name);
EXTERN_C EXPORT KERNEL
void _PicUnmaskIrq(PicDevice_t *dev, Irq_t irq);
EXTERN_C EXPORT KERNEL
void _PicMaskIrq(PicDevice_t *dev, Irq_t irq);
EXTERN_C EXPORT KERNEL
void _PicEoi(PicDevice_t *dev, Irq_t irq);
EXTERN_C EXPORT KERNEL
int _PicDetermineIrq(PicDevice_t *dev);
EXTERN_C EXPORT KERNEL
void _PicBroadcastIpi(PicDevice_t *dev, int ipi);
#define INT_IRQPEND0 (0x200) // The basic interrupt pending register
//-------------------------------------------------------------------------------------------------------------------
#define INTPND0IRQ62 (1<<20) // GPU IRQ 62
#define INTPND0IRQ57 (1<<19) // GPU IRQ 57
#define INTPND0IRQ56 (1<<18) // GPU IRQ 56
#define INTPND0IRQ55 (1<<17) // GPU IRQ 55
#define INTPND0IRQ54 (1<<16) // GPU IRQ 54
#define INTPND0IRQ53 (1<<15) // GPU IRQ 53
#define INTPND0IRQ19 (1<<14) // GPU IRQ 19
#define INTPND0IRQ18 (1<<13) // GPU IRQ 18
#define INTPND0IRQ10 (1<<12) // GPU IRQ 10
#define INTPND0IRQ9 (1<<11) // GPU IRQ 9
#define INTPND0IRQ7 (1<<10) // GPU IRQ 7
#define INTPND0IRQREG2 (1<<9) // Pending Register 0 IRQ
#define INTPND0IRQREG1 (1<<8) // Pending Register 1 IRQ
#define INTPND0IRQILL0 (1<<7) // Illegal Access type 0 IRQ
#define INTPND0IRQILL1 (1<<6) // Illegal Access type 1 IRQ
#define INTPND0IRQGPUH1 (1<<5) // GPU1 halted IRQ
#define INTPND0IRQGPUH0 (1<<4) // GPU0 halted IRQ
#define INTPND0IRQDOORB1 (1<<3) // ARM Doorbell 1
#define INTPND0IRQDOORB0 (1<<2) // ARM Doorbell 0
#define INTPND0IRQMAIL (1<<1) // ARM Mailbox IRQ
#define INTPND0IRQTIMER (1<<0) // ARM Timer IRQ
#define INT_IRQPEND1 (0x204) // IRQ pending 1
//-------------------------------------------------------------------------------------------------------------------
#define INTPND1IRQ31 (1<<31) // IRQ 31 pending
#define INTPND1IRQ30 (1<<30) // IRQ 30 pending
#define INTPND1IRQ29 (1<<29) // IRQ 29 pending
#define INTPND1IRQ28 (1<<28) // IRQ 28 pending
#define INTPND1IRQ27 (1<<27) // IRQ 27 pending
#define INTPND1IRQ26 (1<<26) // IRQ 26 pending
#define INTPND1IRQ25 (1<<25) // IRQ 25 pending
#define INTPND1IRQ24 (1<<24) // IRQ 24 pending
#define INTPND1IRQ23 (1<<23) // IRQ 23 pending
#define INTPND1IRQ22 (1<<22) // IRQ 22 pending
#define INTPND1IRQ21 (1<<21) // IRQ 21 pending
#define INTPND1IRQ20 (1<<20) // IRQ 20 pending
#define INTPND1IRQ19 (1<<19) // IRQ 19 pending
#define INTPND1IRQ18 (1<<18) // IRQ 18 pending
#define INTPND1IRQ17 (1<<17) // IRQ 17 pending
#define INTPND1IRQ16 (1<<16) // IRQ 16 pending
#define INTPND1IRQ15 (1<<15) // IRQ 15 pending
#define INTPND1IRQ14 (1<<14) // IRQ 14 pending
#define INTPND1IRQ13 (1<<13) // IRQ 13 pending
#define INTPND1IRQ12 (1<<12) // IRQ 12 pending
#define INTPND1IRQ11 (1<<11) // IRQ 11 pending
#define INTPND1IRQ10 (1<<10) // IRQ 10 pending
#define INTPND1IRQ9 (1<<9) // IRQ 9 pending
#define INTPND1IRQ8 (1<<8) // IRQ 8 pending
#define INTPND1IRQ7 (1<<7) // IRQ 7 pending
#define INTPND1IRQ6 (1<<6) // IRQ 6 pending
#define INTPND1IRQ5 (1<<5) // IRQ 5 pending
#define INTPND1IRQ4 (1<<4) // IRQ 4 pending
#define INTPND1IRQ3 (1<<3) // IRQ 3 pending
#define INTPND1IRQ2 (1<<2) // IRQ 2 pending
#define INTPND1IRQ1 (1<<1) // IRQ 1 pending
#define INTPND1IRQ0 (1<<0) // IRQ 0 pending
#define INT_IRQPEND2 (0x208) // IRQ pending 2
//-------------------------------------------------------------------------------------------------------------------
#define INTPND2IRQ63 (1<<31) // IRQ 63 pending
#define INTPND2IRQ62 (1<<30) // IRQ 62 pending
#define INTPND2IRQ61 (1<<29) // IRQ 61 pending
#define INTPND2IRQ60 (1<<28) // IRQ 60 pending
#define INTPND2IRQ59 (1<<27) // IRQ 59 pending
#define INTPND2IRQ58 (1<<26) // IRQ 58 pending
#define INTPND2IRQ57 (1<<25) // IRQ 57 pending
#define INTPND2IRQ56 (1<<24) // IRQ 56 pending
#define INTPND2IRQ55 (1<<23) // IRQ 55 pending
#define INTPND2IRQ54 (1<<22) // IRQ 54 pending
#define INTPND2IRQ53 (1<<21) // IRQ 53 pending
#define INTPND2IRQ52 (1<<20) // IRQ 52 pending
#define INTPND2IRQ51 (1<<19) // IRQ 51 pending
#define INTPND2IRQ50 (1<<18) // IRQ 50 pending
#define INTPND2IRQ49 (1<<17) // IRQ 49 pending
#define INTPND2IRQ48 (1<<16) // IRQ 48 pending
#define INTPND2IRQ47 (1<<15) // IRQ 47 pending
#define INTPND2IRQ46 (1<<14) // IRQ 46 pending
#define INTPND2IRQ45 (1<<13) // IRQ 45 pending
#define INTPND2IRQ44 (1<<12) // IRQ 44 pending
#define INTPND2IRQ43 (1<<11) // IRQ 43 pending
#define INTPND2IRQ42 (1<<10) // IRQ 42 pending
#define INTPND2IRQ41 (1<<9) // IRQ 41 pending
#define INTPND2IRQ40 (1<<8) // IRQ 40 pending
#define INTPND2IRQ39 (1<<7) // IRQ 39 pending
#define INTPND2IRQ38 (1<<6) // IRQ 38 pending
#define INTPND2IRQ37 (1<<5) // IRQ 37 pending
#define INTPND2IRQ36 (1<<4) // IRQ 36 pending
#define INTPND2IRQ35 (1<<3) // IRQ 35 pending
#define INTPND2IRQ34 (1<<2) // IRQ 34 pending
#define INTPND2IRQ33 (1<<1) // IRQ 33 pending
#define INTPND2IRQ32 (1<<0) // IRQ 32 pending
#define INT_FIQCTL (0x20c) // FIQ Control
//-------------------------------------------------------------------------------------------------------------------
#define INTFIQ_ENB (1<<7) // FIQ enable
#define INTFIQ_SRC (0x7f) // FIQ Source
#define INT_IRQENB1 (0x210) // IRQ Enable 1
//-------------------------------------------------------------------------------------------------------------------
#define INTENB1IRQ31 (1<<31) // IRQ 31 Enable
#define INTENB1IRQ30 (1<<30) // IRQ 30 Enable
#define INTENB1IRQ29 (1<<29) // IRQ 29 Enable
#define INTENB1IRQ28 (1<<28) // IRQ 28 Enable
#define INTENB1IRQ27 (1<<27) // IRQ 27 Enable
#define INTENB1IRQ26 (1<<26) // IRQ 26 Enable
#define INTENB1IRQ25 (1<<25) // IRQ 25 Enable
#define INTENB1IRQ24 (1<<24) // IRQ 24 Enable
#define INTENB1IRQ23 (1<<23) // IRQ 23 Enable
#define INTENB1IRQ22 (1<<22) // IRQ 22 Enable
#define INTENB1IRQ21 (1<<21) // IRQ 21 Enable
#define INTENB1IRQ20 (1<<20) // IRQ 20 Enable
#define INTENB1IRQ19 (1<<19) // IRQ 19 Enable
#define INTENB1IRQ18 (1<<18) // IRQ 18 Enable
#define INTENB1IRQ17 (1<<17) // IRQ 17 Enable
#define INTENB1IRQ16 (1<<16) // IRQ 16 Enable
#define INTENB1IRQ15 (1<<15) // IRQ 15 Enable
#define INTENB1IRQ14 (1<<14) // IRQ 14 Enable
#define INTENB1IRQ13 (1<<13) // IRQ 13 Enable
#define INTENB1IRQ12 (1<<12) // IRQ 12 Enable
#define INTENB1IRQ11 (1<<11) // IRQ 11 Enable
#define INTENB1IRQ10 (1<<10) // IRQ 10 Enable
#define INTENB1IRQ9 (1<<9) // IRQ 9 Enable
#define INTENB1IRQ8 (1<<8) // IRQ 8 Enable
#define INTENB1IRQ7 (1<<7) // IRQ 7 Enable
#define INTENB1IRQ6 (1<<6) // IRQ 6 Enable
#define INTENB1IRQ5 (1<<5) // IRQ 5 Enable
#define INTENB1IRQ4 (1<<4) // IRQ 4 Enable
#define INTENB1IRQ3 (1<<3) // IRQ 3 Enable
#define INTENB1IRQ2 (1<<2) // IRQ 2 Enable
#define INTENB1IRQ1 (1<<1) // IRQ 1 Enable
#define INTENB1IRQ0 (1<<0) // IRQ 0 Enable
#define INT_IRQENB2 (0x214) // IRQ Enable 2
//-------------------------------------------------------------------------------------------------------------------
#define INTENB2IRQ63 (1<<31) // IRQ 63 Enable
#define INTENB2IRQ62 (1<<30) // IRQ 62 Enable
#define INTENB2IRQ61 (1<<29) // IRQ 61 Enable
#define INTENB2IRQ60 (1<<28) // IRQ 60 Enable
#define INTENB2IRQ59 (1<<27) // IRQ 59 Enable
#define INTENB2IRQ58 (1<<26) // IRQ 58 Enable
#define INTENB2IRQ57 (1<<25) // IRQ 57 Enable
#define INTENB2IRQ56 (1<<24) // IRQ 56 Enable
#define INTENB2IRQ55 (1<<23) // IRQ 55 Enable
#define INTENB2IRQ54 (1<<22) // IRQ 54 Enable
#define INTENB2IRQ53 (1<<21) // IRQ 53 Enable
#define INTENB2IRQ52 (1<<20) // IRQ 52 Enable
#define INTENB2IRQ51 (1<<19) // IRQ 51 Enable
#define INTENB2IRQ50 (1<<18) // IRQ 50 Enable
#define INTENB2IRQ49 (1<<17) // IRQ 49 Enable
#define INTENB2IRQ48 (1<<16) // IRQ 48 Enable
#define INTENB2IRQ47 (1<<15) // IRQ 47 Enable
#define INTENB2IRQ46 (1<<14) // IRQ 46 Enable
#define INTENB2IRQ45 (1<<13) // IRQ 45 Enable
#define INTENB2IRQ44 (1<<12) // IRQ 44 Enable
#define INTENB2IRQ43 (1<<11) // IRQ 43 Enable
#define INTENB2IRQ42 (1<<10) // IRQ 42 Enable
#define INTENB2IRQ41 (1<<9) // IRQ 41 Enable
#define INTENB2IRQ40 (1<<8) // IRQ 40 Enable
#define INTENB2IRQ39 (1<<7) // IRQ 39 Enable
#define INTENB2IRQ38 (1<<6) // IRQ 38 Enable
#define INTENB2IRQ37 (1<<5) // IRQ 37 Enable
#define INTENB2IRQ36 (1<<4) // IRQ 36 Enable
#define INTENB2IRQ35 (1<<3) // IRQ 35 Enable
#define INTENB2IRQ34 (1<<2) // IRQ 34 Enable
#define INTENB2IRQ33 (1<<1) // IRQ 33 Enable
#define INTENB2IRQ32 (1<<0) // IRQ 32 Enable
#define INT_IRQENB0 (0x218) // Basic IRQ Enable
//-------------------------------------------------------------------------------------------------------------------
#define INTENB0IRQILL0 (1<<7) // Illegal Access type 0 IRQ Enable
#define INTENB0IRQILL1 (1<<6) // Illegal Access type 1 IRQ Enable
#define INTENB0IRQGPUH1 (1<<5) // GPU1 halted IRQ Enable
#define INTENB0IRQGPUH0 (1<<4) // GPU0 halted IRQ Enable
#define INTENB0IRQDOORB1 (1<<3) // ARM Doorbell 1 Enable
#define INTENB0IRQDOORB0 (1<<2) // ARM Doorbell 0 Enable
#define INTENB0IRQMAIL (1<<1) // ARM Mailbox IRQ Enable
#define INTENB0IRQTIMER (1<<0) // ARM Timer IRQ Enable
#define INT_IRQDIS1 (0x21c) // IRQ Disable 1
//-------------------------------------------------------------------------------------------------------------------
#define INTDIS1IRQ31 (1<<31) // IRQ 31 Disable
#define INTDIS1IRQ30 (1<<30) // IRQ 30 Disable
#define INTDIS1IRQ29 (1<<29) // IRQ 29 Disable
#define INTDIS1IRQ28 (1<<28) // IRQ 28 Disable
#define INTDIS1IRQ27 (1<<27) // IRQ 27 Disable
#define INTDIS1IRQ26 (1<<26) // IRQ 26 Disable
#define INTDIS1IRQ25 (1<<25) // IRQ 25 Disable
#define INTDIS1IRQ24 (1<<24) // IRQ 24 Disable
#define INTDIS1IRQ23 (1<<23) // IRQ 23 Disable
#define INTDIS1IRQ22 (1<<22) // IRQ 22 Disable
#define INTDIS1IRQ21 (1<<21) // IRQ 21 Disable
#define INTDIS1IRQ20 (1<<20) // IRQ 20 Disable
#define INTDIS1IRQ19 (1<<19) // IRQ 19 Disable
#define INTDIS1IRQ18 (1<<18) // IRQ 18 Disable
#define INTDIS1IRQ17 (1<<17) // IRQ 17 Disable
#define INTDIS1IRQ16 (1<<16) // IRQ 16 Disable
#define INTDIS1IRQ15 (1<<15) // IRQ 15 Disable
#define INTDIS1IRQ14 (1<<14) // IRQ 14 Disable
#define INTDIS1IRQ13 (1<<13) // IRQ 13 Disable
#define INTDIS1IRQ12 (1<<12) // IRQ 12 Disable
#define INTDIS1IRQ11 (1<<11) // IRQ 11 Disable
#define INTDIS1IRQ10 (1<<10) // IRQ 10 Disable
#define INTDIS1IRQ9 (1<<9) // IRQ 9 Disable
#define INTDIS1IRQ8 (1<<8) // IRQ 8 Disable
#define INTDIS1IRQ7 (1<<7) // IRQ 7 Disable
#define INTDIS1IRQ6 (1<<6) // IRQ 6 Disable
#define INTDIS1IRQ5 (1<<5) // IRQ 5 Disable
#define INTDIS1IRQ4 (1<<4) // IRQ 4 Disable
#define INTDIS1IRQ3 (1<<3) // IRQ 3 Disable
#define INTDIS1IRQ2 (1<<2) // IRQ 2 Disable
#define INTDIS1IRQ1 (1<<1) // IRQ 1 Disable
#define INTDIS1IRQ0 (1<<0) // IRQ 0 Disable
#define INT_IRQDIS2 (0x220) // IRQ Disable 2
//-------------------------------------------------------------------------------------------------------------------
#define INTDIS2IRQ63 (1<<31) // IRQ 63 Disable
#define INTDIS2IRQ62 (1<<30) // IRQ 62 Disable
#define INTDIS2IRQ61 (1<<29) // IRQ 61 Disable
#define INTDIS2IRQ60 (1<<28) // IRQ 60 Disable
#define INTDIS2IRQ59 (1<<27) // IRQ 59 Disable
#define INTDIS2IRQ58 (1<<26) // IRQ 58 Disable
#define INTDIS2IRQ57 (1<<25) // IRQ 57 Disable
#define INTDIS2IRQ56 (1<<24) // IRQ 56 Disable
#define INTDIS2IRQ55 (1<<23) // IRQ 55 Disable
#define INTDIS2IRQ54 (1<<22) // IRQ 54 Disable
#define INTDIS2IRQ53 (1<<21) // IRQ 53 Disable
#define INTDIS2IRQ52 (1<<20) // IRQ 52 Disable
#define INTDIS2IRQ51 (1<<19) // IRQ 51 Disable
#define INTDIS2IRQ50 (1<<18) // IRQ 50 Disable
#define INTDIS2IRQ49 (1<<17) // IRQ 49 Disable
#define INTDIS2IRQ48 (1<<16) // IRQ 48 Disable
#define INTDIS2IRQ47 (1<<15) // IRQ 47 Disable
#define INTDIS2IRQ46 (1<<14) // IRQ 46 Disable
#define INTDIS2IRQ45 (1<<13) // IRQ 45 Disable
#define INTDIS2IRQ44 (1<<12) // IRQ 44 Disable
#define INTDIS2IRQ43 (1<<11) // IRQ 43 Disable
#define INTDIS2IRQ42 (1<<10) // IRQ 42 Disable
#define INTDIS2IRQ41 (1<<9) // IRQ 41 Disable
#define INTDIS2IRQ40 (1<<8) // IRQ 40 Disable
#define INTDIS2IRQ39 (1<<7) // IRQ 39 Disable
#define INTDIS2IRQ38 (1<<6) // IRQ 38 Disable
#define INTDIS2IRQ37 (1<<5) // IRQ 37 Disable
#define INTDIS2IRQ36 (1<<4) // IRQ 36 Disable
#define INTDIS2IRQ35 (1<<3) // IRQ 35 Disable
#define INTDIS2IRQ34 (1<<2) // IRQ 34 Disable
#define INTDIS2IRQ33 (1<<1) // IRQ 33 Disable
#define INTDIS2IRQ32 (1<<0) // IRQ 32 Disable
#define INT_IRQDIS0 (0x224) // Basic IRQ Disable
//-------------------------------------------------------------------------------------------------------------------
#define INTDIS0IRQILL0 (1<<7) // Illegal Access type 0 IRQ Disable
#define INTDIS0IRQILL1 (1<<6) // Illegal Access type 1 IRQ Disable
#define INTDIS0IRQGPUH1 (1<<5) // GPU1 halted IRQ Disable
#define INTDIS0IRQGPUH0 (1<<4) // GPU0 halted IRQ Disable
#define INTDIS0IRQDOORB1 (1<<3) // ARM Doorbell 1 Disable
#define INTDIS0IRQDOORB0 (1<<2) // ARM Doorbell 0 Disable
#define INTDIS0IRQMAIL (1<<1) // ARM Mailbox IRQ Disable
#define INTDIS0IRQTIMER (1<<0) // ARM Timer IRQ Disable
<|start_filename|>modules/kernel/src/debugger/DebuggerEngage.cc<|end_filename|>
//===================================================================================================================
//
// DebuggerEngage.cc -- Signal the cores to stop processing until we release them
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-03 Initial v0.6.0a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "pic.h"
#include "debugger.h"
//
// -- Signal the other cores to stop and wait for confirmation that they have
// -----------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void DebuggerEngage(DbgIniCommand_t cmd)
{
debugCommunication.debuggerFlags = DisableInterrupts();
AtomicSet(&debugCommunication.coresEngaged, 1);
debugCommunication.command = cmd;
PicBroadcastIpi(picControl, IPI_DEBUGGER);
while (AtomicRead(&debugCommunication.coresEngaged) != cpus.cpusRunning) {}
}
<|start_filename|>modules/kernel/src/process/ProcessVars.cc<|end_filename|>
//===================================================================================================================
//
// ProcessVars.cc -- Global variables for process management
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Oct-14 Initial 0.1.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "process.h"
//
// -- This is the scheduler object
// ----------------------------
EXPORT KERNEL_DATA
Scheduler_t scheduler = {
0, // nextPID
0xffffffffffffffff, // nextWake
false, // processChangePending
0, // flags
{0}, // schedulerLockCount
{0}, // postponeCount
-1, // lock CPU
{{&scheduler.queueOS.list, &scheduler.queueOS.list}, {0}, 0}, // the os ready queue
{{&scheduler.queueHigh.list, &scheduler.queueHigh.list}, {0}, 0}, // the high ready queue
{{&scheduler.queueNormal.list, &scheduler.queueNormal.list}, {0}, 0}, // the normal ready queue
{{&scheduler.queueLow.list, &scheduler.queueLow.list}, {0}, 0}, // the low ready queue
{{&scheduler.queueIdle.list, &scheduler.queueIdle.list}, {0}, 0}, // the idle ready queue
{{&scheduler.listBlocked.list, &scheduler.listBlocked.list}, {0}, 0}, // the list of blocked processes
{{&scheduler.listSleeping.list, &scheduler.listSleeping.list}, {0}, 0}, // the list of sleeping processes
{{&scheduler.listTerminated.list, &scheduler.listTerminated.list}, {0}, 0}, // the list of terminated tasks
{{&scheduler.globalProcesses.list, &scheduler.globalProcesses.list}, {0}, 0}, // the global process list
};
Spinlock_t schedulerLock = {0};
<|start_filename|>platform/pc/interrupts/IsrInt03.cc<|end_filename|>
//===================================================================================================================
//
// IsrInt03.cc -- This is a breakpoint
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This is the breakpoint and will be the entry into the debugger
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Oct-11 Initial 0.1.0 ADCL Initial version
// 2019-Feb-09 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "interrupt.h"
//
// -- Breakpoint Handler
// ------------------
EXTERN_C EXPORT KERNEL
void IsrInt03(isrRegs_t *regs)
{
kprintf("\nBreakpoint\n");
IsrDumpState(regs);
}
<|start_filename|>arch/arm/mmu/MmuVirtToPhys.cc<|end_filename|>
//===================================================================================================================
//
// MmuVirtToPhys.cc -- Walk the page tables to convert a virtual address to a physical one
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Jan-12 Initial v0.5.0e ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "printf.h"
#include "mmu.h"
//
// -- Check for the page and unmap if it is mapped.
// ---------------------------------------------
EXTERN_C EXPORT KERNEL
archsize_t MmuVirtToPhys(void *addr)
{
archsize_t a = (archsize_t)addr;
Ttl1_t *ttl1Table = (Ttl1_t *)(ARMV7_TTL1_TABLE_VADDR);
Ttl1_t *ttl1Entry = &ttl1Table[a >> 20];
Ttl2_t *ttl2Tables = (Ttl2_t *)(ARMV7_TTL2_TABLE_VADDR);
Ttl2_t *ttl2Entry = &ttl2Tables[a >> 12];
if (ttl1Entry->fault == ARMV7_MMU_FAULT) return -1;
if (ttl2Entry->fault == ARMV7_MMU_FAULT) return -1;
// -- apply the proper offset to the physical frame!
return (ttl2Entry->frame << 12) | (a & 0xfff);
}
<|start_filename|>platform/bcm2836/pic/PicMailbox0Handler.cc<|end_filename|>
//===================================================================================================================
//
// PicMailboxHandler0.cc -- Handle a mailbox0 interrupt, decoding it and acking it
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Mar-01 Initial v0.5.0h ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "pic.h"
//
// -- Decode and handle a mailbox0 interrupt
// --------------------------------------
EXTERN_C EXPORT KERNEL
void PicMailbox0Handler(UNUSED(isrRegs_t *))
{
int msg = MmioRead(IPI_MAILBOX_ACK + ((thisCpu->cpuNum) * 0x10));
MbHandler_t handler = NULL;
AtomicInc(&mb0Resp);
if (msg < 0 || msg >= MAX_IPI) goto exit;
handler = mbHandlers[msg];
if (handler == NULL) {
kprintf("PANIC: Unhandled Mailbox message %d\n", msg);
CpuPanicPushRegs("");
}
handler(NULL);
exit:
MmioWrite(IPI_MAILBOX_ACK + (thisCpu->cpuNum * 0x10), 0xffffffff);
}
<|start_filename|>modules/kernel/src/process/ProcessEnd.cc<|end_filename|>
//===================================================================================================================
//
// ProcessEnd.cc -- End a the current process by placing it on the Term queue and blocking
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-29 Initial 0.3.2 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "cpu.h"
#include "msgq.h"
#include "butler.h"
#include "process.h"
//
// -- End current process
// -------------------
EXTERN_C EXPORT KERNEL
void ProcessEnd(void)
{
ProcessLockAndPostpone();
Process_t *proc = currentThread;
assert(proc->stsQueue.next == &proc->stsQueue);
Enqueue(&scheduler.listTerminated, &proc->stsQueue);
ProcessDoBlock(PROC_TERM);
// -- send a message with the scheduler already locked
_MessageQueueSend(butlerMsgq, BUTLER_CLEAN_PROCESS, 0, 0, false);
ProcessUnlockAndSchedule();
}
<|start_filename|>modules/kernel/src/msgq/MsgqReceive.cc<|end_filename|>
//===================================================================================================================
//
// MsgqSReceive.cc -- Receive a message from a queue, blocking is allowed
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- --------------------------------------------------------------------------
// 2020-Apr-09 Initial v0.6.1a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "heap.h"
#include "process.h"
#include "msgq.h"
//
// -- Receive a message from a message queue, optionally blocking
// -----------------------------------------------------------
EXTERN_C EXPORT KERNEL
bool MessageQueueReceive(MessageQueue_t *msgq, long *type, size_t sz, void *payload, bool block)
{
bool wait = true; // -- assume we are going to wait
archsize_t flags;
do {
// -- sequence is important, get the lock and then check if we have something
flags = SPINLOCK_BLOCK_NO_INT(msgq->queue.lock);
if (IsListEmpty(&msgq->queue)) {
SPINLOCK_RLS_RESTORE_INT(msgq->queue.lock, flags);
if (block) {
flags = SPINLOCK_BLOCK_NO_INT(msgq->waiting.lock) {
Enqueue(&msgq->waiting, ¤tThread->stsQueue);
msgq->waiting.count ++;
SPINLOCK_RLS_RESTORE_INT(msgq->waiting.lock, flags);
}
ProcessBlock(PROC_MSGW);
} else return false;
} else {
// -- we still hold the lock!!
wait = false;
}
} while (wait);
// -- at this point, we have the lock on the queue and we know we have something to pull
Message_t *msg = FIND_PARENT(msgq->queue.list.next, Message_t, list);
ListRemoveInit(&msg->list);
msgq->queue.count --;
// -- there is no need for the lock anymore
SPINLOCK_RLS_RESTORE_INT(msgq->queue.lock, flags);
*type = msg->payload.type;
sz = (sz < msg->payloadSize?sz:msg->payloadSize);
if (sz) kMemMove(payload, msg->payload.data, sz);
FREE(msg);
return true;
}
<|start_filename|>platform/pc/inc/platform-apic.h<|end_filename|>
//===================================================================================================================
//
// platform-ioapic.h -- This is any of several IO APICs that may exist on the system board; typically 1 per bus
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-20 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#pragma once
#ifndef __PIC_H__
# error "Use #include \"pic.h\" and it will pick up this file; do not #include this file directly."
#endif
#include "types.h"
#include "timer.h"
//
// -- This is the data structure where we will store the values we need to manage the PIC
// -----------------------------------------------------------------------------------
typedef struct IoApicDeviceData_t {
DeviceData_t work;
archsize_t ioapicBase;
archsize_t localApicBase;
archsize_t redirTableEntry[IRQ_LAST];
} IoApicDeviceData_t;
//
// -- These are the registers that we will read and write
// ---------------------------------------------------
#define IOREGSEL (0x00)
#define IOWIN (0x10)
//
// -- These are the address offsets that are written to IOREGSEL
// ----------------------------------------------------------
#define IOAPICID (0x00)
#define IOAPICVER (0x01)
#define IOAPICARB (0x02)
// -- the IOREDTBL registers are in pairs!!
#define IOREDTBL0 (0x10)
#define IOREDTBL1 (0x12)
#define IOREDTBL2 (0x14)
#define IOREDTBL3 (0x16)
#define IOREDTBL4 (0x18)
#define IOREDTBL5 (0x1a)
#define IOREDTBL6 (0x1c)
#define IOREDTBL7 (0x1e)
#define IOREDTBL8 (0x20)
#define IOREDTBL9 (0x22)
#define IOREDTBL10 (0x24)
#define IOREDTBL11 (0x26)
#define IOREDTBL12 (0x28)
#define IOREDTBL13 (0x2a)
#define IOREDTBL14 (0x2c)
#define IOREDTBL15 (0x2e)
#define IOREDTBL16 (0x30)
#define IOREDTBL17 (0x32)
#define IOREDTBL18 (0x34)
#define IOREDTBL19 (0x36)
#define IOREDTBL20 (0x38)
#define IOREDTBL21 (0x3a)
#define IOREDTBL22 (0x3c)
#define IOREDTBL23 (0x3e)
//
// -- These are the offsets to the different Memory Mapped I/O registers for the Local APIC
// -------------------------------------------------------------------------------------
#define LAPIC_ID (0x020)
#define LAPIC_VERSION (0x030)
#define LAPIC_TPR (0x080)
#define LAPIC_APR (0x090)
#define LAPIC_PPR (0x0a0)
#define LAPIC_EOI (0x0b0)
#define LAPIC_RRD (0x0c0)
#define LAPIC_LOGICAL_DEST (0x0d0)
#define LAPIC_DEST_FMT (0x0e0)
#define LAPIC_SPURIOUS_VECT (0x0f0)
#define LAPIC_ISR_0 (0x100)
#define LAPIC_ISR_1 (0x110)
#define LAPIC_ISR_2 (0x120)
#define LAPIC_ISR_3 (0x130)
#define LAPIC_ISR_4 (0x140)
#define LAPIC_ISR_5 (0x150)
#define LAPIC_ISR_6 (0x160)
#define LAPIC_ISR_7 (0x170)
#define LAPIC_TMR_0 (0x180)
#define LAPIC_TMR_1 (0x190)
#define LAPIC_TMR_2 (0x1a0)
#define LAPIC_TMR_3 (0x1b0)
#define LAPIC_TMR_4 (0x1c0)
#define LAPIC_TMR_5 (0x1d0)
#define LAPIC_TMR_6 (0x1e0)
#define LAPIC_TMR_7 (0x1f0)
#define LAPIC_IRR_0 (0x200)
#define LAPIC_IRR_1 (0x210)
#define LAPIC_IRR_2 (0x220)
#define LAPIC_IRR_3 (0x230)
#define LAPIC_IRR_4 (0x240)
#define LAPIC_IRR_5 (0x250)
#define LAPIC_IRR_6 (0x260)
#define LAPIC_IRR_7 (0x270)
#define LAPIC_ESR (0x280)
#define LAPIC_ICR_LO (0x300)
#define LAPIC_ICR_HI (0x310)
#define LAPIC_LVT_TMR (0x320)
#define LAPIC_THERMAL (0x330)
#define LAPIC_LVT_PERF (0x340)
#define LAPIC_LVT_LINT0 (0x350)
#define LAPIC_LVT_LINT1 (0x360)
#define LAPIC_LVT_ERR (0x370)
#define LAPIC_TMRINITCNT (0x380)
#define LAPIC_TMRCURRCNT (0x390)
#define LAPIC_TMRDIV (0x3e0)
//
// -- This is the bitting for the IOREGSEL register
// ---------------------------------------------
typedef struct Ioregsel_t {
union {
struct {
uint32_t address : 8;
uint32_t reserved : 24;
} __attribute__((packed));
uint32_t reg;
};
} __attribute__((packed)) Ioregsel_t;
//
// -- this is the bitting for the IOAPICID register
// ---------------------------------------------
typedef struct Ioapicid_t {
union {
struct {
uint32_t reserved : 24;
uint32_t apicId : 4;
uint32_t reserved2 : 4;
} __attribute__((packed));
uint32_t reg;
};
} __attribute__((packed)) Ioapicid_t;
//
// -- this is the bitting for the IOAPICVER register
// ----------------------------------------------
typedef struct Ioapicver_t {
union {
struct {
uint32_t version : 8;
uint32_t reserved : 8;
uint32_t maxRedir : 8;
uint32_t reserved2 : 8;
} __attribute__((packed));
uint32_t reg;
};
} __attribute__((packed)) Ioapicver_t;
//
// -- this is the bitting for the IOAPICARB register
// ----------------------------------------------
typedef struct Ioapicarb_t {
union {
struct {
uint32_t reserved : 24;
uint32_t arbId : 4;
uint32_t reserved2 : 4;
} __attribute__((packed));
uint32_t reg;
};
} __attribute__((packed)) Ioapicarb_t;
//
// -- This is the bitting for the 2 registers that make up an IOREDTBL register
// -------------------------------------------------------------------------
typedef struct Ioapicredtbl_t {
union {
struct {
uint64_t intvec : 8;
uint64_t delmod : 3;
uint64_t destmod : 1;
uint64_t delivs : 1;
uint64_t intpol : 1;
uint64_t remoteIrr : 1;
uint64_t triggerMode : 1;
uint64_t intMask : 1;
uint64_t reserved : 39;
uint64_t dest : 8;
} __attribute__((packed));
struct {
uint32_t reg0; // when reading/writing 32-bit pairs, always do reg0 first!!
uint32_t reg1;
} __attribute__((packed));
uint64_t reg;
};
} __attribute__((packed)) Ioapicredtbl_t;
//
// -- These are the non-reserved delivery modes in the delmode field
// --------------------------------------------------------------
enum {
DELMODE_FIXED = 0b000,
DELMODE_LOWEST = 0b001,
DELMODE_SMI = 0b010,
DELMODE_NMI = 0b100,
DELMODE_INIT = 0b101,
DELMODE_STARTUP = 0b110,
};
//
// -- The LocalAPIC ICR High DWORD structure
// --------------------------------------
typedef union LapicIcrHi_t {
struct {
uint32_t reserved : 24;
uint32_t destination : 8;
} __attribute__((packed));
uint32_t raw;
} __attribute__((packed)) LapicIcrHi_t;
//
// -- The LocalAPIC ICR Low DWORD structure
// -------------------------------------
typedef union LapicIcrLo_t {
struct {
uint32_t vector : 8;
uint32_t deliveryMode : 3;
uint32_t destinationMode : 1;
uint32_t deliveryStatus : 1;
uint32_t pad1 : 1;
uint32_t level : 1;
uint32_t trigger : 1;
uint32_t pad2 : 2;
uint32_t destinationShorthand : 2;
uint32_t pad3 : 12;
} __attribute__((packed));
uint32_t raw;
} __attribute__((packed)) LapicIcrLo_t;
//
// -- Here are the function prototypes that the operation functions need to conform to
// --------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void _IoApicInit(PicDevice_t *dev, const char *name);
EXTERN_C EXPORT KERNEL
isrFunc_t _IoApicRegisterHandler(PicDevice_t *, Irq_t, int, isrFunc_t);
EXTERN_C EXPORT KERNEL
void _IoApicUnmaskIrq(PicDevice_t *dev, Irq_t irq);
EXTERN_C EXPORT KERNEL
void _IoApicMaskIrq(PicDevice_t *dev, Irq_t irq);
EXTERN_C EXPORT KERNEL
void _IoApicEoi(PicDevice_t *dev, Irq_t irq);
EXTERN_C EXPORT KERNEL
void _LApicBroadcastIpi(PicDevice_t *dev, int ipi);
EXTERN_C EXPORT KERNEL
void _LApicBroadcastInit(PicDevice_t *dev, uint32_t core);
EXTERN_C EXPORT KERNEL
void _LApicBroadcastSipi(PicDevice_t *dev, uint32_t core, archsize_t addr);
//
// -- A helper function for translating an IRQ to a redir table entry
// ---------------------------------------------------------------
EXPORT INLINE
archsize_t IoApicRedir(IoApicDeviceData_t *data, Irq_t irq) { return data->redirTableEntry[irq]; }
//
// -- Local APIC Timer functions
// --------------------------
EXTERN_C EXPORT KERNEL
void _LApicInit(TimerDevice_t *dev, uint32_t frequency);
EXTERN_C EXPORT KERNEL
void _LApicEoi(TimerDevice_t *dev);
EXTERN_C EXPORT KERNEL
void _LApicPlatformTick(TimerDevice_t *dev);
EXTERN_C EXPORT KERNEL
uint64_t _LApicCurrentCount(TimerDevice_t *dev);
//
// -- These 2 inlines will assist in reading from/writing to the ioapic registers
// ---------------------------------------------------------------------------
EXPORT INLINE
uint32_t IoapicRead(archsize_t addr, uint32_t reg) {
MmioWrite(addr + IOREGSEL, reg);
return MmioRead(addr + IOWIN);
}
EXPORT INLINE
void IoapicWrite(archsize_t addr, uint32_t reg, uint32_t val) {
MmioWrite(addr + IOREGSEL, reg);
MmioWrite(addr + IOWIN, val);
}
<|start_filename|>modules/kernel/src/spinlock/SpinlockVars.cc<|end_filename|>
//===================================================================================================================
//
// SpinlockVars.cc -- Spinlock Global Variables
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-18 Initial 0.3.2 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "spinlock.h"
//
// -- This is a lock for use when initializing a stack
// ------------------------------------------------
EXPORT KERNEL_DATA
Spinlock_t mmuStackInitLock = {0};
//
// -- This is a lock for use when initializing a level 2 table for the mmu
// --------------------------------------------------------------------
EXPORT KERNEL_DATA
Spinlock_t mmuTableInitLock = {0};
<|start_filename|>platform/pc/init/PicPick.cc<|end_filename|>
//===================================================================================================================
//
// PicPick.cc -- Make a decision on which PIC will be used
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-18 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "hw-disc.h"
#include "printf.h"
#include "platform.h"
#include "pic.h"
//
// -- This is the pic we are going to use
// -----------------------------------
EXPORT KERNEL_DATA
PicDevice_t *picControl = &pic8259;
//
// -- Pick the best PIC we have available to us and set up to use that
// ----------------------------------------------------------------
EXTERN_C EXPORT LOADER
PicDevice_t *PicPick(void)
{
if (GetIoapicCount() > 0) picControl = &ioapicDriver;
else picControl = &pic8259; // -- fall back in the 8259 PIC
//
// -- Complete the initialization
// ---------------------------
PicInit(picControl, "PIC");
return picControl;
}
<|start_filename|>arch/x86/mmu/MmuMapToFrame.cc<|end_filename|>
//===================================================================================================================
//
// MmuMapToFrame.cc -- Map a page to point to a physical frame
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This function will walk the current paging tables and insert tables as needed to map a virtual address or
// page to a physical frame. If the page is already mapped, it will not be replaced. Also, frame 0 is
// explicitly not allowed to be mapped. The loader takes care of this and there is no reason whatsoever why any
// other task should need to map to this frame.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-10 Initial 0.1.0 ADCL Initial version
// 2019-Feb-09 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "cpu.h"
#include "printf.h"
#include "pmm.h"
#include "pic.h"
#include "mmu.h"
#include "process.h"
//
// -- Map a page to a frame
// ---------------------
EXTERN_C EXPORT KERNEL
void MmuMapToFrame(archsize_t addr, frame_t frame, int flags)
{
#if DEBUG_ENABLED(MmuMapToFrame)
kprintf("Mapping page %p to frame %x\n", addr, frame);
kprintf("... Kernel: %s\n", flags&PG_KRN?"yes":"no");
kprintf("... Device: %s\n", flags&PG_DEVICE?"yes":"no");
kprintf("... Write.: %s\n", flags&PG_WRT?"yes":"no");
#endif
// -- refuse to map frame 0 for security reasons
if (!frame || !addr) {
return;
}
#if DEBUG_ENABLED(MmuMapToFrame)
kprintf(".. %s sanity checks passed\n", __func__);
#endif
PageEntry_t *pde = PD_ENTRY(addr);
if (!pde->p) {
frame_t fr = PmmAllocateFrame();
MmuClearFrame(fr);
pde->frame = fr;
pde->rw = X86_MMU_WRITE;
pde->us = X86_MMU_USER;
pde->p = X86_MMU_PRESENT_TRUE;
}
PageEntry_t *pte = PT_ENTRY(addr);
if (pte->p) {
return;
}
// -- finally we can map the page to the frame as requested
pte->frame = frame;
pte->rw = (flags & PG_WRT?X86_MMU_WRITE:X86_MMU_READ);
pte->us = (flags & PG_KRN?X86_MMU_SUPERVISOR:X86_MMU_USER);
pte->pcd = (flags & PG_DEVICE?X86_MMU_PCD_TRUE:X86_MMU_PCD_FALSE);
pte->pwt = (flags & PG_DEVICE?X86_MMU_PWT_ENABLED:X86_MMU_PWT_DISABLED);
pte->p = X86_MMU_PRESENT_TRUE;
#if DEBUG_ENABLED(MmuMapToFrame)
kprintf("... The contents of the PTE is at %p: %p\n", pte, ((*(uint32_t *)pte) & 0xffffffff));
#endif
}
<|start_filename|>modules/kernel/src/heap/HeapReleaseEntry.cc<|end_filename|>
//===================================================================================================================
//
// HeapReleaseEntry.cc -- Release an OrderedList Entry and put it back in the pool
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Release an OrderedList Entry and put it back in the pool of available entries to use.
//
// On calling, the contents of entry have the following characteristics:
// +-----------------------------------------------------------------+
// | Entry |
// +-----------------------------------------------------------------+
// | block -- points to proper header |
// | size -- the size of the block, with header/footer |
// | prev -- points to the previous (smaller) block (may be null) |
// | next -- points to the next (larger) block (may be null) |
// +-----------------------------------------------------------------+
// | block->entry -- is equal to the parm entry |
// +-----------------------------------------------------------------+
//
// On exit, the following characteristics:
// +-----------------------------------------------------------------+
// | Entry |
// +-----------------------------------------------------------------+
// | block -- zero |
// | size -- zero |
// | prev -- zero [through RemoveFromList()] |
// | next -- zero [through RemoveFromList()] |
// +-----------------------------------------------------------------+
// | block->entry -- zero |
// +-----------------------------------------------------------------+
//
// if on entry, entry->next != null, then entry->next->prev is set to entry->prev.
//
// if on entry, entry->prev != null, then entry->prev->next is set to entry->next.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Jul-03 Initial version
// 2012-Sep-16 Leveraged from Century
// 2018-Jun-01 Initial 0.1.0 ADCL Copied this file from century32 to century-os
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "heap.h"
//
// -- Release an entry from the ordered list
// --------------------------------------
void HeapReleaseEntry(OrderedList_t *entry)
{
if (!assert(entry != NULL)) HeapError("NULL entry in HeapReleaseEntry()", "");
HeapValidateHdr(entry->block, "HeapReleaseEntry()");
// verify removed from list and remove if necessary
if (entry->next || entry->prev || entry->block->entry) {
HeapRemoveFromList(entry);
}
// clear out the data
entry->block->entry = 0;
entry->block = 0;
entry->size = 0;
}
<|start_filename|>modules/kernel/inc/pmm-msg.h<|end_filename|>
//===================================================================================================================
//
// pmm-msg.h -- These are the messages that the PMM will recognize
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-11 Initial 0.1.0 ADCL Initial version
//
//===================================================================================================================
#pragma once
#ifndef __PMM_H__
# error "Do not include 'pmm-msg.h' directly. It can be included through 'pmm.h'"
#endif
#include "types.h"
//
// -- These are the messages that the PMM manager will respond to
// -----------------------------------------------------------
typedef enum {
PMM_NOOP,
PMM_FREE_FRAME,
PMM_ALLOC_FRAME,
PMM_FREE_RANGE,
PMM_ALLOC_RANGE,
PMM_NEW_FRAME,
PMM_INIT,
} PmmMessages_t;
<|start_filename|>modules/kernel/inc/loader.h<|end_filename|>
//===================================================================================================================
//
// loader.h -- These are functions that used to perform the loader functions
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Dec-16 Initial 0.5.0 ADCL Initial version
//
//===================================================================================================================
#pragma once
#include "types.h"
//
// -- this function will call the global initialization functions
// -----------------------------------------------------------
EXTERN_C EXPORT LOADER
void LoaderFunctionInit(void);
//
// -- Complete the initialization of the MMU
// --------------------------------------
EXTERN_C EXPORT LOADER
void MmuInit(void);
//
// -- This is the prototype for the loader main entry point
// -----------------------------------------------------
EXTERN_C EXPORT LOADER NORETURN
void LoaderMain(archsize_t arg0, archsize_t arg1, archsize_t arg2);
//
// -- This is the prototype function to jump into the kernel proper
// -------------------------------------------------------------
EXTERN_C EXPORT LOADER NORETURN
void JumpKernel(void (*addr)(), archsize_t stack) __attribute__((noreturn));
//
// -- This is the kernel function that will gain control (kernel entry point)
// -----------------------------------------------------------------------
EXTERN_C EXPORT KERNEL NORETURN
void kInit(void);
<|start_filename|>modules/kernel/inc/debugger.h<|end_filename|>
//===================================================================================================================
//
// debugger.h -- The header-level definitions and prototypes for the kernel debugger
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-02 Initial v0.6.0a ADCL Initial version
//
//===================================================================================================================
#pragma once
#include "types.h"
#include "printf.h"
//
// -- Some defines to make the debugger output pretty!
// ------------------------------------------------
#define ANSI_PASTE(x) #x
#define ANSI_ESC "\x1b["
#define ANSI_CLEAR ANSI_ESC "2J"
#define ANSI_SET_CURSOR(r,c) ANSI_ESC ANSI_PASTE(r) ";" ANSI_PASTE(c) "H"
#define ANSI_CURSOR_UP(x) ANSI_ESC ANSI_PASTE(x) "A"
#define ANSI_CURSOR_DOWN(x) ANSI_ESC ANSI_PASTE(x) "B"
#define ANSI_CURSOR_FORWARD(x) ANSI_ESC ANSI_PASTE(x) "C"
#define ANSI_CURSOR_BACKWARD(x) ANSI_ESC ANSI_PASTE(x) "D"
#define ANSI_CURSOR_SAVE ANSI_ESC "s"
#define ANSI_CURSOR_RESTORE ANSI_ESC "u"
#define ANSI_ERASE_LINE ANSI_ESC "K"
#define ANSI_ATTR_NORMAL ANSI_ESC "0m"
#define ANSI_ATTR_BOLD ANSI_ESC "1m"
#define ANSI_ATTR_BLINK ANSI_ESC "5m"
#define ANSI_ATTR_REVERSE ANSI_ESC "7m"
#define ANSI_FG_BLACK ANSI_ESC "30m"
#define ANSI_FG_RED ANSI_ESC "31m"
#define ANSI_FG_GREEN ANSI_ESC "32m"
#define ANSI_FG_YELLOW ANSI_ESC "33m"
#define ANSI_FG_BLUE ANSI_ESC "34m"
#define ANSI_FG_MAGENTA ANSI_ESC "35m"
#define ANSI_FG_CYAN ANSI_ESC "36m"
#define ANSI_FG_WHITE ANSI_ESC "37m"
#define ANSI_BG_BLACK ANSI_ESC "40m"
#define ANSI_BG_RED ANSI_ESC "41m"
#define ANSI_BG_GREEN ANSI_ESC "42m"
#define ANSI_BG_YELLOW ANSI_ESC "43m"
#define ANSI_BG_BLUE ANSI_ESC "44m"
#define ANSI_BG_MAGENTA ANSI_ESC "45m"
#define ANSI_BG_CYAN ANSI_ESC "46m"
#define ANSI_BG_WHITE ANSI_ESC "47m"
//
// -- These are the commands that are allowed to be sent to all the cores
// -------------------------------------------------------------------
typedef enum {
DIPI_ENGAGE,
DIPI_TIMER,
} DbgIniCommand_t;
//
// -- The debugger command tree will be implemented as a state machine -- these are the states
// ----------------------------------------------------------------------------------------
typedef enum {
DBG_HOME,
DBG_SCHED,
DBG_SCHED_RDY,
DBG_SCHED_LIST,
DBG_TIMER,
DBG_MSGQ,
} DebuggerState_t;
//
// -- The debugger commands
// ---------------------
typedef enum {
CMD_ERROR,
CMD_SCHED,
CMD_SHOW,
CMD_STAT,
CMD_RUNNING,
CMD_READY,
CMD_ALL,
CMD_OS,
CMD_HIGH,
CMD_NORM,
CMD_LOW,
CMD_IDLE,
CMD_LIST,
CMD_BLOCKED,
CMD_SLEEPING,
CMD_ZOMBIE,
CMD_HELP,
CMD_EXIT,
CMD_TIMER,
CMD_COUNTS,
CMD_CONFIG,
CMD_MSGQ,
} DebuggerCommand_t;
//
// -- The main debugger entry point
// -----------------------------
EXTERN_C EXPORT KERNEL
void DebugStart(void);
//
// -- Given the current command branch, prompt for and get the next command
// ---------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void DebugPrompt(DebuggerState_t state);
//
// -- Signal the other cores to stop and wait for confirmation that they have
// -----------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void DebuggerEngage(DbgIniCommand_t cmd);
//
// -- Release the other cores from a stopped state
// --------------------------------------------
EXTERN_C EXPORT KERNEL
void DebuggerRelease(void);
//
// -- Debug the scheduler
// -------------------
EXTERN_C EXPORT KERNEL
void DebugScheduler(void);
//
// -- Show the status of the scheduler queues
// ---------------------------------------
EXTERN_C EXPORT KERNEL
void DebugSchedulerStat(void);
//
// -- Dump the interesting values from the running processes on each CPU
// ------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void DebugSchedulerRunning(void);
//
// -- Show the status of the scheduler queues
// ---------------------------------------
EXTERN_C EXPORT KERNEL
void DebugSchedulerShow(void);
//
// -- Debug the timer
// ---------------
EXTERN_C EXPORT KERNEL
void DebugTimer(void);
//
// -- Debug the timer over all CPUs
// -----------------------------
EXTERN_C EXPORT KERNEL
void DebugTimerCounts(void);
//
// -- Dump the timer config
// ---------------------
EXTERN_C EXPORT KERNEL
void DebugTimerConfig(void);
//
// -- Debug the message queues
// ------------------------
EXTERN_C EXPORT KERNEL
void DebugMsgq(void);
//
// -- Debug the message queues
// ------------------------
EXTERN_C EXPORT KERNEL
void DebugMsgqStatus(void);
//
// -- Parse the entered command line for the next command
// ---------------------------------------------------
EXTERN_C EXPORT KERNEL
DebuggerCommand_t DebugParse(DebuggerState_t state);
//
// -- This is the current variable that identifies the current state
// --------------------------------------------------------------
EXTERN EXPORT KERNEL_BSS
DebuggerState_t debugState;
//
// -- the global command buffer
// -------------------------
EXTERN EXPORT KERNEL_BSS
char debugCommand[DEBUG_COMMAND_LEN];
//
// -- This is the structure the prompts are kept in
// ---------------------------------------------
typedef struct DebugPrompt_t {
const char *branch;
const char *allowed;
} DebugPrompt_t;
//
// -- these are the prompts for any given state
// -----------------------------------------
EXTERN EXPORT KERNEL_DATA
DebugPrompt_t dbgPrompts[];
//
// -- print spaces to the desired width
// ---------------------------------
EXPORT INLINE
void DbgSpace(int w, int p) {
w -= p;
while (w >= 0) {
kprintf(" ");
w --;
}
}
//
// -- This structure is the communication structure between the cores when debugging
// ------------------------------------------------------------------------------
typedef struct DebugComm_t {
AtomicInt_t coresEngaged; // -- used to get the cores into the IPI for halting for a short time
archsize_t debuggerFlags; // -- flags on the core where debugger is running
DbgIniCommand_t command; // -- this is what the IPI is required to do on each core
AtomicInt_t coresResponded; // -- depending on command, may be used to indicate response provided
uint64_t timerValue[MAX_CPUS]; // -- the values for the timers for each CPU
} DebugComm_t;
//
// -- This is the actual debug communication structure
// ------------------------------------------------
EXTERN EXPORT KERNEL_BSS
DebugComm_t debugCommunication;
<|start_filename|>arch/x86/mmu/MmuIsMapped.cc<|end_filename|>
//===================================================================================================================
//
// MmuIsMapped.cc -- Determine if the address is mapped in the current address space
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-May-01 Initial 0.4.3 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "printf.h"
#include "mmu.h"
//
// -- Check for the page and unmap if it is mapped.
// ---------------------------------------------
bool MmuIsMapped(archsize_t addr)
{
return PT_ENTRY(addr)->p?true:false;
}
<|start_filename|>modules/kernel/src/frame-buffer/FrameBufferParseRGB.cc<|end_filename|>
//===================================================================================================================
//
// FrameBufferParseRGB.cc -- Frame buffer initialization for the console
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2017-May-03 Initial 0.0.0 ADCL Initial version
// 2018-Jun-13 Initial 0.1.0 ADCL Copied this file from century (fb.c) to century-os
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "fb.h"
//
// -- an internal worker function that will parse a hex digit to a value
// ------------------------------------------------------------------
static inline uint16_t ParseHex(char c)
{
if (c >= '0' && c <= '9') return (c - '0') & 0x0f;
else if (c >= 'a' && c <= 'f') return (c - 'a' + 10) & 0x0f;
else if (c >= 'A' && c <= 'F') return (c - 'A' + 10) & 0x0f;
else return 0;
}
//
// -- set the color code (uint16_t) to be the result of parsing the string ("#FFFFFF")
// --------------------------------------------------------------------------------
uint16_t FrameBufferParseRGB(const char *c)
{
if (!c || kStrLen(c) != 7 || *c != '#') return 0;
int r = (ParseHex(c[1]) << 4) | ParseHex(c[2]);
int g = (ParseHex(c[3]) << 4) | ParseHex(c[4]);
int b = (ParseHex(c[5]) << 4) | ParseHex(c[6]);
r = (r & 0xff) >> 3; // 5 bits
g = (g & 0xff) >> 2; // 6 bits
b = (b & 0xff) >> 3; // 5 bits
return (r << 11) | (g << 5) | b;
}
<|start_filename|>arch/x86/inc/arch-cpu.h<|end_filename|>
//===================================================================================================================
//
// arch-cpu.h -- This file contains the definitions for setting up the x86 32-bit CPUs
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This file contains the structures related to managing the x86 32-bit CPUs. Not included here are the mmu or
// interrupt handling structures. This is intended to be the basic strucures for getting the CPU into Protected
// Mode.
//
// So, to be clear about interrupts, included here are the structures for the setup of interrupts to be taken
// (as there may be several errors that need to be handled), but not the actual handling of these interrupts.
// What I am going for here is the Descriptor Table Entries.
//
// There are several function that are also needed to be implemented for Arch-specific setup. Several of these
// will be `#define`-type macros.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-May-30 Initial 0.1.0 ADCL Copied this file from century to century-os
// 2019-Feb-09 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
//
// -- Perform the required housekeeping
// ---------------------------------
#pragma once
#ifndef __TYPES_H__
# error "Missing include 'types.h' at the top of the #include list."
#endif
#ifndef __CPU_H__
# error "Do not include 'arch-cpu.h' directly; include 'cpu.h' instead, which will pick up this file."
#endif
#include "atomic.h"
//
// -- This is the Task State-Segment structure
// ----------------------------------------
typedef struct Tss_t {
uint32_t prev_tss;
uint32_t esp0;
uint32_t ss0;
uint32_t esp1;
uint32_t ss1;
uint32_t esp2;
uint32_t ss2;
uint32_t cr3;
uint32_t eip;
uint32_t eflags;
uint32_t eax;
uint32_t ecx;
uint32_t edx;
uint32_t ebx;
uint32_t esp;
uint32_t ebp;
uint32_t esi;
uint32_t edi;
uint32_t es;
uint32_t cs;
uint32_t ss;
uint32_t ds;
uint32_t fs;
uint32_t gs;
uint32_t ldt;
uint16_t trap;
uint16_t ioMap;
uint32_t ssp;
} __attribute__((packed)) Tss_t;
//
// -- This is the abstraction of the x86 CPU
// --------------------------------------
typedef struct ArchCpu_t {
COMMON_CPU_ELEMENTS;
Tss_t tss;
archsize_t gsSelector;
archsize_t tssSelector;
} ArchCpu_t;
//
// -- This is a descriptor used for the GDT and LDT
// ---------------------------------------------
typedef struct Descriptor_t {
unsigned int limitLow : 16; // Low bits (15-0) of segment limit
unsigned int baseLow : 16; // Low bits (15-0) of segment base address
unsigned int baseMid : 8; // Middle bits (23-16) of segment base address
unsigned int type : 4; // Segment type (see GDT_* constants)
unsigned int s : 1; // 0 = system, 1 = application (1 for code/data)
unsigned int dpl : 2; // Descriptor Privilege Level
unsigned int p : 1; // Present (must be 1)
unsigned int limitHi : 4; // High bits (19-16) of segment limit
unsigned int avl : 1; // Unused (available for software use)
unsigned int bit64 : 1; // 1 = 64-bit segment
unsigned int db : 1; // 0 = 16-bit segment, 1 = 32-bit segment
unsigned int g : 1; // Granularity: limit scaled by 4K when set
unsigned int baseHi : 8; // High bits (31-24) of segment base address
} Descriptor_t;
//
// -- A helper macro use to define the NULL Selector
// ----------------------------------------------
#define NULL_GDT {0}
//
// -- A helper macro used to define the kernel code
// 0x00 c f 9 a 00 0000 ffff
// ---------------------------------------------
#define KCODE_GDT { \
.limitLow = 0xffff, \
.baseLow = 0, \
.baseMid = 0, \
.type = 0x0a, \
.s = 1, \
.dpl = 0, \
.p = 1, \
.limitHi = 0xf, \
.avl = 0, \
.bit64 = 0, \
.db = 1, \
.g = 1, \
.baseHi = 0, \
}
//
// -- A helper macro used to define the kernel data
// 0x00 c f 9 2 00 0000 ffff
// ---------------------------------------------
#define KDATA_GDT { \
.limitLow = 0xffff, \
.baseLow = 0, \
.baseMid = 0, \
.type = 0x02, \
.s = 1, \
.dpl = 0, \
.p = 1, \
.limitHi = 0xf, \
.avl = 0, \
.bit64 = 0, \
.db = 1, \
.g = 1, \
.baseHi = 0, \
}
//
// -- A helper macro used to define the kernel code
// 0x00 c f 9 a 00 0000 ffff
// ---------------------------------------------
#define UCODE_GDT { \
.limitLow = 0xffff, \
.baseLow = 0, \
.baseMid = 0, \
.type = 0x0a, \
.s = 1, \
.dpl = 0, \
.p = 1, \
.limitHi = 0xf, \
.avl = 0, \
.bit64 = 0, \
.db = 1, \
.g = 1, \
.baseHi = 0, \
}
//
// -- A helper macro used to define the kernel data
// 0x00 c f 9 2 00 0000 ffff
// ---------------------------------------------
#define UDATA_GDT { \
.limitLow = 0xffff, \
.baseLow = 0, \
.baseMid = 0, \
.type = 0x02, \
.s = 1, \
.dpl = 0, \
.p = 1, \
.limitHi = 0xf, \
.avl = 0, \
.bit64 = 0, \
.db = 1, \
.g = 1, \
.baseHi = 0, \
}
//
// -- A helper macro to define a segment selector specific to the per-cpu data for a given CPU.
// -----------------------------------------------------------------------------------------
#define GS_GDT(locn) { \
.limitLow = 7, \
.baseLow = ((locn) & 0xffff), \
.baseMid = (((locn) >> 16) & 0xff), \
.type = 0x02, \
.s = 1, \
.dpl = 0, \
.p = 1, \
.limitHi = 0, \
.avl = 0, \
.bit64 = 0, \
.db = 1, \
.g = 0, \
.baseHi = (((locn) >> 24) & 0xff), \
}
//
// -- A helper macro used to define the kernel code
// 0x00 c f 9 a 00 0000 ffff
// ---------------------------------------------
#define LCODE_GDT { \
.limitLow = 0xffff, \
.baseLow = 0, \
.baseMid = 0, \
.type = 0x0a, \
.s = 1, \
.dpl = 0, \
.p = 1, \
.limitHi = 0xf, \
.avl = 0, \
.bit64 = 0, \
.db = 1, \
.g = 1, \
.baseHi = 0, \
}
//
// -- A helper macro used to define the kernel data
// 0x00 c f 9 2 00 0000 ffff
// ---------------------------------------------
#define LDATA_GDT { \
.limitLow = 0xffff, \
.baseLow = 0, \
.baseMid = 0, \
.type = 0x02, \
.s = 1, \
.dpl = 0, \
.p = 1, \
.limitHi = 0xf, \
.avl = 0, \
.bit64 = 0, \
.db = 1, \
.g = 1, \
.baseHi = 0, \
}
//
// -- A helper macro used to define the kernel data
// 0x00 c f 9 2 00 0000 ffff
// ---------------------------------------------
#define TSS32_GDT(locn) { \
.limitLow = ((sizeof(Tss_t) - 1) & 0xffff), \
.baseLow = ((locn) & 0xffff), \
.baseMid = (((locn) >> 16) & 0xff), \
.type = 0x9, \
.s = 0, \
.dpl = 0, \
.p = 1, \
.limitHi = (((sizeof(Tss_t) - 1) >> 16) & 0xf), \
.avl = 0, \
.bit64 = 0, \
.db = 0, \
.g = 0, \
.baseHi = (((locn) >> 24) & 0xff), \
}
//
// -- Some cute optimizations for accessing the CPU elements. The "asm(gs:0)" tells gcc that when you want
// to read this variable, it is found at the offset 0 from the start of the gs section. So the key here
// is going to be to set gs properly. This will be one during initialization.
// -----------------------------------------------------------------------------------------------------
EXTERN ArchCpu_t *thisCpu asm("%gs:0");
EXTERN struct Process_t *currentThread asm("%gs:4");
EXTERN_C EXPORT INLINE
void CurrentThreadAssign(Process_t *p) { currentThread = p; }
//
// -- Perform the architecture-specific CPU initialization
// ----------------------------------------------------
EXTERN_C EXPORT LOADER
void ArchEarlyCpuInit(void);
EXTERN_C EXPORT LOADER
void ArchLateCpuInit(int c);
//
// -- Perform some arch-specific initialization
// -----------------------------------------
EXTERN_C EXPORT LOADER
void ArchPerCpuInit(int i);
//
// -- Perform the setup for the permanent GDT
// ---------------------------------------
EXTERN_C EXPORT LOADER
void ArchGdtSetup(void);
//
// -- Perform the setup for the permanent IDT
// ---------------------------------------
EXTERN_C EXPORT LOADER
void ArchIdtSetup(void);
//
// -- Load GS from the per-cpu struct
// -------------------------------
EXTERN_C EXPORT LOADER
void ArchGsLoad(archsize_t sel);
//
// -- Load TSS from the per-cpu struct
// --------------------------------
EXTERN_C EXPORT LOADER
void ArchTssLoad(archsize_t sel);
//
// -- Arch Specific cpu location determination
// ----------------------------------------
#define ArchCpuLocation() MmioRead(LAPIC_MMIO + LAPIC_ID)
// -- TODO: relocate these constants
//
// -- This is the max IOAPICs that can be defined for this arch
// ---------------------------------------------------------
#define MAX_IOAPIC 64
//
// -- This is the natural byte alignment for this architecture
// --------------------------------------------------------
#define BYTE_ALIGNMENT 4
//
// -- This is the location of the Page Directory and Page Tables
// ----------------------------------------------------------
#define PAGE_DIR_VADDR 0xfffff000
#define PAGE_TBL_VADDR 0xffc00000
//
// -- These macros help assist with the management of the MMU mappings -- separating the address components
// into the indexes of the separate tables
// -----------------------------------------------------------------------------------------------------
#define PD_ENTRY(a) (&((PageEntry_t *)PAGE_DIR_VADDR)[(a) >> 22])
#define PT_ENTRY(a) (&((PageEntry_t *)PAGE_TBL_VADDR)[(a) >> 12])
//
// -- This is the size of the short TSS stack
// ---------------------------------------
#define TSS_STACK_SIZE 512
//
// -- These are critical CPU structure locations
// ------------------------------------------
const archsize_t TSS_ADDRESS = 0xff401080;
//
// -- Load the task register
// ----------------------
EXTERN_C EXPORT KERNEL
void Ltr(uint16_t tr);
//
// -- Change the page directory to the physical address provided
// ----------------------------------------------------------
EXTERN_C EXPORT KERNEL
void MmuSwitchPageDir(archsize_t physAddr);
//
// -- Access macros for the APIC
// --------------------------
#define APIC_BASE (0x1b)
#define READ_APIC_BASE() RDMSR(APIC_BASE)
#define WRITE_APIC_BASE(v) WRMSR(APIC_BASE,v)
//
// -- Bochs magic breakpoint
// ----------------------
#define BOCHS_BREAK __asm volatile("xchg %bx,%bx")
#define BOCHS_TOGGLE_INSTR __asm volatile("xchg %edx,%edx")
//
// -- Get the CR3 value
// -----------------
EXTERN_C EXPORT KERNEL
archsize_t GetCr3(void);
//
// -- Check if CPUID is supported
// ---------------------------
EXTERN_C EXPORT KERNEL
int CheckCpuid(void);
//
// -- Collect the CPUID information
// -----------------------------
EXTERN_C EXPORT KERNEL
void CollectCpuid(void);
//
// -- Load the GDT and set it up
// --------------------------
EXTERN_C EXPORT KERNEL
void ArchLoadGdt(void *);
//
// -- Load the IDT
// ------------
EXTERN_C EXPORT KERNEL
void ArchLoadIdt(void *);
#define ApTimerInit(t,f)
//
// -- Inlcude the arch-specific CPU operations
// ----------------------------------------
#include "arch-cpu-ops.h"
<|start_filename|>modules/kernel/inc/heap.h<|end_filename|>
//===================================================================================================================
//
// heap.h -- Kernel Heap structures and functions
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This files contains the structures and definitions needed to manage and control the heap in Century.
//
// The basis for the design is lifted from Century32 (a 32-bit Hobby OS).
//
// There are several structures that are used and maintained with the heap management. The heap structure itself
// is nothing more than a doubly linked list of free blocks of memory. This linked list is also ordered based on
// the size of the free block of memory. Pointers are setup in the heap structure to point to blocks of certain
// sizes in an attempt to speed up the allocation and deallocation process. These pointers are at:
// * the beginning of the heap (of course)
// * >= 512 bytes
// * >= 1K bytes
// * >= 4K bytes
// * >= 16K bytes
//
// When a block of memory is requested, the size if first increased to cover the size of the header and footer as
// well as adjusted up to the allocation alignment. So, if 1 byte is requested (unlikely, but great for
// illustration purposes), the size is increased to HEAP_SMALLEST and then the size of the header (KHeapHdr_size),
// the size of the footer (KHeapFtr_size), and then aligned to the next 8 byte boundary up.
//
// Free blocks are maintained in the heap structure as an ordered list by size, from smallest to biggest. In
// addition, when the ordered list is searched for the "best fit" (that is the class of algorithm used here), if
// the adjusted request is >= 16K, then the search starts at the 16K pointer; >= 4K but < 16K, then the search
// starts at the 4K pointer; >= 1K but < 4K, then the search starts at the 1K pointer; >= 512 bytes but < 1K, then
// the search starts at the 512 bytes pointer; and, all other searches < 512 bytes are stated at the beginning.
//
// Note that if there are no memory blocks < 512 bytes, but blocks >= 512 bytes, then the beginning of the ordered
// list will point to the first block no matter the size. The rationale for this is simple: a larger block can
// always be split to fulfill a request.
//
// On the other hand, if there are no blocks >= 16K bytes is size, then the >= 16K pointer will be NULL. Again,
// the rationale is simple: we cannot add up blocks to make a 16K block, so other measures need to be taken (create
// more heap memory or return failure).
//
// Finally, the dedicated ordered list array is going to be eliminated in this implementation. Instead it will be
// included as part of the header structure. This change will allow for more than a fixed number of free blocks.
// This should also simplify the implementation as well.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Jun-30 Initial version
// 2012-Sep-16 Leveraged from Century
// 2013-Sep-12 #101 Resolve issues splint exposes
// 2018-May-30 Initial 0.1.0 ADCL Copied this file from century32 to century-os
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#pragma once
#include "types.h"
#define DEBUG_HEAP 1
//
// -- Set DEBUG_HEAP to 1 to enable debugging
// ---------------------------------------
#if RELEASE == 1
# if defined(DEBUG_HEAP)
# undef DEBUG_HEAP
# endif
#endif
#if !defined(DEBUG_HEAP)
# define DEBUG_HEAP 0
#else
# if DEBUG_HEAP != 0
# undef DEBUG_HEAP
# define DEBUG_HEAP 1
# endif
#endif
//
// -- Define some quick macros to help with managing the heap
// -------------------------------------------------------
#define HEAP_SMALLEST 32
#define HEAP_MAGIC ((uint32_t)0xBAB6BADC)
#define HEAP_CHECK(x) (((x) & 0xfffffffe) == HEAP_MAGIC)
#define MIN_HOLE_SIZE (sizeof(KHeapHeader_t) + sizeof(KHeapHeader_t) + HEAP_SMALLEST)
#define HEAP_MIN_SIZE 0x00010000
#define HEAP_SIZE_INCR HEAP_MIN_SIZE
#define ORDERED_LIST_STATIC (1024)
//
// -- forward declare the OrderedList structure
// -----------------------------------------
struct OrderedList_t;
//
// -- This is the heap block header, used to manage a block of memory in the heap
// ---------------------------------------------------------------------------
typedef struct KHeapHeader_t {
union {
struct {
uint32_t isHole : 1; // == 1 if this is a hole (not used)
uint32_t magic : 31; // magic number (BAB6 BADC when bit 0 is forced to 0)
};
uint32_t magicHole; // this is the aggregate of the bit fields
} _magicUnion;
struct OrderedList_t *entry; // pointer to the OrderedList entry if hole; NULL if allocated
size_t size; // this size includes the size of the header and footer
} __attribute__((packed)) KHeapHeader_t;
//
// -- This is the beap block footer, used in conjunction with the heap header to makage the heap memory
// -------------------------------------------------------------------------------------------------
typedef struct KHeapFooter_t {
union {
struct {
uint32_t isHole : 1; // the field is the header is the one used
uint32_t magic : 31; // magic number (0xBAB6_BADC when bit 0 is forced to 0)
};
uint32_t magicHole; // this is the aggregate of the bit fields
} _magicUnion;
KHeapHeader_t *hdr; // pointer back to the header
} __attribute__((packed)) KHeapFooter_t;
//
// -- This is a compare function prototype declaration for ordering blocks
// --------------------------------------------------------------------
typedef int (*cmpFunc)(KHeapHeader_t *, KHeapHeader_t *);
//
// -- The heap is implemented as an ordered list for a bet-fit implementation
// -----------------------------------------------------------------------
typedef struct OrderedList_t {
KHeapHeader_t *block; // pointer to the block of heap memory
size_t size; // the size of the memory pointed to
struct OrderedList_t *prev; // pointer to the previous entry
struct OrderedList_t *next; // pointer to the next entry
} OrderedList_t;
//
// -- This is the heap control structure, maintianing the heap integrity
// ------------------------------------------------------------------
typedef struct KHeap_t {
OrderedList_t *heapMemory; // the start of all heap memory lists < 512 bytes
OrderedList_t *heap512; // the start of heap memory >= 512 bytes
OrderedList_t *heap1K; // the start of heap memory >= 1K bytes
OrderedList_t *heap4K; // the start of heap memory >= 4K bytes
OrderedList_t *heap16K; // the start of heap memory >= 16K bytes
byte_t *strAddr; // the start address of the heap
byte_t *endAddr; // the ending address of the heap
byte_t *maxAddr; // the max address to which the heap can grow
} KHeap_t;
//
// -- Global heap variable
// --------------------
EXTERN KERNEL_DATA
KHeap_t *kHeap;
//
// -- Add an entry of available memory to the ordered list of free memory by size
// ---------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void HeapAddToList(OrderedList_t *entry);
//
// -- Align an ordered list free memory block to a page boundary,
// creating a free block ahead of the aligned block
// -----------------------------------------------------------
EXTERN_C EXPORT KERNEL
OrderedList_t *HeapAlignToPage(OrderedList_t *entry);
//
// -- Allocate memory from the heap
// ------------------------------
EXTERN_C EXPORT KERNEL
void *HeapAlloc(size_t size, bool align);
//
// -- Calculate how to adjust a block to align it to the frame
// --------------------------------------------------------
EXTERN_C EXPORT KERNEL
size_t HeapCalcPageAdjustment(OrderedList_t *entry);
//
// -- Find a hole of the appropriate size (best fit method)
// -----------------------------------------------------
EXTERN_C EXPORT KERNEL
OrderedList_t *HeapFindHole(size_t adjustedSize, bool align);
//
// -- Free a block of memory
// ----------------------
EXTERN_C EXPORT KERNEL
void HeapFree(void *mem);
//
// -- Initialize the Heap
// -------------------
EXTERN_C EXPORT LOADER
void HeapInit(void);
//
// -- Insert a newly freed block into the ordered list
// ------------------------------------------------
EXTERN_C EXPORT KERNEL
OrderedList_t *HeapNewListEntry(KHeapHeader_t *hdr, bool add);
//
// -- Merge a free block with a free block on the immediate left if it is really free
// -------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
OrderedList_t *HeapMergeLeft(KHeapHeader_t *hdr);
//
// -- Merge a free block with a free block on the immediate right if it is really free
// --------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
OrderedList_t *HeapMergeRight(KHeapHeader_t *hdr);
//
// -- Release a entry from the ordered list
// -------------------------------------
EXTERN_C EXPORT KERNEL
void HeapReleaseEntry(OrderedList_t *entry);
//
// -- Remove an entry from the list
// -----------------------------
EXTERN_C EXPORT KERNEL
void HeapRemoveFromList(OrderedList_t *entry);
//
// -- Expand the heap size (we have the heap lock)
// --------------------------------------------
EXTERN_C EXPORT KERNEL
size_t HeapExpand(void);
//
// -- Split a block into 2 blocks, creating ordered list entries for each
// -------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
KHeapHeader_t *HeapSplitAt(OrderedList_t *entry, size_t adjustToSize);
//
// -- Debugging functions to validate the header of a block
// -----------------------------------------------------
#if DEBUG_HEAP == 1
EXTERN_C EXPORT KERNEL
void HeapValidateHdr(KHeapHeader_t *hdr, const char *from);
#else
# define HeapValidateHdr(h,f) (void)0
#endif
//
// -- Debugging function to validate the heap structure itself
// --------------------------------------------------------
#if DEBUG_HEAP == 1
EXTERN_C EXPORT KERNEL
void HeapValidatePtr(const char *from);
#else
# define HeapValidatePtr(f) (void)0
#endif
//
// -- Debugging function to monitor the health of the heap
// ----------------------------------------------------
#if DEBUG_HEAP == 1
EXTERN_C EXPORT KERNEL
void HeapCheckHealth(void);
#else
# define HeapCheckHealth() (void)0
#endif
//
// -- Panic error function when the heap has a problem
// ------------------------------------------------
EXTERN_C EXPORT KERNEL NORETURN
void HeapError(const char *from, const char *desc);
//
// -- A quick macro to make coding easier and more readable
// -----------------------------------------------------
#define NEW(tp) ({tp* rv = (tp *)HeapAlloc(sizeof(tp), false); if (!rv) HeapCheckHealth(); rv;})
#define FREE(ptr) HeapFree(ptr)
//
// -- manage cache for the heap
// -------------------------
#define CLEAN_HEAP() CleanCache((archsize_t)kHeap, sizeof(KHeap_t))
#define INVALIDATE_HEAP() InvalidateCache(kHeap, sizeof(KHeap_t))
//
// -- manage cache for an entry
// -------------------------
#define CLEAN_ENTRY(ent) CleanCache((archsize_t)ent, sizeof(OrderedList_t))
#define INVALIDATE_ENTRY(ent) InvalidateCache(ent, sizeof(OrderedList_t))
<|start_filename|>arch/arm/UndefinedHandler.cc<|end_filename|>
//===================================================================================================================
//
// UndefinedHandler.cc -- Handle an undefined instruction
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Dec-01 Initial 0.2.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "cpu.h"
#include "interrupt.h"
//
// -- Handle an undefined instruction
// -------------------------------
EXTERN_C EXPORT KERNEL
void UndefinedHandler(isrRegs_t *regs)
{
kprintf("Undefined Instruction:\n");
IsrDumpState(regs);
}
<|start_filename|>arch/arm/mmu/MmuMapToFrame.cc<|end_filename|>
//===================================================================================================================
//
// MmuMapToFrame.cc -- Map a page to point to a physical frame
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This function will walk the current paging tables and insert tables as needed to map a virtual address or
// page to a physical frame. If the page is already mapped, it will not be replaced. Also, frame 0 is
// explicitly not allowed to be mapped. The loader takes care of this and there is no reason whatsoever why any
// other task should need to map to this frame.
//
// Note that this function is called before any serial port mapping/setup is complete; therefore, not debugging
// code can exist in this function yet.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-10 Initial 0.1.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "cpu.h"
#include "printf.h"
#include "pmm.h"
#include "process.h"
#include "spinlock.h"
#include "pic.h"
#include "mmu.h"
//
// -- Helper function to create and map a new table
// ----------------------------------------------------------------------------------------------
EXTERN_C HIDDEN KERNEL
frame_t MmuMakeTtl2Table(archsize_t addr, int flags)
{
//
// -- We have been asked to create a new TTL2 table. We got here, so we know we need a frame.
// Go get it.
// ----------------------------------------------------------------------------------------
frame_t frame = PmmAllocateFrame();
MmuClearFrame(frame);
//
// -- The next order of business is to map this into the Management table. This needs to be done for
// every new table, so there is nothing to check -- we know we need to do this.
// -----------------------------------------------------------------------------------------------
Ttl2_t *ttl2Entry = KRN_TTL2_ENTRY(MMU_CLEAR_FRAME);
WriteDCCMVAC((uint32_t)ttl2Entry);
InvalidatePage(addr);
ttl2Entry = KRN_TTL2_MGMT(addr);
ttl2Entry->frame = frame;
ttl2Entry->s = ARMV7_SHARABLE_TRUE;
ttl2Entry->apx = ARMV7_MMU_APX_FULL_ACCESS;
ttl2Entry->ap = ARMV7_MMU_AP_FULL_ACCESS;
ttl2Entry->tex = ARMV7_MMU_TEX_NORMAL;
ttl2Entry->c = ARMV7_MMU_CACHED;
ttl2Entry->b = ARMV7_MMU_BUFFERED;
ttl2Entry->nG = ARMV7_MMU_GLOBAL;
ttl2Entry->fault = ARMV7_MMU_DATA_PAGE;
WriteDCCMVAC((uint32_t)ttl2Entry);
InvalidatePage(addr);
//
// -- Note that we will not actually map this into the TTL1 table. The calling function holds that
// responsibility. Therefore, the only thing left to do is to return the frame we have allocated
// and prepared to be a TTL2 table.
// ----------------------------------------------------------------------------------------------
WriteBPIALLIS();
MemoryBarrier();
return frame;
}
//==================================================================================================================
//
// -- Map a page to a frame
// ---------------------
EXTERN_C EXPORT KERNEL
void MmuMapToFrame(archsize_t addr, frame_t frame, int flags)
{
// -- refuse to map frame 0 for security reasons
if (!frame || !addr) {
return;
}
//
// -- The first order of business is to check if we have a TTL2 table for this address. We will know this
// by checking the TTL1 Entry and checking the fault field.
// ----------------------------------------------------------------------------------------------------
// kprintf("Checking for TTL1 entry (%p; %x)....\n", addr, TTL1_ENTRY(addr, flags)->fault);
if (KRN_TTL1_ENTRY(addr)->fault == ARMV7_MMU_FAULT) {
// kprintf("TTL1 entry is not mapped to a TTL2 table; creating\n");
frame_t ttl2 = MmuMakeTtl2Table(addr, flags);
Ttl1_t *ttl1Entry = KRN_TTL1_ENTRY4(addr);
for (int i = 0; i < 4; i ++) {
ttl1Entry[i].ttl2 = (ttl2 << 2) + i;
ttl1Entry[i].fault = ARMV7_MMU_TTL2;
WriteDCCMVAC((uint32_t)&ttl1Entry[i]);
InvalidatePage((uint32_t)&ttl1Entry[i]);
MemoryBarrier();
}
}
//
// -- At this point, we know we have a ttl2 table and the management entries are all set up properly. It
// is just a matter of mapping the address.
// ---------------------------------------------------------------------------------------------------
// kprintf("Checking for TTL2 entry....\n");
Ttl2_t *ttl2Entry = KRN_TTL2_ENTRY(addr);
// kprintf("ttl2Entry has been set: %p\n", ttl2Entry);
if (ttl2Entry->fault != ARMV7_MMU_FAULT) return;
// kprintf("mapping the page\n");
WriteDCCMVAC((uint32_t)ttl2Entry);
InvalidatePage(addr);
ttl2Entry->frame = frame;
ttl2Entry->s = ARMV7_SHARABLE_TRUE;
ttl2Entry->apx = ARMV7_MMU_APX_FULL_ACCESS;
ttl2Entry->ap = ARMV7_MMU_AP_FULL_ACCESS;
ttl2Entry->tex = (flags&PG_DEVICE?ARMV7_MMU_TEX_DEVICE:ARMV7_MMU_TEX_NORMAL);
ttl2Entry->c = (flags&PG_DEVICE?ARMV7_MMU_UNCACHED:ARMV7_MMU_CACHED);
ttl2Entry->b = (flags&PG_DEVICE?ARMV7_MMU_UNBUFFERED:ARMV7_MMU_BUFFERED);
ttl2Entry->nG = ARMV7_MMU_GLOBAL;
ttl2Entry->fault = ARMV7_MMU_CODE_PAGE;
WriteDCCMVAC((uint32_t)ttl2Entry);
InvalidatePage(addr);
MemoryBarrier();
}
<|start_filename|>platform/pc/acpi/AcpiFindRsdp.cc<|end_filename|>
//===================================================================================================================
//
// AcpiFindRdsp.cc -- Find the Root System Description Pointer (RSDP), which is really a table.
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// The RDSP can be in one of 2 places:
// 1) The first 1K of the EBDA
// 2) Between 0xe0000 and 0xfffff
//
// In either case, the value must be on a 16-byte boundary.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-05 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "hw-disc.h"
#include "printf.h"
#include "mmu.h"
#include "hardware.h"
//
// -- check a pointer to see if it really qualifies as a RSDP
// -------------------------------------------------------
EXTERN_C HIDDEN LOADER
bool IsRsdp(RSDP_t *rsdp)
{
if (!rsdp) return false;
uint32_t cs = 0;
uint8_t *data = (uint8_t *)rsdp;
for (uint32_t i = 0; i < 20; i ++) {
cs += data[i];
}
return (cs & 0xff) == 0;
}
//
// -- Locate the Root System Description Table
// ----------------------------------------
EXTERN_C EXPORT LOADER
RSDP_t * AcpiFindRsdp(void)
{
archsize_t wrk = GetEbda() & ~0x000f;
archsize_t end = wrk + 1024;
archsize_t pg = wrk & ~0xfff;
RSDP_t *rsdp;
RSDP_t *rv = NULL;
MmuMapToFrame(pg, pg >> 12, PG_KRN);
if (wrk != 0) {
while (wrk < end) {
rsdp = (RSDP_t *)wrk;
if (rsdp->lSignature == RSDP_SIG && IsRsdp(rsdp)) {
kprintf("RSDP found at address %p\n", wrk);
SetRsdp(wrk);
MmuUnmapPage(pg);
return rsdp;
}
wrk += 16;
}
}
MmuUnmapPage(pg);
wrk = 0xe0000;
end = 0xfffff;
for (pg = wrk; pg < end; pg += PAGE_SIZE) {
MmuMapToFrame(pg, pg >> 12, PG_KRN);
}
pg = wrk; // reset pg
while (wrk < end) {
rsdp = (RSDP_t *)wrk;
if (rsdp->lSignature == RSDP_SIG && IsRsdp(rsdp)) {
kprintf("RSDP found at address %p\n", wrk);
kprintf(".. Version %x\n", rsdp->revision);
kprintf(".. Rsdt Address %p\n", rsdp->rsdtAddress);
kprintf(".. Xsdt Address %p\n", rsdp->xsdtAddress);
SetRsdp(wrk);
rv = rsdp;
goto exit;
}
wrk += 16;
}
exit:
for (pg = wrk; pg < end; pg += PAGE_SIZE) {
MmuUnmapPage(pg);
}
return rv;
}
<|start_filename|>modules/kernel/inc/hw-disc.h<|end_filename|>
//===================================================================================================================
//
// hw-disc.h -- Locally formatted copies of the multiboot information
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Jun-03 Initial 0.1.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#pragma once
#include "types.h"
#include "cpu.h"
//
// -- Since this is all statically allocated, set some reasonable limits
// ------------------------------------------------------------------
#define NUM_MMAP_ENTRIES (25)
#define MAX_MODULES (10)
#define MODULE_IDENT_LEN (12)
//
// -- The values for the frame buffer type
// ------------------------------------
typedef enum {
FB_PALLET = 0,
FB_RGB = 1,
FB_EGA = 2,
} FrameBufferType;
//
// -- This is the structure that will hold loaded module info
// -------------------------------------------------------
typedef struct Module_t {
uint32_t modStart;
uint32_t modEnd;
uint32_t modHdrSize;
// archsize_t cr3;
archsize_t entry;
char modIdent[MODULE_IDENT_LEN]; // this will point to at-risk memory
} Module_t;
//
// -- This is the structure that will hold the memory map data
// --------------------------------------------------------
typedef struct MMap_t {
uint64_t baseAddr;
uint64_t length;
} MMap_t;
//
// -- This structure will hold a stage-3 loader local copy of the provided multiboot information.
// -------------------------------------------------------------------------------------------
typedef struct HardwareDiscovery_t {
//
// -- The BIOS information
// --------------------
archsize_t ebdaLocation;
uint16_t com1;
uint16_t com2;
uint16_t com3;
uint16_t com4;
uint16_t lpt1;
uint16_t lpt2;
uint16_t lpt3;
uint16_t videoPort;
//
// -- the memory limit information
// ----------------------------
bool memLimitsAvail;
uint32_t availLowerMem;
uint32_t availUpperMem;
uint64_t upperMemLimit;
//
// -- the memory map information
// --------------------------
bool memMapAvail;
int memMapCount;
MMap_t mmap[NUM_MMAP_ENTRIES];
//
// -- the module information
// ----------------------
bool modAvail;
int modCount;
Module_t mods[MAX_MODULES];
frame_t modHighestFrame;
//
// -- the Physical Memory Manager location and other relevant info
// ------------------------------------------------------------
uint32_t *pmmBitmap;
size_t pmmFrameCount;
//
// -- FrameBufferInformation
// ----------------------
// -- Frame Buffer Info
bool frameBufferAvail;
uint16_t *fbAddr;
uint32_t fbPitch;
uint32_t fbWidth;
uint32_t fbHeight;
uint8_t fbBpp;
FrameBufferType fbType;
//---------------------------------
//
// -- The console properties; which will be also passed to the kernel
// ---------------------------------------------------------------
uint16_t bgColor;
uint16_t fgColor;
uint16_t rowPos;
uint16_t colPos;
//
// -- The location of the ACPI tables
// -------------------------------
archsize_t rsdp;
//
// -- CPUID Data
// ----------
bool cpuidSupported;
uint32_t cpuid00eax;
uint32_t cpuid00ebx;
uint32_t cpuid00ecx;
uint32_t cpuid00edx;
uint32_t cpuid01eax;
uint32_t cpuid01ebx;
uint32_t cpuid01ecx;
uint32_t cpuid01edx;
uint32_t cpuid02eax;
uint32_t cpuid02ebx;
uint32_t cpuid02ecx;
uint32_t cpuid02edx;
uint32_t cpuid03eax;
uint32_t cpuid03ebx;
uint32_t cpuid03ecx;
uint32_t cpuid03edx;
uint32_t cpuid04eax;
uint32_t cpuid04ebx;
uint32_t cpuid04ecx;
uint32_t cpuid04edx;
uint32_t cpuid05eax;
uint32_t cpuid05ebx;
uint32_t cpuid05ecx;
uint32_t cpuid05edx;
uint32_t cpuid06eax;
uint32_t cpuid06ebx;
uint32_t cpuid06ecx;
uint32_t cpuid06edx;
uint32_t cpuid07eax;
uint32_t cpuid07ebx;
uint32_t cpuid07ecx;
uint32_t cpuid07edx;
uint32_t cpuid09eax;
uint32_t cpuid09ebx;
uint32_t cpuid09ecx;
uint32_t cpuid09edx;
uint32_t cpuid0aeax;
uint32_t cpuid0aebx;
uint32_t cpuid0aecx;
uint32_t cpuid0aedx;
uint32_t cpuid0beax;
uint32_t cpuid0bebx;
uint32_t cpuid0becx;
uint32_t cpuid0bedx;
//
// -- Local APIC info
// ---------------
size_t lapicCount;
//
// -- IO APIC Address and Count
// -------------------------
int ioApicCount;
struct {
archsize_t addr;
int gsiBase;
} ioApic[MAX_IOAPIC];
} HardwareDiscovery_t __attribute__((aligned(4096)));
//
// -- We will work with a local copy and then copy the ending structure to its final location
// ---------------------------------------------------------------------------------------
EXTERN EXPORT LOADER_DATA
HardwareDiscovery_t *localHwDisc;
//
// -- A compile time sanity check -- if this throws an error, the structure will not fit in a page and will cause
// problems for both the loader and the kernel. The fix will be to move some things around in memory and make
// room for the larger structure
// -----------------------------------------------------------------------------------------------------------
static_assert(sizeof(HardwareDiscovery_t) <= 4096, \
"The size of the Hardware Discovery stucture is more than 1 page long. Something must be done...");
//
// -- Hardware discovery function to collect the hardware inventory
// -------------------------------------------------------------
EXTERN_C EXPORT LOADER
void HwDiscovery(void);
//
// -- Read the MB1 structures and place the important bits in the localHwDisc structure
// ---------------------------------------------------------------------------------
EXTERN_C EXPORT LOADER
void Mb1Parse(void);
//
// -- Read the MB2 structures and place the important bits in the localHwDisc structure
// ---------------------------------------------------------------------------------
EXTERN_C EXPORT LOADER
void Mb2Parse(void);
//
// -- BIOS Data Area
// --------------
EXPORT LOADER INLINE
bool IsEbdaAvail(void) { return localHwDisc->ebdaLocation != 0; }
EXPORT LOADER INLINE
bool IsCom1Avail(void) { return localHwDisc->com1 != 0; }
EXPORT LOADER INLINE
bool IsCom2Avail(void) { return localHwDisc->com2 != 0; }
EXPORT LOADER INLINE
bool IsCom3Avail(void) { return localHwDisc->com3 != 0; }
EXPORT LOADER INLINE
bool IsCom4Avail(void) { return localHwDisc->com4 != 0; }
EXPORT LOADER INLINE
bool IsLpt1Avail(void) { return localHwDisc->lpt1 != 0; }
EXPORT LOADER INLINE
bool IsLpt2Avail(void) { return localHwDisc->lpt2 != 0; }
EXPORT LOADER INLINE
bool IsLpt3Avail(void) { return localHwDisc->lpt3 != 0; }
EXPORT LOADER INLINE
bool IsVideoAvail(void) { return localHwDisc->videoPort != 0; }
EXPORT LOADER INLINE
void SetEbda(archsize_t e) { localHwDisc->ebdaLocation = e; }
EXPORT LOADER INLINE
archsize_t GetEbda(void) { return localHwDisc->ebdaLocation; }
EXPORT LOADER INLINE
void SetCom1(devaddr_t p) { localHwDisc->com1 = p; }
EXPORT LOADER INLINE
devaddr_t GetCom1(void) { return localHwDisc->com1; }
EXPORT LOADER INLINE
void SetCom2(devaddr_t p) { localHwDisc->com2 = p; }
EXPORT LOADER INLINE
devaddr_t GetCom2(void) { return localHwDisc->com2; }
EXPORT LOADER INLINE
void SetCom3(devaddr_t p) { localHwDisc->com3 = p; }
EXPORT LOADER INLINE
devaddr_t GetCom3(void) { return localHwDisc->com3; }
EXPORT LOADER INLINE
void SetCom4(devaddr_t p) { localHwDisc->com4 = p; }
EXPORT LOADER INLINE
devaddr_t GetCom4(void) { return localHwDisc->com4; }
EXPORT LOADER INLINE
void SetLpt1(devaddr_t p) { localHwDisc->lpt1 = p; }
EXPORT LOADER INLINE
devaddr_t GetLpt1(void) { return localHwDisc->lpt1; }
EXPORT LOADER INLINE
void SetLpt2(devaddr_t p) { localHwDisc->lpt2 = p; }
EXPORT LOADER INLINE
devaddr_t GetLpt2(void) { return localHwDisc->lpt2; }
EXPORT LOADER INLINE
void SetLpt3(devaddr_t p) { localHwDisc->lpt3 = p; }
EXPORT LOADER INLINE
devaddr_t GetLpt3(void) { return localHwDisc->lpt3; }
EXPORT LOADER INLINE
void SetVideo(uint16_t p) { localHwDisc->videoPort = p; }
EXPORT LOADER INLINE
uint16_t GetVideo(void) { return localHwDisc->videoPort; }
//
// -- Basic memory limits (where flag 0 is set)
// -----------------------------------------
EXPORT LOADER INLINE
bool AreMemLimitsAvail(void) { return localHwDisc->memLimitsAvail; }
EXPORT LOADER INLINE
void SetAvailLowerMem(uint32_t l) { localHwDisc->availLowerMem = l; localHwDisc->memLimitsAvail = true; }
EXPORT LOADER INLINE
uint32_t GetAvailLowerMem(void) { return localHwDisc->availLowerMem; }
EXPORT LOADER INLINE
void SetAvailUpperMem(uint32_t l) { localHwDisc->availUpperMem = l; localHwDisc->memLimitsAvail = true; }
EXPORT LOADER INLINE
uint32_t GetAvailUpperMem(void) { return localHwDisc->availUpperMem; }
EXPORT LOADER INLINE
void SetUpperMemLimit(uint64_t l) { localHwDisc->upperMemLimit = l; }
EXPORT LOADER INLINE
uint64_t GetUpperMemLimit(void) { return localHwDisc->upperMemLimit; }
//
// -- Module Data
// -----------
EXPORT LOADER INLINE
bool HaveModData(void) { return localHwDisc->modAvail; }
EXPORT LOADER INLINE
int GetModCount(void) { return localHwDisc->modCount; }
EXPORT LOADER INLINE
frame_t GetModHightestFrame(void) { return localHwDisc->modHighestFrame; }
EXPORT LOADER INLINE
void UpdateModHighestFrame(frame_t frame) {
if (frame > localHwDisc->modHighestFrame) localHwDisc->modHighestFrame = frame;
}
EXPORT LOADER INLINE
void AddModule(uint64_t at, uint64_t end, char *ident) {
localHwDisc->mods[localHwDisc->modCount].modStart = at;
localHwDisc->mods[localHwDisc->modCount].modEnd = end;
kStrCpy(localHwDisc->mods[localHwDisc->modCount ++].modIdent, ident);
UpdateModHighestFrame(end >> 12);
localHwDisc->modAvail = true;
}
EXPORT LOADER INLINE
void SetModuleHdrSize(int i, size_t s) { localHwDisc->mods[i].modHdrSize = s; }
//EXPORT LOADER INLINE
//void SetModuleCr3(int i, archsize_t cr3) { localHwDisc->mods[i].cr3 = cr3; }
EXPORT LOADER INLINE
void SetModuleEntry(int i, archsize_t entry) { localHwDisc->mods[i].entry = entry; }
EXPORT LOADER INLINE
uint64_t GetAvailModuleStart(int i) { return localHwDisc->mods[i].modStart; }
EXPORT LOADER INLINE
uint64_t GetAvailModuleEnd(int i) { return localHwDisc->mods[i].modEnd; }
EXPORT LOADER INLINE
char *GetAvailModuleIdent(int i) { return localHwDisc->mods[i].modIdent; }
//
// -- Memory Map data
// ---------------
EXPORT LOADER INLINE
bool HaveMMapData(void) { return localHwDisc->memMapAvail; }
EXPORT LOADER INLINE
int GetMMapEntryCount(void) { return localHwDisc->memMapCount; }
EXPORT LOADER INLINE
void AddAvailMem(uint64_t at, uint64_t len) {
localHwDisc->mmap[localHwDisc->memMapCount].baseAddr = at;
localHwDisc->mmap[localHwDisc->memMapCount ++].length = len;
localHwDisc->memMapAvail = true;
}
EXPORT LOADER INLINE
uint64_t GetAvailMemStart(int i) { return localHwDisc->mmap[i].baseAddr; }
EXPORT LOADER INLINE
uint64_t GetAvailMemLength(int i) { return localHwDisc->mmap[i].length; }
//
// -- Physical Memory Manager Bitmap location
// ---------------------------------------
EXPORT LOADER INLINE
void SetPmmBitmap(uint32_t *l) { localHwDisc->pmmBitmap = l; }
EXPORT LOADER INLINE
uint32_t *GetPmmBitmap(void) { return localHwDisc->pmmBitmap; }
EXPORT LOADER INLINE
void SetPmmFrameCount(size_t c) { localHwDisc->pmmFrameCount = c; }
EXPORT LOADER INLINE
size_t GetPmmFrameCount(void) { return localHwDisc->pmmFrameCount; }
//
// -- Frame Buffer Management & Screen output management
// --------------------------------------------------
EXPORT LOADER INLINE
bool IsFrameBufferAvail(void) { return localHwDisc->frameBufferAvail; }
EXPORT LOADER INLINE
void SetFrameBufferAddr(uint16_t *a) { localHwDisc->fbAddr = a; localHwDisc->frameBufferAvail = true; }
EXPORT LOADER INLINE
uint16_t *GetFrameBufferAddr(void) { return localHwDisc->fbAddr; }
EXPORT LOADER INLINE
void SetFrameBufferPitch(uint32_t p) { localHwDisc->fbPitch = p; }
EXPORT LOADER INLINE
uint32_t GetFrameBufferPitch(void) { return localHwDisc->fbPitch; }
EXPORT LOADER INLINE
void SetFrameBufferWidth(uint32_t w) { localHwDisc->fbWidth = w; }
EXPORT LOADER INLINE
uint32_t GetFrameBufferWidth(void) { return localHwDisc->fbWidth; }
EXPORT LOADER INLINE
void SetFrameBufferHeight(uint32_t h) { localHwDisc->fbHeight = h; }
EXPORT LOADER INLINE
uint32_t GetFrameBufferHeight(void) { return localHwDisc->fbHeight; }
EXPORT LOADER INLINE
void SetFrameBufferBpp(uint8_t b) { localHwDisc->fbBpp = b; }
EXPORT LOADER INLINE
uint8_t GetFrameBufferBpp(void) { return localHwDisc->fbBpp; }
EXPORT LOADER INLINE
void SetFrameBufferType(FrameBufferType t) { localHwDisc->fbType = t; }
EXPORT LOADER INLINE
FrameBufferType GetFrameBufferType(void) { return localHwDisc->fbType; }
//
// -- Console properties that are passed to the kernel
// ------------------------------------------------
EXPORT LOADER INLINE
void SetBgColor(uint16_t c) { localHwDisc->bgColor = c; }
EXPORT LOADER INLINE
uint16_t GetBgColor(void) { return localHwDisc->bgColor; }
EXPORT LOADER INLINE
void SetFgColor(uint16_t c) { localHwDisc->fgColor = c; }
EXPORT LOADER INLINE
uint16_t GetFgColor(void) { return localHwDisc->fgColor; }
EXPORT LOADER INLINE
void SetColPos(uint16_t p) { localHwDisc->colPos = p; }
EXPORT LOADER INLINE
uint16_t GetColPos(void) { return localHwDisc->colPos; }
EXPORT LOADER INLINE
void SetRowPos(uint16_t p) { localHwDisc->rowPos = p; }
EXPORT LOADER INLINE
uint16_t GetRowPos(void) { return localHwDisc->rowPos; }
//
// -- access to the rsdp member
// -------------------------
EXPORT LOADER INLINE
void SetRsdp(archsize_t p) { localHwDisc->rsdp = p; }
EXPORT LOADER INLINE
archsize_t GetRsdp(void) { return localHwDisc->rsdp; }
//
// -- access to cpuid support
// -----------------------
EXPORT LOADER INLINE
void SetCpuid(bool c) { localHwDisc->cpuidSupported = c; }
EXPORT LOADER INLINE
bool GetCpuid(void) { return localHwDisc->cpuidSupported; }
EXPORT LOADER INLINE
bool HasLocalApic(void) { return !!(localHwDisc->cpuid01edx & (1<<9)); }
//
// -- access to the local APIC fields
// -------------------------------
EXPORT LOADER INLINE
void IncLocalApic(void) { localHwDisc->lapicCount ++; }
EXPORT LOADER INLINE
size_t GetLocalApicCount(void) { return localHwDisc->lapicCount; }
//
// -- access to the IO APIC
// ---------------------
EXPORT LOADER INLINE
void AddIoapic(archsize_t addr, int gsi) { localHwDisc->ioApic[localHwDisc->ioApicCount ++] = { addr, gsi}; }
EXPORT LOADER INLINE
int GetIoapicCount(void) { return localHwDisc->ioApicCount; }
EXPORT LOADER INLINE
archsize_t GetIoapicAddr(int i) { return localHwDisc->ioApic[i].addr; }
EXPORT LOADER INLINE
int GetIoapicGsi(int i) { return localHwDisc->ioApic[i].gsiBase; }
<|start_filename|>modules/kernel/src/debugger/DebugSchedulerShow.cc<|end_filename|>
//===================================================================================================================
//
// DebugSchedulerShow.cc -- List all running processes
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-07 Initial v0.6.0a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "process.h"
#include "printf.h"
#include "debugger.h"
//
// -- Print the details of this process
// ---------------------------------
EXTERN_C HIDDEN KERNEL
void PrintProcessRow(Process_t *proc)
{
kprintf("| " ANSI_ATTR_BOLD);
DbgSpace(25, kprintf("%s ", proc->command));
kprintf(ANSI_ATTR_NORMAL);
DbgSpace( 8, kprintf("| %d ", proc->pid));
DbgSpace(10, kprintf("| %s ", ProcPriorityStr(proc->priority)));
DbgSpace(10, kprintf("| %s ", ProcStatusStr(proc->status)));
DbgSpace(12, kprintf("| %p ", proc));
DbgSpace(20, kprintf("| %p %p ", (uint32_t)(proc->timeUsed >> 32), (uint32_t)proc->timeUsed));
kprintf("|\n");
}
//
// -- Show the status of the scheduler queues
// ---------------------------------------
EXTERN_C EXPORT KERNEL
void DebugSchedulerShow(void)
{
DebuggerEngage(DIPI_ENGAGE);
kprintf(ANSI_CLEAR ANSI_SET_CURSOR(0,0));
kprintf("+---------------------------+--------+----------+----------+------------+-----------------------+\n");
kprintf("| " ANSI_ATTR_BOLD ANSI_FG_BLUE "Command" ANSI_ATTR_NORMAL " | "
ANSI_ATTR_BOLD ANSI_FG_BLUE "PID" ANSI_ATTR_NORMAL " | " ANSI_ATTR_BOLD ANSI_FG_BLUE "Priority"
ANSI_ATTR_NORMAL " | " ANSI_ATTR_BOLD ANSI_FG_BLUE "Status" ANSI_ATTR_NORMAL " | "
ANSI_ATTR_BOLD ANSI_FG_BLUE "Address" ANSI_ATTR_NORMAL " | " ANSI_ATTR_BOLD ANSI_FG_BLUE
"Time Used" ANSI_ATTR_NORMAL " |\n");
kprintf("+---------------------------+--------+----------+----------+------------+-----------------------+\n");
ListHead_t::List_t *wrk = scheduler.globalProcesses.list.next;
while (wrk != &scheduler.globalProcesses.list) {
Process_t *proc = FIND_PARENT(wrk, Process_t, globalList);
PrintProcessRow(proc);
wrk = wrk->next;
}
kprintf("+---------------------------+--------+----------+----------+------------+-----------------------+\n");
DebuggerRelease();
}
<|start_filename|>arch/x86/cpu/ArchIdtSetup.cc<|end_filename|>
//===================================================================================================================
//
// ArchIdtSetup.cc -- Build the IDT Table in-place
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This function will build the IDT table in-place at location 0x00000800.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-May-30 Initial 0.1.0 ADCL Initial version
// 2019-Feb-09 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "mmu.h"
#include "cpu.h"
#include "printf.h"
#include "interrupt.h"
//
// -- The ISR entry points as functions declared
// ------------------------------------------
EXTERN_C EXPORT KERNEL
void isr0 (void);
EXTERN_C EXPORT KERNEL
void isr1 (void);
EXTERN_C EXPORT KERNEL
void isr2 (void);
EXTERN_C EXPORT KERNEL
void isr3 (void);
EXTERN_C EXPORT KERNEL
void isr4 (void);
EXTERN_C EXPORT KERNEL
void isr5 (void);
EXTERN_C EXPORT KERNEL
void isr6 (void);
EXTERN_C EXPORT KERNEL
void isr7 (void);
EXTERN_C EXPORT KERNEL
void isr8 (void);
EXTERN_C EXPORT KERNEL
void isr9 (void);
EXTERN_C EXPORT KERNEL
void isr10 (void);
EXTERN_C EXPORT KERNEL
void isr11 (void);
EXTERN_C EXPORT KERNEL
void isr12 (void);
EXTERN_C EXPORT KERNEL
void isr13 (void);
EXTERN_C EXPORT KERNEL
void isr14 (void);
EXTERN_C EXPORT KERNEL
void isr15 (void);
EXTERN_C EXPORT KERNEL
void isr16 (void);
EXTERN_C EXPORT KERNEL
void isr17 (void);
EXTERN_C EXPORT KERNEL
void isr18 (void);
EXTERN_C EXPORT KERNEL
void isr19 (void);
EXTERN_C EXPORT KERNEL
void isr20 (void);
EXTERN_C EXPORT KERNEL
void isr21 (void);
EXTERN_C EXPORT KERNEL
void isr22 (void);
EXTERN_C EXPORT KERNEL
void isr23 (void);
EXTERN_C EXPORT KERNEL
void isr24 (void);
EXTERN_C EXPORT KERNEL
void isr25 (void);
EXTERN_C EXPORT KERNEL
void isr26 (void);
EXTERN_C EXPORT KERNEL
void isr27 (void);
EXTERN_C EXPORT KERNEL
void isr28 (void);
EXTERN_C EXPORT KERNEL
void isr29 (void);
EXTERN_C EXPORT KERNEL
void isr30 (void);
EXTERN_C EXPORT KERNEL
void isr31 (void);
EXTERN_C EXPORT SYSCALL
void isr100(void);
EXTERN_C EXPORT KERNEL
void irq0 (void);
EXTERN_C EXPORT KERNEL
void irq1 (void);
EXTERN_C EXPORT KERNEL
void irq2 (void);
EXTERN_C EXPORT KERNEL
void irq3 (void);
EXTERN_C EXPORT KERNEL
void irq4 (void);
EXTERN_C EXPORT KERNEL
void irq5 (void);
EXTERN_C EXPORT KERNEL
void irq6 (void);
EXTERN_C EXPORT KERNEL
void irq7 (void);
EXTERN_C EXPORT KERNEL
void irq8 (void);
EXTERN_C EXPORT KERNEL
void irq9 (void);
EXTERN_C EXPORT KERNEL
void irq10 (void);
EXTERN_C EXPORT KERNEL
void irq11 (void);
EXTERN_C EXPORT KERNEL
void irq12 (void);
EXTERN_C EXPORT KERNEL
void irq13 (void);
EXTERN_C EXPORT KERNEL
void irq14 (void);
EXTERN_C EXPORT KERNEL
void irq15 (void);
EXTERN_C EXPORT KERNEL
void irq16 (void);
EXTERN_C EXPORT KERNEL
void irq17 (void);
EXTERN_C EXPORT KERNEL
void irq18 (void);
EXTERN_C EXPORT KERNEL
void irq19 (void);
EXTERN_C EXPORT KERNEL
void irq20 (void);
EXTERN_C EXPORT KERNEL
void irq21 (void);
EXTERN_C EXPORT KERNEL
void irq22 (void);
EXTERN_C EXPORT KERNEL
void irq23 (void);
EXTERN_C EXPORT KERNEL
void irq240(void);
EXTERN_C EXPORT KERNEL
void irq241(void);
EXTERN_C EXPORT KERNEL
void irq242(void);
EXTERN_C EXPORT KERNEL
void irq243(void);
EXTERN_C EXPORT KERNEL
void irq244(void);
EXTERN_C EXPORT KERNEL
void irq245(void);
EXTERN_C EXPORT KERNEL
void irq246(void);
EXTERN_C EXPORT KERNEL
void irq247(void);
EXTERN_C EXPORT KERNEL
void irq248(void);
EXTERN_C EXPORT KERNEL
void irq249(void);
EXTERN_C EXPORT KERNEL
void irq250(void);
EXTERN_C EXPORT KERNEL
void irq251(void);
EXTERN_C EXPORT KERNEL
void irq252(void);
EXTERN_C EXPORT KERNEL
void irq253(void);
EXTERN_C EXPORT KERNEL
void irq254(void);
EXTERN_C EXPORT KERNEL
void irq255(void);
//
// -- Build the parts of the IDT we are going to use so far
// -----------------------------------------------------
EXTERN_C EXPORT LOADER
void ArchIdtSetup(void)
{
kprintf("Initializing the IDT properly\n");
MmuMapToFrame(X86_VIRT_IDT, X86_PHYS_IDT >> 12, PG_WRT | PG_KRN);
kMemSetB((void *)X86_VIRT_IDT, 0, sizeof(IdtEntry_t) * 256);
for (int i = 0; i < 256; i ++) {
ArchIdtSetGate(i, (archsize_t)ArchIntNone, 0x08, 0x8e);
}
ArchIdtSetGate( 0, (uint32_t)isr0 , 0x08, 0x8e);
ArchIdtSetGate( 1, (uint32_t)isr1 , 0x08, 0x8e);
ArchIdtSetGate( 2, (uint32_t)isr2 , 0x08, 0x8e);
ArchIdtSetGate( 3, (uint32_t)isr3 , 0x08, 0x8e);
ArchIdtSetGate( 4, (uint32_t)isr4 , 0x08, 0x8e);
ArchIdtSetGate( 5, (uint32_t)isr5 , 0x08, 0x8e);
ArchIdtSetGate( 6, (uint32_t)isr6 , 0x08, 0x8e);
ArchIdtSetGate( 7, (uint32_t)isr7 , 0x08, 0x8e);
ArchIdtSetGate( 8, (uint32_t)isr8 , 0x08, 0x8e);
ArchIdtSetGate( 9, (uint32_t)isr9 , 0x08, 0x8e);
ArchIdtSetGate(10, (uint32_t)isr10, 0x08, 0x8e);
ArchIdtSetGate(11, (uint32_t)isr11, 0x08, 0x8e);
ArchIdtSetGate(12, (uint32_t)isr12, 0x08, 0x8e);
ArchIdtSetGate(13, (uint32_t)isr13, 0x08, 0x8e);
ArchIdtSetGate(14, (uint32_t)isr14, 0x08, 0x8e);
ArchIdtSetGate(15, (uint32_t)isr15, 0x08, 0x8e);
ArchIdtSetGate(16, (uint32_t)isr16, 0x08, 0x8e);
ArchIdtSetGate(17, (uint32_t)isr17, 0x08, 0x8e);
ArchIdtSetGate(18, (uint32_t)isr18, 0x08, 0x8e);
ArchIdtSetGate(19, (uint32_t)isr19, 0x08, 0x8e);
ArchIdtSetGate(20, (uint32_t)isr20, 0x08, 0x8e);
ArchIdtSetGate(21, (uint32_t)isr21, 0x08, 0x8e);
ArchIdtSetGate(22, (uint32_t)isr22, 0x08, 0x8e);
ArchIdtSetGate(23, (uint32_t)isr23, 0x08, 0x8e);
ArchIdtSetGate(24, (uint32_t)isr24, 0x08, 0x8e);
ArchIdtSetGate(25, (uint32_t)isr25, 0x08, 0x8e);
ArchIdtSetGate(26, (uint32_t)isr26, 0x08, 0x8e);
ArchIdtSetGate(27, (uint32_t)isr27, 0x08, 0x8e);
ArchIdtSetGate(28, (uint32_t)isr28, 0x08, 0x8e);
ArchIdtSetGate(29, (uint32_t)isr29, 0x08, 0x8e);
ArchIdtSetGate(30, (uint32_t)isr30, 0x08, 0x8e);
ArchIdtSetGate(31, (uint32_t)isr31, 0x08, 0x8e);
ArchIdtSetGate(32, (uint32_t)irq0 , 0x08, 0x8e);
ArchIdtSetGate(33, (uint32_t)irq1 , 0x08, 0x8e);
ArchIdtSetGate(34, (uint32_t)irq2 , 0x08, 0x8e);
ArchIdtSetGate(35, (uint32_t)irq3 , 0x08, 0x8e);
ArchIdtSetGate(36, (uint32_t)irq4 , 0x08, 0x8e);
ArchIdtSetGate(37, (uint32_t)irq5 , 0x08, 0x8e);
ArchIdtSetGate(38, (uint32_t)irq6 , 0x08, 0x8e);
ArchIdtSetGate(39, (uint32_t)irq7 , 0x08, 0x8e);
ArchIdtSetGate(40, (uint32_t)irq8 , 0x08, 0x8e);
ArchIdtSetGate(41, (uint32_t)irq9 , 0x08, 0x8e);
ArchIdtSetGate(42, (uint32_t)irq10, 0x08, 0x8e);
ArchIdtSetGate(43, (uint32_t)irq11, 0x08, 0x8e);
ArchIdtSetGate(44, (uint32_t)irq12, 0x08, 0x8e);
ArchIdtSetGate(45, (uint32_t)irq13, 0x08, 0x8e);
ArchIdtSetGate(46, (uint32_t)irq14, 0x08, 0x8e);
ArchIdtSetGate(47, (uint32_t)irq15, 0x08, 0x8e);
ArchIdtSetGate(48, (uint32_t)irq16, 0x08, 0x8e);
ArchIdtSetGate(49, (uint32_t)irq17, 0x08, 0x8e);
ArchIdtSetGate(50, (uint32_t)irq18, 0x08, 0x8e);
ArchIdtSetGate(51, (uint32_t)irq19, 0x08, 0x8e);
ArchIdtSetGate(52, (uint32_t)irq20, 0x08, 0x8e);
ArchIdtSetGate(53, (uint32_t)irq21, 0x08, 0x8e);
ArchIdtSetGate(54, (uint32_t)irq22, 0x08, 0x8e);
ArchIdtSetGate(55, (uint32_t)irq23, 0x08, 0x8e);
ArchIdtSetGate(100, (uint32_t)isr100, 0x0b, 0x8e|0x60); // available from user space
ArchIdtSetGate(240, (uint32_t)irq240, 0x08, 0x8e);
ArchIdtSetGate(241, (uint32_t)irq241, 0x08, 0x8e);
ArchIdtSetGate(242, (uint32_t)irq242, 0x08, 0x8e);
ArchIdtSetGate(243, (uint32_t)irq243, 0x08, 0x8e);
ArchIdtSetGate(244, (uint32_t)irq244, 0x08, 0x8e);
ArchIdtSetGate(245, (uint32_t)irq245, 0x08, 0x8e);
ArchIdtSetGate(246, (uint32_t)irq246, 0x08, 0x8e);
ArchIdtSetGate(247, (uint32_t)irq247, 0x08, 0x8e);
ArchIdtSetGate(248, (uint32_t)irq248, 0x08, 0x8e);
ArchIdtSetGate(249, (uint32_t)irq249, 0x08, 0x8e);
ArchIdtSetGate(250, (uint32_t)irq250, 0x08, 0x8e);
ArchIdtSetGate(251, (uint32_t)irq251, 0x08, 0x8e);
ArchIdtSetGate(252, (uint32_t)irq252, 0x08, 0x8e);
ArchIdtSetGate(253, (uint32_t)irq253, 0x08, 0x8e);
ArchIdtSetGate(254, (uint32_t)irq254, 0x08, 0x8e);
ArchIdtSetGate(255, (uint32_t)irq255, 0x08, 0x8e);
// -- Finally we need to load the new GDT
struct {
uint16_t size;
uintptr_t loc;
} __attribute__((packed)) idtRec = {
(uint16_t)((sizeof(IdtEntry_t) * 256) - 1),
X86_VIRT_IDT,
};
ArchLoadIdt(&idtRec);
// -- Register the individual ISR routines
IsrRegister(0x00, IsrInt00);
IsrRegister(0x01, IsrInt01);
IsrRegister(0x02, IsrInt02);
IsrRegister(0x03, IsrInt03);
IsrRegister(0x04, IsrInt04);
IsrRegister(0x05, IsrInt05);
IsrRegister(0x06, IsrInt06);
IsrRegister(0x07, IsrInt07);
IsrRegister(0x08, IsrInt08);
IsrRegister(0x09, IsrInt09);
IsrRegister(0x0a, IsrInt0a);
IsrRegister(0x0b, IsrInt0b);
IsrRegister(0x0c, IsrInt0c);
IsrRegister(0x0d, IsrInt0d);
IsrRegister(0x0e, IsrInt0e);
IsrRegister(0x0f, IsrInt0f);
IsrRegister(0x10, IsrInt10);
IsrRegister(0x11, IsrInt11);
IsrRegister(0x12, IsrInt12);
IsrRegister(0x13, IsrInt13);
IsrRegister(0x14, IsrInt14);
IsrRegister(0x15, IsrInt15);
IsrRegister(0x16, IsrInt16);
IsrRegister(0x17, IsrInt17);
IsrRegister(0x18, IsrInt18);
IsrRegister(0x19, IsrInt19);
IsrRegister(0x1a, IsrInt1a);
IsrRegister(0x1b, IsrInt1b);
IsrRegister(0x1c, IsrInt1c);
IsrRegister(0x1d, IsrInt1d);
IsrRegister(0x1e, IsrInt1e);
IsrRegister(0x1f, IsrInt1f);
IsrRegister(100, SyscallHandler);
}
<|start_filename|>platform/pc/pic/PicRegisterHandler.cc<|end_filename|>
//===================================================================================================================
//
// PicRegisterHandler.cc -- Register a handler to take care of an IRQ
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-24 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "interrupt.h"
#include "printf.h"
#include "pic.h"
//
// -- Register an IRQ handler
// -----------------------
EXTERN_C EXPORT KERNEL
isrFunc_t _PicRegisterHandler(PicDevice_t *dev, Irq_t irq, int vector, isrFunc_t handler)
{
if (!dev) return (isrFunc_t)-1;
if (!handler) return (isrFunc_t)-1;
if (irq < 0 || irq > 15) return (isrFunc_t)-1;
if (vector < 0 || vector > 255) return (isrFunc_t)-1;
kprintf("Processing an audited request to map irq %x to vector %x\n", irq, vector);
PicMaskIrq(dev, irq);
isrFunc_t rv = IsrRegister(vector, handler);
PicUnmaskIrq(dev, irq);
kprintf(".. Request complete\n");
return rv;
}
<|start_filename|>modules/kernel/src/process/ProcessCheckQueue.cc<|end_filename|>
//===================================================================================================================
//
// ProcessCheckQueue.cc -- Debugging function used to output the state of the scheduler queues
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-28 Initial v0.5.1a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "lists.h"
#include "printf.h"
#include "process.h"
//
// -- Output the state of the scheduler
// ---------------------------------
void ProcessDoCheckQueue(void)
{
ProcessLockAndPostpone();
kprintf("Dumping the status of the scheduler on CPU%d\n", thisCpu->cpuNum);
kprintf("The scheduler is %s\n", schedulerLock.lock?"locked":"unlocked");
if (schedulerLock.lock) kprintf("... on CPU%d\n", scheduler.lockCpu);
assert(schedulerLock.lock != 0);
assert(scheduler.lockCpu == thisCpu->cpuNum);
kprintf(".. postpone count %d\n", AtomicRead(&scheduler.postponeCount));
kprintf(".. currently, a reschedule is %spending\n", scheduler.processChangePending ? "" : "not ");
kprintf(".. OS Queue process count: %d\n", ListCount(&scheduler.queueOS));
kprintf(".. High Queue process count: %d\n", ListCount(&scheduler.queueHigh));
kprintf(".. Normal Queue process count: %d\n", ListCount(&scheduler.queueNormal));
kprintf(".. Low Queue process count: %d\n", ListCount(&scheduler.queueLow));
kprintf(".. Idle Queue process count: %d\n", ListCount(&scheduler.queueIdle));
kprintf(".. There are %d processes on the terminated list\n", ListCount(&scheduler.listTerminated));
ProcessUnlockAndSchedule();
}
<|start_filename|>modules/kernel/src/heap/HeapValidateHeader.cc<|end_filename|>
//===================================================================================================================
//
// HeapValidateHdr.cc -- Validate heap header
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Validate the heap header, heap footer, and if is a hole the ordered list entry.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Jul-02 Initial version
// 2012-Sep-16 Leveraged from Century
// 2012-Sep-23 Removed DUMP() define
// 2013-Sep-12 #101 Resolve issues splint exposes
// 2018-May-31 Initial 0.1.0 ADCL Copied this file from century32 to century-os
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "heap.h"
#if DEBUG_HEAP == 1
//
// -- Validate a heap header block to ensure it has not been overrun
// --------------------------------------------------------------
void HeapValidateHdr(KHeapHeader_t *hdr, const char *from)
{
KHeapFooter_t *ftr;
if (!hdr) {
HeapError(from, "Unable to validate NULL header");
}
ftr = (KHeapFooter_t *)((char *)hdr + hdr->size - sizeof(KHeapFooter_t));
if ((hdr->_magicUnion.magicHole & 0xfffffffe) != HEAP_MAGIC) {
HeapError(from, "Invalid Heap Header Magic Number");
}
if ((ftr->_magicUnion.magicHole & 0xfffffffe) != HEAP_MAGIC) {
HeapError(from, "Invalid Heap Footer Magic Number");
}
if (hdr->_magicUnion.magicHole != ftr->_magicUnion.magicHole) {
HeapError(from, "Header/Footer Magic Number/Hole mismatch");
}
if (hdr->_magicUnion.isHole == 1 && hdr->entry == 0) {
HeapError(from, "Heap hole has no ordered list entry");
}
if (hdr->_magicUnion.isHole == 0 && hdr->entry != 0) {
HeapError(from, "Heap allocated block has an ordered list entry");
}
if (hdr->entry && hdr->entry->block != hdr) {
HeapError(from, "Entry does not point to this header");
}
if (hdr->entry && hdr->entry->size != hdr->size) {
HeapError(from, "Header/Entry size mismatch");
}
}
#endif
<|start_filename|>modules/kernel/src/butler/ButlerCleanProcess.cc<|end_filename|>
//===================================================================================================================
//
// ButlerCleanProcess.cc -- Clean up a terminated process, with all the required cleanup
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-14 Initial v0.6.1d ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "pmm.h"
#include "process.h"
#include "heap.h"
#include "butler.h"
//
// -- The Butler has been notified of a Process to clean
// --------------------------------------------------
void ButlerCleanProcess(void)
{
// kprintf("Starting to clean a process up\n");
Process_t *dlt = NULL;
archsize_t flags = SPINLOCK_BLOCK_NO_INT(schedulerLock) {
if (!IsListEmpty(&scheduler.listTerminated)) {
dlt = FIND_PARENT(scheduler.listTerminated.list.next, Process_t, stsQueue);
ListRemoveInit(&dlt->stsQueue);
ListRemoveInit(&dlt->globalList);
}
SPINLOCK_RLS_RESTORE_INT(schedulerLock, flags);
}
if (!dlt) return;
// -- from here, there is a process to clean up; since it is not on any queue, we own the structure
// we are starting by cleaning up all references
while (!IsListEmpty(&dlt->references)) {
Reference_t *ref = FIND_PARENT(dlt->references.list.next, Reference_t, procRefList);
ListRemoveInit(&ref->procRefList);
// -- now, we can remove the reference from the resource once we know what it is
switch (ref->type) {
case REF_MSGQ: {
MessageQueue_t *msgq = (MessageQueue_t *)ref->resAddr;
archsize_t flags = SPINLOCK_BLOCK_NO_INT(msgq->procList.lock) {
ListRemoveInit(&ref->resourceRefBy);
msgq->procList.count --;
SPINLOCK_RLS_RESTORE_INT(msgq->procList.lock, flags);
}
FREE(ref);
break;
}
default:
assert_msg(false, "Unsupported reference type to clean up");
break;
}
}
// -- resources are released; work on cleaning up virtual memory
// -- TODO: Implement this
// -- Clean up the stack
if (dlt->topOfStack > STACK_BASE) {
PmmReleaseFrame(MmuUnmapPage(dlt->topOfStack & ~(STACK_SIZE - 1)));
}
// -- Finally, free up the process memory
FREE(dlt);
}
<|start_filename|>modules/kernel/src/mmu/MmuVars.cc<|end_filename|>
//===================================================================================================================
//
// MmuVars.cc -- Common variables for the MMU
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Dec-22 Initial v0.5.0b ADCL Initial Version
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "spinlock.h"
#include "mmu.h"
//
// -- This spinlock is used to control access to the address space to clear the frame
// -------------------------------------------------------------------------------
EXPORT KERNEL_BSS
Spinlock_t frameClearLock;
//
// -- This is used to control the flushes for the TLB buffer
// ------------------------------------------------------
EXPORT KERNEL_BSS
TlbFlush_t tlbFlush;
<|start_filename|>arch/x86/cpu/ArchPerCpuInit.cc<|end_filename|>
//===================================================================================================================
//
// ArchPerCpuInit.cc -- Initialize the arch-specific per cpu elementss
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Feb-01 Initial v0.5.0f ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "cpu.h"
//
// -- Both the gs and the TSS need to be initialized for this CPU
// -----------------------------------------------------------
EXTERN_C EXPORT LOADER
void ArchPerCpuInit(int i)
{
cpus.perCpuData[i].gsSelector = ((i * 3) + 9 + 0) << 3;
cpus.perCpuData[i].tssSelector = ((i * 3) + 9 + 1) << 3;
kprintf("!!>> [%d]: Setting the gs selector to %x and the tss selector to %x\n", i,
cpus.perCpuData[i].gsSelector, cpus.perCpuData[i].tssSelector);
}
<|start_filename|>platform/pc/apic/IoApicUnmaskIrq.cc<|end_filename|>
//===================================================================================================================
//
// IoApicUnmaskIrq.cc -- Unmask an IRQ so that it is effectively enabled
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-20 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "hw-disc.h"
#include "mmu.h"
#include "interrupt.h"
#include "pic.h"
//
// -- Enable an IRQ by unmasking it
// -----------------------------
EXTERN_C EXPORT KERNEL
void _IoApicUnmaskIrq(PicDevice_t *dev, Irq_t irq)
{
if (!dev) return;
if (irq < 0 || irq > 23) return;
IoApicDeviceData_t *data = (IoApicDeviceData_t *)dev->device.deviceData;
archsize_t addr = data->ioapicBase;
archsize_t reg = IoApicRedir(data, irq);
IoapicWrite(addr, reg, IoapicRead(addr, reg) & ~(1<<16));
}
<|start_filename|>modules/kernel/src/pmm/PmmReleaseFrameRange.cc<|end_filename|>
//===================================================================================================================
//
// PmmReleaseFrameRange.cc -- Release a frame and place it in the scrub queue
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// The design of the PMM has changed. I am now storing the PMM data in the frames themselves. Therefore, the
// frame will need to be mapped into the `insert` member and then the stack info populated from the values passed
// in, along with the frame number from the next member in the stack. Then the frame will need to be unmapped
// and then remapped to the top of the proper stack.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-11 Initial 0.3.1 ADCL Initial version
// 2020-Apr-12 #405 v0.6.1c ADCL Redesign the PMM to store the stack in the freed frames themselves
//
//===================================================================================================================
#include "types.h"
#include "spinlock.h"
#include "heap.h"
#include "butler.h"
#include "msgq.h"
#include "pmm.h"
//
// -- Add the frames to the scrub queue
// ---------------------------------
EXTERN_C EXPORT KERNEL
void PmmReleaseFrameRange(const frame_t frame, const size_t count)
{
// -- there are 2 locks to get
archsize_t flags = SPINLOCK_BLOCK_NO_INT(pmm.scrubLock) {
PmmPush(pmm.scrubStack, frame, count);
SPINLOCK_RLS_RESTORE_INT(pmm.scrubLock, flags);
}
AtomicAdd(&pmm.framesAvail, count);
MessageQueueSend(butlerMsgq, BUTLER_CLEAN_PMM, 0, 0);
}
<|start_filename|>arch/arm/mmu/MmuNewVirtualSpace.cc<|end_filename|>
//===================================================================================================================
//
// MmuNewVirtualSpace.cc -- For a new process, create the user virtual address space
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-16 Initial 0.3.2 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "spinlock.h"
#include "pmm.h"
#include "mmu.h"
//
// -- for arm, all we need is a blank address space
// ---------------------------------------------
EXTERN_C EXPORT KERNEL
frame_t MmuNewVirtualSpace(frame_t stack)
{
frame_t rv = PmmAllocAlignedFrames(4, 14);
for (int i = 0; i < 4; i ++) MmuClearFrame(rv + i);
return rv;
}
<|start_filename|>platform/bcm2836/framebuffer/FrameBufferInit.cc<|end_filename|>
//===================================================================================================================
//
// FrameBufferInit.cc -- Frame buffer initialization for the console (rpi2b version)
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This function interacts with the mailbox. For this to work, it requires a 16-byte aligned chunck of memory.
// I want to be able to allocate this in the .bss section, so here is how this works.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Jan-05 Initial 0.2.0 ADCL Initial version
// 2019-Feb-15 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "hw-disc.h"
#include "printf.h"
#include "fb.h"
#include "mmu.h"
#include "hardware.h"
//
// -- this is the buffer for the mailbox
// ----------------------------------
EXPORT LOADER_BSS
uint32_t mbBuf[64] __attribute__((aligned(16)));
//
// -- Initialize the additional frame buffer info
// -------------------------------------------
EXTERN_C EXPORT LOADER
void FrameBufferInit(void)
{
kprintf("Setting up the frame buffer\n");
uint16_t *fb;
kMemSetB(mbBuf, 0, sizeof(mbBuf));
mbBuf[0] = 32 * 4;
mbBuf[1] = 0; // This is a request code
mbBuf[2] = 0x00048003; // Set the physical width/height
mbBuf[3] = 8; // 8 byte request/reply
mbBuf[4] = 0; // indicate this is a request
mbBuf[5] = WIDTH; // 800 pixels wide
mbBuf[6] = HEIGHT; // 400 pixels high
mbBuf[7] = 0x00048004; // Set the virtual width/height
mbBuf[8] = 8; // 8 byte request/reply
mbBuf[9] = 0; // indicate this is a request
mbBuf[10] = WIDTH; // 800 pixels wide
mbBuf[11] = HEIGHT; // 400 pixels high
mbBuf[12] = 0x00048005; // Set the color depth
mbBuf[13] = 4; // 4 byte request/reply
mbBuf[14] = 0; // indicate this is a request
mbBuf[15] = DEPTH; // 16-bit color
mbBuf[16] = 0x00048009; // Set the virtual offset
mbBuf[17] = 8; // 8 byte request/reply
mbBuf[18] = 0; // indicate this is a request
mbBuf[19] = 0; // offset at 0,0
mbBuf[20] = 0;
mbBuf[21] = 0x00040001; // Allocate the frame buffer
mbBuf[22] = 8; // 8 byte request/reply
mbBuf[23] = 0; // indicate this is a request
mbBuf[24] = 0; // fb addr
mbBuf[25] = 0; // fb size
mbBuf[26] = 0x00040008; // Get the pitch
mbBuf[27] = 4; // 4 byte request/reply
mbBuf[28] = 0; // indicate this is a request
mbBuf[29] = 0; // pitch returned here
mbBuf[30] = 0; // last tag
mbBuf[31] = 0; // clear one more anyway
kprintf("The physical address of the buffer at %p is %p\n", mbBuf, MmuVirtToPhys(mbBuf));
CleanCache((archsize_t)mbBuf, sizeof(mbBuf));
MailboxSend(&kernelMailbox, 8, MmuVirtToPhys(mbBuf));
MailboxReceive(&kernelMailbox, 8);
InvalidateCache((archsize_t)mbBuf, sizeof(mbBuf));
fb = (uint16_t *)(mbBuf[24] + ARM_MAILBOX_OFFSET);
SetFrameBufferAddr(fb);
SetFrameBufferHeight(HEIGHT);
SetFrameBufferWidth(WIDTH);
SetFrameBufferBpp(DEPTH);
SetFrameBufferPitch(mbBuf[29]?mbBuf[29]:WIDTH*DEPTH);
kprintf(".. Framebuffer located at: %p\n", GetFrameBufferAddr());
kprintf(".. Framebuffer size: %p\n", GetFrameBufferPitch() * GetFrameBufferHeight());
//
// -- Map the frame buffer to its final location in virtual memory
// ------------------------------------------------------------
kprintf("Mapping the Frame Buffer\n");
for (archsize_t fbVirt = MMU_FRAMEBUFFER, fbFrame = ((archsize_t)GetFrameBufferAddr()) >> 12,
fbEnd = fbVirt + (GetFrameBufferPitch() * GetFrameBufferHeight());
fbVirt < fbEnd; fbVirt += PAGE_SIZE, fbFrame ++) {
MmuMapToFrame(fbVirt, fbFrame, PG_KRN | PG_WRT | PG_DEVICE);
}
// -- goose the config to the correct fb address
SetFrameBufferAddr((uint16_t *)MMU_FRAMEBUFFER);
SetFgColor(0xffff);
SetBgColor(0x1234);
FrameBufferClear();
}
<|start_filename|>modules/kernel/src/stacks/StackFind.cc<|end_filename|>
//===================================================================================================================
//
// StackFind.cc -- Find an available stack for use
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Dec-01 Initial 0.4.6d ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "spinlock.h"
#include "printf.h"
#include "stacks.h"
//
// -- Find an available stack and return its base address
// ---------------------------------------------------
EXPORT KERNEL
archsize_t StackFind(void)
{
archsize_t rv = 0;
archsize_t flags = SPINLOCK_BLOCK_NO_INT(stackBitmapLock) {
for (int i = 0; i < STACK_COUNT; i ++) {
if (stacks[i] != (archsize_t)-1) {
for (int j = 0; j < 32; j ++) {
if ((stacks[i] & (1 << j)) == 0) {
rv = STACK_LOCATION + (STACK_SIZE * ((i * 32) + j));
StackDoAlloc(rv);
goto exit;
}
}
}
}
exit:
SPINLOCK_RLS_RESTORE_INT(stackBitmapLock, flags);
}
return rv;
}
<|start_filename|>platform/bcm2836/gpio/GpioSelectAlt.cc<|end_filename|>
//===================================================================================================================
//
// GpioSelectAlt.cc -- Select the alternate function for a GPIO pin
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "hardware.h"
//
// -- Select the alternate function for a Gpio pin
// --------------------------------------------
EXTERN_C EXPORT KERNEL
void _GpioSelectAlt(GpioDevice_t *dev, GpioPin_t pin, GpioAlt_t alt)
{
if (!dev) return;
if (pin < 0 || pin > 53) return;
if (alt < 0 || alt > 5) return;
int bank = pin / 10;
int shift = (pin % 10) << 3;
archsize_t sel = MmioRead(dev->base + GPIO_FSEL0 + (bank * 4));
sel &= ~(7 << shift);
sel |= (alt << shift);
MmioWrite(dev->base + GPIO_FSEL0 + (bank * 4), sel);
}
<|start_filename|>modules/kernel/src/pmm/PmmPush.cc<|end_filename|>
//===================================================================================================================
//
// PmmPush.cc -- Push a new node of blocks onto a stack of frames
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Pushing a node is no longer a trivial task. So, a function is added to complete this work and maintain
// code readability.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-12 #405 v0.6.1c ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "spinlock.h"
#include "mmu.h"
#include "pmm.h"
//
// -- Push a new node onto the stack; stack must be locked to call this function
// --------------------------------------------------------------------------
void PmmPush(PmmFrameInfo_t *stack, frame_t frame, size_t count)
{
// kprintf("Pushing a node onto the stack at %p (frame: %x; size: %d)\n", stack, frame, count);
// -- decorate the frame with the proper info to push onto the stack
SPINLOCK_BLOCK(pmm.insertLock) {
MmuMapToFrame((archsize_t)pmm.insert, frame, PG_KRN | PG_WRT);
pmm.insert->frame = frame;
pmm.insert->count = count;
pmm.insert->prev = 0;
if (MmuIsMapped((archsize_t)stack)) {
pmm.insert->next = stack->frame;
stack->prev = frame;
MmuUnmapPage((archsize_t)stack);
} else {
pmm.insert->next = 0;
}
MmuUnmapPage((archsize_t)pmm.insert);
SPINLOCK_RLS(pmm.insertLock);
}
// -- finally, push the new node
MmuMapToFrame((archsize_t)stack, frame, PG_WRT | PG_KRN);
}
<|start_filename|>platform/bcm2836/pic/PicUnmaskIrq.cc<|end_filename|>
//===================================================================================================================
//
// PicUnmaskIrq.cc -- Enable the PIC to pass along an IRQ
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "pic.h"
#include "printf.h"
//
// -- Enable the PIC to pass along an IRQ (some call it unmasking)
// ------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void _PicUnmaskIrq(PicDevice_t *dev, int irq)
{
#if DEBUG_ENABLED(PicUnmaskIrq)
kprintf("Entering PicUnmaskIrq\n");
#endif
if (!dev) return;
if (irq < 0 || irq > BCM2836_LAST_IRQ) return;
Bcm2835Pic_t *picData = (Bcm2835Pic_t *)dev->device.deviceData;
int shift;
archsize_t addr;
#if DEBUG_ENABLED(PicUnmaskIrq)
kprintf("Sanity checks qualify PicUnmaskIrq, irq %d\n", irq);
#endif
if (irq >= BCM2836_CORE_BASE) {
shift = irq - BCM2836_CORE_BASE;
addr = (MMIO_VADDR + 0x01000060) + (thisCpu->cpuNum * 4);
#if DEBUG_ENABLED(PicUnmaskIrq)
kprintf("... bcm2836 local core interrupt\n");
#endif
} else if (irq >= BCM2835_ARM_BASE) {
shift = irq - BCM2835_GPU_BASE1;
addr = picData->picLoc + INT_IRQDIS0;
#if DEBUG_ENABLED(PicUnmaskIrq)
kprintf("... arm processor interrupt\n");
#endif
} else { // GPU IRQ 0-63
shift = irq % 32;
addr = picData->picLoc + INT_IRQDIS1 + (4 * (irq / 32));
#if DEBUG_ENABLED(PicUnmaskIrq)
kprintf("... bcm2835 GPU interrupt\n");
#endif
}
#if DEBUG_ENABLED(PicUnmaskIrq)
kprintf("Enabling IRQ bit %x at address %p\n", shift, addr);
#endif
MmioWrite(addr, 1 << shift);
#if DEBUG_ENABLED(PicUnmaskIrq)
kprintf("Done\n");
#endif
}
<|start_filename|>modules/kernel/src/heap/HeapError.cc<|end_filename|>
//===================================================================================================================
//
// HeapError.cc -- When a Heap Error occurs, kill the kernel
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// When a Heap Error occurs, kill the kernel printing the problem description
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Sep-23 Initial Version
// 2018-May-31 Initial 0.1.0 ADCL Copied this file from century32 (__HeapError.c)
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "printf.h"
#include "heap.h"
//
// -- Panic the kernel as the result of a heap error
// ----------------------------------------------
void HeapError(const char *from, const char *desc)
{
DisableInterrupts();
kprintf("Heap Error!!! %s - %s\n", from, desc);
CpuPanicPushRegs("");
}
<|start_filename|>platform/pc/inc/platform-io.h<|end_filename|>
//===================================================================================================================
//
// platform-io.h -- These are additional I/O functions that are use for x86
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-23 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#pragma once
#ifndef __HARDWARE_H__
# error "Use #include \"hardware.h\" and it will pick up this file; do not #include this file directly."
#endif
//
// -- Get a byte from an I/O Port
// ---------------------------
EXPORT INLINE
uint8_t inb(uint16_t port) {
uint8_t ret;
asm volatile ( "inb %1, %0" : "=a"(ret) : "Nd"(port) );
return ret;
}
//
// -- Output a byte to an I/O Port
// ----------------------------
EXPORT INLINE
void outb(uint16_t port, uint8_t val) { asm volatile ( "outb %0, %1" : : "a"(val), "Nd"(port) ); }
<|start_filename|>arch/x86/inc/arch-types.h<|end_filename|>
//===================================================================================================================
//
// arch-types.h -- Type definitions specific to i686 architectures
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// These types are architecture dependent.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-May-25 Initial 0.1.0 ADCL Initial
// 2018-Nov-11 Initial 0.2.0 ADCL Address architecture abstraction issues
// 2019-Feb-09 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#ifndef __TYPES_H__
# error "Do not include 'arch-types.h' directly; include 'types.h' instead, which will pick up this file."
#endif
//
// -- This is the address width size for this architecture
// ----------------------------------------------------
typedef uint32_t archsize_t;
//
// -- This is the size of a frame for the PMM (which is tied to the address width for this architecture)
// --------------------------------------------------------------------------------------------------
typedef archsize_t frame_t;
//
// -- This is the equivalent to a port size for x86
// ---------------------------------------------
typedef archsize_t devaddr_t;
//
// -- This is the order of the registers on the stack
// -----------------------------------------------
typedef struct isrRegs_t {
archsize_t ss;
archsize_t gs;
archsize_t fs;
archsize_t es;
archsize_t ds;
archsize_t cr3;
archsize_t cr2;
archsize_t cr0;
archsize_t edi;
archsize_t esi;
archsize_t ebp;
archsize_t esp;
archsize_t ebx;
archsize_t edx;
archsize_t ecx;
archsize_t eax;
archsize_t intno;
archsize_t ackIRQ;
archsize_t errcode;
archsize_t eip;
archsize_t cs;
archsize_t eflags;
} isrRegs_t;
//
// -- The Interrupt Descriptor Table Entry
// ------------------------------------
typedef struct IdtEntry_t {
uint16_t baseLow;
uint16_t sel;
uint8_t always0;
uint8_t flags;
uint16_t baseHigh;
} __attribute__((packed)) IdtEntry_t;
<|start_filename|>platform/pc/init/LowMemChk.cc<|end_filename|>
//===================================================================================================================
//
// LowMemCheck.cc -- Determine if this low memory is used or available
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-10 Initial v0.6.1b ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "hw-disc.h"
//
// -- all low memory is available on rpi2b
// ------------------------------------
EXTERN_C EXPORT LOADER
bool LowMemCheck(frame_t frame)
{
// -- the NULL frame is abandonned
if (frame == 0) return false;
// -- The GDT is not available
if (frame == 0x10) return false;
// -- The IDT is not available
if (frame == 0x09) return false;
// -- The Trampoline is not available
if (frame == 0x08) return false;
// -- The EBDA is not available
if (frame > GetEbda() >> 12) return false;
// -- everything else is available
return true;
}
<|start_filename|>modules/kernel/src/heap/HeapValidatePtr.cc<|end_filename|>
//===================================================================================================================
//
// ValidateHeapPtr.cc -- Validate the heap pointer.
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Validate the heap pointer.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Jul-02 Initial version
// 2012-Sep-16 Leveraged from Century
// 2012-Sep-23 Removed DUMP() define
// 2013-Sep-12 #101 Resolve issues splint exposes
// 2018-Jun-01 Initial 0.1.0 ADCL Copied this file from century32 to century-os
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "heap.h"
#if DEBUG_HEAP == 1
//
// -- Check the heap structure
// ------------------------
void HeapValidatePtr(const char *from)
{
if (!kHeap->heapMemory) {
HeapError(from, "Start of heapMemory is empty");
}
if (!kHeap->heapMemory) return;
HeapValidateHdr(kHeap->heapMemory->block, from);
}
#endif
<|start_filename|>arch/x86/inc/arch-mmu.h<|end_filename|>
//===================================================================================================================
//
// arch-mmu-kernel.h -- This is the kernel MMU manager header specific to i686
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- --------------------------------------------------------------------------
// 2018-Nov-21 Initial 0.1.0 ADCL Initial version
// 2019-Feb-09 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#pragma once
#ifndef __MMU_H__
# error "Do not include 'arch-mmu.h' directly. Include 'mmu.h' and this file will be included"
#endif
#include "types.h"
//
// -- This is the recursive mapping location
// --------------------------------------
const archsize_t RECURSIVE_VADDR = 0xffc00000;
const archsize_t RECURSIVE_PD_VADDR = 0xfffff000;
//
// -- This is a 32-bit page entry for both the page directory and the page tables
// ---------------------------------------------------------------------------
typedef struct PageEntry_t {
unsigned int p : 1; // Is the page present?
unsigned int rw : 1; // set to 1 to allow writes
unsigned int us : 1; // 0=Supervisor; 1=user
unsigned int pwt : 1; // Page Write Through
unsigned int pcd : 1; // Page-level cache disable
unsigned int a : 1; // accessed
unsigned int d : 1; // dirty (needs to be written for a swap)
unsigned int pat : 1; // set to 0 for tables, page Page Attribute Table (set to 0)
unsigned int g : 1; // Global (set to 0)
unsigned int k : 1; // Is this a kernel page?
unsigned int avl : 2; // Available for software use
unsigned int frame : 20; // This is the 4K aligned page frame address (or table address)
} __attribute__((packed)) PageEntry_t;
//
// -- These are the helper functions to make MMU management nearly painless
// ---------------------------------------------------------------------
EXPORT INLINE
int MmuGetPDIndexFromAddr(archsize_t addr) { return (addr >> 22) & 0x3ff; }
EXPORT INLINE
int MmuGetPTIndexFromAddr(archsize_t addr) { return (addr >> 12) & 0x3ff; }
EXPORT INLINE
PageEntry_t *MmuGetPDAddress(void) { return (PageEntry_t *)RECURSIVE_PD_VADDR; }
EXPORT INLINE
PageEntry_t *MmuGetPTAddress(archsize_t addr) {
return (PageEntry_t *)(RECURSIVE_VADDR + (MmuGetPDIndexFromAddr(addr) * 0x1000));
}
EXPORT INLINE
PageEntry_t *MmuGetPDEntry(archsize_t addr) { return &MmuGetPDAddress()[MmuGetPDIndexFromAddr(addr)]; }
EXPORT INLINE
PageEntry_t *MmuGetPTEntry(archsize_t addr) { return &MmuGetPTAddress(addr)[MmuGetPTIndexFromAddr(addr)]; }
EXTERN_C EXPORT KERNEL
void InvalidatePage(archsize_t addr);
EXTERN_C EXPORT KERNEL
void MmuDumpTables(archsize_t addr);
<|start_filename|>arch/arm/inc/arch-interrupt.h<|end_filename|>
//===================================================================================================================
//
// arch-interrupt.cc -- These are functions related to interrupts for the rpi2b architecture
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// These are function prototypes for interrupts management
//
// The interrupt vector deserves some discussion. This is documented in the ARM Cortex-A Programmers Reference
// section 11.1.1. This vector table is located at address 0xffff0000. Each entry in the table is a 4-byte
// instruction. A simple branch to a relative offset requires the offset to be withing 24-bits. Since the
// kernel is located it 0xc0000000, this is not the case.
//
// The alternative is to use a long (32-bit) jump where the target is relative to the pc register. For this
// approach to work, the jump target address must be stored close (12 bits offset). By putting these jump targets
// immediately above the Interrupt Vector table, I meet that requirement. Now, I can jump to an address that is
// stored relative to the pc register in a single instruction, and that target is a 32-bit address location.
//
// To accomplish this, I have an instruction that I will place in the Interrupt Vector portion, IVEC_JUMP_ASM.
// This instruction is a pre-assembled instrution `ldr pc,[pc,#0x18]`. Now, the astute reader will note that the
// actual offset in the table is 0x20 bytes but I am only offsetting the instruction by 0x18 bytes. The ARM ARM
// section A5.2.2 notes that when the base register for the addressing mode calculation, "the value is the
// address of the instruction plus eight." Thus, the 8-byte difference.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-11 Initial 0.2.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#ifndef __INTERRUPT_H__
# error "Do not include 'arch-interrupt.h' directly. Include 'interrupt.h' and this file will be included"
#endif
#include "types.h"
#include "cpu.h"
#include "printf.h"
//
// -- This is the code that will be placed in each of the vector locations (not the targets)
// Note that this is written in little-endian order for: 0xe59ff018
// --------------------------------------------------------------------------------------
#define IVEC_JUMP_ASM (0xe59ff018)
//
// -- This structure is the interrupt vector table for the rpi2b
// ----------------------------------------------------------
typedef struct InterruptVector_t {
archsize_t reset;
archsize_t undefined;
archsize_t supervisorCall;
archsize_t prefetchAbort;
archsize_t dataAbort;
archsize_t unused;
archsize_t irqInterrupt;
archsize_t fiqInterrupt;
archsize_t resetTarget;
archsize_t undefinedTarget;
archsize_t supervisorCallTarget;
archsize_t perfetchAbortTarget;
archsize_t dataAbortTarget;
archsize_t unusedTarget;
archsize_t irqInterruptTarget;
archsize_t fiqInterruptTarget;
} __attribute__((packed)) InterruptVector_t;
//
// -- Build the IDT and populate its gates; initialize the handlers to NULL
// ---------------------------------------------------------------------
EXTERN_C EXPORT LOADER
void ExceptionInit(void);
//
// -- A Local prototype to prevent the compiler from name mangling
// ------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void IsrHandler(isrRegs_t *regs);
//
// -- These are some macros to assist in the system calls handling
// ------------------------------------------------------------
#define SYSCALL_FUNC_NO(regs) ((regs)->type)
#define SYSCALL_RETURN(regs) ((regs)->r0)
#define SYSCALL_RCVMSG_PARM1(regs) ((regs)->r0)
#define SYSCALL_SNDMSG_PARM1(regs) ((regs)->r0)
#define SYSCALL_SNDMSG_PARM2(regs) ((regs)->r1)
<|start_filename|>arch/x86/mmu/MmuUnmapPage.cc<|end_filename|>
//===================================================================================================================
//
// MmuUnmapPage.cc -- Unmap a page in virtual address space, returning the frame in case something else needs done.
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This function will walk the current paging tables and remove the page from the virtual address space if it is
// mapped. If not, frame 0 is returned as an invalid page mapping. Note that frame 0 really does exist and
// really is mapped into this space but will never be unmapped. Since it also contains the GDT, IDT, and TSS, it
// is also the least likely to be attempted to be unmapped and has a carefully located position in the virtual
// address space. The changes of the kernel doing this legitimately are nearly NULL.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-10 Initial 0.1.0 ADCL Initial version
// 2019-Feb-09 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "cpu.h"
#include "pic.h"
#include "printf.h"
#include "process.h"
#include "mmu.h"
//
// -- Check for the page and unmap if it is mapped.
// ---------------------------------------------
EXTERN_C EXPORT KERNEL
frame_t MmuUnmapPage(archsize_t addr)
{
frame_t rv;
archsize_t flags = SPINLOCK_BLOCK_NO_INT(tlbFlush.lock) {
tlbFlush.addr = -1;
PicBroadcastIpi(picControl, IPI_TLB_FLUSH);
rv = PT_ENTRY(addr)->frame;
*(uint32_t *)PT_ENTRY(addr) = 0;
InvalidatePage(addr);
//
// -- Finally, wait for all the CPUs to complete the flush before continuing
// -----------------------------------------------------------------------
AtomicSet(&tlbFlush.count, cpus.cpusRunning - 1);
tlbFlush.addr = addr & ~(PAGE_SIZE - 1);
while (AtomicRead(&tlbFlush.count) != 0 && picControl->ipiReady) {
ProcessMilliSleep(150);
}
SPINLOCK_RLS_RESTORE_INT(tlbFlush.lock, flags);
}
return rv;
}
<|start_filename|>arch/arm/mmu/MmuDumpTables.cc<|end_filename|>
//===================================================================================================================
//
// MmuDumpTables.cc -- Dump the Paging Tables for a virtual address
//
// Copyright (c) 2017-2019 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-19 Initial 0.2.0 ADCL Initial version
// 2019-Dec-31 Initial v0.5.0c ADCL Recover from the old rpi2b loader and refactor for the kernel
//
//===================================================================================================================
#include "types.h"
#include "entry.h"
#include "printf.h"
#include "mmu.h"
//
// -- Dump the MMU Tables for a specific address
// ------------------------------------------
EXTERN_C EXPORT KERNEL
void MmuDumpTables(archsize_t addr)
{
kprintf("\nMmuDumpTables: Walking the page tables for address %p\n", addr);
kprintf("Level Tabl-Addr Index Entry Addr Next PAddr fault\n");
kprintf("----- ---------- ---------- ---------- ---------- -----\n");
Ttl1_t *t1 = MMU_TTL1_ENTRY(addr);
kprintf("TTL1 %p %d %p %p %x\n", mmuLvl1Table, addr >> 20, t1, t1->ttl2 << 10, t1->fault);
if (!t1->fault) return;
archsize_t t2tab = (t1->ttl2 << 10);
int i = (addr >> 12) & 0xff;
Ttl2_t *t2 = MMU_TTL2_ENTRY(addr);
kprintf("TTL2 %p %d %p %p %x\n", t2tab, i, t2, t2->frame, t2->fault);
}
<|start_filename|>modules/kernel/inc/pmm.h<|end_filename|>
//===================================================================================================================
//
// pmm.h -- The Physical Memory Manager header
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Jun-10 Initial 0.1.0 ADCL Initial version
// 2019-Feb-14 Initial 0.3.0 ADCL Relocated
// 2019-Mar-10 Initial 0.3.1 ADCL Rebuild the PMM to be managed by a stack
// 2020-Apr-12 #405 v0.6.1c ADCL Redesign the PMM to store the stack in the freed frames themselves
//
//===================================================================================================================
#pragma once
#define __PMM_H__
#include "types.h"
#include "lists.h"
#include "printf.h"
#include "hw-disc.h"
#include "spinlock.h"
#include "mmu.h"
#include "pmm-msg.h"
//
// -- This is the new PMM frame information structure -- contains info about this block of frames
// -------------------------------------------------------------------------------------------
typedef struct PmmFrameInfo_t {
frame_t frame;
size_t count;
frame_t prev;
frame_t next;
} PmmFrameInfo_t;
//
// -- This is the new PMM itself
// --------------------------
typedef struct Pmm_t {
AtomicInt_t framesAvail; // -- this is the total number of frames available in the 3 stacks
Spinlock_t lowLock; // -- This lock protects lowStack
PmmFrameInfo_t *lowStack;
Spinlock_t normLock; // -- This lock protects normStack
PmmFrameInfo_t *normStack;
Spinlock_t scrubLock; // -- This lock protects scrubStack
PmmFrameInfo_t *scrubStack;
Spinlock_t searchLock; // -- This lock protects search only; get lowLock or normLock also
PmmFrameInfo_t *search;
Spinlock_t insertLock; // -- Protects insert only; one of low-, norm-, or scrubLock as well
PmmFrameInfo_t *insert;
} Pmm_t;
//
// -- This variable is the actual Physical Memory Manager data
// --------------------------------------------------------
EXTERN EXPORT KERNEL_DATA
Pmm_t pmm;
//
// -- Has the PMM been initialized properly for use?
// ----------------------------------------------
EXTERN EXPORT KERNEL_DATA
bool pmmInitialized;
//
// -- The early frame initialization
// ------------------------------
EXTERN EXPORT LOADER_DATA
archsize_t earlyFrame;
//
// -- Pop a node off the stack; stack must be locked to call this function
// --------------------------------------------------------------------
void PmmPop(PmmFrameInfo_t *stack);
//
// -- Push a new node onto the stack; stack must be locked to call this function
// --------------------------------------------------------------------------
void PmmPush(PmmFrameInfo_t *stack, frame_t frame, size_t count);
//
// -- add the frames to an existing block if possible, returning if the operation was successful
// ------------------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void PmmAddToStackNode(Spinlock_t *lock, PmmFrameInfo_t *stack, frame_t frame, size_t count);
//
// -- This is the worker function to find a block and allocate it
// -----------------------------------------------------------
EXTERN_C EXPORT KERNEL
frame_t PmmDoAllocAlignedFrames(Spinlock_t *lock, PmmFrameInfo_t *stack, const size_t count, const size_t bitAlignment);
//
// -- This is the worker function to find a block and allocate it
// -----------------------------------------------------------
EXTERN_C EXPORT KERNEL
frame_t _PmmDoRemoveFrame(PmmFrameInfo_t *stack, bool scrub);
//
// -- Allocate a frame from the pmm
// -----------------------------
EXTERN_C EXPORT KERNEL
frame_t PmmAllocateFrame(void);
//
// -- Allocate a frame from low memory in the pmm
// -------------------------------------------
EXTERN_C EXPORT INLINE
frame_t PmmAllocateLowFrame(void) {
frame_t rv;
archsize_t flags = SPINLOCK_BLOCK_NO_INT(pmm.lowLock) {
rv = _PmmDoRemoveFrame(pmm.lowStack, false);
SPINLOCK_RLS_RESTORE_INT(pmm.lowLock, flags);
}
return rv;
}
//
// -- Allocate a block of aligned frames; bitAlignment is the significance of the alignment (min is 12 bits)
// ------------------------------------------------------------------------------------------------------
EXTERN_C EXPORT INLINE
frame_t PmmAllocAlignedFrames(const size_t count, const size_t bitAlignment) {
return PmmDoAllocAlignedFrames(&pmm.normLock, pmm.normStack, count, bitAlignment);
}
//
// -- Same as above but from low mem; bitAlignment is significance of the alignment (min is 12 bits)
// ----------------------------------------------------------------------------------------------
EXTERN_C EXPORT INLINE
frame_t PmmAllocAlignedLowFrames(const size_t count, const size_t bitAlignment) {
return PmmDoAllocAlignedFrames(&pmm.lowLock, pmm.lowStack, count, bitAlignment);
}
//
// -- Release a block of frames (very useful during initialization)
// -------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void PmmReleaseFrameRange(const frame_t frame, const size_t count);
//
// -- Release a single frame
// ----------------------
EXTERN_C EXPORT INLINE
void PmmReleaseFrame(const frame_t frame) { return PmmReleaseFrameRange(frame, 1); }
//
// -- Scrub a frame in preparation the next allocation (includes clearing the frame)
// ------------------------------------------------------------------------------
EXTERN_C EXPORT INLINE
void PmmScrubFrame(const frame_t frame) { MmuClearFrame(frame); }
//
// -- This is the function to scrub a single block from the scrubStack
// ----------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void PmmScrubBlock(void);
//
// -- Initialize the PMM
// ------------------
EXTERN_C EXPORT LOADER
void PmmInit(void);
//
// -- Allocate an early frame before the PMM is put in charge
// -------------------------------------------------------
EXTERN_C EXPORT ENTRY
frame_t NextEarlyFrame(void);
//
// -- Clean/Invalidate PMM Manager structure
// --------------------------------------
#define CLEAN_PMM() CleanCache((archsize_t)&pmm, sizeof(Pmm_t))
#define INVALIDATE_PMM() InvalidateCache(&pmm, sizeof(Pmm_t))
<|start_filename|>arch/x86/CollectCpuid.cc<|end_filename|>
//===================================================================================================================
//
// CollectCpuid.cc -- Collect the information from Cpuid -- we will look at it later
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-10 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "hw-disc.h"
#include "cpu.h"
//
// -- Collect and store the CPUID info
// --------------------------------
EXTERN_C EXPORT LOADER
void CollectCpuid(void)
{
CPUID(0x00, &localHwDisc->cpuid00eax, &localHwDisc->cpuid00ebx, &localHwDisc->cpuid00ecx,
&localHwDisc->cpuid00edx);
int max = localHwDisc->cpuid00eax;
if (max >= 1) {
CPUID(0x01, &localHwDisc->cpuid01eax, &localHwDisc->cpuid01ebx, &localHwDisc->cpuid01ecx,
&localHwDisc->cpuid01edx);
}
if (max >= 2) {
CPUID(0x02, &localHwDisc->cpuid02eax, &localHwDisc->cpuid02ebx, &localHwDisc->cpuid02ecx,
&localHwDisc->cpuid02edx);
}
if (max >= 3) {
CPUID(0x03, &localHwDisc->cpuid03eax, &localHwDisc->cpuid03ebx, &localHwDisc->cpuid03ecx,
&localHwDisc->cpuid03edx);
}
if (max >= 4) {
CPUID(0x04, &localHwDisc->cpuid04eax, &localHwDisc->cpuid04ebx, &localHwDisc->cpuid04ecx,
&localHwDisc->cpuid04edx);
}
if (max >= 5) {
CPUID(0x01, &localHwDisc->cpuid05eax, &localHwDisc->cpuid05ebx, &localHwDisc->cpuid05ecx,
&localHwDisc->cpuid05edx);
}
if (max >= 6) {
CPUID(0x06, &localHwDisc->cpuid06eax, &localHwDisc->cpuid06ebx, &localHwDisc->cpuid06ecx,
&localHwDisc->cpuid06edx);
}
if (max >= 7) {
CPUID(0x07, &localHwDisc->cpuid07eax, &localHwDisc->cpuid07ebx, &localHwDisc->cpuid07ecx,
&localHwDisc->cpuid07edx);
}
if (max >= 9) {
CPUID(0x09, &localHwDisc->cpuid09eax, &localHwDisc->cpuid09ebx, &localHwDisc->cpuid09ecx,
&localHwDisc->cpuid09edx);
}
if (max >= 0xa) {
CPUID(0x0a, &localHwDisc->cpuid0aeax, &localHwDisc->cpuid0aebx, &localHwDisc->cpuid0aecx,
&localHwDisc->cpuid0aedx);
}
if (max >= 0xb) {
CPUID(0x0b, &localHwDisc->cpuid0beax, &localHwDisc->cpuid0bebx, &localHwDisc->cpuid0becx,
&localHwDisc->cpuid0bedx);
}
}
<|start_filename|>arch/arm/mmu/MmuClearFrame.cc<|end_filename|>
//===================================================================================================================
//
// MmuClearFrame.cc -- Clear a frame before adding it into the paging structures formally
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Note that this function does not need to trigger a TLB flush on other cores since this is not a shared mapping.
// Only one CPU can get a lock to perform this function at a time, so by definition, no other cores require a TLB
// flush -- the state when the lock is released is the same as it was when the lock was obtained: nothing is mapped.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-06 Initial 0.3.0 ADCL Initial Version
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "spinlock.h"
#include "mmu.h"
//
// -- Mount a frame into the kernel address space and clear its contents
// ------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void MmuClearFrame(frame_t frame)
{
//
// -- The next order of business is to map this frame to clear it. We carefully chose this
// location to be in the same TTL2 table as the management addresses for the TTL1 table. However.
// it is a critical section and needs to be synchronized. Therefore, obtain a lock before
// attempting to use that address. This will always be done in kernel space.
// -----------------------------------------------------------------------------------------------
Ttl2_t *ttl2Entry = KRN_TTL2_ENTRY(MMU_CLEAR_FRAME);
archsize_t flags = SPINLOCK_BLOCK_NO_INT(frameClearLock) {
WriteDCCMVAC((uint32_t)ttl2Entry);
InvalidatePage(MMU_CLEAR_FRAME);
ttl2Entry->frame = frame;
ttl2Entry->s = ARMV7_SHARABLE_TRUE;
ttl2Entry->apx = ARMV7_MMU_APX_FULL_ACCESS;
ttl2Entry->ap = ARMV7_MMU_AP_FULL_ACCESS;
ttl2Entry->tex = ARMV7_MMU_TEX_NORMAL;
ttl2Entry->c = ARMV7_MMU_CACHED;
ttl2Entry->b = ARMV7_MMU_BUFFERED;
ttl2Entry->nG = ARMV7_MMU_GLOBAL;
ttl2Entry->fault = ARMV7_MMU_DATA_PAGE;
WriteDCCMVAC((uint32_t)ttl2Entry);
InvalidatePage(MMU_CLEAR_FRAME);
kMemSetB((void *)MMU_CLEAR_FRAME, 0, FRAME_SIZE);
MmuUnmapPage(MMU_CLEAR_FRAME);
SPINLOCK_RLS_RESTORE_INT(frameClearLock, flags);
}
}
<|start_filename|>modules/kernel/src/timer/TimerCallBack.cc<|end_filename|>
//===================================================================================================================
//
// TimerCallBack.cc -- Handle the interrupt when the timer fires.
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This IRQ is used as the scheduler for preemptive multitasking. Since this is used as IRQ0, there is an EOI that
// needs to be issued to the interrupt controller. This can get messy, so I want to walk it through a bit here.
//
// First, recall that there is a context for the running process. This process was interrupted by the CPU and
// has all of that interrupt material on the user stack. The interrupt also disabled interrupts after storing the
// flags register on the stack. At the same time the PIC/APIC will be waiting for confirmation that the IRQ has
// been serviced.
//
// Then, in this context, the EOI is issues, allowing additional IRQs to be processed. But interrupts are still
// disabled, so we are still in contol of the CPU. `ProcessReschedule()` is called to determine the next process to
// give to the CPU. Finally, `ProcessSwitch()` is called to actually perform the context switch to a new process.
// This context switch will also save all the necessary registers and flags on the stack (duplicating them if they
// are already there).
//
// At this point, interrupts are still disabled. To complicate matters, the new process may or may not have been
// interrupted, but rather voluntarily gave up the CPU because it was blocked. The point is the path out of any
// particular context switch (an unblocked process) will not be the same as the path in (an interrupt). But,
// that does not cause a problem since the stack was changed as well and the `ret` opcodes will hand control back
// to the proper functions. Eventually, the flags will be popped off the stack (at least once) and will then
// restore the interrupts for that context. This will then allow additional IRQs to interrupt the CPU.
//
// It took me a while to get my head around all this, as I naturally want everything to be symmetric and this is
// not.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Sep-16 Leveraged from Royalty
// 2012-Sep-23 set new SpuriousIRQ handler
// 2013-Sep-03 #73 Encapsulate Process Structure
// 2013-Sep-12 #101 Resolve issues splint exposes
// 2018-Oct-28 Initial 0.1.0 ADCL Copied this function from Century32 to Century-OS
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "process.h"
#include "interrupt.h"
#include "serial.h"
#include "hardware.h"
#include "timer.h"
//
// -- Handle a timer IRQ
// ------------------
EXPORT KERNEL
void TimerCallBack(UNUSED(isrRegs_t *reg))
{
uint64_t now = 0;
TimerEoi(timerControl); // take care of this while interrupts are disabled!
ProcessLockAndPostpone();
#if DEBUG_ENABLED(TimerCallBack)
kprintf("handling timer\n");
#endif
if (timerControl->TimerPlatformTick && thisCpu->cpuNum == 0) TimerPlatformTick(timerControl);
//
// -- here we look for any sleeping tasks to wake
// -------------------------------------------
now = TimerCurrentCount(timerControl);
if (now >= scheduler.nextWake && IsListEmpty(&scheduler.listSleeping) == false) {
uint64_t newWake = (uint64_t)-1;
//
// -- loop through and find the processes to wake up
// ----------------------------------------------
ListHead_t::List_t *list = scheduler.listSleeping.list.next;
while (list != &scheduler.listSleeping.list) {
ListHead_t::List_t *next = list->next; // must be saved before it is changed below
Process_t *wrk = FIND_PARENT(list, Process_t, stsQueue);
if (now >= wrk->wakeAtMicros) {
wrk->wakeAtMicros = 0;
ListRemoveInit(&wrk->stsQueue);
ProcessDoUnblock(wrk);
} else if (wrk->wakeAtMicros < newWake) newWake = wrk->wakeAtMicros;
list = next;
}
scheduler.nextWake = newWake;
}
//
// -- adjust the quantum and see if it is time to change tasks
// --------------------------------------------------------
if (currentThread != NULL) {
if (AtomicDec(¤tThread->quantumLeft) <= 0) {
#if DEBUG_ENABLED(TimerCallBack)
kprintf("Preempt\n");
#endif
scheduler.processChangePending = true;
}
}
ProcessUnlockAndSchedule();
}
<|start_filename|>platform/pc/acpi/AcpiGetTableSig.cc<|end_filename|>
//===================================================================================================================
//
// AcpiGetTableSig.cc -- Get the signature of the table at an address
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-06 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "mmu.h"
#include "hardware.h"
//
// -- get the table signature (and check its valid); return 0 if invalid
// ------------------------------------------------------------------
EXTERN_C EXPORT LOADER
uint32_t AcpiGetTableSig(archsize_t loc)
{
kprintf("Checking ACPI table at %p\n", loc);
if (!MmuIsMapped(loc)) {
MmuMapToFrame(loc, loc >> 12, PG_KRN);
}
if (!MmuIsMapped(loc) || loc == 0) {
kprintf("... not mapped: skipping!\n");
return 0;
}
uint32_t rv = *((uint32_t *)loc);
if (!AcpiCheckTable(loc, rv)) return 0;
switch(rv) {
case MAKE_SIG("APIC"):
kprintf(".. APIC: Multiple APIC Description Table\n");
AcpiReadMadt(loc);
break;
case MAKE_SIG("BERT"):
kprintf(".. BERT: Boot Error Record Table\n");
break;
case MAKE_SIG("BGRT"):
kprintf(".. BGRT: Boot Graphics Resource Table\n");
break;
case MAKE_SIG("BOOT"):
kprintf(".. BOOT: Simple Boot Flag Table\n");
break;
case MAKE_SIG("CPEP"):
kprintf(".. CPEP: Corrected Platform Error Polling Table\n");
break;
case MAKE_SIG("CSRT"):
kprintf(".. CSRT: Core System Resource Table\n");
break;
case MAKE_SIG("DBG2"):
kprintf(".. DBG2: Debug Port Table 2\n");
break;
case MAKE_SIG("DBGP"):
kprintf(".. DBGP: Debug Port Table\n");
break;
case MAKE_SIG("DMAR"):
kprintf(".. DMAR: DMA Remapping Table\n");
break;
case MAKE_SIG("DRTM"):
kprintf(".. DRTM: Dynamic Root of Trust for Measurement Table\n");
break;
case MAKE_SIG("DSDT"):
kprintf(".. DSDT: Differentiated System Description Table\n");
break;
case MAKE_SIG("ECDT"):
kprintf(".. ECDT: Embedded Controller Boot Resources Table\n");
break;
case MAKE_SIG("EINJ"):
kprintf(".. EINJ: Error Injection Table\n");
break;
case MAKE_SIG("ERST"):
kprintf(".. ERST: Error Record Serialization Table\n");
break;
case MAKE_SIG("ETDT"):
kprintf(".. ETDT: Event Timer Description Table (Obsolete)\n");
break;
case MAKE_SIG("FACP"):
kprintf(".. FACP: Fixed ACPI Description Table (FADT)\n");
break;
case MAKE_SIG("FACS"):
kprintf(".. FACS: Firmware ACPI Control Structure\n");
break;
case MAKE_SIG("FPDT"):
kprintf(".. FPDT: Firmware Performance Data Table\n");
break;
case MAKE_SIG("GTDT"):
kprintf(".. GTDT: Generic Timer Description Table\n");
break;
case MAKE_SIG("HEST"):
kprintf(".. HEST: Hardware Error Source Table\n");
break;
case MAKE_SIG("HPET"):
kprintf(".. HPET: High Performance Event Timer\n");
break;
case MAKE_SIG("IBFT"):
kprintf(".. IBFT: iSCSI Boot Firmware Table\n");
break;
case MAKE_SIG("IVRS"):
kprintf(".. IVRS: I/O Virtualization Reporting Structure\n");
break;
case MAKE_SIG("MCFG"):
kprintf(".. MCFG: PCI Express memory mapped configuration space base address Description Table\n");
break;
case MAKE_SIG("MCHI"):
kprintf(".. MCHI: Management Controller Host Interface Table\n");
break;
case MAKE_SIG("MPST"):
kprintf(".. MPST: Memory Power State Table\n");
break;
case MAKE_SIG("MSCT"):
kprintf(".. MSCT: Maximum System Characteristics Table");
break;
case MAKE_SIG("MSDM"):
kprintf(".. MSDM: Microsoft Data Management Table\n");
break;
case MAKE_SIG("OEM0"):
case MAKE_SIG("OEM1"):
case MAKE_SIG("OEM2"):
case MAKE_SIG("OEM3"):
case MAKE_SIG("OEM4"):
case MAKE_SIG("OEM5"):
case MAKE_SIG("OEM6"):
case MAKE_SIG("OEM7"):
case MAKE_SIG("OEM8"):
case MAKE_SIG("OEM9"):
case MAKE_SIG("OEMA"):
case MAKE_SIG("OEMB"):
case MAKE_SIG("OEMC"):
case MAKE_SIG("OEMD"):
case MAKE_SIG("OEME"):
case MAKE_SIG("OEMF"):
case MAKE_SIG("OEMG"):
case MAKE_SIG("OEMH"):
case MAKE_SIG("OEMI"):
case MAKE_SIG("OEMJ"):
case MAKE_SIG("OEMK"):
case MAKE_SIG("OEML"):
case MAKE_SIG("OEMM"):
case MAKE_SIG("OEMN"):
case MAKE_SIG("OEMO"):
case MAKE_SIG("OEMP"):
case MAKE_SIG("OEMQ"):
case MAKE_SIG("OEMR"):
case MAKE_SIG("OEMS"):
case MAKE_SIG("OEMT"):
case MAKE_SIG("OEMU"):
case MAKE_SIG("OEMV"):
case MAKE_SIG("OEMW"):
case MAKE_SIG("OEMX"):
case MAKE_SIG("OEMY"):
case MAKE_SIG("OEMZ"):
kprintf(".. OEMx: OEM Specific Information Table\n");
break;
case MAKE_SIG("PMTT"):
kprintf(".. PMTT: Platform Memory Topology Table\n");
break;
case MAKE_SIG("PSDT"):
kprintf(".. PSDT: Persistent System Description Table\n");
break;
case MAKE_SIG("RASF"):
kprintf(".. RASF: ACPI RAS Feature Table\n");
break;
case MAKE_SIG("RSDT"):
kprintf(".. RSDT: Root System Description Table\n");
break;
case MAKE_SIG("SBST"):
kprintf(".. SBST: Smart Battery Specification Table\n");
break;
case MAKE_SIG("SLIC"):
kprintf(".. SLIC: Microsoft Software Licensing Table Specification\n");
break;
case MAKE_SIG("SLIT"):
kprintf(".. SLIT: System Locality Distance Information Table\n");
break;
case MAKE_SIG("SPCR"):
kprintf(".. SPCR: Serial Port Console Redirection Table\n");
break;
case MAKE_SIG("SPMI"):
kprintf(".. SPMI: Server Platform Management Interface Table\n");
break;
case MAKE_SIG("SRAT"):
kprintf(".. SRAT: System Resource Affinity Table\n");
break;
case MAKE_SIG("SSDT"):
kprintf(".. SSDT: Secondary System Description Table\n");
break;
case MAKE_SIG("TCPA"):
kprintf(".. TCPA: Trusted Computing Platform Alliance Capabilities Table\n");
break;
case MAKE_SIG("TPM2"):
kprintf(".. TPM2: Trusted Platform Module 2 Table\n");
break;
case MAKE_SIG("UEFI"):
kprintf(".. UEFI: UEFI ACPI Data Table\n");
break;
case MAKE_SIG("WAET"):
kprintf(".. WAET: Windows ACPI Emulated Deviced Table\n");
break;
case MAKE_SIG("WDAT"):
kprintf(".. WDAT: Watch Dog Action Table\n");
break;
case MAKE_SIG("XSDT"):
kprintf(".. XSDT: Extended System Description Table\n");
break;
default:
{
char l1 [2] = {0};
char l2 [2] = {0};
char l3 [2] = {0};
char l4 [2] = {0};
l4[0] = (rv >> 24) & 0xff;
l3[0] = (rv >> 16) & 0xff;
l2[0] = (rv >> 8) & 0xff;
l1[0] = rv & 0xff;
kprintf(".. The table for this signature is invalid; ignoring table %s%s%s%s\n", l1, l2, l3, l4);
return 0;
}
}
return rv;
}
<|start_filename|>platform/pc/interrupts/IsrInt14.cc<|end_filename|>
//===================================================================================================================
//
// IsrInt14.cc -- This is Intel Reserved Interrupt
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// An unused Interrupt
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Oct-11 Initial 0.1.0 ADCL Initial version
// 2019-Feb-09 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "interrupt.h"
//
// -- An Intel Interrupt
// --------------
EXTERN_C EXPORT KERNEL
void IsrInt14(isrRegs_t *regs)
{
kprintf("\nRESERVED INT14\n");
IsrDumpState(regs);
}
<|start_filename|>platform/pc/init/PlatformInit.cc<|end_filename|>
//===================================================================================================================
//
// PlatformInit.cc -- Handle the initialization for the pc platform
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Complete the platform initialization.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-18 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "pic.h"
//
// -- Complete the platform initialization
// ------------------------------------
EXTERN_C EXPORT LOADER
void PlatformInit(void)
{
PicPick();
TimerPick();
if (!assert_msg(timerControl == &lapicTimerControl, "LAPIC timer required")) {
// -- fall back on a single CPU system
cpus.cpusDiscovered = 1;
cpus.cpusRunning = 1;
}
}
<|start_filename|>arch/x86/ProcessNewStack.cc<|end_filename|>
//===================================================================================================================
//
// ProcessNewStack.cc -- for a new process, create its stack so we can return from ProcessSwitch()
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-16 Initial 0.3.2 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "heap.h"
#include "mmu.h"
#include "pmm.h"
#include "stacks.h"
#include "process.h"
//
// -- build the stack needed to start a new process
// ---------------------------------------------
EXPORT KERNEL
frame_t ProcessNewStack(Process_t *proc, void (*startingAddr)(void))
{
archsize_t *stack;
frame_t rv = PmmAllocAlignedFrames(STACK_SIZE / FRAME_SIZE, 12);
archsize_t flags = SPINLOCK_BLOCK_NO_INT(mmuStackInitLock) {
MmuMapToFrame(MMU_STACK_INIT_VADDR, rv, PG_KRN | PG_WRT);
stack = (archsize_t *)(MMU_STACK_INIT_VADDR + STACK_SIZE);
// *--stack = ProcessEnd; // -- just in case, we will self-terminate
*--stack = (archsize_t)startingAddr; // -- this is the process starting point
*--stack = (archsize_t)ProcessStart; // -- initialize a new process
*--stack = 0; // -- ebx
*--stack = 0; // -- esi
*--stack = 0; // -- edi
*--stack = 0; // -- ebp
MmuUnmapPage(MMU_STACK_INIT_VADDR);
SPINLOCK_RLS_RESTORE_INT(mmuStackInitLock, flags);
}
archsize_t stackLoc = StackFind(); // get a new stack
assert(stackLoc != 0);
proc->topOfStack = ((archsize_t)stack - MMU_STACK_INIT_VADDR) + stackLoc;
MmuMapToFrame(stackLoc, rv, PG_KRN | PG_WRT);
kprintf("the new process stack is located at %p (frame %p)\n", stackLoc, rv);
return rv;
}
<|start_filename|>modules/kernel/src/debugger/DebugSchedulerStat.cc<|end_filename|>
//===================================================================================================================
//
// DebugSchedulerStat.cc -- Show the status of the scheduler (counts)
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-03 Initial v0.6.0a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "process.h"
#include "printf.h"
#include "debugger.h"
//
// -- Show the status of the scheduler queues
// ---------------------------------------
EXTERN_C EXPORT KERNEL
void DebugSchedulerStat(void)
{
DebuggerEngage(DIPI_ENGAGE);
kprintf(ANSI_FG_BLUE ANSI_ATTR_BOLD "Dumping the status of the scheduler:\n" ANSI_ATTR_NORMAL);
// -- check the overall status of the scheduler (technically should never be locked)
if (schedulerLock.lock) {
kprintf("The scheduler is " ANSI_FG_RED ANSI_ATTR_BOLD "locked" ANSI_ATTR_NORMAL " by CPU%d\n",
scheduler.lockCpu);
kprintf(" The process running on this CPU is %p (%s)\n",
cpus.perCpuData[scheduler.lockCpu].process, cpus.perCpuData[scheduler.lockCpu].process->command);
} else {
kprintf("The scheduler is " ANSI_FG_GREEN ANSI_ATTR_BOLD "unlocked" ANSI_ATTR_NORMAL"\n");
}
// -- check each CPU for a running process
for (int i = 0; i < cpus.cpusRunning; i ++) {
if (cpus.perCpuData[i].process) {
kprintf(ANSI_ATTR_BOLD "CPU%d" ANSI_FG_GREEN " does " ANSI_ATTR_NORMAL
"have a process running on it\n", i);
} else {
kprintf(ANSI_ATTR_BOLD "CPU%d" ANSI_FG_RED " does not " ANSI_ATTR_NORMAL
"have a process running on it\n", i);
}
}
// -- check the status of each queue
kprintf(ANSI_ATTR_BOLD " OS Queue" ANSI_ATTR_NORMAL " process count: " ANSI_ATTR_BOLD "%d\n"
ANSI_ATTR_NORMAL, ListCount(&scheduler.queueOS));
kprintf(ANSI_ATTR_BOLD " High Queue" ANSI_ATTR_NORMAL " process count: " ANSI_ATTR_BOLD "%d\n"
ANSI_ATTR_NORMAL, ListCount(&scheduler.queueHigh));
kprintf(ANSI_ATTR_BOLD " Normal Queue" ANSI_ATTR_NORMAL " process count: " ANSI_ATTR_BOLD "%d\n"
ANSI_ATTR_NORMAL, ListCount(&scheduler.queueNormal));
kprintf(ANSI_ATTR_BOLD " Low Queue" ANSI_ATTR_NORMAL " process count: " ANSI_ATTR_BOLD "%d\n"
ANSI_ATTR_NORMAL, ListCount(&scheduler.queueLow));
kprintf(ANSI_ATTR_BOLD " Idle Queue" ANSI_ATTR_NORMAL " process count: " ANSI_ATTR_BOLD "%d\n"
ANSI_ATTR_NORMAL, ListCount(&scheduler.queueIdle));
kprintf(ANSI_ATTR_BOLD " Blocked List" ANSI_ATTR_NORMAL " process count: " ANSI_ATTR_BOLD "%d\n"
ANSI_ATTR_NORMAL, ListCount(&scheduler.listBlocked));
kprintf(ANSI_ATTR_BOLD " Sleeping List" ANSI_ATTR_NORMAL " process count: " ANSI_ATTR_BOLD "%d\n"
ANSI_ATTR_NORMAL, ListCount(&scheduler.listSleeping));
kprintf(ANSI_ATTR_BOLD "Terminated List" ANSI_ATTR_NORMAL " process count: " ANSI_ATTR_BOLD "%d\n"
ANSI_ATTR_NORMAL, ListCount(&scheduler.listTerminated));
DebuggerRelease();
}
<|start_filename|>platform/bcm2836/serial/SerialHasChar.cc<|end_filename|>
//===================================================================================================================
//
// SerialHasChar.cc -- Determine if there is data in the buffer to be read
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-02 Initial v0.6.0a ADCL Initial Version
//
//===================================================================================================================
#include "types.h"
#include "hardware.h"
#include "serial.h"
//
// -- does the serial port have room for a character to be added?
// -----------------------------------------------------------
EXTERN_C EXPORT KERNEL
bool _SerialHasChar(SerialDevice_t *dev)
{
if (!dev) return false;
archsize_t val = MmioRead(dev->base + AUX_MU_LSR_REG);
val &= 1;
return (val != 0);
}
<|start_filename|>modules/kernel/src/debugger/DebugVars.cc<|end_filename|>
//===================================================================================================================
//
// DebugVars.cc -- Variables used by the debugger
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-02 Initial v0.6.0a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "debugger.h"
//
// -- This is the current variable that identifies the current state
// --------------------------------------------------------------
EXPORT KERNEL_BSS
DebuggerState_t debugState;
//
// -- This is the buffer for the command being entered
// ------------------------------------------------
EXPORT KERNEL_BSS
char debugCommand[DEBUG_COMMAND_LEN];
//
// -- For each state, this is the visual representation where on the command tree the user is and what the
// valid commands are (indexed by state).
// ----------------------------------------------------------------------------------------------------
EXPORT KERNEL_DATA
DebugPrompt_t dbgPrompts[] {
// -- location allowed
{"-", "scheduler,timer,msgq"}, // -- DBG_HOME
{"sched", "show,status,run,ready,list,exit"}, // -- DBG_SCHED
{"sched:ready", "all,os,high,normal,low,idle,exit"}, // -- DBG_SCHED_RDY
{"sched:list", "blocked,sleeping,zombie,exit"}, // -- DBG_SCHED_LIST
{"timer", "counts,config,exit"}, // -- DBG_TIMER
{"msgq", "status,show,exit"}, // -- DBG_MSGQ
};
//
// -- This is the actual debug communication structure
// ------------------------------------------------
EXPORT KERNEL_BSS
DebugComm_t debugCommunication = {0};
<|start_filename|>modules/kernel/inc/mmu.h<|end_filename|>
//===================================================================================================================
//
// mmu.h -- This is the kernel MMU manager header.
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// All fo these functions are written so that PD[1023] is recursively mapped. As a result, we know some things
// are in certain locations. For example, the Page Directory itself starts at virtual address 0xfffff000, no
// matter what process we are in. The Page Tables will be located at 0xffc00000 each. This then means that in
// order to manage any given Page Table structure, the calculations are consistent -- which differs from the
// loader MMU requirements. In particular, I do not need to know which cr3 I am managing.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- --------------------------------------------------------------------------
// 2018-Nov-10 Initial 0.1.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#pragma once
#define __MMU_H__
#include <stdint.h>
#include <stddef.h>
#include <stdbool.h>
#include "types.h"
#include "spinlock.h"
#include "arch-mmu.h"
//
// -- Some constants to help with mapping flags
// -----------------------------------------
enum {
PG_KRN = 0x00000001,
PG_WRT = 0x00000002,
PG_DEVICE = 0x80000000, // used for ARM
};
//
// -- This structure is used to trigger TLB flushes across multiple cores
// -------------------------------------------------------------------
typedef struct TlbFlush_t {
Spinlock_t lock;
volatile archsize_t addr;
AtomicInt_t count;
} TlbFlush_t;
//
// -- this is the structure to control the TLB flushes
// ------------------------------------------------
EXTERN EXPORT KERNEL_BSS
TlbFlush_t tlbFlush;
//
// -- With the page table structures given, map a virtual address to a physical frame
// -------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void MmuMapToFrame(archsize_t addr, frame_t frame, int flags);
//
// -- Unmap a page from the page table
// --------------------------------
EXTERN_C EXPORT KERNEL
frame_t MmuUnmapPage(archsize_t addr);
//
// -- Clear a frame before formally adding it to the paging tables
// ------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void MmuClearFrame(frame_t frame);
//
// -- Check of the address is mapped
// ------------------------------
EXTERN_C EXPORT KERNEL
bool MmuIsMapped(archsize_t addr);
//
// -- Create a new set of paging tables for a new process
// ---------------------------------------------------
EXTERN_C EXPORT KERNEL
frame_t MmuNewVirtualSpace(frame_t stack);
//
// -- Convert a virtual address to physical for the current paging tables
// returns -1 if not mapped, which should be an invalid (unaligned) address for most archs
// ---------------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
archsize_t MmuVirtToPhys(void *addr);
//
// -- Check a structure to see if it is fully mapped
// ----------------------------------------------
#define IS_MAPPED(a,z) ({ \
bool __rv = true; \
for (archsize_t __va = ((archsize_t)a) & ~0x0fff; __va <= (((archsize_t)a) + z); __va += PAGE_SIZE) { \
__rv = __rv && MmuIsMapped(__va); \
} \
__rv; })
//
// -- The spinlock for clearing a page before giving it to the MMU
// ------------------------------------------------------------
EXTERN EXPORT KERNEL_DATA
Spinlock_t frameClearLock;
<|start_filename|>platform/pc/apic/LApicInit.cc<|end_filename|>
//===================================================================================================================
//
// LApicInit.cc -- Initialize the Local APIC (timer)
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-24 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "interrupt.h"
#include "printf.h"
#include "mmu.h"
#include "cpu.h"
#include "pic.h"
//
// -- This is the spurious IRQ handler
// --------------------------------
EXTERN_C HIDDEN KERNEL
void LApicSpurious(isrRegs_t *regs)
{
kprintf("!");
}
//
// -- this is used during initialization to calibrate the timer
// ---------------------------------------------------------
EXTERN_C HIDDEN KERNEL
void LApicInitTimeout(isrRegs_t *regs)
{
}
//
// -- Initialize the Local APIC part of the split architecture
// --------------------------------------------------------
EXTERN_C EXPORT LOADER
void _LApicInit(TimerDevice_t *dev, uint32_t freq)
{
static uint64_t factor = 0;
if (!dev) return;
int cpu = thisCpu->cpuNum;
kprintf("Local APIC Init\n");
// -- get the per cpu address
dev->pic = picControl;
dev->base = (TimerBase_t)LAPIC_MMIO;
archsize_t base = dev->base;
if (cpu == 0) {
MmuDumpTables(LAPIC_MMIO);
//
// -- Take care of the initialization for the Local APIC address
// ----------------------------------------------------------
kprintf("Base is %p whereas LAPIC_MMIO is %p\n", base, LAPIC_MMIO);
kprintf("The value coming from MSR `0x1b` is: %p\n", RDMSR(0x1b));
kprintf("The address of the target location is: %p\n", LAPIC_MMIO);
WRMSR(0x1b, LAPIC_MMIO | (1<<11) | (RDMSR(0x1b) & 0xfff)); // -- include global enable just in case
kprintf("The updated value fpr MSR `0x1b` is: %p\n", RDMSR(0x1b));
MmuMapToFrame(LAPIC_MMIO, LAPIC_MMIO >> 12, PG_DEVICE | PG_KRN | PG_WRT);
}
//
// -- SW enable the Local APIC timer
// ------------------------------
MmioWrite(base + LAPIC_ESR, 0);
kprintf(".. Before setting the spurious interrupt at %p, the value is %p\n", base + LAPIC_SPURIOUS_VECT,
MmioRead(base + LAPIC_SPURIOUS_VECT));
__asm volatile("nop\n");
MmioWrite(base + LAPIC_SPURIOUS_VECT, 39 | (1<<8));
__asm volatile("nop\n");
kprintf(".. After setting the spurious interrupt at %p, the value is %p\n", base + LAPIC_SPURIOUS_VECT,
MmioRead(base + LAPIC_SPURIOUS_VECT));
kprintf(".. The LAPIC error register is: %p\n", MmioRead(base + LAPIC_ESR));
if (cpu == 0) {
IsrRegister(32, LApicInitTimeout); // this is temporary until we get calibrated
IsrRegister(39, LApicSpurious);
}
// -- here we initialize the LAPIC to a defined state -- taken from Century32
MmioWrite(base + LAPIC_DEST_FMT, 0xffffffff); // ipi flat model??
MmioWrite(base + LAPIC_LOGICAL_DEST, MmioRead(base + LAPIC_LOGICAL_DEST) | (1<<24)); // set logical apic to 1
MmioWrite(base + LAPIC_LVT_TMR, (1<<16)); // mask the timer during setup
MmioWrite(base + LAPIC_LVT_PERF, (1<<16));
MmioWrite(base + LAPIC_LVT_LINT0, (1<<16));
MmioWrite(base + LAPIC_LVT_LINT1, (1<<16));
MmioWrite(base + LAPIC_LVT_ERR, (1<<16));
MmioWrite(base + LAPIC_TPR, 0);
MmioWrite(base + LAPIC_TMRDIV, 0x03); // divide value is 16
MmioWrite(base + LAPIC_LVT_TMR, 32); // timer is vector 32; now unmasked
// -- enable the PIC timer in one-shot mode
if (cpu == 0) {
outb(0x61, (inb(0x61) & 0xfd) | 1);
outb(0x43, 0xb2);
//
// -- So, here is the math:
// We need to divide the clock by 20 to have a value large enough to get a decent time.
// So, we will be measuring 1/20th of a second.
// -- 1193180 Hz / 20 == 59659 cycles == e90b cycles
outb(0x42, 0x0b);
inb(0x60); // short delay
outb(0x42, 0xe9);
// -- now reset the PIT timer and start counting
uint8_t tmp = inb(0x61) & 0xfe;
outb(0x61, tmp);
outb(0x61, tmp | 1);
kprintf("Prior to calibration, the timer count is %p\n", MmioRead(base + LAPIC_TMRCURRCNT));
// -- reset the APIC counter to -1
MmioWrite(base + LAPIC_TMRINITCNT, 0xffffffff);
kprintf("During calibration, the timer count is %p\n", MmioRead(base + LAPIC_TMRCURRCNT));
while (!(inb(0x61) & 0x20)) {} // -- busy wait here
MmioWrite(base + LAPIC_LVT_TMR, (1<<16));
// -- disable the PIC
outb(0x21, 0xff);
outb(0xa1, 0xff);
//
// -- Now we can calculate the cpu frequency, converting back to a full second
// ------------------------------------------------------------------------
uint64_t cpuFreq = (0xffffffff - MmioRead(base + LAPIC_TMRCURRCNT)) * 16 * 20;
factor = cpuFreq / freq / 16;
if ((factor >> 32) != 0) {
CpuPanicPushRegs("PANIC: The factor is too large for the architecture!\n");
}
kprintf("So, the calculated clock divider is %p\n", (uint32_t)factor);
}
// -- This will also unmask IRQ0 with the PIC, so nothing else should be needed
PicRegisterHandler(dev->pic, IRQ0, 32, dev->TimerCallBack);
//
// -- Now, program the Timer
// ----------------------
MmioWrite(base + LAPIC_TMRINITCNT, factor);
MmioWrite(base + LAPIC_LVT_TMR, 32 | (0b01<<17));
}
<|start_filename|>modules/kernel/src/debugger/DebugSchedulerRun.cc<|end_filename|>
//===================================================================================================================
//
// DebugSchedulerRun.cc -- Dump the information from the processes currently running on each CPU
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// The 'interesting' bits of information I want to dump are:
// * Process Address
// * PID
// * Command
// * Virtual Address Space
// * Base Stack frame
// * Status
// * Priority
// * Quantum left
// * Time Used
// * Wake At (should be 0 or -1 -- I cannot recall)
// * Whether it is on a queue
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-04 Initial v0.6.0a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "serial.h"
#include "process.h"
#include "debugger.h"
//
// -- This function will clear the screen and print out the data headings
// -------------------------------------------------------------------
EXTERN_C HIDDEN KERNEL
void PrintHeadings(void)
{
#define B ANSI_ATTR_BOLD ANSI_FG_BLUE
#define N ANSI_ATTR_NORMAL
kprintf(ANSI_CLEAR ANSI_SET_CURSOR(0,0));
kprintf("+------------------------+\n");
kprintf("| " B "CPU" N " |\n");
kprintf("+------------------------+\n");
kprintf("| " B "Process Address:" N " |\n");
kprintf("| " B "Process ID:" N " |\n");
kprintf("| " B "Command:" N " |\n");
kprintf("| " B "Virtual Address Space:" N " |\n");
kprintf("| " B "Base Stack Frame:" N " |\n");
kprintf("| " B "Status:" N " |\n");
kprintf("| " B "Priority:" N " |\n");
kprintf("| " B "Quantum Left:" N " |\n");
kprintf("| " B "Time Used:" N " |\n");
kprintf("| " B "Wake At:" N " |\n");
kprintf("| " B "Queue Status:" N " |\n");
kprintf("+------------------------+\n");
#undef B
#undef N
}
//
// -- Output the interesting values for a CPU
// ---------------------------------------
EXTERN_C HIDDEN KERNEL
void PrintProcess(int cpu, volatile Process_t *proc)
{
if (!proc) return;
int fwd = (cpu * 20) + 25;
kprintf(ANSI_SET_CURSOR(0,0));
kprintf("\x1b[%dC+-------------------+\n", fwd);
kprintf("\x1b[%dC| " ANSI_ATTR_BOLD "CPU%d" ANSI_ATTR_NORMAL " |\n", fwd, cpu);
kprintf("\x1b[%dC+-------------------+\n", fwd);
kprintf("\x1b[%dC| ", fwd); DbgSpace(17, kprintf("%x", proc)); kprintf("|\n");
kprintf("\x1b[%dC| ", fwd); DbgSpace(17, kprintf("%d", proc->pid)); kprintf("|\n");
kprintf("\x1b[%dC| ", fwd); DbgSpace(17, kprintf("%s", proc->command)); kprintf("|\n");
kprintf("\x1b[%dC| ", fwd); DbgSpace(17, kprintf("%x", proc->virtAddrSpace)); kprintf("|\n");
kprintf("\x1b[%dC| ", fwd); DbgSpace(17, kprintf("%x", proc->ssAddr)); kprintf("|\n");
kprintf("\x1b[%dC| ", fwd); DbgSpace(17, kprintf("%s", ProcStatusStr(proc->status))); kprintf("|\n");
kprintf("\x1b[%dC| ", fwd); DbgSpace(17, kprintf("%s", ProcPriorityStr(proc->priority))); kprintf("|\n");
kprintf("\x1b[%dC| ", fwd); DbgSpace(17, kprintf("%d", AtomicRead(&proc->quantumLeft))); kprintf("|\n");
kprintf("\x1b[%dC| ", fwd); DbgSpace(17, kprintf("%d", (uint32_t)proc->timeUsed)); kprintf("|\n");
kprintf("\x1b[%dC| ", fwd); DbgSpace(17, kprintf("%d", (uint32_t)proc->wakeAtMicros)); kprintf("|\n");
if (proc->stsQueue.next == &proc->stsQueue) {
kprintf("\x1b[%dC| " ANSI_FG_GREEN ANSI_ATTR_BOLD "Not on a queue" ANSI_ATTR_NORMAL " |\n", fwd);
} else {
kprintf("\x1b[%dC| " ANSI_FG_RED ANSI_ATTR_BOLD "On some queue" ANSI_ATTR_NORMAL " |\n", fwd);
}
kprintf("\x1b[%dC+-------------------+\n", fwd);
}
//
// -- Dump the interesting values from the running processes on each CPU
// ------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void DebugSchedulerRunning(void)
{
DebuggerEngage(DIPI_ENGAGE);
PrintHeadings();
for (int i = 0; i < cpus.cpusRunning; i ++) {
PrintProcess(i, cpus.perCpuData[i].process);
}
DebuggerRelease();
}
<|start_filename|>platform/bcm2836/init/PlatformInit.cc<|end_filename|>
//===================================================================================================================
//
// PlatformInit.cc -- Handle the initialization for the rpi2b platform
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Complete the platform initialization.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-18 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "pic.h"
#include "interrupt.h"
#include "platform.h"
//
// -- Complete the platform initialization
// ------------------------------------
EXTERN_C EXPORT LOADER
void PlatformInit(void)
{
ExceptionInit();
PicUnmaskIrq(picControl, BCM2836_CORE_MAILBOX0);
IsrRegister(0x64, PicMailbox0Handler);
}
<|start_filename|>platform/pc/pic/PicVars.cc<|end_filename|>
//===================================================================================================================
//
// PicVars.cc -- These are the variables for the x86 Pic
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "pic.h"
//
// -- This is the device description that is used to output data to the serial port during loader initialization
// ----------------------------------------------------------------------------------------------------------
EXPORT KERNEL_DATA
PicDevice_t pic8259 = {
.ipiReady = false,
.PicInit = _PicInit,
.PicRegisterHandler = _PicRegisterHandler,
.PicMaskIrq = _PicMaskIrq,
.PicUnmaskIrq = _PicUnmaskIrq,
.PicEoi = _PicEoi,
.PicBroadcastIpi = (void (*)(PicDevice_t *, int))EmptyFunction,
};
<|start_filename|>platform/pc/acpi/AcpiReadRsdt.cc<|end_filename|>
//===================================================================================================================
//
// AcpiReadRsdt.cc -- Validate and read the RSDT table
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-06 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "mmu.h"
#include "hardware.h"
//
// -- read the rsdt table
// -------------------
EXTERN_C EXPORT LOADER
bool AcpiReadRsdt(archsize_t loc)
{
kprintf("Reading the RSDT\n");
CheckAcpi(loc);
if (!AcpiCheckTable(loc, MAKE_SIG("RSDT"))) {
kprintf("The RSDT does not match the required checks\n");
return false;
}
RSDT_t *rsdt = (RSDT_t *)loc;
uint32_t entries = (rsdt->length - ACPI_HDR_SIZE) / sizeof(uint32_t);
for (uint32_t i = 0; i < entries; i ++) {
kprintf("The address for entry %x is %p\n", i, rsdt->entry[i]);
if (rsdt->entry[i]) AcpiGetTableSig(rsdt->entry[i]);
}
return true;
}
<|start_filename|>platform/bcm2836/inc/platform-timer.h<|end_filename|>
//===================================================================================================================
//
// platform-timer.h -- Timer definitions and functions for the bcm2835
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#pragma once
#ifndef __TIMER_H__
# error "Use #include \"timer.h\" and it will pick up this file; do not #include this file directly."
#endif
#include "cpu.h"
//
// -- on x86, this is the type we use to refer to the timer port
// ----------------------------------------------------------
typedef archsize_t TimerBase_t;
//
// -- Read the low level timer value
// ------------------------------
struct TimerDevice_t;
EXTERN_C EXPORT KERNEL
uint64_t SysTimerCount(struct TimerDevice_t *);
//
// -- These are the offsets we will use for the timer
// -----------------------------------------------
#define TIMER_CONTROL 0x00
#define TIMER_PRESCALAR 0x08
#define TIMER_LOCAL_INT_ROUTING 0x24
#define TIMER_LOCAL_CONTROL 0x34
#define TIMER_WRITE_FLAGS 0x38
#define TIMER_INTERRUPT_CONTROL 0x40
#define MAILBOX_INTERRUPT_CONTROL 0x50
#define TIMER_IRQ_SOURCE 0x60
#define TIMER_FIQ_SOURCE 0x70
//
// -- These are the control registers for the timer
// ---------------------------------------------
#define CNTFRQ "p15, 0, %0, c14, c0, 0"
#define READ_CNTFRQ() MRC(CNTFRQ)
#define CNTP_CTL "p15, 0, %0, c14, c2, 1"
#define READ_CNTP_CTL() MRC(CNTP_CTL)
#define WRITE_CNTP_CTL(val) MCR(CNTP_CTL,val)
#define CNTP_TVAL "p15, 0, %0, c14, c2, 0"
#define READ_CNTP_TVAL() MRC(CNTP_TVAL)
#define WRITE_CNTP_TVAL(val) MCR(CNTP_TVAL,val)
#define CNTPCT "p15, 0, %0, %1, c14"
#define READ_CNTPCT() MRRC(CNTPCT)
#define CNTP_CVAL "p15, 2, %0, %1, c14"
#define READ_CNTP_CVAL() MRRC(CNTP_CVAL)
#define WRITE_CNTP_CVAL(val) MCRR(CNTP_CVAL,val)
<|start_filename|>modules/kernel/src/frame-buffer/FrameBufferClear.cc<|end_filename|>
//===================================================================================================================
//
// FrameBufferClear.cc -- Clear the frame buffer, setting the contents to the bgcolor
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2017-May-03 Initial 0.0.0 ADCL Initial version
// 2018-Jun-13 Initial 0.1.0 ADCL Copied this function from century to century-os
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "hw-disc.h"
#include "printf.h"
#include "fb.h"
//
// -- Clear the screen defined by the frame buffer
// --------------------------------------------
void FrameBufferClear(void)
{
//
// -- calculate the number of 16-bit words to write (rows * cols)
// -----------------------------------------------------------
size_t cnt = GetFrameBufferHeight() * GetFrameBufferWidth();
uint16_t *b = (uint16_t *)GetFrameBufferAddr();
kprintf("Attempting the clear the monitor screen at address %p\n", b);
kMemSetW(b, GetBgColor(), cnt);
SetRowPos(0);
SetColPos(0);
kprintf(".. Done!\n");
}
<|start_filename|>modules/kernel/src/heap/HeapExpand.cc<|end_filename|>
//===================================================================================================================
//
// HeapExpand.cc -- Expand the size of the heap
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-12 Initial v0.6.1b ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "pmm.h"
#include "printf.h"
#include "mmu.h"
#include "heap.h"
//
// -- Expand the heap size (we have the heap lock)
// --------------------------------------------
EXTERN_C EXPORT KERNEL
size_t HeapExpand(void)
{
// TODO: remove the following line
return 0;
if (!assert_msg(kHeap->endAddr < kHeap->maxAddr, "All Heap memory allocated; unable to create more")) {
return 0;
}
kprintf("Expanding heap...\n");
size_t rv = 0;
byte_t *newEnd = kHeap->endAddr + HEAP_SIZE_INCR;
if (newEnd > kHeap->maxAddr) newEnd = kHeap->maxAddr;
kprintf(".. new end will be %p (%d additional pages)\n", newEnd, (newEnd - kHeap->endAddr) >> 12);
while (kHeap->endAddr < newEnd) {
kprintf(".. getting a frame...\n");
frame_t frame = PmmAllocateFrame();
kprintf(".. mapping\n");
MmuMapToFrame((archsize_t)kHeap->endAddr, frame, PG_KRN | PG_WRT);
kprintf(".. done\n");
kHeap->endAddr += PAGE_SIZE;
rv += PAGE_SIZE;
}
kprintf("Heap expanded by %d bytes\n", rv);
return rv;
}
<|start_filename|>modules/kernel/src/debugger/DebugParse.cc<|end_filename|>
//===================================================================================================================
//
// DebugParse.cc -- Parse the buffer for a command, and return its token
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// So, a programmer's note: This function is incredibly inefficient!! It's horrible. A better solution would be
// to use flex to tokenize an input stream. However, for this to work, I would need to prepare several C-runtime
// library functions to support the flex functions. I do not yet have enough of the kernel available to support
// that effort. So, instead, this is currently a brute force method.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-03 Initial v0.6.0a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "debugger.h"
//
// -- Parse the entered command line for the next command
// ---------------------------------------------------
EXTERN_C EXPORT KERNEL
DebuggerCommand_t DebugParse(DebuggerState_t state)
{
DebuggerCommand_t rv = CMD_ERROR;
// -- down-case the first word in the command string
for (size_t i = 0; i < kStrLen(debugCommand); i ++ ) {
if (debugCommand[i] >= 'A' and debugCommand[i] <= 'Z') {
debugCommand[i] = debugCommand[i] - 'A' + 'a';
}
if (debugCommand[i] == ' ') {
debugCommand[i] = '\0';
break;
}
}
// -- A few global commands
if (kStrCmp(debugCommand, "help") == 0) { rv = CMD_HELP; goto exit; }
if (kStrCmp(debugCommand, "?") == 0) { rv = CMD_HELP; goto exit; }
if (kStrCmp(debugCommand, "exit") == 0) { rv = CMD_EXIT; goto exit; }
if (kStrCmp(debugCommand, "x") == 0) { rv = CMD_EXIT; goto exit; }
if (kStrCmp(debugCommand, "quit") == 0) { rv = CMD_EXIT; goto exit; }
if (kStrCmp(debugCommand, "q") == 0) { rv = CMD_EXIT; goto exit; }
// -- consider the current state
switch (state) {
case DBG_HOME:
if (kStrCmp(debugCommand, "sched") == 0) { rv = CMD_SCHED; goto exit; }
if (kStrCmp(debugCommand, "scheduler") == 0) { rv = CMD_SCHED; goto exit; }
if (kStrCmp(debugCommand, "timer") == 0) { rv = CMD_TIMER; goto exit; }
if (kStrCmp(debugCommand, "msgq") == 0) { rv = CMD_MSGQ; goto exit; }
break;
case DBG_SCHED:
if (kStrCmp(debugCommand, "show") == 0) { rv = CMD_SHOW; goto exit; }
if (kStrCmp(debugCommand, "stat") == 0) { rv = CMD_STAT; goto exit; }
if (kStrCmp(debugCommand, "status") == 0) { rv = CMD_STAT; goto exit; }
if (kStrCmp(debugCommand, "run") == 0) { rv = CMD_RUNNING; goto exit; }
if (kStrCmp(debugCommand, "running") == 0) { rv = CMD_RUNNING; goto exit; }
if (kStrCmp(debugCommand, "ready") == 0) { rv = CMD_READY; goto exit; }
if (kStrCmp(debugCommand, "list") == 0) { rv = CMD_LIST; goto exit; }
break;
case DBG_TIMER:
if (kStrCmp(debugCommand, "counts") == 0) { rv = CMD_COUNTS; goto exit; }
if (kStrCmp(debugCommand, "count") == 0) { rv = CMD_COUNTS; goto exit; }
if (kStrCmp(debugCommand, "cnt") == 0) { rv = CMD_COUNTS; goto exit; }
if (kStrCmp(debugCommand, "configuration") == 0) { rv = CMD_CONFIG; goto exit; }
if (kStrCmp(debugCommand, "config") == 0) { rv = CMD_CONFIG; goto exit; }
if (kStrCmp(debugCommand, "conf") == 0) { rv = CMD_CONFIG; goto exit; }
if (kStrCmp(debugCommand, "cfg") == 0) { rv = CMD_CONFIG; goto exit; }
case DBG_MSGQ:
if (kStrCmp(debugCommand, "show") == 0) { rv = CMD_SHOW; goto exit; }
if (kStrCmp(debugCommand, "stat") == 0) { rv = CMD_STAT; goto exit; }
if (kStrCmp(debugCommand, "status") == 0) { rv = CMD_STAT; goto exit; }
break;
default:
kprintf(ANSI_FG_RED ANSI_ATTR_BOLD "\n\n!! Unimplemented state!!\n" ANSI_ATTR_NORMAL);
break;
}
exit:
if (rv == CMD_ERROR) {
kprintf("Invalid command. Available commands are below the input line. Use 'help' for a detailed description.\n");
kMemSetB(debugCommand, 0, DEBUG_COMMAND_LEN);
return CMD_ERROR;
}
// -- move any additional command up
int len = kStrLen(debugCommand);
kMemMove(debugCommand, debugCommand + len + 1, kStrLen(debugCommand + len + 1) + 1);
kMemSetB(debugCommand + kStrLen(debugCommand), 0, DEBUG_COMMAND_LEN - kStrLen(debugCommand));
return rv;
}
<|start_filename|>modules/kernel/src/heap/HeapNewListEntry.cc<|end_filename|>
//===================================================================================================================
//
// HeapNewListEntry.cc -- Create an OrderedList entry for the KHeapHeader pointer provided
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Create an OrderedList entry for the KHeapHeader pointer provided
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Jul-03 Initial version
// 2012-Sep-16 Leveraged from Century
// 2013-Sep-12 #101 Resolve issues splint exposes
// 2018-Jun-01 Initial 0.1.0 ADCL Copied this file from century32 to century-os
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "heap.h"
//
// -- Create a new list entry for the hole
// ------------------------------------
OrderedList_t *HeapNewListEntry(KHeapHeader_t *hdr, bool add)
{
int i;
OrderedList_t *ret;
extern OrderedList_t fixedList[ORDERED_LIST_STATIC];
assert(hdr != NULL);
// Assume the hdr to be good; entry does not pass test
for (i = 0; i < ORDERED_LIST_STATIC; i ++) {
if (!fixedList[i].block) {
ret = &fixedList[i];
ret->block = hdr;
ret->size = hdr->size;
ret->next = ret->prev = 0;
hdr->entry = ret;
if (add) HeapAddToList(ret);
HeapValidateHdr(hdr, "Created HeapNewListEntry()");
return ret;
}
}
HeapError("Unable to allocate a free OrderedList entry", "");
return 0;
}
<|start_filename|>modules/kernel/src/kInit.cc<|end_filename|>
//===================================================================================================================
//
// kInit.cc -- Initialize the kernel structures
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Initialize the kernel structures, preparing to formally start the OS.
//
// Initialization is going to be broken into several phases. At a high level, the phases are:
// 1) Required initialization to put the processor into a known and common state
// 2) OS Structure Initialization
// 3) Service Interrupts and hardware discovery
// 4) Full interrupts enabled and user space initialization
// 5) Become the butler process
//
// The above is a starting point and will be expanded as we add support
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Sep-15 Initial Initial version -- leveraged from Century: kmain
// 2013-Sep-01 #82 Add a mutex for screen operations (2018-05-25: temporarily removed)
// 2013-Sep-03 #73 Encapsulate Process Structure
// 2013-Sep-13 #101 Resolve issues splint exposes
// 2018-May-25 0.1.0 ADCL Copy this file from century32 to century-os
// 2018-Jul-01 Initial 0.1.0 ADCL Refactor this function to be strictly the kernel (not the loader)
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "lists.h"
#include "cpu.h"
#include "fb.h"
#include "hw-disc.h"
#include "interrupt.h"
#include "hardware.h"
#include "printf.h"
#include "heap.h"
#include "process.h"
#include "timer.h"
#include "pmm.h"
#include "msgq.h"
#include "serial.h"
#include "debugger.h"
#include "msgq.h"
#include "butler.h"
//
// -- the flag which will indicate it is time to clean up
// ---------------------------------------------------
extern volatile bool startCleanup;
//
// -- A couple of local prototypes
// ----------------------------
extern "C" void kInit(void);
void PmmStart(Module_t *);
EXPORT KERNEL_DATA
Process_t *A;
EXPORT KERNEL_DATA
Process_t *B;
EXPORT KERNEL_DATA
Process_t *debugger;
int semid;
EXPORT KERNEL_DATA
MessageQueue_t *q1;
EXPORT KERNEL_DATA
MessageQueue_t *q2;
EXTERN_C EXPORT KERNEL
void StartA(void)
{
while (true) {
long t = 0;
MessageQueueSend(q1, t, 0, 0);
MessageQueueReceive(q2, &t, 0, 0, true);
ProcessSleep(1);
}
}
EXTERN_C EXPORT KERNEL
void StartB(void)
{
while (true) {
long t = 0;
MessageQueueReceive(q1, &t, 0, 0, true);
ProcessSleep(1);
MessageQueueSend(q2, t, 0, 0);
}
}
EXPORT KERNEL_DATA
Spinlock_t testLock = {0};
EXPORT KERNEL_DATA
volatile int testval = 0;
EXPORT KERNEL_DATA
AtomicInt_t atomVal = {0};
EXPORT KERNEL_DATA
AtomicInt_t done = {0};
EXPORT KERNEL_DATA
AtomicInt_t instance = {0};
EXTERN_C EXPORT KERNEL
void AtomicsTest(void)
{
int odd = AtomicInc(&instance) % 2;
while (cpus.cpusRunning != 4) {}
for (int i = 0; i < 1000000; i ++) {
if (odd) {
AtomicInc(&atomVal);
} else {
AtomicDec(&atomVal);
}
}
for (int i = 0; i < 1000000; i ++) {
archsize_t flags = SPINLOCK_BLOCK_NO_INT(testLock) {
if (odd) {
testval ++;
} else {
testval --;
}
} SPINLOCK_RLS_RESTORE_INT(testLock, flags);
}
AtomicInc(&done);
}
//
// -- This is the main entry point for the kernel, starting with initialization
// -------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL NORETURN
void kInit(void)
{
//
// -- Phase 1: Required by the processor to setup the proper state
// Greet the user from the kernel.
// ------------------------------------------------------------
kprintf("\x1b[0;1;31;40mWelcome to CenturyOS\x1b[0m -- a hobby operating system\n");
kprintf(" (initializing...)\n");
SetBgColor(FrameBufferParseRGB("#404040"));
SetFgColor(0xffff);
FrameBufferClear();
FrameBufferPutS("Welcome to CenturyOS -- a hobby operating system\n");
FrameBufferPutS(" (initializing...)\n");
FrameBufferPutS("The RSDP is located at "); FrameBufferPutHex(GetRsdp()); FrameBufferDrawChar('\n');
//
// -- Phase 2: Required OS Structure Initialization
// ---------------------------------------------
ProcessInit();
TimerInit(timerControl, 1000);
kprintf("Reporting interesting Process_t offsets:\n");
kprintf(" Top of Stack: %x\n", offsetof(Process_t, topOfStack));
kprintf(" Virtual Address Space: %x\n", offsetof(Process_t, virtAddrSpace));
kprintf(" Process Status: %x\n", offsetof(Process_t, status));
kprintf(" Process Priority: %x\n", offsetof(Process_t, priority));
kprintf(" Process Quantum Left: %x\n", offsetof(Process_t, quantumLeft));
kprintf("Reporting interesting Scheduler_t offsets:\n");
kprintf(" Next PID to assign: %x\n", offsetof(Scheduler_t, nextPID));
kprintf(" Next wake timer tick: %x\n", offsetof(Scheduler_t, nextWake));
kprintf(" Process Change Pending flag: %x\n", offsetof(Scheduler_t, processChangePending));
kprintf(" Process Lock Count: %x\n", offsetof(Scheduler_t, schedulerLockCount));
kprintf(" Postpone Count: %x\n", offsetof(Scheduler_t, postponeCount));
kprintf("Reporting interesting perCPU offsets:\n");
kprintf(" Current Process: %x\n", offsetof(ArchCpu_t, process));
MessageQueueInit();
kprintf("Enabling interrupts now\n");
EnableInterrupts();
//BOCHS_TOGGLE_INSTR;
CoresStart();
picControl->ipiReady = true;
//
// -- Phase 3: Service Interrupts only enabled, not ready for all interrupts
// Includes hardware initialization
// ----------------------------------------------------------------------
//
// -- Phase 4: Full interrupts enabled, user space prepared
// Includes loading and starting device drivers
// -----------------------------------------------------
q1 = MessageQueueCreate();
q2 = MessageQueueCreate();
kprintf("Starting drivers and other kernel processes\n");
ProcessCreate("Process A", StartA);
ProcessCreate("Process B", StartB);
debugger = ProcessCreate("Kernel Debugger", DebugStart);
//
// -- Phase 5: Assume the butler process role
// ---------------------------------------
startCleanup = true;
Butler();
}
<|start_filename|>modules/kernel/src/process/ProcessLockScheduler.cc<|end_filename|>
//===================================================================================================================
//
// ProcessLockScheduler.cc -- Lock the scheduler for manipulation
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Nov-25 Initial 0.4.6a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "lists.h"
#include "timer.h"
#include "spinlock.h"
#include "process.h"
//
// -- Lock the scheduler in preparation for changes
// ---------------------------------------------
EXPORT KERNEL
void ProcessLockScheduler(bool save)
{
archsize_t flags = SPINLOCK_BLOCK_NO_INT(schedulerLock);
scheduler.lockCpu = thisCpu->cpuNum;
if (AtomicRead(&scheduler.schedulerLockCount) == 0) {
// kprintf("Scheduler locked on CPU%d\n", thisCpu->cpuNum);
if (save) scheduler.flags = flags;
}
AtomicInc(&scheduler.schedulerLockCount);
}
<|start_filename|>modules/kernel/src/heap/HeapAddToList.cc<|end_filename|>
//===================================================================================================================
//
// HeapAddToList.cc -- Add a new Ordered List Entry into the list in the proper place
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Add a new Ordered List Entry into the list in the proper place
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Jul-11 Initial version
// 2012-Sep-16 Leveraged from Century
// 2018-Jun-01 Initial 0.1.0 ADCL Copied this file from century32 to century-os
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "heap.h"
//
// -- Add an ordered list entry to the heap structures
// ------------------------------------------------
void HeapAddToList(OrderedList_t *entry)
{
OrderedList_t *wrk, *sav = 0;
size_t size;
if (!assert(entry != NULL)) HeapError("NULL entry in HeapAddToList()", "");
HeapValidateHdr(entry->block, "HeapAddToList()");
// cannot validate heap ptrs as may be empty
size = entry->size;
// assume that we are starting at the beginning
wrk = kHeap->heapMemory;
if (wrk) {
if (size >= 512 && kHeap->heap512) wrk = kHeap->heap512;
if (size >= 1024 && kHeap->heap1K) wrk = kHeap->heap1K;
if (size >= 4096 && kHeap->heap4K) wrk = kHeap->heap4K;
if (size >= 16384 && kHeap->heap16K) wrk = kHeap->heap16K;
} else {
// special case, nothing in the Ordered List; make it right and leave
kHeap->heapMemory = entry;
entry->next = entry->prev = 0;
if (size >= 512) kHeap->heap512 = entry;
if (size >= 1024) kHeap->heap1K = entry;
if (size >= 4096) kHeap->heap4K = entry;
if (size >= 16384) kHeap->heap16K = entry;
goto out;
}
// in theory, wrk is now optimized for a faster search for the right size
while (wrk) { // while we have something to work with...
if (wrk->size < size) {
sav = wrk;
wrk = wrk->next;
continue;
}
// at this point, we need to insert before wrk
entry->next = wrk;
entry->prev = wrk->prev;
if (entry->next) entry->next->prev = entry;
if (entry->prev) entry->prev->next = entry;
break;
}
// check if we need to add to the end -- special case
if (!wrk) {
sav->next = entry;
entry->prev = sav;
entry->next = 0;
}
// entry inserted; now fix-up the optimized pointers; start with NULLs
if (!kHeap->heap512 && size >= 512) kHeap->heap512 = entry;
if (!kHeap->heap1K && size >= 1024) kHeap->heap1K = entry;
if (!kHeap->heap4K && size >= 4096) kHeap->heap4K = entry;
if (!kHeap->heap16K && size >= 16384) kHeap->heap16K = entry;
// fixup the pointer for >= 512 bytes
if (kHeap->heap512) {
if (kHeap->heap512->prev && kHeap->heap512->prev->size >= 512) {
kHeap->heap512 = kHeap->heap512->prev;
}
}
// fixup the pointer for >= 1024 bytes
if (kHeap->heap1K) {
if (kHeap->heap1K->prev && kHeap->heap1K->prev->size >= 1024) {
kHeap->heap1K = kHeap->heap1K->prev;
}
}
// fixup the pointer for >= 4096 bytes
if (kHeap->heap4K) {
if (kHeap->heap4K->prev && kHeap->heap4K->prev->size >= 4096) {
kHeap->heap4K = kHeap->heap4K->prev;
}
}
// fixup the pointer for >= 16384 bytes
if (kHeap->heap16K) {
if (kHeap->heap16K->prev && kHeap->heap16K->prev->size >= 16384) {
kHeap->heap16K = kHeap->heap16K->prev;
}
}
out:
HeapValidatePtr("HeapAddToList()");
HeapValidateHdr(entry->block, "HeapAddToList() at exit");
}
<|start_filename|>arch/arm/mmu/MmuInit.cc<|end_filename|>
//===================================================================================================================
//
// MmuyInit.cc -- Complete the MMU initialization for the arm architecture
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// The goal of this function is to make sure that MMU is fully mapped. Now that we have access to upper memory
// functions, we will use them to complete the mappings still pening. These are:
// * MMIO space
// * Interrupt Vector Table (map to existing frame but in upper address space)
// * Frame buffer
//
// All other addresses should be mapped properly before handing control to the loader.
//
// While the MMU is up and running, the OS structures to manage the MMU are not set up yet. The purpose of this
// function is to make sure the MMU is in a state the kernel can take over this responsibility. There are still
// several things that need to still take place before I can turn over responsibilty to the kernel:
// * The management mappings need to be completed to the proper locations (both the ttl1 and ttl2 tables)
// * The frame buffer needs to get mapped
// * The MMIO addresses need to be mapped to the kernel locations
// * Map the exception vector table (VBAR)
// * Map a Kernel Stack
//
// It is prudent to document how to take an address and pick it apart into the different components that are needed
// for managing the paging tables. There are several to consider:
// 1. The index into the TTL1 table
// 2. Given the TTL2 Table address, the index into the TTL2 table
// 3. Given the address, the overall index into the TTL2 management table which starts at 0xffc00000
//
// So for an address, it breaks down like this:
//
//
// +-------- This is the offset into the TTL2 table -- Each table is 1K, so there are 256 entries
// --
// 0x12345678
// --- ---
// | +------ This is the offset into the frame -- handled by the MMU
// |
// +----------- This is the offset into the TTL1 table -- used to determine the address of the TTL2 table
//
// Now, the index into the overall TTL2 table managed at address 0xffc00000, the top 20 bits will index into that
// table. Checking our math here, the TTL2 table is 4MB long; each entry 4 bytes. So, there are 0x100000
// entries. 1MB can be represented by 20 bits (5 nibbles).
//
// So, I also want to make sure I have the structure documented here. I am going to do my best to draw with ASCII
// art. This has been completed in the `entry.s` file.
//
// The TTL1 table is located at over 4 pages from 0xff404000 to 0xff407000 inclusive. There are 2 blocks of
// TTL1 entries we will be concerned with here: 0xff4-0xff7 and 0xffc-0xfff. The first group is needed to map
// the TTL1 table for management -- keep in mind here we will only map 4 pages. The second group is needed to
// map the TTL2 table for management. This will be 4MB of mappings and will be an entire frame of TTL2 tables
// (which is really 4 tables by the way).
//
// +-------//-------++-------//-------++-------//-------++---------------------//---------------------------+
// | || || || |.|F|F|F|F|.|F|F|F|F|
// | 0xff404000 || 0xff405000 || 0xff406000 || 0xff407000 |.|F|F|F|F|.|F|F|F|F|
// | || || || |.|4|5|6|7|.|C|D|E|F|
// +-------//-------++-------//-------++-------//-------++---------------------//---------------------------+
//
// So, the TTL1 management table will look like this:
//
// 0xff400000:
// ff4___________________ ff5___ ff6___ ff7___
// +-------------------//-+--//--+--//--+--//--+ * Entry 04 will point to the frame for 0xff404000
// |-|-|-|-|0|0|0|0|.| | | | | * Entry 05 will point to the frame for 0xff405000
// |-|-|-|-|4|5|6|7|.| | | | | * Entry 06 will point to the frame for 0xff406000
// +-------------------//-+--//--+--//--+--//--+ * Entry 07 will point to the frame for 0xff407000
//
// This then leaves the TTL2 management addresses. This is a 4MB block that needs to be managed. This area can
// be managed with a single frame or 4 TTL2 tables inserted into the TTL1 table at indices 0xffc, ffd, ffe, fff.
// So, this is the last group above. This will look like the following:
//
// 0xffc00000:
// ffc___ ffd___ ffe___ fff
// +--//--+--//--+--//--+--//------------------------+ * Entry fc will not point to anything on init
// | | | | |F|F|F|F| * Entry fd will not point to anything on init
// | | | | |C|D|E|F| * Entry fe will not point to anything on init
// +--//--+--//--+--//--+--//------------------------+ * Entry ff will be recursively pointed to this frame
//
// Now, this is not to say that not other entries will be initialized. Quite the contrary. I am just saying that
// the other entries are not needed for managing the paging tables.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-13 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "serial.h"
#include "mmu.h"
#include "hw-disc.h"
#include "printf.h"
#include "entry.h"
#include "loader.h"
//
// -- Complete the initialization of the Mmu for the loader to function properly
// --------------------------------------------------------------------------
EXTERN_C EXPORT LOADER
void MmuInit(void)
{
//
// -- Next up is the VBAR -- which needs to be mapped. This one is rather trivial.
// -----------------------------------------------------------------------------
MmuMapToFrame(EXCEPT_VECTOR_TABLE, intTableAddr, PG_KRN | PG_WRT);
//
// -- Next up is the MMIO locations. These are currently at physical address `0x3f000000` to `0x4003ffff`, if
// you include the BCM2836 extensions for multiple cores. The goal here is to re-map these to be in kernel
// space at `0xf8000000` to `0xf903ffff`. This should be trivial, almost.
// --------------------------------------------------------------------------------------------------------
for (archsize_t mmioVirt = MMIO_VADDR, mmioPhys = MMIO_LOADER_LOC;
mmioPhys < MMIO_LOADER_END;
mmioPhys ++, mmioVirt += PAGE_SIZE) {
MmuMapToFrame(mmioVirt, mmioPhys, PG_KRN | PG_DEVICE | PG_WRT);
}
kPrintfEnabled = true;
}
<|start_filename|>modules/kernel/src/pmm/PmmAddToStackNode.cc<|end_filename|>
//===================================================================================================================
//
// PmmAddToStackNode.cc -- Given the stack provided, see if the frame can be added to one of the blocks
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// The problem with this function is that a newly freed frame or block of frames may be adjascent to up to 2
// other frame blocks that are already in the stack. To deal with this, the best thing to do would be to traverse
// the stack and check for both conditions. However, the challenge with that is twofold:
// 1) the implementation requires the TLB to be flushed on all CPUs for each mapping change, which is expensive
// 2) this function is called from the butler, which is running at a low priority and will have a lock for an
// extended period of time.
//
// The previous implementation struggled with the same problem, and I had decided to leave this work to the butler
// to clean up. It the moment, I will do the same thing.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-12 Initial 0.3.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "spinlock.h"
#include "lists.h"
#include "heap.h"
#include "pmm.h"
//
// -- Search through the stack and see if the frame can be added to an existing block; create a new one if not.
// ---------------------------------------------------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void PmmAddToStackNode(Spinlock_t *lock, PmmFrameInfo_t *stack, frame_t frame, size_t count)
{
archsize_t flags = SPINLOCK_BLOCK_NO_INT(*lock) {
PmmPush(stack, frame, count);
SPINLOCK_RLS_RESTORE_INT(*lock, flags);
}
}
<|start_filename|>modules/kernel/src/pmm/PmmInit.cc<|end_filename|>
//===================================================================================================================
//
// PmmInit.cc -- Initialize the Physical Memory Manger's internal OS structure
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This function will initialize the Physical Memory Manager (PMM). The PMM is implemented as a bitmap, where
// a bit flag in an array of bits will indicate if the frame is available or taken.
//
// There are several steps that are needed to be completed to completely initialize the PMM. These are:
// 1. Determine where and how big to make the PMM -- this is dependent on the upper memory limit which will
// determine the number of frames we need to keep track of. Allocate this memory space in the physical memory.
// 2. Set every thing to be allocated. This is a necessary step as we cannot guarantee that the multiboot
// information contains all the unusable holes in memory. So we will assume that unless explicitly available,
// the frame is not available.
// 3. Set all the available memory from the multiboot information to be available. This will be all the volatile
// memory on the system.
// 4. Go through and mark all the frames that have been used appropriately. This wil be the loader, several
// additional OS structures, the loaded modules, and even the video buffer. This will include the bitmap
// itself.
//
// A change has been made so that I no longer need to worry about storing the pmm bitmap in low memory on x86.
// Therefore, this function simplifies greatly.
//
// At the same time, I have been able to remove the crappy pre-allocation of heap frames. Since we have the
// ability to allocate now in real time. I have eliminated the chicken-and-egg problem I had before.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2017-Jun-11 Initial 0.1.0 ADCL Initial version
// 2019-Feb-14 Initial 0.3.0 ADCL Relocated
// 2020-Apr-12 #405 v0.6.1c ADCL Redesign the PMM to store the stack in the freed frames themselves
//
//===================================================================================================================
#include "types.h"
#include "serial.h"
#include "printf.h"
#include "cpu.h"
#include "heap.h"
#include "hw-disc.h"
#include "pmm.h"
#define DEBUG_PMM 1
#ifndef DEBUG_PMM
# define DEBUG_PMM 0
#endif
//
// -- initialize the physical portion of the memory manager
// -----------------------------------------------------
EXTERN_C EXPORT LOADER
void PmmInit(void)
{
extern bool pmmInitialized;
kprintf("Startng PMM initialization\n");
// -- Sanity check -- we cannot continue without a memory map
if (!HaveMMapData()) {
CpuPanicPushRegs("PANIC: Unable to determine memory map; Century OS cannot initialize\n\n");
}
//
// -- Now simply loop through the memory map and add the blocks to the scrubStack. The only catch here is that
// we will not deal with the first 4MB of memory, saving that for the cleanup after boot.
// ---------------------------------------------------------------------------------------------------------
for (int i = 0; i < GetMMapEntryCount(); i ++) {
uint64_t start = GetAvailMemStart(i);
uint64_t end = start + GetAvailMemLength(i);
if ((start & 0xffffffff00000000) != 0) continue; // -- if it is above 32-bit space, it is not usable
if (end > 0x100000000) end = 0x100000000; // -- yes, this is a 9-digit hex number!!
frame_t frame = (archsize_t)(start >> 12);
size_t count = (archsize_t)((end - start) >> 12);
// -- skip anything before 4MB
if (frame < earlyFrame && count < earlyFrame) continue;
if (frame < earlyFrame) {
count -= (earlyFrame - frame);
frame = earlyFrame;
}
kprintf("Releasing block of memory from frame %x for a count of %x frames\n", frame, count);
//
// -- since we are guaranteed to be above 1MB, this is all the normal queue
// ---------------------------------------------------------------------
MmuMapToFrame((archsize_t)pmm.normStack, frame, PG_KRN | PG_WRT);
pmm.normStack->frame = frame;
pmm.normStack->count = count;
pmm.normStack->next = 0;
pmm.normStack->prev = 0;
}
CLEAN_PMM();
//
// -- TODO: Do I need to address the framebuffer here?
// ------------------------------------------------
kprintf("The frame buffer is located at %p\n", GetFrameBufferAddr());
pmmInitialized = true;
kprintf("Phyiscal Memory Manager Initialized\n");
}
<|start_filename|>modules/kernel/src/ipi/IpiHandleTlbFlush.cc<|end_filename|>
//===================================================================================================================
//
// IpiHandleTlbFlush.cc -- Handle the actual TLB flush for an address
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Feb-24 Initial v0.3.0h ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "mmu.h"
#include "pic.h"
#include "process.h"
#include "printf.h"
//
// -- Handle the actual TLB flush, waiting for the address to flush
// -------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void IpiHandleTlbFlush(isrRegs_t *regs)
{
// -- wait for the address to be given to the CPU
while (tlbFlush.addr == (archsize_t)-1) {}
#if DEBUG_ENABLED(IpiHandleTlbFlush)
kprintf("Flushing TLB on CPU %d\n", thisCpu->cpuNum);
#endif
InvalidatePage(tlbFlush.addr);
AtomicDec(&tlbFlush.count);
PicEoi(picControl, (Irq_t)0);
}
<|start_filename|>arch/x86/IsrDumpState.cc<|end_filename|>
//===================================================================================================================
//
// IsrDumpState.cc -- For exceptions, dump the current state of the processor from the registers
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Jul-02 Initial version
// 2012-Sep-16 Leveraged from Century
// 2012-Sep-23 Removed DUMP() define
// 2013-Sep-12 #101 Resolve issues splint exposes
// 2018-Jun-01 Initial 0.1.0 ADCL Copied this file from century32 to century-os
// 2019-Feb-09 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "cpu.h"
#include "interrupt.h"
void IsrDumpState(isrRegs_t *regs)
{
kprintf("CPU: %d\n", cpus.cpusRunning>1?thisCpu->cpuNum:0);
kprintf("EAX: %p EBX: %p ECX: %p\n", regs->eax, regs->ebx, regs->ecx);
kprintf("EDX: %p ESI: %p EDI: %p\n", regs->edx, regs->esi, regs->edi);
kprintf("EBP: %p ESP: %p SS: %x\n", regs->ebp, regs->esp, regs->ss);
kprintf("EIP: %p EFLAGS: %p\n", regs->eip, regs->eflags);
kprintf("CS: %x DS: %x ES: %x FS: %x GS: %x\n",
regs->cs, regs->ds, regs->es, regs->fs, regs->gs);
kprintf("CR0: %p CR2: %p CR3: %p\n", regs->cr0, regs->cr2, regs->cr3);
kprintf("Trap: %x Error: %x\n\n", regs->intno, regs->errcode);
while (true) {
Halt();
}
}
<|start_filename|>modules/kernel/src/cpu/CpuPanic.cc<|end_filename|>
//===================================================================================================================
//
// CpuPanic.cc -- Panic halt all CPUs, printing the register contents of this CPU
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Mar-04 Initial v0.5.0h ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "pic.h"
#include "interrupt.h"
#include "cpu.h"
//
// -- Panic halt all CPUs
// -------------------
EXTERN_C EXPORT NORETURN KERNEL
void CpuPanic(const char *reason, isrRegs_t *regs)
{
DisableInterrupts();
PicBroadcastIpi(picControl, IPI_PANIC);
kprintf("\n%s\n\n", reason);
IsrDumpState(regs);
}
<|start_filename|>arch/arm/IsrHandler.cc<|end_filename|>
//===================================================================================================================
//
// IsrHandler.cc -- The common ISR handler routine
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// All ISRs are handled by a common service program. This is it. But it is currently a stub.
//
// IRQ 0-63 are for the normal IRQs. Then, there are 8 additional IRQs (64-71) for some additional interrupts.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Dec-06 Initial 0.2.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "hardware.h"
#include "printf.h"
#include "timer.h"
#include "pic.h"
#include "interrupt.h"
//
// -- The ISR Handler Table
// ---------------------
EXPORT KERNEL_BSS
isrFunc_t isrHandlers[256] = {NULL_ISR};
//
// -- This is the common ISR Handler entry routine
// --------------------------------------------
EXTERN_C EXPORT KERNEL
void IsrHandler(isrRegs_t *regs)
{
int intno = 0;
// -- Here we need to determine the intno for the ISR
intno = PicDetermineIrq(picControl);
if (intno == -1) return; // spurious interrupt
#if DEBUG_ENABLED(IsrHandler)
// skip timer irq
if (intno != 97) kprintf("good interrupt on cpu %d: %d\n", thisCpu->cpuNum, intno);
#endif
if (isrHandlers[intno] != NULL) {
isrFunc_t handler = isrHandlers[intno];
handler(regs);
} else {
kprintf("PANIC: Unhandled interrupt: %x\n", intno);
CpuPanic("", regs);
}
}
<|start_filename|>platform/pc/inc/platform-timer.h<|end_filename|>
//===================================================================================================================
//
// platform-timer.h -- Timer definitions and functions for the x86
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#pragma once
#ifndef __TIMER_H__
# error "Use #include \"timer.h\" and it will pick up this file; do not #include this file directly."
#endif
//
// -- on x86, this is the type we use to refer to the timer port
// ----------------------------------------------------------
typedef archsize_t TimerBase_t;
//
// -- These are the offsets we will use for the timer
// -----------------------------------------------
#define TIMER_CHAN_0 0x00
#define TIMER_CHAN_1 0x01
#define TIMER_CHAN_2 0x02
#define TIMER_COMMAND 0x03
//
// -- This is the timer device structures
// -----------------------------------
EXTERN KERNEL_DATA
TimerDevice_t lapicTimerControl;
EXTERN KERNEL_DATA
TimerDevice_t timer8253Control;
<|start_filename|>modules/kernel/src/pmm/PmmScrubBlock.cc<|end_filename|>
//===================================================================================================================
//
// PmmScruber.cc -- Scrub the frames in the scrubStack
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-12 Initial 0.3.1 ADCL Initial version
// 2020-Apr-12 #405 v0.6.1c ADCL Redesign the PMM to store the stack in the freed frames themselves
//
//===================================================================================================================
#include "types.h"
#include "spinlock.h"
#include "lists.h"
#include "heap.h"
#include "pmm.h"
//
// -- Clean a block of physical memory of all its data
// ------------------------------------------------
EXTERN_C EXPORT KERNEL
void PmmScrubBlock(void)
{
// -- quickly check if there is something to do; we will redo the check when we get the lock
if (!MmuIsMapped((archsize_t)pmm.scrubStack)) return;
frame_t frame = 0;
size_t count = 0;
archsize_t flags = SPINLOCK_BLOCK_NO_INT(pmm.scrubLock) {
// -- double check just in case something changed
if (MmuIsMapped((archsize_t)pmm.scrubStack)) {
frame = pmm.scrubStack->frame;
count = pmm.scrubStack->count;
AtomicSub(&pmm.framesAvail, count);
PmmPop(pmm.scrubStack);
}
SPINLOCK_RLS_RESTORE_INT(pmm.scrubLock, flags);
}
// -- if we found nothing to do, return
if (frame == 0) return;
// -- here we scrub the frames in the block
for (size_t i = 0; i < count; i ++) PmmScrubFrame(frame + i);
// -- see if we can add it to an existing block
if (frame < 0x100) {
PmmAddToStackNode(&pmm.lowLock, pmm.lowStack, frame, count);
} else {
PmmAddToStackNode(&pmm.normLock, pmm.normStack, frame, count);
}
}
<|start_filename|>modules/kernel/src/cpu/CpuVars.cc<|end_filename|>
//===================================================================================================================
//
// CpuVars.cc -- CPU Astraction Variables
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Feb-02 Initial v0.5.0f ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "stacks.h"
#include "mmu.h"
#include "pmm.h"
#include "cpu.h"
//
// -- This data will be uninitialized by the compiler
// -----------------------------------------------
EXPORT KERNEL_DATA
Cpu_t cpus = {
.cpusRunning = 1,
};
<|start_filename|>platform/pc/apic/LApicEoi.cc<|end_filename|>
//===================================================================================================================
//
// LApicEoi.cc -- Signal End of Interrupt to the Local APIC
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-26 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "interrupt.h"
#include "mmu.h"
#include "printf.h"
#include "cpu.h"
#include "pic.h"
//
// -- Signal EOI to the Local APIC
// ----------------------------
EXTERN_C EXPORT KERNEL
void _LApicEoi(TimerDevice_t *dev)
{
if (!dev) return;
MmioWrite(dev->base + LAPIC_EOI, 0); // all there needs to be is a write to the register
}
<|start_filename|>modules/kernel/src/pmm/PmmAllocateFrame.cc<|end_filename|>
//===================================================================================================================
//
// PmmAllocateFrame.cc -- Allocate a normal frame (not low memory required)
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// So the algorithm here is relatively simple. Under most circumstances, we will allocate from the
// normal stack of free frames. However, if that is empty, then we will check the scrub queue for
// something that we can clean up ourselves. Finally if that is empty, we will go to the low
// memory stack. Finally, if there is nothing there, we will report we are out of memory.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-11 Initial 0.3.1 ADCL Initial version
// 2020-Apr-12 #405 v0.6.1c ADCL Redesign the PMM to store the stack in the freed frames themselves
//
//===================================================================================================================
#include "types.h"
#include "spinlock.h"
#include "heap.h"
#include "pmm.h"
//
// -- Allocate a frame and return it
// ------------------------------
EXTERN_C EXPORT KERNEL
frame_t PmmAllocateFrame(void)
{
if (unlikely(!pmmInitialized)) {
// kprintf("The PMM is not yet initialized; returning an early frame\n");
return (NextEarlyFrame() >> 12);
}
// kprintf("Allcoating a PMM frame\n");
frame_t rv = 0; // assume we will not find anything
archsize_t flags;
//
// -- check the normal stack for a frame to allocate
// ----------------------------------------------
flags = SPINLOCK_BLOCK_NO_INT(pmm.normLock) {
rv = _PmmDoRemoveFrame(pmm.normStack, false);
SPINLOCK_RLS_RESTORE_INT(pmm.normLock, flags);
}
if (rv != 0) return rv;
//
// -- check the scrub queue for a frame to allocate
// --------------------------------------------------------------------------------------------------
flags = SPINLOCK_BLOCK_NO_INT(pmm.scrubLock) {
rv = _PmmDoRemoveFrame(pmm.scrubStack, true); // -- scrub the frame when it is removed
SPINLOCK_RLS_RESTORE_INT(pmm.scrubLock, flags);
}
if (rv != 0) return rv;
//
// -- check the low stack for a frame to allocate
// -------------------------------------------
return PmmAllocateLowFrame();
}
<|start_filename|>arch/arm/mmu/MmuIsMapped.cc<|end_filename|>
//===================================================================================================================
//
// MmuIsMapped.cc -- Determine if the address is mapped in the current address space
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-May-01 Initial 0.4.3 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "printf.h"
#include "mmu.h"
//
// -- Check for the page and unmap if it is mapped.
// ---------------------------------------------
EXTERN_C EXPORT KERNEL
bool MmuIsMapped(archsize_t addr)
{
Ttl1_t *ttl1Table = (Ttl1_t *)(ARMV7_TTL1_TABLE_VADDR);
Ttl1_t *ttl1Entry = &ttl1Table[addr >> 20];
Ttl2_t *ttl2Tables = (Ttl2_t *)(ARMV7_TTL2_TABLE_VADDR);
Ttl2_t *ttl2Entry = &ttl2Tables[addr >> 12];
if (ttl1Entry->fault == ARMV7_MMU_FAULT) return false;
if (ttl2Entry->fault == ARMV7_MMU_FAULT) return false;
return true;
}
<|start_filename|>platform/bcm2836/timer/TimerVars.cc<|end_filename|>
//===================================================================================================================
//
// TimerVars.cc -- These are the variables for the bcm2835 Timer
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "timer.h"
//
// -- This is the device description that is used to output data to the serial port during loader initialization
// ----------------------------------------------------------------------------------------------------------
EXPORT KERNEL_DATA
TimerDevice_t _timerControl = {
.base = BCM2835_TIMER,
.pic = &picBcm2835,
.TimerCallBack = TimerCallBack,
.TimerInit = _TimerInit,
.TimerEoi = _TimerEoi,
// .TimerPlatformTick = _TimerPlatformTick,
.TimerCurrentCount = _TimerCurrentCount,
};
//
// -- This is the pointer to the structure we will really use
// -------------------------------------------------------
EXPORT KERNEL_DATA
TimerDevice_t *timerControl = &_timerControl;
<|start_filename|>modules/kernel/src/butler/ButlerInit.cc<|end_filename|>
//===================================================================================================================
//
// ButlerInit.cc -- Initialize the Butler process and perform the initial cleanup
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Notice that this function appears in the kernel section, not the loader section as with all the other init jobs.
// The reason is that while this will be called once, it will also remove the loader section from memory and free
// the PMM frames, meaning it would be destroying itself. Undesirable results.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-10 Initial v0.6.1b ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "process.h"
#include "msgq.h"
#include "mmu.h"
#include "pmm.h"
#include "spinlock.h"
#include "heap.h"
#include "butler.h"
//
// -- The name of the butler process
// ------------------------------
EXPORT KERNEL_DATA
const char *butlerName = "Butler";
//
// -- Initialize the Butler and perform the initial cleanup
// -----------------------------------------------------
EXTERN_C EXPORT KERNEL
void ButlerInit(void)
{
// -- kernel text location
EXTERN uint8_t txtStart[];
EXTERN uint8_t txtEnd[];
EXTERN archsize_t txtPhys;
EXTERN archsize_t txtSize;
krnKernelTextStart = txtStart;
krnKernelTextEnd = txtEnd;
krnKernelTextPhys = txtPhys;
krnKernelTextSize = txtSize;
// -- kernel data location
EXTERN uint8_t dataStart[];
EXTERN uint8_t bssEnd[];
EXTERN archsize_t dataPhys;
EXTERN archsize_t dataSize;
krnKernelDataStart = dataStart;
krnKernelDataEnd = bssEnd;
krnKernelDataPhys = dataPhys;
krnKernelDataSize = dataSize;
// -- kernel syscall location
EXTERN uint8_t sysStart[];
EXTERN uint8_t sysEnd[];
EXTERN archsize_t sysPhys;
EXTERN archsize_t sysSize;
krnSyscallStart = sysStart;
krnSyscallEnd = sysEnd;
krnSyscallPhys = sysPhys;
krnSyscallSize = sysSize;
// -- stab location
EXTERN uint8_t stabStart[];
EXTERN uint8_t stabEnd[];
EXTERN archsize_t stabPhys;
EXTERN archsize_t stabSize;
krnStabStart = stabStart;
krnStabEnd = stabEnd;
krnStabPhys = stabPhys;
krnStabSize = stabSize;
// -- data needed to clean up the entry point
EXTERN uint8_t mbStart[];
EXTERN uint8_t mbEnd[];
EXTERN archsize_t mbPhys;
uint8_t *krnMbStart = mbStart;
uint8_t *krnMbEnd = mbEnd;
archsize_t krnMbPhys = mbPhys;
// -- data needed to clean up the loader
EXTERN uint8_t ldrStart[];
EXTERN uint8_t ldrEnd[];
EXTERN archsize_t ldrPhys;
uint8_t *krnLdrStart = ldrStart;
uint8_t *krnLdrEnd = ldrEnd;
archsize_t krnLdrPhys = ldrPhys;
// -- data need to clean up the smp block (better be 1 page)
EXTERN uint8_t smpStart[];
EXTERN archsize_t smpPhys;
EXTERN archsize_t smpSize;
uint8_t *krnSmpStart = smpStart;
archsize_t krnSmpPhys = smpPhys;
archsize_t krnSmpSize = smpSize;
// -- Change our identity
archsize_t flags = DisableInterrupts();
currentThread->command = (char *)butlerName; // usually heap memory, conversion required
currentThread->priority = PTY_LOW;
RestoreInterrupts(flags);
//
// -- up to this point we have had access to the multiboot entry code; not any more
// -----------------------------------------------------------------------------
// -- unmap any memory below 1 MB
for (archsize_t addr = 0; addr < 0x100000; addr += PAGE_SIZE) {
if (MmuIsMapped(addr)) {
MmuUnmapPage(addr);
}
}
// -- free any available memory < 4MB
for (frame_t frame = 0; frame < 0x400; frame ++) {
if (ButlerMemCheck(frame)) PmmReleaseFrame(frame);
}
//
// -- up to this point, we have access to all the loader code; not any more
// ---------------------------------------------------------------------
// -- Clean up the SMP code
if (krnSmpSize) {
MmuUnmapPage((archsize_t)krnSmpStart);
PmmReleaseFrame(krnSmpPhys >> 12);
}
// -- Clean up the loader
while (krnLdrStart < krnLdrEnd) {
MmuUnmapPage((archsize_t)krnLdrStart);
PmmReleaseFrame(krnLdrPhys >> 12);
krnLdrStart += PAGE_SIZE;
krnLdrPhys += PAGE_SIZE;
}
// -- Clean up the multiboot entry
while (krnMbStart < krnMbEnd) {
MmuUnmapPage((archsize_t)krnMbStart);
PmmReleaseFrame(krnMbPhys >> 12);
krnMbStart += PAGE_SIZE;
krnMbPhys += PAGE_SIZE;
}
}
<|start_filename|>platform/bcm2836/init/PlatformEarlyInit.cc<|end_filename|>
//===================================================================================================================
//
// PlatformEarlyInit.cc -- Handle the early initialization for the bcm2835 platform
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This function is called after `MmuEarlyInit()`, so we expect to have access to kernel virtual memory addresses.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-05 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "serial.h"
#include "hw-disc.h"
#include "cpu.h"
#include "printf.h"
#include "platform.h"
//
// -- Handle the early initialization for the pc platform
// ---------------------------------------------------
EXTERN_C EXPORT LOADER
void PlatformEarlyInit(void)
{
SerialOpen(&debugSerial); // initialize the serial port so we can output debug data
kprintf("Hello...\n");
HwDiscovery();
// -- at some point, this will come from the DTB
cpus.cpusDiscovered = 4;
cpus.cpusRunning = 1;
if (cpus.cpusDiscovered > MAX_CPUS) cpus.cpusDiscovered = MAX_CPUS;
CpuInit();
}
<|start_filename|>modules/kernel/src/pmm/PmmVars.cc<|end_filename|>
//===================================================================================================================
//
// PmmVars.cc -- Global variables for the PMM
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-11 Initial 0.3.1 ADCL Initial version
// 2020-Apr-12 #405 v0.6.1c ADCL Redesign the PMM to store the stack in the freed frames themselves
//
//===================================================================================================================
#include "mmu.h"
#include "pmm.h"
//
// -- Has the PMM been initialized properly for use?
// ----------------------------------------------
EXPORT KERNEL_DATA
bool pmmInitialized = false;
//
// -- This is the structure for managing the PMM
// ------------------------------------------
EXPORT KERNEL_DATA
Pmm_t pmm = {
.framesAvail = {0},
.lowLock = {0},
.lowStack = (PmmFrameInfo_t *)MMU_PMM_LOW_TOS,
.normLock = {0},
.normStack = (PmmFrameInfo_t *)MMU_PMM_NORM_TOS,
.scrubLock = {0},
.scrubStack = (PmmFrameInfo_t *)MMU_PMM_SCRUB_TOS,
.searchLock = {0},
.search = (PmmFrameInfo_t *)MMU_PMM_SEARCH_TOS,
.insertLock = {0},
.insert = (PmmFrameInfo_t *)MMU_PMM_INSERT,
};
<|start_filename|>modules/kernel/src/syscall/SyscallSendMessage.cc<|end_filename|>
//===================================================================================================================
//
// SyscallSendMessage.cc -- SYSCALL for POSIX `msgsnd()`
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// SYSCALL to send a message
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Nov-02 Initial 0.1.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "process.h"
#include "interrupt.h"
#include <errno.h>
//
// -- SYSCALL to send a message
// ----------------------------
EXTERN_C EXPORT SYSCALL
void SyscallSendMessage(isrRegs_t *regs)
{
#if 0
PID_t pid = SYSCALL_SNDMSG_PARM1(regs);
Message_t *msg = (Message_t *)SYSCALL_SNDMSG_PARM2(regs);
Process_t *proc = ProcessGetStruct(pid);
if (msg == NULL) {
SYSCALL_RETURN(regs) = -EINVAL;
return;
}
if (proc == NULL) {
SYSCALL_RETURN(regs) = -EINVAL;
return;
}
SYSCALL_RETURN(regs) = MessageSend(pid, (Message_t *)msg);
#endif
}
<|start_filename|>modules/kernel/src/heap/HeapInit.cc<|end_filename|>
//===================================================================================================================
//
// HeapInit.cc -- Create and initialize the internal heap structures
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Create and initialize the internal Century heap structures.
//
// Please note that we are allocating a starting block of memory statically. This block is called
// `heapMemoryBlock`. The loader will have allocated frames for it but this heap is not located in the right area
// of virtual address. So, part of the responsibility of this initialization step is to unmap these from the
// kernel binary and then remap them into the Heap virtual address space at 0xd0000000. By doing this, the
// kernel should be able to get enough heap operational to begin to send messages, and then add more then the
// PMM is operational.
//
// Now, since I am moving the heap from the end of the kernel (which is how this was originally written), to a
// standalone block of virtual address space, there are some chages that will need to be made. Fortunately, the
// design allows for this change relatively easily.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Jun-30 Initial version
// 2012-Sep-16 Leveraged from Century
// 2013-Sep-12 #101 Resolve issues splint exposes
// 2018-Jun-01 Initial 0.1.0 ADCL Copied this file from century32 to century-os
// 2018-Nov-10 Initial 0.1.0 ADCL Move the heap memory into its own dedicate virtual address space
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "serial.h"
#include "printf.h"
#include "pmm.h"
#include "mmu.h"
#include "heap.h"
//
// -- This is how much of the heap we will allocate at compile time. This will really be frames that will be moved
// during initialization. This is 64K.
// -------------------------------------------------------------------------------------------------------------
#define INITIAL_HEAP (4096*16)
//
// -- some local and global variables
// -------------------------------
archsize_t heapStart = MMU_HEAP_START; // this is the start in virtual address space
OrderedList_t fixedList[ORDERED_LIST_STATIC];
bool fixedListUsed = 0;
static KHeap_t _heap;
KHeap_t *kHeap = &_heap;
//
// -- Initialize the heap structures
// ------------------------------
void HeapInit(void)
{
#if DEBUG_HEAP == 1
kprintf("Start heap initialization\n");
#endif
archsize_t vAddr = heapStart;
archsize_t vLimit = vAddr + INITIAL_HEAP;
for ( ; vAddr < vLimit; vAddr += 0x1000) {
MmuMapToFrame(vAddr, PmmAllocateFrame(), PG_KRN | PG_WRT);
}
// -- Set up the heap structure and list of open blocks
KHeapFooter_t *tmpFtr;
kMemSetB(fixedList, 0, sizeof(fixedList));
// -- Build the first free block which is all allocated
fixedList[0].block = (KHeapHeader_t *)heapStart;
fixedList[0].next = 0;
fixedList[0].prev = 0;
fixedList[0].size = INITIAL_HEAP;
_heap.strAddr = (byte_t *)heapStart;
_heap.endAddr = ((byte_t *)_heap.strAddr) + fixedList[0].size;
_heap.maxAddr = (byte_t *)FRAME_BUFFER_VADDR;
_heap.heapMemory = _heap.heap512 = _heap.heap1K =
_heap.heap4K = _heap.heap16K = &fixedList[0];
fixedList[0].block->_magicUnion.magicHole = HEAP_MAGIC;
fixedList[0].block->_magicUnion.isHole = 1;
fixedList[0].block->size = fixedList[0].size;
fixedList[0].block->entry = &fixedList[0];
tmpFtr = (KHeapFooter_t *)(((char *)fixedList[0].block) +
fixedList[0].size - sizeof(KHeapFooter_t));
tmpFtr->_magicUnion.magicHole = fixedList[0].block->_magicUnion.magicHole;
tmpFtr->hdr = fixedList[0].block;
fixedListUsed = 1;
kHeap = &_heap;
#if DEBUG_HEAP == 1
kprintf("Heap Created\n");
kprintf(" Heap Start Location: %p\n", kHeap->strAddr);
kprintf(" Current Heap Size..: %p\n", fixedList[0].size);
kprintf(" Heap End Location..: %p\n", kHeap->endAddr);
#endif
}
<|start_filename|>platform/pc/acpi/AcpiCheckTable.cc<|end_filename|>
//===================================================================================================================
//
// AcpiCheckTable.cc -- Check the table signature and checksum to confirm it is a valid table.
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-06 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "mmu.h"
#include "hardware.h"
//
// -- Check the table to see if it is what we expect; note that this memory must be mapped before calling
// ---------------------------------------------------------------------------------------------------
EXTERN_C EXPORT LOADER
bool AcpiCheckTable(archsize_t locn, uint32_t sig)
{
uint8_t *table = (uint8_t *)locn;
uint32_t size;
archsize_t checksum = 0;
kprintf(".. Checking the ACPI table....\n");
if (*((uint32_t *)locn) != sig) {
kprintf(".. (signature check fails)\n");
return false;
}
size = *((uint32_t *)(locn + 4));
kprintf(".. Checking %x bytes of the table\n", size);
for (uint32_t i = 0; i < size; i ++) {
archsize_t loc = (archsize_t)(&table[i]);
if (!MmuIsMapped(loc)) {
MmuMapToFrame(loc, loc >> 12, PG_KRN);
}
checksum += table[i];
}
return (checksum & 0xff) == 0;
}
<|start_filename|>modules/kernel/inc/entry.h<|end_filename|>
//===================================================================================================================
//
// entry.h -- These are some things presented in the entry section
//
// Copyright (c) 2017-2019 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Dec-16 Initial 0.5.0 ADCL Initial version
//
//===================================================================================================================
#pragma once
#include "types.h"
//
// -- Allocate an early frame before the PMM is put in charge
// -------------------------------------------------------
EXTERN_C EXPORT ENTRY
frame_t NextEarlyFrame(void);
//
// -- This is the mmu table level 1 physcal address
// ---------------------------------------------
EXTERN ENTRY_DATA
archsize_t mmuLvl1Table;
//
// -- This is the interrupt table physical address
// --------------------------------------------
EXTERN ENTRY_DATA
archsize_t intTableAddr;
//
// -- This is the stack frame we used for the loader
// ----------------------------------------------
EXTERN ENTRY_DATA
frame_t ldrStackFrame;
//
// -- These are the linker-provided symbols for the entry section
// -----------------------------------------------------------
EXTERN ENTRY_DATA
archsize_t mbStart;
EXTERN ENTRY_DATA
archsize_t mbEnd;
EXTERN ENTRY_DATA
archsize_t mbSize;
EXTERN ENTRY_DATA
archsize_t mbPhys;
#define mbPhysStart (mbPhys)
#define mbPhysEnd (mbPhys + mbSize)
//
// -- These are the linker-provided symbols for the loader section
// ------------------------------------------------------------
EXTERN ENTRY_DATA
archsize_t ldrStart;
EXTERN ENTRY_DATA
archsize_t ldrEnd;
EXTERN ENTRY_DATA
archsize_t ldrSize;
EXTERN ENTRY_DATA
archsize_t ldrPhys;
EXTERN ENTRY_DATA
archsize_t ldrVirt;
#define ldrPhysStart (ldrPhys)
#define ldrPhysEnd (ldrPhys + ldrSize)
#define ldrVirtStart (ldrVirt)
#define ldrVirtEnd (ldrEnd)
//
// -- These are the linker-provided symbols for the syscall pergatory section
// -----------------------------------------------------------------------
EXTERN ENTRY_DATA
archsize_t sysStart;
EXTERN ENTRY_DATA
archsize_t sysEnd;
EXTERN ENTRY_DATA
archsize_t sysSize;
EXTERN ENTRY_DATA
archsize_t sysPhys;
EXTERN ENTRY_DATA
archsize_t sysVirt;
#define sysPhysStart (sysPhys)
#define sysPhysEnd (sysPhys + sysSize)
#define sysVirtStart (sysVirt)
#define sysVirtEnd (sysEnd)
//
// -- These are the linker-provided symbols for the kernel code section
// -----------------------------------------------------------------
EXTERN ENTRY_DATA
archsize_t txtStart;
EXTERN ENTRY_DATA
archsize_t txtEnd;
EXTERN ENTRY_DATA
archsize_t txtSize;
EXTERN ENTRY_DATA
archsize_t txtPhys;
EXTERN ENTRY_DATA
archsize_t txtVirt;
#define txtPhysStart (txtPhys)
#define txtPhysEnd (txtPhys + txtSize)
#define txtVirtStart (txtVirt)
#define txtVirtEnd (txtEnd)
//
// -- These are the linker-provided symbols for the smp trampline section
// -------------------------------------------------------------------
EXTERN ENTRY_DATA
archsize_t smpStart;
EXTERN ENTRY_DATA
archsize_t smpEnd;
EXTERN ENTRY_DATA
archsize_t smpSize;
EXTERN ENTRY_DATA
archsize_t smpPhys;
EXTERN ENTRY_DATA
archsize_t smpVirt;
#define smpPhysStart (smpPhys)
#define smpPhysEnd (smpPhys + smpSize)
#define smpVirtStart (smpVirt)
#define smpVirtEnd (smpEnd)
//
// -- These are the linker-provided symbols for the kernel data section
// -----------------------------------------------------------------
EXTERN ENTRY_DATA
archsize_t dataStart;
EXTERN ENTRY_DATA
archsize_t dataEnd;
EXTERN ENTRY_DATA
archsize_t dataSize;
EXTERN ENTRY_DATA
archsize_t dataPhys;
EXTERN ENTRY_DATA
archsize_t dataVirt;
#define dataPhysStart (dataPhys)
#define dataPhysEnd (dataPhys + dataSize)
#define dataVirtStart (dataVirt)
#define dataVirtEnd (dataEnd)
//
// -- These are the linker-provided symbols for the kernel bss section
// ----------------------------------------------------------------
EXTERN ENTRY_DATA
archsize_t bssStart;
EXTERN ENTRY_DATA
archsize_t bssEnd;
EXTERN ENTRY_DATA
archsize_t bssSize;
EXTERN ENTRY_DATA
archsize_t bssPhys;
EXTERN ENTRY_DATA
archsize_t bssVirt;
#define bssPhysStart (bssPhys)
#define bssPhysEnd (bssPhys + bssSize)
#define bssVirtStart (bssVirt)
#define bssVirtEnd (bssEnd)
//
// -- These are the linker-provided symbols for the stab section
// ----------------------------------------------------------
EXTERN ENTRY_DATA
archsize_t stabStart;
EXTERN ENTRY_DATA
archsize_t stabEnd;
EXTERN ENTRY_DATA
archsize_t stabSize;
EXTERN ENTRY_DATA
archsize_t stabPhys;
EXTERN ENTRY_DATA
archsize_t stabVirt;
#define stabPhysStart (stabPhys)
#define stabPhysEnd (stabPhys + stabSize)
#define stabVirtStart (stabVirt)
#define stabVirtEnd (stabEnd)
//
// -- This is an array of function pointers that need to be called to initialize some data
// ------------------------------------------------------------------------------------
typedef void (*FunctionPtr_t)(void);
// -- these 2 addresses bound the array
EXTERN LOADER_DATA
FunctionPtr_t const init_start[], init_end[];
<|start_filename|>modules/kernel/src/butler/Butler.cc<|end_filename|>
//===================================================================================================================
//
// ButlerInit.cc -- This is the main butler process
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-10 Initial v0.6.1b ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "msgq.h"
#include "butler.h"
//
// -- The main butler process, dispatching tasks to complete
// ------------------------------------------------------
EXTERN_C EXPORT KERNEL NORETURN
void Butler(void)
{
long msgt;
// kprintf("Assuming the Butler role!\n");
ButlerInit();
// kprintf(".. Butler Initialization complete\n");
while (true) {
// -- block until we have something to do
MessageQueueReceive(butlerMsgq, &msgt, 0, 0, true);
// kprintf("Have a message to process...\n");
switch (msgt) {
case BUTLER_CLEAN_PMM:
ButlerCleanPmm();
break;
case BUTLER_CLEAN_PROCESS:
ButlerCleanProcess();
break;
default:
assert(false);
break;
}
}
}
<|start_filename|>modules/kernel/src/heap/HeapAlloc.cc<|end_filename|>
//===================================================================================================================
//
// HeapAlloc.cc -- Allocate a number of bytes from the heap
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Allocate a number of bytes from the heap, returning a pointer to the block of memory requested. This block of
// memory is adjusted for the number of bytes in the header block; the pointer is the first byte beyond the header.
//
// The following conditions are accounted for in this function:
// 1. A hole is found that is EXACTLY the size needed (rare) -- allocate it
// 2. A hole is found that is slightly larger than needed, but not enough space to realistically leave another
// hole behind -- allocate the hole
// 3. A hole is found to be too big -- split the hole and allocate the correct amount of heap
// 4. A hole that is not enough can be found -- return 0
//
// When a request for memory must be page aligned:
// 5. A hole before the allocated memory is too small -- add it to the previous block
// 6. A hole after the allocated memory is too small -- allocate it with the requested memory
// 7. Both the 2 situations above -- completed both actions
// 8. A hole is too big -- split it accordingly taking into account the above
//
// TODO: Fix potential memory leak when multiple small alignments get added to previous blocks that will never
// be deallocated.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Jul-02 Initial version
// 2012-Aug-15 #44 Implement Enhancement #44 (Use Mutex to lock) (2018-05-31 - removed)
// 2012-Sep-16 Leveraged from Century
// 2013-Sep-01 #80 Re-implement Mutexes (that work now) (commented again)
// 2013-Sep-12 #101 Resolve issues splint exposes
// 2013-Sep-13 #74 Rewrite Debug.h to use assertions and write to TTY_LOG
// 2018-May-31 Initial 0.1.0 ADCL Copied this file from century32 to century-os
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "spinlock.h"
#include "heap.h"
Spinlock_t heapLock = {0};
//
// -- Alloc a block of memory from the heap
// -------------------------------------
void *HeapAlloc(size_t size, bool align)
{
archsize_t flags = SPINLOCK_BLOCK_NO_INT(heapLock) {
size_t adjustedSize;
OrderedList_t *entry;
KHeapHeader_t *hdr;
if (size < HEAP_SMALLEST) size = HEAP_SMALLEST; // must allocate at least 1 byte
if (size & (BYTE_ALIGNMENT - 1)) { // Check for alignment
size += BYTE_ALIGNMENT;
size &= ~(BYTE_ALIGNMENT - 1);
}
adjustedSize = size + sizeof(KHeapHeader_t) + sizeof(KHeapFooter_t);
again:
entry = HeapFindHole(adjustedSize, align);
// -- are we out of memory?
if (!entry) {
HeapCheckHealth();
if(HeapExpand()) goto again;
SPINLOCK_RLS_RESTORE_INT(heapLock, flags);
return 0;
}
HeapValidateHdr(entry->block, "HeapAlloc()");
hdr = entry->block;
// if we are aligning, take care of it now
if (align) {
entry = HeapAlignToPage(entry); // must reset entry
if (!entry) {
HeapCheckHealth();
if(HeapExpand()) goto again;
SPINLOCK_RLS_RESTORE_INT(heapLock, flags);
return 0;
}
HeapValidateHdr(entry->block, "HeapAlloc() after alignment");
hdr = entry->block;
}
// perfect fit -OR- just a little too big
if (hdr->size == adjustedSize || adjustedSize - hdr->size < MIN_HOLE_SIZE) {
KHeapFooter_t *ftr;
ftr = (KHeapFooter_t *)((byte_t *)hdr + hdr->size - sizeof(KHeapFooter_t));
HeapReleaseEntry(entry);
hdr->_magicUnion.isHole = 0;
ftr->_magicUnion.isHole = 0;
HeapValidateHdr(hdr, "Resulting Header before return (good size)");
HeapCheckHealth();
CLEAN_HEAP();
SPINLOCK_RLS_RESTORE_INT(heapLock, flags);
return (void *)((byte_t *)hdr + sizeof(KHeapHeader_t));
}
// the only thing left is that it is too big and needs to be split
hdr = HeapSplitAt(entry, adjustedSize); // var entry is no longer valid after call
HeapValidatePtr("HeapAlloc()");
HeapValidateHdr(hdr, "Resulting Header before return (big size)");
HeapCheckHealth();
CLEAN_HEAP();
SPINLOCK_RLS_RESTORE_INT(heapLock, flags);
return (void *)((byte_t *)hdr + sizeof(KHeapHeader_t));
}
}
<|start_filename|>modules/kernel/src/process/ProcessStart.cc<|end_filename|>
//===================================================================================================================
//
// ProcessStart.cc -- Perform the startup tasks for a new process
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This function will be called for every new process in order to make sure that all new processes have
// the proper initialization completed.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-16 Initial 0.3.2 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "cpu.h"
#include "process.h"
//
// -- complete any new task initialization
// ------------------------------------
EXPORT KERNEL
void ProcessStart(void)
{
assert_msg(AtomicRead(&scheduler.schedulerLockCount) > 0,
"`ProcessStart()` is executing for a new process without holding the proper lock");
assert_msg(AtomicRead(&scheduler.schedulerLockCount) == 1,
"`ProcessStart()` is executing while too many locks are held");
ProcessUnlockScheduler();
EnableInterrupts();
}
<|start_filename|>platform/pc/apic/ApicVars.cc<|end_filename|>
//===================================================================================================================
//
// ApicVars.cc -- These are the variables for the x86 APIC
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-19 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "timer.h"
#include "pic.h"
//
// -- This is the structure for the data needed by this driver
// --------------------------------------------------------
EXPORT KERNEL_DATA
IoApicDeviceData_t ioapicData = {
.redirTableEntry = {
IOREDTBL2, // IRQ0
IOREDTBL1, // IRQ1
0, // IRQ2
IOREDTBL3, // IRQ3
IOREDTBL4, // IRQ4
IOREDTBL5, // IRQ5
IOREDTBL6, // IRQ6
IOREDTBL7, // IRQ7
IOREDTBL8, // IRQ8
IOREDTBL9, // IRQ9
IOREDTBL10, // IRQ10
IOREDTBL11, // IRQ11
IOREDTBL12, // IRQ12
IOREDTBL13, // IRQ13
IOREDTBL14, // IRQ14
IOREDTBL15, // IRQ15
IOREDTBL16, // PIRQ0
IOREDTBL17, // PIRQ1
IOREDTBL18, // PIRQ2
IOREDTBL19, // PIRQ3
IOREDTBL20, // MIRQ0
IOREDTBL21, // MIRQ1
IOREDTBL22, // GPIRQ
IOREDTBL23, // SMI
IOREDTBL0, // INTR
},
};
//
// -- This is the device description for the IO APIC
// ----------------------------------------------
EXPORT KERNEL_DATA
PicDevice_t ioapicDriver = {
.device = {
.name = {'a', 'p', 'i', 'c', '\0'},
.deviceData = (DeviceData_t)&ioapicData,
},
.ipiReady = false,
.PicInit = _IoApicInit,
.PicRegisterHandler = _IoApicRegisterHandler,
.PicMaskIrq = _IoApicMaskIrq,
.PicUnmaskIrq = _IoApicUnmaskIrq,
.PicEoi = _IoApicEoi,
.PicBroadcastIpi = _LApicBroadcastIpi,
.PicBroadcastInit = _LApicBroadcastInit,
.PicBroadcastSipi = _LApicBroadcastSipi,
};
//
// -- This is the device description for the local apic timer
// -------------------------------------------------------
EXPORT KERNEL_DATA
TimerDevice_t lapicTimerControl = {
.TimerCallBack = TimerCallBack,
.TimerInit = _LApicInit,
.TimerEoi = _LApicEoi,
.TimerPlatformTick = _TimerPlatformTick,
.TimerCurrentCount = _TimerCurrentCount,
};
<|start_filename|>modules/kernel/src/process/ProcessUnlockScheduler.cc<|end_filename|>
//===================================================================================================================
//
// ProcessUnlockScheduler.cc -- Unlock the scheduler after manipulation
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Nov-26 Initial 0.4.6a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "lists.h"
#include "timer.h"
#include "spinlock.h"
#include "process.h"
//
// -- Unlock the scheduler after changes
// ----------------------------------
EXPORT KERNEL
void ProcessUnlockScheduler(void)
{
assert_msg(AtomicRead(&scheduler.schedulerLockCount) > 0, "schedulerLockCount out if sync");
if (AtomicDecAndTest0(&scheduler.schedulerLockCount)) {
// kprintf("Scheduler unlocked on CPU%d\n", thisCpu->cpuNum);
SPINLOCK_RLS_RESTORE_INT(schedulerLock, scheduler.flags);
}
}
<|start_filename|>modules/kernel/src/heap/HeapMergeRight.cc<|end_filename|>
//===================================================================================================================
//
// HeapMergeRight.cc -- Merge the freeing block with the block to the right if free as well
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Merge the freeing block with the block to the right if free as well
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Jul-26 Initial version
// 2012-Sep-16 Leveraged from Century
// 2012-Sep-23 #90 Fixed issue with calc'ing the right footer
// 2013-Sep-12 #101 Resolve issues splint exposes
// 2013-Sep-13 #74 Rewrite Debug.h to use assertions and write to TTY_LOG
// 2018-Sep-01 Initial 0.1.0 ADCL Copy this file from century32 to century-os
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "heap.h"
//
// -- Merge a new hole with the existing hols on the right side of this one in memory
// -------------------------------------------------------------------------------
OrderedList_t *HeapMergeRight(KHeapHeader_t *hdr)
{
KHeapFooter_t *rightFtr;
KHeapHeader_t *rightHdr;
if (!assert(hdr != NULL)) HeapError("Bad Header passed into HeapMergeRight()", "");
rightHdr = (KHeapHeader_t *)((byte_t *)hdr + hdr->size);
rightFtr = (KHeapFooter_t *)((byte_t *)rightHdr + rightHdr->size - sizeof(KHeapFooter_t));
if ((byte_t *)rightFtr + sizeof(KHeapFooter_t) > kHeap->endAddr) return 0;
HeapValidateHdr(rightHdr, "rightHeader in HeapMergeRight()");
if (!rightHdr->_magicUnion.isHole) return 0; // make sure the left block is a hole
HeapReleaseEntry(rightHdr->entry);
hdr->size += rightHdr->size;
rightFtr->hdr = hdr;
hdr->_magicUnion.isHole = rightFtr->_magicUnion.isHole = 1;
return HeapNewListEntry(hdr, 0);
}
<|start_filename|>modules/kernel/src/process/ProcessListRemove.cc<|end_filename|>
//===================================================================================================================
//
// ProcessListRemove.cc -- Remove a process from whatever list it is on
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-30 Initial 0.3.2 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "lists.h"
#include "spinlock.h"
#include "process.h"
//
// -- Remove the process from whatever list it is on, ensuring proper locking
// -----------------------------------------------------------------------
EXPORT KERNEL
void ProcessListRemove(Process_t *proc)
{
if (!assert(proc != NULL)) return;
// -- is it already not on a list?
if (proc->stsQueue.next == &proc->stsQueue) return;
//
// -- Is this process on a queue?
// ---------------------------
if (proc->status != PROC_RUNNING) {
switch (proc->status) {
case PROC_DLYW:
case PROC_MSGW:
case PROC_MTXW:
case PROC_SEMW:
case PROC_TERM:
ListRemoveInit(&proc->stsQueue);
break;
case PROC_READY:
switch (proc->priority) {
case PTY_OS:
ListRemoveInit(&proc->stsQueue);
break;
case PTY_HIGH:
ListRemoveInit(&proc->stsQueue);
break;
case PTY_LOW:
ListRemoveInit(&proc->stsQueue);
break;
default:
ListRemoveInit(&proc->stsQueue);
break;
}
break;
default:
// do nothing
break;
}
}
}
<|start_filename|>modules/kernel/src/heap/HeapCheckHealth.cc<|end_filename|>
//===================================================================================================================
//
// HeapCheckHealth.cc -- Check the health of the heap as we make changes
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Perform several sanity checks on the heap in order to verify its integrity is still good.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-May-31 Initial 0.1.0 ADCL Initial version (copied out of century32 -- HeapDump.c)
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "heap.h"
#if DEBUG_HEAP == 1
//
// -- Execute some sanity checks on the overall heap structures
// ---------------------------------------------------------
void HeapCheckHealth(void)
{
KHeapHeader_t *block;
KHeapFooter_t *ftr;
uint32_t numBlocks = 0;
uint32_t numAlloc = 0;
uint32_t numFree = 0;
uint32_t numCorrupt = 0;
uint32_t ttlAlloc = 0;
uint32_t ttlFree = 0;
uint32_t largeSize = 0;
block = (KHeapHeader_t *)kHeap->strAddr;
// guaranteed to be at least 1 block
do {
ftr = (KHeapFooter_t *)((char*)block + block->size - sizeof(KHeapFooter_t));
// count the number of blocks regardless of status
numBlocks ++;
// now determine if block is corrupt
if ((block->_magicUnion.magicHole & 0xfffffffe) != HEAP_MAGIC ||
(ftr->_magicUnion.magicHole & 0xfffffffe) != HEAP_MAGIC) {
numCorrupt ++;
} else if (block->_magicUnion.magicHole != ftr->_magicUnion.magicHole) {
numCorrupt ++;
} else if (ftr->hdr != block) {
numCorrupt ++;
// now check for free
} else if (block->_magicUnion.isHole == 1) {
if (block->entry != 0) {
numFree ++;
ttlFree += block->size;
if (block->size > largeSize) {
largeSize = block->size;
}
} else {
numCorrupt ++;
}
// now check for alloc
} else if (block->_magicUnion.isHole == 0) {
if (block->entry == 0) {
numAlloc ++;
ttlAlloc += block->size;
} else {
numCorrupt ++;
}
}
block = (KHeapHeader_t *)((char *)block + block->size);
} while ((byte_t *)block < kHeap->endAddr);
if (!numCorrupt) return;
else while (1);
}
#endif
<|start_filename|>modules/kernel/src/process/ProcessMicroSleepUntil.cc<|end_filename|>
//===================================================================================================================
//
// ProcessMicroSleepUntil.cc -- Sleep until we get to the requested micros since boot
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Oct-14 Initial 0.1.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "process.h"
//
// -- sleep until we get to the number of micros since boot
// -----------------------------------------------------
EXPORT KERNEL
void ProcessDoMicroSleepUntil(uint64_t when)
{
assert_msg(AtomicRead(&scheduler.schedulerLockCount) > 0,
"Calling `ProcessDoMicroSleepUntil()` without the proper lock");
assert_msg(currentThread != NULL, "scheduler.currentProcess is NULL");
if (when <= TimerCurrentCount(timerControl)) return;
currentThread->wakeAtMicros = when;
if (when < scheduler.nextWake) scheduler.nextWake = when;
Enqueue(&scheduler.listSleeping, ¤tThread->stsQueue);
ProcessDoBlock(PROC_DLYW);
}
<|start_filename|>modules/kernel/src/butler/ButlerVars.cc<|end_filename|>
//===================================================================================================================
//
// ButlerVars.cc -- Variables used by the Butler
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-11 Initial v0.6.1b ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "msgq.h"
#include "butler.h"
//
// -- This is the message queue the butler will use
// ---------------------------------------------
EXPORT KERNEL_BSS
MessageQueue_t *butlerMsgq;
//
// -- These are several memory locations that were provided by the linker that we may want to keep track of
// -----------------------------------------------------------------------------------------------------
EXPORT KERNEL_BSS
uint8_t *krnKernelTextStart;
EXPORT KERNEL_BSS
uint8_t *krnKernelTextEnd;
EXPORT KERNEL_BSS
archsize_t krnKernelTextPhys;
EXPORT KERNEL_BSS
archsize_t krnKernelTextSize;
EXPORT KERNEL_BSS
uint8_t *krnKernelDataStart;
EXPORT KERNEL_BSS
uint8_t *krnKernelDataEnd;
EXPORT KERNEL_BSS
archsize_t krnKernelDataPhys;
EXPORT KERNEL_BSS
archsize_t krnKernelDataSize;
EXPORT KERNEL_BSS
uint8_t *krnSyscallStart;
EXPORT KERNEL_BSS
uint8_t *krnSyscallEnd;
EXPORT KERNEL_BSS
archsize_t krnSyscallPhys;
EXPORT KERNEL_BSS
archsize_t krnSyscallSize;
EXPORT KERNEL_BSS
uint8_t *krnStabStart;
EXPORT KERNEL_BSS
uint8_t *krnStabEnd;
EXPORT KERNEL_BSS
archsize_t krnStabPhys;
EXPORT KERNEL_BSS
archsize_t krnStabSize;
<|start_filename|>modules/kernel/src/debugger/DebugEntry.cc<|end_filename|>
//===================================================================================================================
//
// DebugStart.cc -- This is the entry point for the kernel debugger
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-02 Initial v0.6.0a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "serial.h"
#include "process.h"
#include "debugger.h"
//
// -- This is the main entry point for the kernel debugger
// ----------------------------------------------------
EXTERN_C EXPORT KERNEL
void DebugStart(void)
{
EnableInterrupts();
// -- we want the highest chance of getting CPU time!
currentThread->priority = PTY_OS;
debugState = DBG_HOME;
kprintf(ANSI_CLEAR ANSI_SET_CURSOR(0,0) ANSI_FG_RED ANSI_ATTR_BOLD
"Welcome to the Century-OS kernel debugger\n" ANSI_ATTR_NORMAL);
while (true) {
DebugPrompt(debugState);
DebuggerCommand_t cmd = DebugParse(debugState);
switch(cmd) {
case CMD_EXIT:
debugState = DBG_HOME;
kMemSetB(debugCommand, 0, DEBUG_COMMAND_LEN);
continue;
case CMD_SCHED:
debugState = DBG_SCHED;
DebugScheduler();
continue;
case CMD_TIMER:
debugState = DBG_TIMER;
DebugTimer();
continue;
case CMD_MSGQ:
debugState = DBG_MSGQ;
DebugMsgq();
continue;
case CMD_ERROR:
default:
kprintf("\n\n" ANSI_ATTR_BOLD ANSI_FG_RED
"Something went wrong (main) -- a bug in the debugger is likely\n" ANSI_ATTR_NORMAL);
continue;
}
if (cmd == CMD_ERROR) continue;
}
}
<|start_filename|>arch/x86/cpu/ArchEarlyCpuInit.cc<|end_filename|>
//===================================================================================================================
//
// ArchEarlyCpuInit.cc -- Initialize the CPU structures for the x86 arch
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This will have several responsibilities;
// 1) Create the permanent GDT Structure in low memory
// 2) Enable the permanent GDT for CPU 0
// 3) Create the permanent IDT Structure in low memory (copy from existing)
// 4) Enable the permanent IDT for CPU 0
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Feb-01 Initial v0.5.0f ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "cpu.h"
//
// -- Complete the CPU initialization
// -------------------------------
EXTERN_C EXPORT LOADER
void ArchEarlyCpuInit(void)
{
kprintf("Completing CPU initialization\n");
ArchGdtSetup();
ArchIdtSetup();
}
<|start_filename|>platform/pc/apic/IoApicInit.cc<|end_filename|>
//===================================================================================================================
//
// IoApicInit.cc -- Initialize the IO APIC
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-20 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "interrupt.h"
#include "hw-disc.h"
#include "mmu.h"
#include "pic.h"
//
// -- Handle a spurioius interrupt from the 8259 -- just in case
// ----------------------------------------------------------
EXTERN_C HIDDEN KERNEL
void SpurriousPic(isrRegs_t *regs) { }
//
// -- Initialize the IO APIC
// ----------------------
EXTERN_C EXPORT LOADER
void _IoApicInit(PicDevice_t *dev, const char *name)
{
int count = GetIoapicCount();
if (count > MAX_IOAPIC) {
kprintf("WARNING! The number of IOAPICs is greater than the compiled support\n");
kprintf(" %x are supported; %x were found\n", MAX_IOAPIC, count);
count = MAX_IOAPIC;
}
//
// -- For some buggy implementations, remap the 8259 PIC to space out of the way
// --------------------------------------------------------------------------
outb(PIC1 + PIC_MASTER_DATA, 0xff); // Disable all IRQs
outb(PIC2 + PIC_SLAVE_DATA, 0xff); // Disable all IRQs
outb(PIC1 + PIC_MASTER_COMMAND, 0x11);
outb(PIC2 + PIC_SLAVE_COMMAND, 0x11);
outb(PIC1 + PIC_MASTER_DATA, 0xf0);
outb(PIC2 + PIC_SLAVE_DATA, 0xf8);
outb(PIC1 + PIC_MASTER_DATA, 0x04);
outb(PIC2 + PIC_SLAVE_DATA, 0x02);
outb(PIC1 + PIC_MASTER_DATA, 0x01);
outb(PIC2 + PIC_SLAVE_DATA, 0x01);
outb(PIC1 + PIC_MASTER_DATA, 0xff); // Disable all IRQs
outb(PIC2 + PIC_SLAVE_DATA, 0xff); // Disable all IRQs
IsrRegister(IPI_PANIC, (isrFunc_t)Halt);
IsrRegister(IPI_TLB_FLUSH, IpiHandleTlbFlush);
IsrRegister(0xf2, SpurriousPic);
IsrRegister(0xf3, SpurriousPic);
IsrRegister(0xf4, SpurriousPic);
IsrRegister(0xf5, SpurriousPic);
IsrRegister(0xf6, SpurriousPic);
IsrRegister(0xf7, SpurriousPic);
IsrRegister(0xf8, SpurriousPic);
IsrRegister(0xf9, SpurriousPic);
IsrRegister(0xfa, SpurriousPic);
IsrRegister(0xfb, SpurriousPic);
IsrRegister(0xfc, SpurriousPic);
IsrRegister(0xfd, SpurriousPic);
IsrRegister(0xfe, SpurriousPic);
IsrRegister(IPI_DEBUGGER, IpiHandleDebugger);
IoApicDeviceData_t *data = (IoApicDeviceData_t*)dev->device.deviceData;
for (int i = 0; i < count; i ++) {
archsize_t addr = GetIoapicAddr(i);
Ioapicid_t apicid;
Ioapicver_t apicver;
Ioapicredtbl_t apicredir;
MmuMapToFrame(addr, addr>>12, PG_DEVICE | PG_KRN | PG_WRT);
apicid.reg = IoapicRead(addr,IOAPICID);
apicver.reg = IoapicRead(addr, IOAPICVER);
kprintf("IOAPIC located at: %p\n", addr);
kprintf(" The APIC ID is %x\n", apicid.apicId);
kprintf(" The APIC Version is %x; the max redir is %x\n", apicver.version, apicver.maxRedir);
for (int j = 0; j <= apicver.maxRedir; j ++) {
apicredir.reg0 = IoapicRead(addr, IOREDTBL0 + (j * 2));
apicredir.reg1 = IoapicRead(addr, IOREDTBL0 + (j * 2) + 1);
kprintf(" Redirection table entry %x: %p %p\n", j, apicredir.reg1, apicredir.reg0);
}
data->ioapicBase = addr; // TODO: fix this; assumes 1 IOAPIC
}
data->localApicBase = RDMSR(0x1b) & 0xfffff000;
MmuMapToFrame(data->localApicBase, data->localApicBase>>12, PG_DEVICE | PG_KRN | PG_WRT);
}
<|start_filename|>modules/kernel/src/pmm/PmmPop.cc<|end_filename|>
//===================================================================================================================
//
// PmmPop.cc -- Pop a node of blocks off a stack of frames
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Popping a node is no longer a trivial task. So, a function is added to complete this work and maintain
// code readability.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-12 #405 v0.6.1c ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "spinlock.h"
#include "mmu.h"
#include "pmm.h"
//
// -- Pop a node off the stack; stack must be locked to call this function
// --------------------------------------------------------------------
void PmmPop(PmmFrameInfo_t *stack)
{
// kprintf("Popping a node off the stack at %p\n", stack);
if (!MmuIsMapped((archsize_t)stack)) return;
frame_t nx = stack->next;
// -- clear out the data elements! -- prev is already 0
stack->count = 0;
stack->frame = 0;
stack->next = 0;
MmuUnmapPage((archsize_t)stack);
if (nx) {
MmuMapToFrame((archsize_t)stack, nx, PG_KRN | PG_WRT);
stack->prev = 0;
}
}
<|start_filename|>modules/kernel/src/process/ProcessSchedule.cc<|end_filename|>
//===================================================================================================================
//
// ProcessSchedule.cc -- Select the next process to schedule and switch to it
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-18 Initial 0.3.2 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "lists.h"
#include "process.h"
//
// -- Find the next process to give the CPU to
// ----------------------------------------
HIDDEN KERNEL
Process_t *ProcessNext(ProcPriority_t pty)
{
#if DEBUG_ENABLED(ProcessNext)
kprintf("From within ProcessNext():\n");
ProcessDoCheckQueue();
#endif
if (IsListEmpty(&scheduler.queueOS) == false) {
#if DEBUG_ENABLED(ProcessNext)
kprintf("..OS\n");
#endif
return FIND_PARENT(scheduler.queueOS.list.next, Process_t, stsQueue);
} else if (IsListEmpty(&scheduler.queueHigh) == false && PTY_HIGH >= pty) {
#if DEBUG_ENABLED(ProcessNext)
kprintf("..High\n");
#endif
return FIND_PARENT(scheduler.queueHigh.list.next, Process_t, stsQueue);
} else if (IsListEmpty(&scheduler.queueNormal) == false && PTY_NORM >= pty) {
#if DEBUG_ENABLED(ProcessNext)
kprintf("..Normal\n");
#endif
return FIND_PARENT(scheduler.queueNormal.list.next, Process_t, stsQueue);
} else if (IsListEmpty(&scheduler.queueLow) == false && PTY_LOW >= pty) {
#if DEBUG_ENABLED(ProcessNext)
kprintf("..Low\n");
#endif
return FIND_PARENT(scheduler.queueLow.list.next, Process_t, stsQueue);
} else if (IsListEmpty(&scheduler.queueIdle) == false && PTY_IDLE >= pty) {
#if DEBUG_ENABLED(ProcessNext)
kprintf("..Idle\n");
#endif
return FIND_PARENT(scheduler.queueIdle.list.next, Process_t, stsQueue);
} else {
// kprintf("FATAL: CPU%d: ", thisCpu->cpuNum);
// CpuPanicPushRegs("Nothing available to schedule!!");
return NULL;
}
}
//
// -- pick the next process to execute and execute it; ProcessLockScheduler() must be called before calling
// -----------------------------------------------------------------------------------------------------
EXPORT KERNEL
void ProcessSchedule(void)
{
assert_msg(AtomicRead(&scheduler.schedulerLockCount) > 0,
"Calling `ProcessSchedule()` without holding the proper lock");
if (!assert(currentThread != NULL)) {
CpuPanicPushRegs("currentThread is NULL entering ProcessSchedule");
}
Process_t *next = NULL;
ProcessUpdateTimeUsed();
if (AtomicRead(&scheduler.postponeCount) != 0) {
#if DEBUG_ENABLED(ProcessSchedule)
kprintf("Postponing a reschedule\n");
#endif
if (currentThread && AtomicRead(¤tThread->quantumLeft) < 0) {
scheduler.processChangePending = true;
}
return;
}
next = ProcessNext(currentThread?currentThread->priority:PTY_IDLE);
if (next != NULL) {
ProcessListRemove(next);
assert(AtomicRead(&scheduler.postponeCount) == 0);
#if DEBUG_ENABLED(ProcessSchedule)
kprintf("CPU%d: preparing to change to process at %p\n", thisCpu->cpuNum, next);
#endif
ProcessSwitch(next);
} else if (currentThread->status == PROC_RUNNING) {
// -- Do nothing; the current process can continue; reset quantum
AtomicAdd(¤tThread->quantumLeft, currentThread->priority);
return;
} else {
// -- No tasks available; so we go into idle mode
Process_t *save = currentThread; // we will save this process for later
CurrentThreadAssign(NULL); // nothing is running!
do {
// -- -- temporarily unlock the scheduler and enable interrupts for the timer to fire
ProcessUnlockScheduler();
EnableInterrupts();
HaltCpu();
DisableInterrupts();
ProcessLockScheduler(false); // make sure that this does not overwrite the process's flags
next = ProcessNext(PTY_IDLE);
} while (next == NULL);
ProcessListRemove(next);
// -- restore the current Process and change if needed
ProcessUpdateTimeUsed();
CurrentThreadAssign(save);
AtomicSet(&next->quantumLeft, next->priority);
if (next != currentThread) ProcessSwitch(next);
}
}
<|start_filename|>modules/kernel/src/debugger/DebugTimerCounts.cc<|end_filename|>
//===================================================================================================================
//
// DebugTimerCounts.cc -- Debug the timer counts across all cores
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-05 Initial v0.6.0a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "serial.h"
#include "process.h"
#include "debugger.h"
//
// -- Debug the timer over all CPUs
// -----------------------------
EXTERN_C EXPORT KERNEL
void DebugTimerCounts(void)
{
while (true) {
AtomicSet(&debugCommunication.coresResponded, 1);
DebuggerEngage(DIPI_TIMER);
debugCommunication.timerValue[thisCpu->cpuNum] = TimerCurrentCount(timerControl);
while (AtomicRead(&debugCommunication.coresResponded) != cpus.cpusRunning) {}
DebuggerRelease();
// -- now we have the values -- dump them
kprintf(ANSI_CLEAR ANSI_SET_CURSOR(0,0) ANSI_FG_BLUE ANSI_ATTR_BOLD
"Current Timer Counts (press <Enter> to exit)\n" ANSI_ATTR_NORMAL);
for (int i = 0; i < cpus.cpusRunning; i ++) {
kprintf("CPU%d | %d\n", i, (uint32_t)debugCommunication.timerValue[i]);
}
while (SerialHasChar(&debugSerial)) {
uint8_t ch = SerialGetChar(&debugSerial);
if (ch == 10 || ch == 13) {
return;
}
}
ProcessSleep(1);
}
}
<|start_filename|>platform/pc/apic/LApicBroadcastSipi.cc<|end_filename|>
//===================================================================================================================
//
// LApicBroadcastSipi.cc -- Broadcast a Startup IPI (SIPI) to all cores
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Jun-16 Initial 0.4.6 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "timer.h"
#include "hardware.h"
#include "pic.h"
//
// -- Broadcast a SIPI to all CPUs (including myself)
// -----------------------------------------------
EXTERN_C EXPORT KERNEL
void _LApicBroadcastSipi(PicDevice_t *dev, uint32_t core, archsize_t addr)
{
if (!dev) return;
LapicIcrHi_t hi = {
.destination = (uint8_t)core,
};
LapicIcrLo_t lo = {0};
lo.vector = (addr >> 12) & 0xff;
lo.deliveryMode = DELMODE_STARTUP;
lo.destinationMode = 0;
lo.deliveryStatus = 1;
lo.level = 1;
lo.trigger = 1;
lo.destinationShorthand = 0b00;
MmioWrite(LAPIC_MMIO + LAPIC_ICR_HI, hi.raw);
MmioWrite(LAPIC_MMIO + LAPIC_ICR_LO, lo.raw);
}
<|start_filename|>modules/kernel/src/heap/HeapCalcPageAdjustment.cc<|end_filename|>
//===================================================================================================================
//
// HeapCalcPageAdjustment.cc -- Calculate the adjustment to align the pointer to a page boundary
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// Calculate the adjustment to align the pointer to a page boundary, not including the header (i.e. after the
// header)
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2012-Jul-26 Initial version
// 2012-Sep-16 Leveraged from Century
// 2013-Sep-12 #101 Resolve issues splint exposes
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "heap.h"
//
// -- Calculate the adjustment needed to align to a page
// --------------------------------------------------
size_t HeapCalcPageAdjustment(OrderedList_t *entry)
{
archsize_t wrkPtr;
assert(entry != NULL);
wrkPtr = (archsize_t)entry->block + sizeof(KHeapHeader_t);
// if not a page aligned block, align it
if (wrkPtr & 0x00000fff) {
wrkPtr = (wrkPtr & 0xfffff000) + 0x1000; //! next page
}
return wrkPtr - sizeof(KHeapHeader_t);
}
<|start_filename|>modules/kernel/src/process/ProcessTerminate.cc<|end_filename|>
//===================================================================================================================
//
// ProcessTerminate.cc -- End a task by placing it on the terminated queue
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-29 Initial 0.3.2 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "printf.h"
#include "cpu.h"
#include "process.h"
//
// -- Terminate a task
// ----------------
EXTERN_C EXPORT KERNEL
void ProcessTerminate(Process_t *proc)
{
assert_msg(false, "`ProcessTerminate() is flawed!! do not use");
return;
if (!assert(proc != NULL)) return;
ProcessLockAndPostpone();
kprintf("Terminating process at address %p on CPU%d\n", proc, thisCpu->cpuNum);
kprintf(".. this process is %sRunning\n", proc == currentThread ? "" : "not ");
if (proc == currentThread) {
kprintf(".. ending the current process\n");
assert(proc->stsQueue.next == &proc->stsQueue);
Enqueue(&scheduler.listTerminated, &proc->stsQueue);
ProcessDoBlock(PROC_TERM);
} else {
kprintf(".. termianting another process\n");
ProcessListRemove(proc);
Enqueue(&scheduler.listTerminated, &proc->stsQueue);
proc->status = PROC_TERM;
}
kprintf(".. terminated; giving up the CPU\n");
ProcessUnlockAndSchedule();
}
<|start_filename|>platform/pc/timer/TimerPlatformTick.cc<|end_filename|>
//===================================================================================================================
//
// TimerPlatformTick.cc -- This is the update that needs to take place with every tick of the timer.
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Mar-19 Initial 0.3.2 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "timer.h"
//
// -- Do nothing on a timer tick
// --------------------------
EXPORT KERNEL
void _TimerPlatformTick(UNUSED(TimerDevice_t *dev))
{
extern uint64_t microsSinceBoot;
microsSinceBoot += 1000;
}
<|start_filename|>platform/inc/hardware.h<|end_filename|>
//===================================================================================================================
//
// hardware.h -- These are the hardware abstractions for interacting with any of the platform devices
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// This header and the types and constants herein are used to provide a layer of abstraction between the kernel
// code and the platform implementation. The key to success here is that all of the hardware needs to have a
// common interface to the kernel. For example, all the serial functions need to look the same across all
// platforms. This means that on BCM2835 where it is accessed by MMIO, and x86 where it is accessed by I/O port,
// the function prototypes need to be the same. Complicate that by the fact that I will likely call each of these
// (at least for the serial port) from both the loader and from the kernel and there is some thought that needs to
// go into this effort.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-23 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#pragma once
#define __HARDWARE_H__
#include "types.h"
#include "lists.h"
//
// -- Write to a Memory Mapped I/O Register
// -------------------------------------
EXPORT INLINE
void MmioWrite(archsize_t regLocation, uint32_t data) { (*((volatile uint32_t *)(regLocation)) = (data)); }
//
// -- Write to a 64-bit Memory Mapped I/O Register
// --------------------------------------------
EXPORT INLINE
void MmioWrite64(archsize_t regLocation, uint64_t data) { (*((volatile uint64_t *)(regLocation)) = (data)); }
//
// -- Read from a Memory Mapped I/O Register
// --------------------------------------
EXPORT INLINE
uint32_t MmioRead(archsize_t regLocation) { return (*((volatile uint32_t *)(regLocation))); }
//
// -- Read from a 64-bit Memory Mapped I/O Register
// ---------------------------------------------
EXPORT INLINE
uint64_t MmioRead64(archsize_t regLocation) { return (*((volatile uint64_t *)(regLocation))); }
//
// -- This is the base of a device data structure; this structure will need to be included
// as the first data member (not a pointer) of any device's specific data.
// ------------------------------------------------------------------------------------
typedef void *DeviceData_t;
//
// -- This structure is the basis for any device that is managed in Century; this structure will also
// need to be included as the first data member (not a pointer) of any device's structure.
// -----------------------------------------------------------------------------------------------
typedef struct GenericDevice_t {
struct GenericDevice_t *parent;
ListHead_t::List_t siblings;
ListHead_t children;
char name[MAX_DEV_NAME];
DeviceData_t deviceData;
} GenericDevice_t;
//
// -- Include any platform-dependent I/O functions that might be required
// -------------------------------------------------------------------
#if __has_include("platform-acpi.h")
# include "platform-acpi.h"
#endif
#if __has_include("platform-io.h")
# include "platform-io.h"
#endif
#if __has_include("platform-gpio.h")
# include "platform-gpio.h"
#endif
#if __has_include("platform-mailbox.h")
# include "platform-mailbox.h"
#endif
#if __has_include("platform-init.h")
# include "platform-init.h"
#endif
<|start_filename|>modules/kernel/src/frame-buffer/FrameBufferPutS.cc<|end_filename|>
//===================================================================================================================
//
// FrameBufferPutS.cc -- Write a string to the frame buffer
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Jun-13 Initial 0.1.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "hw-disc.h"
#include "fb.h"
//
// -- Write a string to the frame buffer
// ----------------------------------
void FrameBufferPutS(const char *s)
{
while (*s) {
FrameBufferDrawChar(*s ++);
}
}
<|start_filename|>modules/kernel/inc/butler.h<|end_filename|>
//===================================================================================================================
//
// butler.h -- The butler process
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-10 Initial v0.6.1b ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "process.h"
#include "msgq.h"
//
// -- These are the messages that the butler knows how to handle
// ----------------------------------------------------------
typedef enum {
BUTLER_CLEAN_PMM,
BUTLER_CLEAN_PROCESS,
} ButlerTask_t;
//
// -- The name of the butler process
// ------------------------------
EXTERN EXPORT KERNEL_DATA
const char *butlerName;
//
// -- This is the message queue the butler will use
// ---------------------------------------------
EXTERN EXPORT KERNEL_BSS
MessageQueue_t *butlerMsgq;
//
// -- These are several memory locations that were provided by the linker that we may want to keep track of
// -----------------------------------------------------------------------------------------------------
EXTERN EXPORT KERNEL_BSS
uint8_t *krnKernelTextStart;
EXTERN EXPORT KERNEL_BSS
uint8_t *krnKernelTextEnd;
EXTERN EXPORT KERNEL_BSS
archsize_t krnKernelTextPhys;
EXTERN EXPORT KERNEL_BSS
archsize_t krnKernelTextSize;
EXTERN EXPORT KERNEL_BSS
uint8_t *krnKernelDataStart;
EXTERN EXPORT KERNEL_BSS
uint8_t *krnKernelDataEnd;
EXTERN EXPORT KERNEL_BSS
archsize_t krnKernelDataPhys;
EXTERN EXPORT KERNEL_BSS
archsize_t krnKernelDataSize;
EXTERN EXPORT KERNEL_BSS
uint8_t *krnSyscallStart;
EXTERN EXPORT KERNEL_BSS
uint8_t *krnSyscallEnd;
EXTERN EXPORT KERNEL_BSS
archsize_t krnSyscallPhys;
EXTERN EXPORT KERNEL_BSS
archsize_t krnSyscallSize;
EXTERN EXPORT KERNEL_BSS
uint8_t *krnStabStart;
EXTERN EXPORT KERNEL_BSS
uint8_t *krnStabEnd;
EXTERN EXPORT KERNEL_BSS
archsize_t krnStabPhys;
EXTERN EXPORT KERNEL_BSS
archsize_t krnStabSize;
//
// -- Initialize the Butler and perform the initial cleanup
// -----------------------------------------------------
EXTERN_C EXPORT KERNEL
void ButlerInit(void);
//
// -- The main butler process, dispatching tasks to complete
// ------------------------------------------------------
EXTERN_C EXPORT KERNEL NORETURN
void Butler(void);
//
// -- Check the memory to see if it is eligible to be freed
// -----------------------------------------------------
EXTERN_C EXPORT LOADER
bool ButlerMemCheck(frame_t frame);
//
// -- The Butler has been notified of a PMM frame to clean
// ----------------------------------------------------
void ButlerCleanPmm(void);
//
// -- The Butler has been notified of a Process to clean
// --------------------------------------------------
void ButlerCleanProcess(void);
<|start_filename|>platform/pc/serial/SerialOpen.cc<|end_filename|>
//===================================================================================================================
//
// SerialOpen.cc -- Initialize and open a serial port for debugging output
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2017-Jun-07 Initial 0.0.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "hardware.h"
#include "serial.h"
//
// -- Initialize the serial port for debugging output
// -----------------------------------------------
EXTERN_C EXPORT KERNEL
void _SerialOpen(SerialDevice_t *dev)
{
if (!dev) return;
SerialBase_t base = dev->base;
outb(base + SERIAL_INTERRUPT_ENABLE, 0x00); // Disable all interrupts
outb(base + SERIAL_LINE_CONTROL, 0x80); // Enable DLAB (set baud rate divisor)
outb(base + SERIAL_DIVISOR_LSB, 0x01); // Set divisor to 1 (lo byte) 115200 baud
outb(base + SERIAL_DIVISOR_MSB, 0x00); // (hi byte)
outb(base + SERIAL_LINE_CONTROL, 0x03); // 8 bits, no parity, one stop bit
outb(base + SERIAL_FIFO_CONTROL, 0xC7); // Enable FIFO, clear them, with 14-byte threshold
outb(base + SERIAL_MODEM_CONTROL, 0x0B); // IRQs enabled, RTS/DSR set
}
<|start_filename|>modules/kernel/src/msgq/MsgqSend.cc<|end_filename|>
//===================================================================================================================
//
// MsgqSend.cc -- Send a message to a queue and wake up all waiting processes
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// -----------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- --------------------------------------------------------------------------
// 2020-Apr-09 Initial v0.6.1a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "heap.h"
#include "process.h"
#include "msgq.h"
//
// -- Send a message to a message queue (all pre-checks completed)
// ------------------------------------------------------------
EXTERN_C EXPORT KERNEL
void _MessageQueueSend(MessageQueue_t *msgq, long type, size_t sz, void *payload, bool lock)
{
// -- construct the message
size_t size = sz + sizeof(Message_t); // -- adjust for the overhead
Message_t *msg = (Message_t *)HeapAlloc(size, false);
ListInit(&msg->list);
msg->payloadSize = sz + sizeof(long);
msg->payload.type = type;
if (sz) kMemMove(msg->payload.data, payload, sz);
// -- queue the message
archsize_t flags = SPINLOCK_BLOCK_NO_INT(msgq->queue.lock) {
Enqueue(&msgq->queue, &msg->list);
msgq->queue.count ++;
SPINLOCK_RLS_RESTORE_INT(msgq->queue.lock, flags);
}
// -- release anything waiting for something in the queue and let the scheduler sort it all out
if (lock) ProcessLockAndPostpone();
flags = SPINLOCK_BLOCK_NO_INT(msgq->waiting.lock) {
while (IsListEmpty(&msgq->waiting) == false) {
Process_t *proc = FIND_PARENT(msgq->waiting.list.next, Process_t, stsQueue);
ListRemoveInit(&proc->stsQueue);
msgq->waiting.count --;
ProcessDoReady(proc);
}
SPINLOCK_RLS_RESTORE_INT(msgq->waiting.lock, flags);
}
if (lock) ProcessUnlockAndSchedule();
}
<|start_filename|>platform/bcm2836/serial/SerialGetChar.cc<|end_filename|>
//===================================================================================================================
//
// SerialGetChar.cc -- Get a single character from the serial port
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2020-Apr-02 Initial v0.6.6a ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "hardware.h"
#include "spinlock.h"
#include "serial.h"
//
// -- Output a single character to the serial port
// --------------------------------------------
EXTERN_C EXPORT KERNEL
uint8_t _SerialGetChar(SerialDevice_t *dev)
{
if (!dev) return 0;
uint8_t rv;
archsize_t flags = SPINLOCK_BLOCK_NO_INT(dev->lock) {
while ((MmioRead(dev->base + AUX_MU_LSR_REG) & 1) == 0) { }
rv = MmioRead(dev->base + AUX_MU_IO_REG);
SPINLOCK_RLS_RESTORE_INT(dev->lock, flags);
}
return rv;
}
<|start_filename|>platform/pc/pic/PicInit.cc<|end_filename|>
//===================================================================================================================
//
// PicInit.cc -- Initialize the PIC
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Feb-24 Initial 0.3.0 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "hardware.h"
#include "pic.h"
//
// -- Initialize the x86 8259 PIC -- note, this is not the IOAPIC
// -----------------------------------------------------------
EXTERN_C EXPORT KERNEL
void _PicInit(PicDevice_t *dev, const char *name)
{
if (!dev) return;
archsize_t flags = DisableInterrupts();
// -- Remap the irq table, even though we may not be using it.
outb(PIC1 + PIC_MASTER_DATA, 0xff); // Disable all IRQs
outb(PIC2 + PIC_SLAVE_DATA, 0xff); // Disable all IRQs
outb(PIC1 + PIC_MASTER_COMMAND, 0x11);
outb(PIC2 + PIC_SLAVE_COMMAND, 0x11);
outb(PIC1 + PIC_MASTER_DATA, 0x20);
outb(PIC2 + PIC_SLAVE_DATA, 0x28);
outb(PIC1 + PIC_MASTER_DATA, 0x04);
outb(PIC2 + PIC_SLAVE_DATA, 0x02);
outb(PIC1 + PIC_MASTER_DATA, 0x01);
outb(PIC2 + PIC_SLAVE_DATA, 0x01);
RestoreInterrupts(flags);
}
<|start_filename|>platform/pc/init/TimerPick.cc<|end_filename|>
//===================================================================================================================
//
// TimerPick.cc -- Make a decision on which Timer will be used
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2019-Apr-26 Initial 0.4.1 ADCL Initial version
//
//===================================================================================================================
#include "types.h"
#include "hw-disc.h"
#include "printf.h"
#include "timer.h"
//
// -- pick the timer device we will use
// ---------------------------------
EXTERN_C EXPORT LOADER
TimerDevice_t *TimerPick(void)
{
kprintf("Picking a timer to use...\n");
if (GetLocalApicCount() > 0) timerControl = &lapicTimerControl;
else timerControl = &timer8253Control;
// -- initialized in kInit();
return timerControl;
}
| eryjus/century-os |
<|start_filename|>src/glue.c<|end_filename|>
#include <emscripten.h>
#include <errno.h>
#include <stdbool.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include "ext2fs.h"
#include "glue.h"
#define O_WRONLY 00000001
#define O_RDWR 00000002
#define O_CREAT 00000100
#define O_EXCL 00000200
#define O_TRUNC 00001000
#define O_APPEND 00002000
#define O_DIRECTORY 00200000
#define O_NOATIME 01000000
// Access js stuff from C
EM_JS(void, array_push_buffer, (int array_id, char* value, int len), {
const heapBuffer = Module.getBuffer(value, len);
const buffer = Buffer.alloc(len);
heapBuffer.copy(buffer);
Module.getObject(array_id).push(buffer);
});
EM_JS(errcode_t, blk_read, (int disk_id, short block_size, unsigned long block, unsigned long count, void *data), {
return Asyncify.handleAsync(async () => {
const offset = block * block_size;
const size = count < 0 ? -count : count * block_size;
const buffer = Module.getBuffer(data, size);
const disk = Module.getObject(disk_id);
try {
await disk.read(buffer, 0, buffer.length, offset);
return 0;
} catch (error) {
return Module.EIO;
}
});
});
EM_JS(errcode_t, blk_write, (int disk_id, short block_size, unsigned long block, unsigned long count, const void *data), {
return Asyncify.handleAsync(async () => {
const offset = block * block_size;
const size = count < 0 ? -count : count * block_size;
const buffer = Module.getBuffer(data, size);
const disk = Module.getObject(disk_id);
try {
await disk.write(buffer, 0, buffer.length, offset);
return 0;
} catch (error) {
return Module.EIO;
}
});
});
EM_JS(errcode_t, discard, (int disk_id, short block_size, unsigned long block, unsigned long count), {
return Asyncify.handleAsync(async () => {
const disk = Module.getObject(disk_id);
const offset = block * block_size;
const size = count < 0 ? -count : count * block_size;
try {
await disk.discard(offset, size);
return 0;
} catch (error) {
return Module.EIO;
}
});
});
EM_JS(errcode_t, flush, (int disk_id), {
return Asyncify.handleAsync(async () => {
const disk = Module.getObject(disk_id);
try {
await disk.flush();
return 0;
} catch (error) {
return Module.EIO;
}
});
});
// ------------------------
// Utils ------------------
ext2_ino_t string_to_inode(ext2_filsys fs, const char *str) {
ext2_ino_t ino;
int retval;
retval = ext2fs_namei(fs, EXT2_ROOT_INO, EXT2_ROOT_INO, str, &ino);
if (retval) {
return 0;
}
return ino;
}
int copy_filename_to_result(
struct ext2_dir_entry *dirent,
int offset,
int blocksize,
char *buf,
void *priv_data // this is the js array_id
) {
size_t len = ext2fs_dirent_name_len(dirent);
if (
(strncmp(dirent->name, ".", len) != 0) &&
(strncmp(dirent->name, "..", len) != 0)
) {
array_push_buffer((int)priv_data, dirent->name, len);
}
return 0;
}
ext2_ino_t get_parent_dir_ino(ext2_filsys fs, const char* path) {
char* last_slash = strrchr(path, '/');
if (last_slash == NULL) {
return 0;
}
unsigned int parent_len = last_slash - path + 1;
char* parent_path = strndup(path, parent_len);
ext2_ino_t parent_ino = string_to_inode(fs, parent_path);
free(parent_path);
return parent_ino;
}
char* get_filename(const char* path) {
char* last_slash = strrchr((char*)path, (int)'/');
if (last_slash == NULL) {
return NULL;
}
char* filename = last_slash + 1;
if (strlen(filename) == 0) {
return NULL;
}
return filename;
}
unsigned int translate_open_flags(unsigned int js_flags) {
unsigned int result = 0;
if (js_flags & (O_WRONLY | O_RDWR)) {
result |= EXT2_FILE_WRITE;
}
if (js_flags & O_CREAT) {
result |= EXT2_FILE_CREATE;
}
// JS flags:
// O_RDONLY
// O_WRONLY EXT2_FILE_WRITE
// O_RDWR
// O_CREAT EXT2_FILE_CREATE
// O_EXCL
// O_NOCTTY
// O_TRUNC
// O_APPEND
// O_DIRECTORY
// O_NOATIME
// O_NOFOLLOW
// O_SYNC
// O_SYMLINK
// O_DIRECT
// O_NONBLOCK
return result;
}
errcode_t create_file(ext2_filsys fs, const char* path, unsigned int mode, ext2_ino_t* ino) {
// Returns a >= 0 error code
errcode_t ret = 0;
ext2_ino_t parent_ino = get_parent_dir_ino(fs, path);
if (parent_ino == 0) {
return ENOTDIR;
}
ret = ext2fs_new_inode(fs, parent_ino, mode, 0, ino);
if (ret) return ret;
char* filename = get_filename(path);
if (filename == NULL) {
// This should never happen.
return EISDIR;
}
ret = ext2fs_link(fs, parent_ino, filename, *ino, EXT2_FT_REG_FILE);
if (ret == EXT2_ET_DIR_NO_SPACE) {
ret = ext2fs_expand_dir(fs, parent_ino);
if (ret) return ret;
ret = ext2fs_link(fs, parent_ino, filename, *ino, EXT2_FT_REG_FILE);
}
if (ret) return ret;
if (ext2fs_test_inode_bitmap2(fs->inode_map, *ino)) {
printf("Warning: inode already set\n");
}
ext2fs_inode_alloc_stats2(fs, *ino, +1, 0);
struct ext2_inode inode;
memset(&inode, 0, sizeof(inode));
inode.i_mode = (mode & ~LINUX_S_IFMT) | LINUX_S_IFREG;
inode.i_atime = inode.i_ctime = inode.i_mtime = time(0);
inode.i_links_count = 1;
ret = ext2fs_inode_size_set(fs, &inode, 0); // TODO: update size? also on write?
if (ret) return ret;
if (ext2fs_has_feature_inline_data(fs->super)) {
inode.i_flags |= EXT4_INLINE_DATA_FL;
} else if (ext2fs_has_feature_extents(fs->super)) {
ext2_extent_handle_t handle;
inode.i_flags &= ~EXT4_EXTENTS_FL;
ret = ext2fs_extent_open2(fs, *ino, &inode, &handle);
if (ret) return ret;
ext2fs_extent_free(handle);
}
ret = ext2fs_write_new_inode(fs, *ino, &inode);
if (ret) return ret;
if (inode.i_flags & EXT4_INLINE_DATA_FL) {
ret = ext2fs_inline_data_init(fs, *ino);
if (ret) return ret;
}
return 0;
}
errcode_t update_xtime(ext2_file_t file, bool a, bool c, bool m) {
errcode_t err = 0;
err = ext2fs_read_inode(file->fs, file->ino, &(file->inode));
if (err) return err;
struct timespec now;
clock_gettime(CLOCK_REALTIME, &now);
if (a) {
file->inode.i_atime = now.tv_sec;
}
if (c) {
file->inode.i_ctime = now.tv_sec;
}
if (m) {
file->inode.i_mtime = now.tv_sec;
}
increment_version(&(file->inode));
err = ext2fs_write_inode(file->fs, file->ino, &(file->inode));
return err;
}
unsigned long getUInt64Number(unsigned long long hi, unsigned long long lo) {
return lo | (hi << 32);
}
// ------------------------
// Call these from js -----
// We can read and write C memory from js but we can't read & write js Buffer data from C so we need a way to allocate & free C memory from js.
// That way we can allocate C memory from js, pass it to a C function to be written to and read it in js.
char* malloc_from_js(int length) {
return malloc(length);
}
void free_from_js(char *data) {
free(data);
}
errcode_t node_ext2fs_mount(int disk_id) {
ext2_filsys fs;
char hex_ptr[sizeof(void*) * 2 + 3];
sprintf(hex_ptr, "%d", disk_id);
errcode_t ret = ext2fs_open(
hex_ptr, // name
EXT2_FLAG_RW, // flags
0, // superblock
0, // block_size
get_js_io_manager(), // manager
&fs // ret_fs
);
if (ret) {
return -ret;
}
ret = ext2fs_read_bitmaps(fs);
if (ret) {
return -ret;
}
return (long)fs;
}
errcode_t node_ext2fs_trim(ext2_filsys fs) {
unsigned int start, blk, count;
errcode_t ret;
if (!fs->block_map) {
if ((ret = ext2fs_read_block_bitmap(fs))) {
return -ret;
}
}
start = fs->super->s_first_data_block;
count = fs->super->s_blocks_count;
for (blk = start; blk <= count; blk++) {
// Check for either the last iteration or a used block
if (blk == count || ext2fs_test_block_bitmap(fs->block_map, blk)) {
if (start < blk) {
if ((ret = io_channel_discard(fs->io, start, blk - start))) {
return -ret;
}
}
start = blk + 1;
}
}
return 0;
}
errcode_t node_ext2fs_readdir(ext2_filsys fs, char* path, int array_id) {
ext2_ino_t ino = string_to_inode(fs, path);
if (ino == 0) {
return -ENOENT;
}
ext2_file_t file;
errcode_t ret = ext2fs_file_open(
fs,
ino, // inode,
0, // flags TODO
&file
);
if (ret) return -ret;
ret = ext2fs_check_directory(fs, ino);
if (ret) return -ret;
char* block_buf = malloc(fs->blocksize);
ret = ext2fs_dir_iterate(
fs,
ino,
0, // flags
block_buf,
copy_filename_to_result,
(void*)array_id
);
free(block_buf);
return -ret;
}
long node_ext2fs_open(ext2_filsys fs, char* path, unsigned int flags, unsigned int mode) {
// TODO: O_NOFOLLOW, O_SYMLINK
ext2_ino_t ino = string_to_inode(fs, path);
errcode_t ret;
if (ino == 0) {
if (!(flags & O_CREAT)) {
return -ENOENT;
}
ret = create_file(fs, path, mode, &ino);
if (ret) return -ret;
} else if (flags & O_EXCL) {
return -EEXIST;
}
if ((flags & O_DIRECTORY) && ext2fs_check_directory(fs, ino)) {
return -ENOTDIR;
}
ext2_file_t file;
ret = ext2fs_file_open(fs, ino, translate_open_flags(flags), &file);
if (ret) return -ret;
if (flags & O_TRUNC) {
ret = ext2fs_file_set_size2(file, 0);
if (ret) return -ret;
}
return (long)file;
}
long node_ext2fs_read(
ext2_file_t file,
int flags,
char *buffer,
unsigned long length, // requested length
unsigned long position // position in file, -1 for current position
) {
errcode_t ret = 0;
if ((flags & O_WRONLY) != 0) {
// Don't try to read write only files.
return -EBADF;
}
if (position != -1) {
ret = ext2fs_file_llseek(file, position, EXT2_SEEK_SET, NULL);
if (ret) return -ret;
}
unsigned int got;
ret = ext2fs_file_read(file, buffer, length, &got);
if (ret) return -ret;
if ((flags & O_NOATIME) == 0) {
ret = update_xtime(file, true, false, false);
if (ret) return -ret;
}
return got;
}
long node_ext2fs_write(
ext2_file_t file,
int flags,
char *buffer,
unsigned long length, // requested length
unsigned long position // position in file, -1 for current position
) {
if ((flags & (O_WRONLY | O_RDWR)) == 0) {
// Don't try to write to readonly files.
return -EBADF;
}
errcode_t ret = 0;
if ((flags & O_APPEND) != 0) {
// append mode: seek to the end before each write
ret = ext2fs_file_llseek(file, 0, EXT2_SEEK_END, NULL);
} else if (position != -1) {
ret = ext2fs_file_llseek(file, position, EXT2_SEEK_SET, NULL);
}
if (ret) return -ret;
unsigned int written;
ret = ext2fs_file_write(file, buffer, length, &written);
if (ret) return -ret;
if ((flags & O_CREAT) != 0) {
ret = update_xtime(file, false, true, true);
if (ret) return -ret;
}
return written;
}
errcode_t node_ext2fs_mkdir(
ext2_filsys fs,
const char *path,
int mode
) {
ext2_ino_t parent_ino = get_parent_dir_ino(fs, path);
if (parent_ino == 0) {
return -ENOTDIR;
}
char* filename = get_filename(path);
if (filename == NULL) {
// This should never happen.
return -EISDIR;
}
ext2_ino_t newdir;
errcode_t ret;
ret = ext2fs_new_inode(
fs,
parent_ino,
LINUX_S_IFDIR,
NULL,
&newdir
);
if (ret) return -ret;
ret = ext2fs_mkdir(fs, parent_ino, newdir, filename);
if (ret) return -ret;
struct ext2_inode inode;
ret = ext2fs_read_inode(fs, newdir, &inode);
if (ret) return -ret;
inode.i_mode = (mode & ~LINUX_S_IFMT) | LINUX_S_IFDIR;
ret = ext2fs_write_inode(fs, newdir, &inode);
return -ret;
}
errcode_t node_ext2fs_unlink(
ext2_filsys fs,
const char *path,
bool rmdir
) {
if (strlen(path) == 0) {
return -ENOENT;
}
ext2_ino_t ino = string_to_inode(fs, path);
if (ino == 0) {
return -ENOENT;
}
errcode_t ret;
ret = ext2fs_check_directory(fs, ino);
bool is_dir = (ret == 0);
if (rmdir) {
if (!is_dir) {
return -ENOTDIR;
}
} else {
if (is_dir) {
return -EISDIR;
}
}
ext2_ino_t parent_ino = get_parent_dir_ino(fs, path);
if (parent_ino == 0) {
return -ENOENT;
}
ret = ext2fs_unlink(fs, parent_ino, NULL, ino, 0);
return -ret;
}
errcode_t node_ext2fs_chmod(
ext2_file_t file,
int mode
) {
errcode_t ret = ext2fs_read_inode(file->fs, file->ino, &(file->inode));
if (ret) return -ret;
// keep only fmt (file or directory)
file->inode.i_mode &= LINUX_S_IFMT;
// apply new mode
file->inode.i_mode |= (mode & ~LINUX_S_IFMT);
increment_version(&(file->inode));
ret = ext2fs_write_inode(file->fs, file->ino, &(file->inode));
return -ret;
}
errcode_t node_ext2fs_chown(
ext2_file_t file,
int uid,
int gid
) {
// TODO handle 32 bit {u,g}ids
errcode_t ret = ext2fs_read_inode(file->fs, file->ino, &(file->inode));
if (ret) return -ret;
// keep only the lower 16 bits
file->inode.i_uid = uid & 0xFFFF;
file->inode.i_gid = gid & 0xFFFF;
increment_version(&(file->inode));
ret = ext2fs_write_inode(file->fs, file->ino, &(file->inode));
return -ret;
}
errcode_t node_ext2fs_close(ext2_file_t file) {
return -ext2fs_file_close(file);
}
int node_ext2fs_stat_i_mode(ext2_file_t file) {
return file->inode.i_mode;
}
int node_ext2fs_stat_i_links_count(ext2_file_t file) {
return file->inode.i_links_count;
}
int node_ext2fs_stat_i_uid(ext2_file_t file) {
return file->inode.i_uid;
}
int node_ext2fs_stat_i_gid(ext2_file_t file) {
return file->inode.i_gid;
}
int node_ext2fs_stat_blocksize(ext2_file_t file) {
return file->fs->blocksize;
}
unsigned long node_ext2fs_stat_i_size(ext2_file_t file) {
return getUInt64Number(file->inode.i_size_high, file->inode.i_size);
}
int node_ext2fs_stat_ino(ext2_file_t file) {
return file->ino;
}
int node_ext2fs_stat_i_blocks(ext2_file_t file) {
return file->inode.i_blocks;
}
int node_ext2fs_stat_i_atime(ext2_file_t file) {
return file->inode.i_atime;
}
int node_ext2fs_stat_i_mtime(ext2_file_t file) {
return file->inode.i_mtime;
}
int node_ext2fs_stat_i_ctime(ext2_file_t file) {
return file->inode.i_ctime;
}
errcode_t node_ext2fs_umount(ext2_filsys fs) {
return -ext2fs_close(fs);
}
//-------------------------------------------
int get_disk_id(io_channel channel) {
return (int)channel->private_data;
}
static errcode_t js_open_entry(const char *disk_id_str, int flags, io_channel *channel) {
io_channel io = NULL;
errcode_t ret = ext2fs_get_mem(sizeof(struct struct_io_channel), &io);
if (ret) {
return ret;
}
memset(io, 0, sizeof(struct struct_io_channel));
io->magic = EXT2_ET_MAGIC_IO_CHANNEL;
io->manager = get_js_io_manager();
sscanf(disk_id_str, "%d", (int*)&io->private_data);
*channel = io;
return 0;
}
static errcode_t js_close_entry(io_channel channel) {
return ext2fs_free_mem(&channel);
}
static errcode_t set_blksize(io_channel channel, int blksize) {
channel->block_size = blksize;
return 0;
}
static errcode_t js_read_blk_entry(io_channel channel, unsigned long block, int count, void *data) {
int disk_id = get_disk_id(channel);
return blk_read(disk_id, channel->block_size, block, count, data);
}
static errcode_t js_write_blk_entry(io_channel channel, unsigned long block, int count, const void *data) {
int disk_id = get_disk_id(channel);
return blk_write(disk_id, channel->block_size, block, count, data);
}
static errcode_t js_flush_entry(io_channel channel) {
int disk_id = get_disk_id(channel);
return flush(disk_id);
}
static errcode_t js_read_blk64_entry(io_channel channel, unsigned long long block, int count, void *data) {
return js_read_blk_entry(channel, block, count, data);
}
static errcode_t js_write_blk64_entry(io_channel channel, unsigned long long block, int count, const void *data) {
return js_write_blk_entry(channel, block, count, data);
}
static errcode_t js_discard_entry(io_channel channel, unsigned long long block, unsigned long long count) {
int disk_id = get_disk_id(channel);
return discard(disk_id, channel->block_size, block, count);
}
static errcode_t js_cache_readahead_entry(io_channel channel, unsigned long long block, unsigned long long count) {
return 0;
}
static errcode_t js_zeroout_entry(io_channel channel, unsigned long long block, unsigned long long count) {
int disk_id = get_disk_id(channel);
unsigned long long size = (count < 0) ? -count : count * channel->block_size;
char *data = malloc(size);
memset(data, 0, size);
errcode_t ret = blk_write(disk_id, channel->block_size, block, count, data);
if (ret) return ret;
return discard(disk_id, channel->block_size, block, count);
}
struct struct_io_manager js_io_manager;
io_manager get_js_io_manager() {
js_io_manager.magic = EXT2_ET_MAGIC_IO_MANAGER;
js_io_manager.name = "JavaScript IO Manager";
js_io_manager.open = js_open_entry;
js_io_manager.close = js_close_entry;
js_io_manager.set_blksize = set_blksize;
js_io_manager.read_blk = js_read_blk_entry;
js_io_manager.write_blk = js_write_blk_entry;
js_io_manager.flush = js_flush_entry;
js_io_manager.read_blk64 = js_read_blk64_entry;
js_io_manager.write_blk64 = js_write_blk64_entry;
js_io_manager.discard = js_discard_entry;
js_io_manager.cache_readahead = js_cache_readahead_entry;
js_io_manager.zeroout = js_zeroout_entry;
return &js_io_manager;
};
<|start_filename|>lib/ext2fs.js<|end_filename|>
'use strict';
const { DiskWrapper } = require('./disk');
const createFs = require('./fs');
const Module = require('./libext2fs');
const { ccallThrowAsync } = require('./util');
const ready = new Promise((resolve) => {
Module.onRuntimeInitialized = resolve;
});
exports.mount = async function(disk, offset = 0) {
await ready;
const wrapper = new DiskWrapper(disk, offset);
const diskId = Module.setObject(wrapper);
let fsPointer;
try {
fsPointer = await ccallThrowAsync('node_ext2fs_mount', 'number', ['number'], [diskId]);
} catch (error) {
Module.deleteObject(diskId);
throw error;
}
const fs = createFs(fsPointer);
fs.trim = async () => {
await ccallThrowAsync('node_ext2fs_trim', 'number', ['number'], [fsPointer]);
};
fs.diskId = diskId;
return fs;
};
exports.umount = async function(fs) {
await fs.closeAllFileDescriptors();
await ccallThrowAsync('node_ext2fs_umount', 'number', ['number'], [fs.fsPointer]);
Module.deleteObject(fs.diskId);
};
exports.withMountedDisk = async function(disk, offset, fn) {
const fs = await exports.mount(disk, offset);
try {
return await fn(fs);
} finally {
await exports.umount(fs);
}
};
<|start_filename|>test/index.js<|end_filename|>
'use strict';
/*global it describe*/
const assert = require('assert');
const Bluebird = require('bluebird');
const filedisk = require('file-disk');
const { createReadStream } = require('fs');
const pathModule = require('path');
const stream = require('stream');
const ext2fs = require('..');
// Each image contains 5 files named 1, 2, 3, 4, 5 and containing
// 'one\n', 'two\n', 'three\n', 'four\n', 'five\n' respectively.
const IMAGES = {
'ext2': 'ext2.img',
'ext3': 'ext3.img',
'ext4': 'ext4.img',
'ext4-4k-block-size': 'ext4-4k-block-size.img'
};
function humanFileMode(fs, stats) {
const result = [];
result.push(stats.isDirectory() ? 'd' : '-');
let constant, enabled;
for (let actor of ['USR', 'GRP', 'OTH']) {
for (let action of ['R', 'W', 'X']) {
constant = fs.constants[`S_I${action}${actor}`];
enabled = ((stats.mode & constant) !== 0);
result.push(enabled ? action.toLowerCase() : '-');
}
}
return result.join('');
}
function testOnAllDisks(fn) {
for (const name of Object.keys(IMAGES)) {
it(name, async () => {
const path = pathModule.join(__dirname, 'fixtures', IMAGES[name]);
await filedisk.withOpenFile(path, 'r', async (fd) => {
const disk = new filedisk.FileDisk(fd, true, true);
// `disk.imageName` will be useful in tests that have different
// results depending on the image.
disk.imageName = name;
await fn(disk);
});
});
}
}
function testOnAllDisksMount(fn) {
testOnAllDisks(async (disk) => {
await ext2fs.withMountedDisk(disk, 0, async (fs) => {
// Might be useful to get the disk name
fs.disk = disk;
await fn(Bluebird.promisifyAll(fs, { multiArgs: true }));
});
});
}
function readStream(stream, buffer = false) {
return new Promise((resolve, reject) => {
const chunks = [];
stream.on('error', reject);
stream.on('close', () => {
if (buffer) {
resolve(Buffer.concat(chunks));
} else {
resolve(chunks.join(''));
}
});
stream.on('data', (chunk) => {
chunks.push(chunk);
});
});
}
function waitStream(stream) {
return new Promise((resolve, reject) => {
stream.on('error', reject);
stream.on('close', resolve);
});
}
function createReadableStreamFromString(s) {
const readable = new stream.Readable();
readable._read = () => {};
readable.push(s);
readable.push(null);
return readable;
}
describe('ext2fs', () => {
describe('disk errors', () => {
testOnAllDisks(async (disk) => {
disk.read = () => {
throw new Error("can't read");
};
try {
await ext2fs.mount(disk, 0);
assert(false);
} catch(err) {
assert.strictEqual(err.errno, 29);
assert.strictEqual(err.code, 'EIO');
}
});
});
describe('offset', () => {
it('offset', async () => {
const path = pathModule.join(__dirname, 'fixtures', IMAGES['ext4']);
const data = await readStream(createReadStream(path), true);
const offset = 2048;
const buffer = Buffer.allocUnsafe(data.length + offset);
data.copy(buffer, offset);
const disk = new filedisk.BufferDisk(buffer, true, true);
await ext2fs.withMountedDisk(disk, offset, async (fs) => {
fs = Bluebird.promisifyAll(fs, { multiArgs: true });
const [files] = await fs.readdirAsync('/');
files.sort();
assert.deepEqual(files, [ '1', '2', '3', '4', '5', 'lost+found' ]);
});
});
});
describe('mount, open, read, close, umount', () => {
testOnAllDisksMount(async (fs) => {
const buffer = Buffer.allocUnsafe(4);
const [fd] = await fs.openAsync('/1', 'r');
const [bytesRead, buf] = await fs.readAsync(fd, buffer, 0, 4, 0);
assert.strictEqual(bytesRead, 4);
assert.strictEqual(buf.toString(), 'one\n');
await fs.closeAsync(fd);
});
});
describe('mount, stat, umount', () => {
testOnAllDisksMount(async (fs) => {
const [stats] = await fs.statAsync('/2');
assert.strictEqual(stats.dev, 0);
assert.strictEqual(stats.mode, 33188);
assert.strictEqual(stats.nlink, 1);
assert.strictEqual(stats.uid, 1000);
assert.strictEqual(stats.gid, 1000);
assert.strictEqual(stats.rdev, 0);
if (fs.disk.imageName === 'ext4-4k-block-size') {
assert.strictEqual(stats.blksize, 4096);
assert.strictEqual(stats.blocks, 8);
} else {
assert.strictEqual(stats.blksize, 1024);
assert.strictEqual(stats.blocks, 2);
}
assert.strictEqual(stats.size, 4);
assert.strictEqual(
stats.atime.getTime(),
(new Date('2017-05-23T18:56:45.000Z')).getTime()
);
assert.strictEqual(
stats.mtime.getTime(),
(new Date('2017-05-22T13:02:28.000Z')).getTime()
);
assert.strictEqual(
stats.ctime.getTime(),
(new Date('2017-05-23T18:56:47.000Z')).getTime()
);
assert.strictEqual(
stats.birthtime.getTime(),
(new Date('2017-05-23T18:56:47.000Z')).getTime()
);
});
});
describe('mount, open, fstat, close, umount', () => {
testOnAllDisksMount(async (fs) => {
const [fd] = await fs.openAsync('/2', fs.constants.O_RDONLY | fs.constants.O_NOATIME);
const [stats] = await fs.fstatAsync(fd);
assert.strictEqual(stats.dev, 0);
assert.strictEqual(stats.mode, 33188);
assert.strictEqual(stats.nlink, 1);
assert.strictEqual(stats.uid, 1000);
assert.strictEqual(stats.gid, 1000);
assert.strictEqual(stats.rdev, 0);
if (fs.disk.imageName === 'ext4-4k-block-size') {
assert.strictEqual(stats.blksize, 4096);
assert.strictEqual(stats.blocks, 8);
} else {
assert.strictEqual(stats.blksize, 1024);
assert.strictEqual(stats.blocks, 2);
}
assert.strictEqual(stats.size, 4);
assert.strictEqual(
stats.atime.getTime(),
(new Date('2017-05-23T18:56:45.000Z')).getTime()
);
assert.strictEqual(
stats.mtime.getTime(),
(new Date('2017-05-22T13:02:28.000Z')).getTime()
);
assert.strictEqual(
stats.ctime.getTime(),
(new Date('2017-05-23T18:56:47.000Z')).getTime()
);
assert.strictEqual(
stats.birthtime.getTime(),
(new Date('2017-05-23T18:56:47.000Z')).getTime()
);
await fs.closeAsync(fd);
});
});
describe('mount, open, write, fstat, close, open, read, fstat, close, umount', () => {
testOnAllDisksMount(async (fs) => {
const string = 'hello';
const buf = Buffer.from(string);
const [fd] = await fs.openAsync('/2', 'w');
await fs.writeAsync(fd, buf, 0, buf.length, 0);
const [stats] = await fs.fstatAsync(fd);
// ctime, mtime and birthtime should change
const now = Date.now();
assert(now - stats.ctime.getTime() < 3000);
assert(now - stats.mtime.getTime() < 3000);
assert(now - stats.birthtime.getTime() < 3000);
assert.strictEqual(stats.size, 5);
await fs.closeAsync(fd);
const [fd2] = await fs.openAsync('/2', 'r');
const [bytesRead, buf2] = await fs.readAsync(fd2, buf, 0, buf.length, 0);
assert.strictEqual(bytesRead, 5);
assert.strictEqual(buf2.toString(), string);
const [stats2] = await fs.fstatAsync(fd2);
assert(Date.now() - stats2.atime.getTime() < 1000);
assert.strictEqual(stats2.size, 5);
await fs.closeAsync(fd2);
});
});
describe('mount, open, write string, read, close, umount', () => {
testOnAllDisksMount(async (fs) => {
const string = 'hello';
const buffer = Buffer.alloc(string.length);
const [fd] = await fs.openAsync('/9', 'w+');
const [bytesWritten, s] = await fs.writeAsync(fd, string);
assert.strictEqual(bytesWritten, string.length);
assert.strictEqual(s, string);
const [bytesRead, buf] = await fs.readAsync(fd, buffer, 0, buffer.length, 0);
assert.strictEqual(bytesRead, 5);
assert.strictEqual(buf.toString(), string);
await fs.closeAsync(fd);
});
});
describe('mount, create, write, fstat, close, open, fstat, read, close, umount', () => {
const path = '/6';
const content = 'six\n';
testOnAllDisksMount(async (fs) => {
const buf = Buffer.from(content);
const [fd] = await fs.openAsync(path, 'w+', 0o777);
const [bytesWritten] = await fs.writeAsync(fd, buf, 0, buf.length, 0);
assert.strictEqual(bytesWritten, buf.length);
assert.strictEqual(buf.toString(), content);
const [statsBeforeClose] = await fs.fstatAsync(fd);
await fs.closeAsync(fd);
const [fd2] = await fs.openAsync(path, 'r');
const [stats] = await fs.fstatAsync(fd2);
// compare the 2 Stats objects
let value, otherValue;
for (let key of Object.keys(statsBeforeClose)) {
value = statsBeforeClose[key];
otherValue = stats[key];
if (value instanceof Date) {
value = value.getTime();
otherValue = otherValue.getTime();
}
assert.strictEqual(value, otherValue);
}
assert(stats.isFile());
assert.strictEqual(stats.dev, 0);
assert.strictEqual(stats.nlink, 1);
assert.strictEqual(stats.uid, 0);
assert.strictEqual(stats.gid, 0);
assert.strictEqual(stats.rdev, 0);
if (fs.disk.imageName === 'ext4-4k-block-size') {
assert.strictEqual(stats.blocks, 8);
} else {
assert.strictEqual(stats.blocks, 2);
}
assert.strictEqual(stats.size, content.length);
assert.strictEqual(humanFileMode(fs, stats), '-rwxrwxrwx');
const now = Date.now();
assert(now - stats.atime.getTime() < 3000);
assert(now - stats.ctime.getTime() < 3000);
assert(now - stats.mtime.getTime() < 3000);
assert(now - stats.birthtime.getTime() < 3000);
buf.fill(0);
const [bytesRead] = await fs.readAsync(fd2, buf, 0, 1024, 0);
assert.strictEqual(bytesRead, content.length);
assert.strictEqual(buf.toString(), content);
await fs.closeAsync(fd2);
});
});
describe('mount, readFile, umount', () => {
testOnAllDisksMount(async (fs) => {
const [data] = await fs.readFileAsync('/1', 'utf8');
assert.strictEqual(data, 'one\n');
});
});
describe('mount, readdir, umount', () => {
testOnAllDisksMount(async (fs) => {
const [filenames] = await fs.readdirAsync('/');
filenames.sort();
assert.deepEqual(
filenames,
[ '1', '2', '3', '4', '5', 'lost+found' ]
);
});
});
describe('mount, create, write 1M, read 1M, close, umount', () => {
testOnAllDisksMount(async (fs) => {
const size = Math.pow(1024, 2);
const buf = Buffer.allocUnsafe(size);
buf.fill(1);
const [fd] = await fs.openAsync('/8', 'w+');
const [bytesWritten] = await fs.writeAsync(fd, buf, 0, size, 0);
assert.strictEqual(bytesWritten, size);
buf.fill(0);
const [bytesRead] = await fs.readAsync(fd, buf, 0, size, 0);
const buf2 = Buffer.allocUnsafe(size);
buf2.fill(1);
assert.strictEqual(bytesRead, size);
assert(buf.equals(buf2));
await fs.closeAsync(fd);
});
});
describe('open non existent file for reading', () => {
testOnAllDisksMount(async (fs) => {
try {
await fs.openAsync('/7', 'r');
assert(false);
} catch (err) {
assert.strictEqual(err.errno, 44);
assert.strictEqual(err.code, 'ENOENT');
}
});
});
describe('create file in non existent folder', () => {
testOnAllDisksMount(async (fs) => {
try {
await fs.openAsync('/7/8', 'w');
assert(false);
} catch(err) {
assert.strictEqual(err.errno, 54);
assert.strictEqual(err.code, 'ENOTDIR');
}
});
});
describe('rmdir', () => {
testOnAllDisksMount(async (fs) => {
await fs.rmdirAsync('/lost+found');
const [files] = await fs.readdirAsync('/');
files.sort();
assert.deepEqual(files, [ '1', '2', '3', '4', '5' ]);
});
});
describe('rmdir a folder that does not exist', () => {
testOnAllDisksMount(async (fs) => {
try {
await fs.rmdirAsync('/no-such-folder');
assert(false);
} catch(error) {
assert.strictEqual(error.code, 'ENOENT');
assert.strictEqual(error.errno, 44);
}
});
});
describe('rmdir a file', () => {
testOnAllDisksMount(async (fs) => {
try {
await fs.rmdirAsync('/1');
assert(false);
} catch(error) {
assert.strictEqual(error.code, 'ENOTDIR');
assert.strictEqual(error.errno, 54);
}
});
});
describe('unlink', () => {
testOnAllDisksMount(async (fs) => {
await fs.unlinkAsync('/1');
await fs.unlinkAsync(Buffer.from('/2'));
const [files] = await fs.readdirAsync('/');
files.sort();
assert.deepEqual(files, [ '3', '4', '5', 'lost+found' ]);
});
});
describe('unlink a file that does not exist', () => {
testOnAllDisksMount(async (fs) => {
try {
await fs.unlinkAsync('/no-such-file');
assert(false);
} catch(error) {
assert.strictEqual(error.code, 'ENOENT');
assert.strictEqual(error.errno, 44);
}
});
});
describe('unlink a directory', () => {
testOnAllDisksMount(async (fs) => {
try {
await fs.unlinkAsync('/lost+found');
assert(false);
} catch(error) {
assert.strictEqual(error.code, 'EISDIR');
assert.strictEqual(error.errno, 31);
}
});
});
describe('access', () => {
testOnAllDisksMount(async (fs) => {
await fs.accessAsync('/1');
});
});
describe('execute access on a file that can not be executed', () => {
testOnAllDisksMount(async (fs) => {
try {
await fs.accessAsync('/1', fs.constants.X_OK);
assert(false);
} catch(error) {
assert.strictEqual(error.code, 'EACCES');
assert.strictEqual(error.errno, 2);
}
});
});
describe('mkdir', () => {
testOnAllDisksMount(async (fs) => {
await fs.mkdirAsync('/new-folder');
const [files] = await fs.readdirAsync('/');
files.sort();
assert.deepEqual(
files,
[ '1', '2', '3', '4', '5', 'lost+found', 'new-folder' ]
);
});
});
describe('mkdir with slashes at the end of the path', () => {
testOnAllDisksMount(async (fs) => {
await fs.mkdirAsync(Buffer.from('/new-folder//////'));
const [files] = await fs.readdirAsync('/');
files.sort();
assert.deepEqual(
files,
[ '1', '2', '3', '4', '5', 'lost+found', 'new-folder' ]
);
});
});
describe('unlink in a directory that is not /', () => {
testOnAllDisksMount(async (fs) => {
await fs.mkdirAsync('/new-folder-2');
const [files] = await fs.readdirAsync('/');
files.sort();
assert.deepEqual(
files,
[ '1', '2', '3', '4', '5', 'lost+found', 'new-folder-2' ]
);
// Also test trailing slashes removal
await fs.writeFileAsync('/new-folder-2/filename////', 'some-data');
const [files2] = await fs.readdirAsync('/new-folder-2');
assert.deepEqual(files2, ['filename']);
await fs.unlinkAsync('/new-folder-2/filename');
const [files3] = await fs.readdirAsync('/new-folder-2');
assert.deepEqual(files3, []);
});
});
describe('mkdir specific mode', () => {
testOnAllDisksMount(async (fs) => {
await fs.mkdirAsync('/new-folder', 0o467);
const [files] = await fs.readdirAsync('/');
files.sort();
assert.deepEqual(
files,
[ '1', '2', '3', '4', '5', 'lost+found', 'new-folder' ]
);
const [stats] = await fs.statAsync('/new-folder');
assert.strictEqual(humanFileMode(fs, stats), 'dr--rw-rwx');
});
});
describe('write in a readonly file', () => {
testOnAllDisksMount(async (fs) => {
const [fd] = await fs.openAsync('/1', 'r');
try {
await fs.writeAsync(fd, 'two');
assert(false);
} catch(error) {
assert.strictEqual(error.errno, 8);
assert.strictEqual(error.code, 'EBADF');
}
await fs.closeAsync(fd);
});
});
describe('read a writeonly file', () => {
testOnAllDisksMount(async (fs) => {
const [fd] = await fs.openAsync('/1', 'w');
try {
await fs.readFileAsync(fd, 'utf8');
assert(false);
} catch(error) {
assert.strictEqual(error.errno, 8);
assert.strictEqual(error.code, 'EBADF');
}
await fs.closeAsync(fd);
});
});
describe('append mode', () => {
testOnAllDisksMount(async (fs) => {
const [fd] = await fs.openAsync('/1', 'a+');
const text = 'two\n';
const [bytesWritten, data] = await fs.writeAsync(fd, text);
assert.strictEqual(bytesWritten, text.length);
assert.strictEqual(data, text);
const buffer = Buffer.alloc(16);
const [bytesRead, data2] = await fs.readAsync(fd, buffer, 0, buffer.length, 0);
assert.strictEqual(bytesRead, 8);
const dataStr = data2.slice(0, bytesRead).toString();
assert.strictEqual(dataStr, 'one\ntwo\n');
await fs.closeAsync(fd);
});
});
describe('readdir a folder that does not exist', () => {
testOnAllDisksMount(async (fs) => {
try {
await fs.readdirAsync('/no-such-folder');
assert(false);
} catch(error) {
assert.strictEqual(error.errno, 44);
assert.strictEqual(error.code, 'ENOENT');
}
});
});
describe('fchmod', () => {
testOnAllDisksMount(async (fs) => {
const [fd] = await fs.openAsync('/1', 'r');
await fs.fchmodAsync(fd, 0o777);
const [stats] = await fs.fstatAsync(fd);
assert.strictEqual(humanFileMode(fs, stats), '-rwxrwxrwx');
await fs.closeAsync(fd);
});
});
describe('fchmod 2', () => {
testOnAllDisksMount(async (fs) => {
const [fd] = await fs.openAsync('/1', 'r');
await fs.fchmodAsync(fd, 0o137);
const [stats] = await fs.fstatAsync(fd);
assert.strictEqual(humanFileMode(fs, stats), '---x-wxrwx');
await fs.closeAsync(fd);
});
});
describe('fchmod a folder', () => {
testOnAllDisksMount(async (fs) => {
const [fd] = await fs.openAsync('/lost+found', 'r');
await fs.fchmodAsync(fd, 0o137);
const [stats] = await fs.fstatAsync(fd);
assert.strictEqual(humanFileMode(fs, stats), 'd--x-wxrwx');
await fs.closeAsync(fd);
});
});
describe('chmod', () => {
testOnAllDisksMount(async (fs) => {
const path = '/1';
await fs.chmodAsync(path, 0o777);
const [stats] = await fs.statAsync(path);
assert.strictEqual(humanFileMode(fs, stats), '-rwxrwxrwx');
});
});
describe('chmod 2', () => {
testOnAllDisksMount(async (fs) => {
const path = '/1';
await fs.chmodAsync(path, 0o137);
const [stats] = await fs.statAsync(path);
assert.strictEqual(humanFileMode(fs, stats), '---x-wxrwx');
});
});
describe('chmod a folder', () => {
testOnAllDisksMount(async (fs) => {
const path = '/lost+found';
await fs.chmodAsync(path, 0o137);
const [stats] = await fs.statAsync(path);
assert.strictEqual(humanFileMode(fs, stats), 'd--x-wxrwx');
});
});
describe('fchown', () => {
testOnAllDisksMount(async (fs) => {
const [fd] = await fs.openAsync('/1', 'r');
await fs.fchownAsync(fd, 2000, 3000);
const [stats] = await fs.fstatAsync(fd);
assert.strictEqual(stats.uid, 2000);
assert.strictEqual(stats.gid, 3000);
await fs.closeAsync(fd);
});
});
describe('chown', () => {
testOnAllDisksMount(async (fs) => {
const path = '/1';
await fs.chownAsync(path, 2000, 3000);
const [stats] = await fs.statAsync(path);
assert.strictEqual(stats.uid, 2000);
assert.strictEqual(stats.gid, 3000);
});
});
describe('O_EXCL', () => {
testOnAllDisksMount(async (fs) => {
try {
await fs.openAsync('/1', 'wx');
assert(false);
} catch(err) {
assert.strictEqual(err.code, 'EEXIST');
assert.strictEqual(err.errno, 20);
}
});
});
describe('O_DIRECTORY', () => {
testOnAllDisksMount(async (fs) => {
try {
await fs.openAsync('/1', fs.constants.O_DIRECTORY);
assert(false);
} catch(err) {
assert.strictEqual(err.code, 'ENOTDIR');
assert.strictEqual(err.errno, 54);
}
});
});
describe('O_TRUNC', () => {
testOnAllDisksMount(async (fs) => {
const path = '/1';
const [fd] = await fs.openAsync(path, 'w');
await fs.closeAsync(fd);
const [content] = await fs.readFileAsync(path, 'utf8');
assert.strictEqual(content, '');
});
});
describe('close all fds on umount', () => {
testOnAllDisks(async (disk) => {
const fs = Bluebird.promisifyAll(await ext2fs.mount(disk), { multiArgs: true });
await fs.openAsync('/1', 'r');
await fs.openAsync('/2', 'r');
await fs.openAsync('/3', 'r');
// this is the function called before umount
await fs.closeAllFileDescriptors();
assert.strictEqual(fs.openFiles.size, 0);
await ext2fs.umount(fs);
});
});
describe('close bad fd', () => {
testOnAllDisksMount(async (fs) => {
const badFd = 9000;
const buf = Buffer.alloc(8);
let calls = [
fs.closeAsync(badFd),
fs.fchmodAsync(badFd, 0o777),
fs.fchownAsync(badFd, 2000, 3000),
fs.fstatAsync(badFd),
fs.readAsync(badFd, buf, 0, buf.length, 0),
fs.writeAsync(badFd, buf, 0, buf.length, 0)
];
calls = calls.map(async (p) => {
try {
await p;
assert(false);
} catch(err) {
assert.strictEqual(err.code, 'EBADF');
assert.strictEqual(err.errno, 8);
}
});
await Promise.all(calls);
});
});
describe('create 20 files at once', () => {
testOnAllDisksMount(async (fs) => {
const promises = [];
for (let i=0; i<20; i++) {
promises.push(fs.openAsync('/file_number_' + i, 'w'));
}
await Promise.all(promises);
});
});
describe('createReadStream', () => {
testOnAllDisksMount(async (fs) => {
const contents = await readStream(fs.createReadStream('/1'));
assert.strictEqual(contents, 'one\n');
});
});
describe('createWriteStream', () => {
testOnAllDisksMount(async (fs) => {
const path = '/1';
const newContent = 'wololo';
const output = fs.createWriteStream(path);
const input = createReadableStreamFromString(newContent);
input.pipe(output);
await waitStream(output);
const [contents] = await fs.readFileAsync(path, 'utf8');
assert.strictEqual(contents, newContent);
});
});
describe('writeFile and readFile', () => {
const filename = '/config.txt';
const content = 'content\n';
const encoding = 'utf8';
testOnAllDisksMount(async (fs) => {
await fs.writeFileAsync(filename, content, encoding);
const [data] = await fs.readFileAsync(filename, encoding);
assert.strictEqual(data, content);
});
});
describe('trim', () => {
testOnAllDisks(async (disk) => {
const blockSize = 512;
await ext2fs.withMountedDisk(disk, 0, async (fs) => {
await fs.trim();
});
const ranges = await disk.getRanges(blockSize);
if (disk.imageName === 'ext2') {
assert.strictEqual(ranges.length, 3);
assert.strictEqual(ranges[0].offset, 0);
assert.strictEqual(ranges[0].length, 152576);
assert.strictEqual(ranges[1].offset, 164864);
assert.strictEqual(ranges[1].length, 2048);
assert.strictEqual(ranges[2].offset, 3146752);
assert.strictEqual(ranges[2].length, 5120);
} else if (disk.imageName === 'ext3') {
assert.strictEqual(ranges.length, 2);
assert.strictEqual(ranges[0].offset, 0);
assert.strictEqual(ranges[0].length, 1208320);
assert.strictEqual(ranges[1].offset, 3146752);
assert.strictEqual(ranges[1].length, 5120);
} else if (disk.imageName === 'ext4') {
assert.strictEqual(ranges.length, 2);
assert.strictEqual(ranges[0].offset, 0);
assert.strictEqual(ranges[0].length, 1208320);
assert.strictEqual(ranges[1].offset, 3146752);
assert.strictEqual(ranges[1].length, 5120);
}
});
});
});
<|start_filename|>Makefile<|end_filename|>
V =
ifeq ($(strip $(V)),)
E = @echo
Q = @
else
E = @echo
Q =
endif
srcdir=deps/e2fsprogs/
libext2fsdir=$(srcdir)lib/ext2fs/
prejs=src/pre.js
glue=src/glue
CC = emcc
CFLAGS = -DHAVE_CONFIG_H \
-I$(srcdir)/lib \
-Iconfig/common \
-Iconfig/emscripten \
-O3
JSFLAGS = \
-s ASYNCIFY \
-s ASYNCIFY_IMPORTS="['blk_read', 'blk_write', 'discard', 'flush']" \
-s EXPORTED_FUNCTIONS="['_malloc_from_js', '_free_from_js', '_node_ext2fs_mount', '_node_ext2fs_trim', '_node_ext2fs_readdir', '_node_ext2fs_open', '_node_ext2fs_read', '_node_ext2fs_write', '_node_ext2fs_unlink', '_node_ext2fs_chmod', '_node_ext2fs_chown', '_node_ext2fs_mkdir', '_node_ext2fs_close', '_node_ext2fs_umount', '_node_ext2fs_stat_i_mode', '_node_ext2fs_stat_i_links_count', '_node_ext2fs_stat_i_uid', '_node_ext2fs_stat_i_gid', '_node_ext2fs_stat_blocksize', '_node_ext2fs_stat_ino', '_node_ext2fs_stat_i_size', '_node_ext2fs_stat_i_blocks', '_node_ext2fs_stat_i_atime', '_node_ext2fs_stat_i_mtime', '_node_ext2fs_stat_i_ctime']" \
-s EXPORTED_RUNTIME_METHODS="['ccall']" \
--pre-js $(prejs)
OBJS= \
$(libext2fsdir)alloc.o \
$(libext2fsdir)alloc_sb.o \
$(libext2fsdir)alloc_stats.o \
$(libext2fsdir)alloc_tables.o \
$(libext2fsdir)atexit.o \
$(libext2fsdir)badblocks.o \
$(libext2fsdir)bb_inode.o \
$(libext2fsdir)bitmaps.o \
$(libext2fsdir)bitops.o \
$(libext2fsdir)blkmap64_ba.o \
$(libext2fsdir)blkmap64_rb.o \
$(libext2fsdir)blknum.o \
$(libext2fsdir)block.o \
$(libext2fsdir)bmap.o \
$(libext2fsdir)check_desc.o \
$(libext2fsdir)closefs.o \
$(libext2fsdir)crc16.o \
$(libext2fsdir)crc32c.o \
$(libext2fsdir)csum.o \
$(libext2fsdir)dblist.o \
$(libext2fsdir)dblist_dir.o \
$(libext2fsdir)dir_iterate.o \
$(libext2fsdir)dirblock.o \
$(libext2fsdir)dirhash.o \
$(libext2fsdir)expanddir.o \
$(libext2fsdir)ext_attr.o \
$(libext2fsdir)extent.o \
$(libext2fsdir)fallocate.o \
$(libext2fsdir)fileio.o \
$(libext2fsdir)finddev.o \
$(libext2fsdir)flushb.o \
$(libext2fsdir)freefs.o \
$(libext2fsdir)gen_bitmap.o \
$(libext2fsdir)gen_bitmap64.o \
$(libext2fsdir)get_num_dirs.o \
$(libext2fsdir)get_pathname.o \
$(libext2fsdir)getsectsize.o \
$(libext2fsdir)getsize.o \
$(libext2fsdir)i_block.o \
$(libext2fsdir)icount.o \
$(libext2fsdir)ind_block.o \
$(libext2fsdir)initialize.o \
$(libext2fsdir)inline.o \
$(libext2fsdir)inline_data.o \
$(libext2fsdir)inode.o \
$(libext2fsdir)io_manager.o \
$(libext2fsdir)ismounted.o \
$(libext2fsdir)link.o \
$(libext2fsdir)llseek.o \
$(libext2fsdir)lookup.o \
$(libext2fsdir)mkdir.o \
$(libext2fsdir)mkjournal.o \
$(libext2fsdir)mmp.o \
$(libext2fsdir)namei.o \
$(libext2fsdir)native.o \
$(libext2fsdir)newdir.o \
$(libext2fsdir)openfs.o \
$(libext2fsdir)progress.o \
$(libext2fsdir)punch.o \
$(libext2fsdir)rbtree.o \
$(libext2fsdir)read_bb.o \
$(libext2fsdir)read_bb_file.o \
$(libext2fsdir)res_gdt.o \
$(libext2fsdir)rw_bitmaps.o \
$(libext2fsdir)sha512.o \
$(libext2fsdir)swapfs.o \
$(libext2fsdir)symlink.o \
$(libext2fsdir)unlink.o \
$(libext2fsdir)valid_blk.o \
$(libext2fsdir)version.o \
$(libext2fsdir)../et/error_message.o \
$(libext2fsdir)../et/et_name.o \
$(libext2fsdir)../et/init_et.o \
$(libext2fsdir)../et/com_err.o \
$(libext2fsdir)../et/com_right.o
all: lib/libext2fs.js
%.o: %.c
$(E) " CC $<"
$(Q) $(CC) $(CFLAGS) -c $< -o $@
$(glue).o: $(glue).c $(glue).h
$(E) " CC $<"
$(Q) $(CC) $(CFLAGS) -c $< -o $@
lib/libext2fs.js: $(OBJS) $(glue).o $(prejs)
$(E) " JSGEN $@"
$(Q) $(CC) $(CFLAGS) $(JSFLAGS) $(OBJS) $(glue).o -o $@
clean:
rm -f $(OBJS) $(glue).o lib/libext2fs.js lib/libext2fs.wasm
<|start_filename|>lib/wasi.js<|end_filename|>
'use strict';
exports.CODE_TO_ERRNO = {
E2BIG: 1,
EACCES: 2,
EADDRINUSE: 3,
EADDRNOTAVAIL: 4,
EAFNOSUPPORT: 5,
EAGAIN: 6,
EALREADY: 7,
EBADF: 8,
EBADMSG: 9,
EBUSY: 10,
ECANCELED: 11,
ECHILD: 12,
ECONNABORTED: 13,
ECONNREFUSED: 14,
ECONNRESET: 15,
EDEADLOCK: 16,
EDESTADDRREQ: 17,
EDOM: 18,
EDQUOT: 19,
EEXIST: 20,
EFAULT: 21,
EFBIG: 22,
EHOSTUNREACH: 23,
EIDRM: 24,
EILSEQ: 25,
EINPROGRESS: 26,
EINTR: 27,
EINVAL: 28,
EIO: 29,
EISCONN: 30,
EISDIR: 31,
ELOOP: 32,
EMFILE: 33,
EMLINK: 34,
EMSGSIZE: 35,
EMULTIHOP: 36,
ENAMETOOLONG: 37,
ENETDOWN: 38,
ENETRESET: 39,
ENETUNREACH: 40,
ENFILE: 41,
ENOBUFS: 42,
ENODEV: 43,
ENOENT: 44,
ENOEXEC: 45,
ENOLCK: 46,
ENOLINK: 47,
ENOMEM: 48,
ENOMSG: 49,
ENOPROTOOPT: 50,
ENOSPC: 51,
ENOSYS: 52,
ENOTCONN: 53,
ENOTDIR: 54,
ENOTEMPTY: 55,
ENOTRECOVERABLE: 56,
ENOTSOCK: 57,
ENOTTY: 59,
ENXIO: 60,
EOVERFLOW: 61,
EOWNERDEAD: 62,
EPERM: 63,
EPIPE: 64,
EPROTO: 65,
EPROTONOSUPPORT: 66,
EPROTOTYPE: 67,
ERANGE: 68,
EROFS: 69,
ESPIPE: 70,
ESRCH: 71,
ESTALE: 72,
ETIMEDOUT: 73,
ETXTBSY: 74,
EXDEV: 75,
};
exports.ERRNO_TO_CODE = {};
for (const [key, value] of Object.entries(exports.CODE_TO_ERRNO)) {
exports.ERRNO_TO_CODE[value] = key;
}
<|start_filename|>lib/binding.js<|end_filename|>
'use strict';
const Module = require('./libext2fs');
const { CODE_TO_ERRNO } = require('./wasi');
const {
ErrnoException,
ccallThrow,
ccallThrowAsync,
promiseToCallback,
withHeapBuffer,
withPathAsHeapBuffer,
} = require('./util');
function getCallback(req) {
if (!req || (typeof req.oncomplete !== 'function')) {
throw new Error('A callback is required.');
}
return req.oncomplete.bind(req);
}
module.exports = function(constants, fsPointer) {
const exports = {};
const openFiles = new Map();
exports.openFiles = openFiles;
exports.FSReqWrap = function () {};
let Stats;
exports.FSInitialize = function (s) {
Stats = s;
};
async function access(path, mode) {
const X = (
constants.S_IXUSR |
constants.S_IXGRP |
constants.S_IXOTH
);
const stats = await stat(path);
if (((mode & constants.X_OK) !== 0) && ((stats.mode & X) === 0)) {
throw new ErrnoException(CODE_TO_ERRNO['EACCES'], 'access', [path, mode]);
}
}
exports.access = function(path, mode, req) {
promiseToCallback(
access(path, mode),
getCallback(req),
);
};
function checkFd(fd, syscall, args) {
if (!openFiles.has(fd)) {
throw new ErrnoException(CODE_TO_ERRNO['EBADF'], syscall, args);
}
}
async function chmod(path, mode) {
const fd = await open(path, 0, 0);
await fchmod(fd, mode);
}
exports.chmod = function(path, mode, req) {
promiseToCallback(
chmod(path, mode),
getCallback(req),
);
};
async function chown(path, uid, gid) {
const fd = await open(path, 0, 0);
await fchown(fd, uid, gid);
}
exports.chown = function(path, uid, gid, req) {
promiseToCallback(
chown(path, uid, gid),
getCallback(req),
);
};
async function close(fd) {
checkFd(fd, 'node_ext2fs_close', [fd]);
await ccallThrowAsync('node_ext2fs_close', 'number', ['number'], [fd]);
openFiles.delete(fd);
}
exports.close = function(fd, req) {
promiseToCallback(
close(fd),
getCallback(req),
);
};
async function fchmod(fd, mode) {
checkFd(fd, 'node_ext2fs_chmod', [fd, mode]);
await ccallThrowAsync('node_ext2fs_chmod', 'number', ['number', 'number'], [fd, mode]);
}
exports.fchmod = function(fd, mode, req) {
promiseToCallback(
fchmod(fd, mode),
getCallback(req),
);
};
async function fchown(fd, uid, gid) {
checkFd(fd, 'node_ext2fs_chown', [fd, uid, gid]);
await ccallThrowAsync('node_ext2fs_chown', 'number', ['number', 'number', 'number'], [fd, uid, gid]);
}
exports.fchown = function(fd, uid, gid, req) {
promiseToCallback(
fchown(fd, uid, gid),
getCallback(req),
);
};
exports.fdatasync = function() {
throw new Error('Unimplemented');
};
function fstat(fd) {
checkFd(fd, 'fstat', [fd]);
function getAttr(name) {
return ccallThrow(`node_ext2fs_stat_${name}`, 'number', ['number'], [fd]);
}
const ctime = getAttr('i_ctime') * 1000;
return new Stats(
0, // dev
getAttr('i_mode'),
getAttr('i_links_count'),
getAttr('i_uid'),
getAttr('i_gid'),
0, // rdev
getAttr('blocksize'),
getAttr('ino'),
getAttr('i_size'),
getAttr('i_blocks'),
getAttr('i_atime') * 1000,
getAttr('i_mtime') * 1000,
ctime,
ctime,
);
}
exports.fstat = function(fd, req) {
const callback = getCallback(req);
try {
callback(null, fstat(fd));
} catch (error) {
callback(error);
}
};
exports.fsync = function() {
throw new Error('Unimplemented');
};
exports.ftruncate = function() {
throw new Error('Unimplemented');
};
exports.futimes = function() {
throw new Error('Unimplemented');
};
exports.link = function() {
throw new Error('Unimplemented');
};
exports.lstat = function() {
throw new Error('Unimplemented');
};
async function mkdir(path, mode) {
return await withPathAsHeapBuffer(path, async (heapBufferPointer) => {
await ccallThrowAsync('node_ext2fs_mkdir', 'number', ['number', 'number', 'number'], [fsPointer, heapBufferPointer, mode]);
});
}
exports.mkdir = function(path, mode, req) {
promiseToCallback(
mkdir(path, mode),
getCallback(req),
);
};
exports.mkdtemp = function() {
throw new Error('Unimplemented');
};
async function open(path, flags, mode) {
return await withPathAsHeapBuffer(path, async (heapBufferPointer) => {
const fd = await ccallThrowAsync(
'node_ext2fs_open',
'number',
['number', 'number', 'number', 'number'],
[fsPointer, heapBufferPointer, flags, mode],
);
openFiles.set(fd, flags);
return fd;
});
}
exports.open = function(path, flags, mode, req) {
promiseToCallback(
open(path, flags, mode),
getCallback(req),
);
};
async function read(fd, buffer, offset, length, position) {
checkFd(fd, 'node_ext2fs_read', [fd, buffer, offset, length, position]);
return await withHeapBuffer(length, async (heapBuffer, heapBufferPointer) => {
const got = await ccallThrowAsync(
'node_ext2fs_read',
'number',
['number', 'number', 'number', 'number', 'number'],
[fd, openFiles.get(fd), heapBufferPointer, length, position],
);
heapBuffer.copy(buffer, offset);
return got;
});
}
exports.read = function(fd, buffer, offset, length, position, req) {
promiseToCallback(
read(
fd,
buffer,
offset,
length,
(typeof position !== 'number') ? -1 : position,
),
getCallback(req),
);
};
async function readdir(path, encoding) {
if (encoding === undefined) {
encoding = 'utf8';
}
const array = [];
await Module.withObjectId(array, async (arrayId) => {
await withPathAsHeapBuffer(path, async (heapBufferPointer) => {
await ccallThrowAsync('node_ext2fs_readdir', 'number', ['number', 'number', 'number'], [fsPointer, heapBufferPointer, arrayId]);
});
});
if (encoding === 'buffer') {
return array;
} else {
return array.map((b) => b.toString(encoding));
}
}
exports.readdir = function(path, encoding, req) {
promiseToCallback(
readdir(
path,
encoding,
),
getCallback(req),
);
};
exports.readlink = function() {
throw new Error('Unimplemented');
};
exports.rename = function() {
throw new Error('Unimplemented');
};
async function unlink(path, isdir) {
await withPathAsHeapBuffer(path, async (heapBufferPointer) => {
await ccallThrowAsync(
'node_ext2fs_unlink',
'number',
['number', 'number', 'number'],
[fsPointer, heapBufferPointer, isdir],
);
});
}
exports.rmdir = function(path, req) {
promiseToCallback(
unlink(path, 1),
getCallback(req),
);
};
async function stat(path) {
// TODO: noatime ?
const fd = await open(path, 'r', 0);
const stats = await fstat(fd);
await close(fd);
return stats;
}
exports.stat = function(path, req) {
promiseToCallback(
stat(path),
getCallback(req),
);
};
exports.StatWatcher = function() {
throw new Error('Unimplemented');
};
exports.symlink = function() {
throw new Error('Unimplemented');
};
exports.unlink = function(path, req) {
promiseToCallback(
unlink(path, 0),
getCallback(req),
);
};
exports.utimes = function() {
throw new Error('Unimplemented');
};
async function writeBuffer(fd, buffer, offset, length, position) {
checkFd(fd, 'node_ext2fs_write', [fd, buffer, offset, length, position]);
return await withHeapBuffer(length, async (heapBuffer, heapBufferPointer) => {
buffer.copy(heapBuffer, 0, offset, offset + length);
return await ccallThrowAsync(
'node_ext2fs_write',
'number',
['number', 'number', 'number', 'number', 'number'],
[fd, openFiles.get(fd), heapBufferPointer, length, position],
);
});
}
exports.writeBuffer = function(fd, buffer, offset, length, position, req) {
promiseToCallback(
writeBuffer(
fd,
buffer,
offset,
length,
(typeof position !== 'number') ? -1 : position,
),
getCallback(req),
);
};
exports.writeBuffers = function() {
throw new Error('Unimplemented');
};
exports.writeString = function(fd, string, position, enc, req) {
const buffer = Buffer.from(string, enc);
exports.writeBuffer(fd, buffer, 0, buffer.length, position, req);
};
exports.closeAllFileDescriptors = async function() {
for (const fd of openFiles.keys()) {
await close(fd);
}
};
return exports;
};
<|start_filename|>package.json<|end_filename|>
{
"name": "ext2fs",
"version": "3.0.5",
"description": "WASM bindings to libext2fs for cross-platform ext filesystem handling",
"author": "<NAME> <<EMAIL>>",
"contributors": [
"<NAME> <<EMAIL>>",
"<NAME> <<EMAIL>>"
],
"license": "Apache-2.0",
"main": "index.js",
"files": [
"index.js",
"lib/**/*.js",
"lib/**/*.wasm"
],
"scripts": {
"build": "make -j $(nproc)",
"prepare": "npm run build",
"pretest": "eslint lib test src/pre.js",
"test": "mocha"
},
"devDependencies": {
"bluebird": "^3.7.2",
"eslint": "^7.5.0",
"file-disk": "^8.0.0",
"mocha": "^8.2.1"
},
"homepage": "https://github.com/balena-io/node-ext2fs#readme",
"repository": {
"type": "git",
"url": "git+https://github.com/balena-io/node-ext2fs.git"
},
"keywords": [
"extfs",
"ext",
"ext2",
"ext3",
"ext4",
"filesystem",
"fs"
],
"bugs": {
"url": "https://github.com/balena-io/node-ext2fs/issues"
},
"dependencies": {}
}
<|start_filename|>src/pre.js<|end_filename|>
/*global Module*/
// Make js objects accessible from C
const objects = new Map();
let nextId = 0;
const idPool = [];
function reserveId() {
if (idPool.length === 0) {
nextId += 1;
idPool.push(nextId);
}
return idPool.shift();
}
function releaseId(id) {
idPool.push(id);
}
function setObject(obj) {
const id = reserveId();
objects.set(id, obj);
return id;
}
Module.setObject = setObject;
function getObject(id) {
return objects.get(id);
}
Module.getObject = getObject;
function deleteObject(id) {
objects.delete(id);
releaseId(id);
}
Module.deleteObject = deleteObject;
async function withObjectId(obj, fn) {
const id = setObject(obj);
try {
return await fn(id);
} finally {
deleteObject(id);
}
}
Module.withObjectId = withObjectId;
// Returns a js Buffer of the memory at `pointer`.
function getBuffer(pointer, length) {
return Buffer.from(Module.HEAP8.buffer, pointer, length);
}
Module.getBuffer = getBuffer;
// from lib/wasi.js
Module.EIO = 29;
<|start_filename|>lib/queue.js<|end_filename|>
'use strict';
let running = false;
const queue = [];
async function run() {
running = true;
if (queue.length === 0) {
running = false;
return;
}
const { fn, args, resolve, reject } = queue.shift();
try {
resolve(await fn(...args));
} catch (error) {
reject(error);
} finally {
await run();
}
}
exports.addOperation = function(fn, args) {
return new Promise((resolve, reject) => {
queue.push({ fn, args, resolve, reject });
if (!running) {
run();
}
});
};
<|start_filename|>lib/disk.js<|end_filename|>
'use strict';
class DiskWrapper {
constructor(disk, offset=0) {
this.disk = disk;
this.offset = offset;
}
async read(buffer, bufferOffset, length, fileOffset) {
return await this.disk.read(buffer, bufferOffset, length, fileOffset + this.offset);
}
async write(buffer, bufferOffset, length, fileOffset) {
return await this.disk.write(buffer, bufferOffset, length, fileOffset + this.offset);
}
async discard(offset, length) {
return await this.disk.discard(offset + this.offset, length);
}
async flush() {
return await this.disk.flush();
}
}
exports.DiskWrapper = DiskWrapper;
<|start_filename|>lib/util.js<|end_filename|>
'use strict';
const { ERRNO_TO_CODE } = require('./wasi');
const Module = require('./libext2fs');
const queue = require('./queue');
async function promiseToCallback(promise, callback) {
try {
const result = await promise;
callback(null, result);
} catch (error) {
callback(error);
}
}
exports.promiseToCallback = promiseToCallback;
class ErrnoException extends Error {
constructor(errno, syscall, args) {
const code = ERRNO_TO_CODE[errno] || 'UNKNOWN';
super(`${syscall} ${code} (${errno}) args: ${JSON.stringify(args)}`);
this.name = 'ErrnoException';
this.errno = errno;
this.syscall = syscall;
this.code = code;
}
}
exports.ErrnoException = ErrnoException;
function ccallThrow(name, returnType, argsType, args) {
const result = Module.ccall(name, returnType, argsType, args);
if (result < 0) {
throw new ErrnoException(-result, name, args);
}
return result;
}
exports.ccallThrow = ccallThrow;
async function ccallThrowAsync(name, returnType, argsType, args) {
const result = await queue.addOperation(Module.ccall, [name, returnType, argsType, args, { async: true }]);
if (result < 0) {
throw new ErrnoException(-result, name, args);
}
return result;
}
exports.ccallThrowAsync = ccallThrowAsync;
async function withHeapBuffer(length, fn) {
const heapBufferPointer = ccallThrow('malloc_from_js', 'number', ['number'], [length]);
const heapBuffer = Module.getBuffer(heapBufferPointer, length);
try {
return await fn(heapBuffer, heapBufferPointer);
} finally {
ccallThrow('free_from_js', 'void', ['number'], [heapBufferPointer]);
}
}
exports.withHeapBuffer = withHeapBuffer;
function rstripSlashesBuffer(buf) {
while (buf[buf.length - 1] === 0x2f) {
buf = buf.slice(0, buf.length - 1);
}
return buf;
}
async function withPathAsHeapBuffer(path, fn) {
// path is a string or a Buffer
// Strips trailing slashes and converts path to a NULL terminated char* readable from C
if (!Buffer.isBuffer(path)) {
path = Buffer.from(path);
}
path = rstripSlashesBuffer(path);
const length = path.length + 1;
return await withHeapBuffer(length, async (heapBuffer, heapBufferPointer) => {
heapBuffer[length - 1] = 0;
path.copy(heapBuffer);
return await fn(heapBufferPointer);
});
}
exports.withPathAsHeapBuffer = withPathAsHeapBuffer;
| resin-io/node-ext2fs |
<|start_filename|>lib/src/animate_do_zooms.dart<|end_filename|>
import 'package:flutter/material.dart';
/// Class [ZoomIn]:
/// [key]: optional widget key reference
/// [child]: mandatory, widget to animate
/// [duration]: how much time the animation should take
/// [delay]: delay before the animation starts
/// [controller]: optional/mandatory, exposes the animation controller created by Animate_do
/// the controller can be use to repeat, reverse and anything you want, its just an animation controller
class ZoomIn extends StatefulWidget {
final Key? key;
final Widget child;
final Duration duration;
final Duration delay;
final Function(AnimationController)? controller;
final bool manualTrigger;
final bool animate;
final double from;
ZoomIn(
{this.key,
required this.child,
this.duration = const Duration(milliseconds: 500),
this.delay = const Duration(milliseconds: 0),
this.controller,
this.manualTrigger = false,
this.animate = true,
this.from = 1.0})
: super(key: key) {
if (manualTrigger == true && controller == null) {
throw FlutterError('If you want to use manualTrigger:true, \n\n'
'Then you must provide the controller property, that is a callback like:\n\n'
' ( controller: AnimationController) => yourController = controller \n\n');
}
}
@override
_ZoomInState createState() => _ZoomInState();
}
/// State class, where the magic happens
class _ZoomInState extends State<ZoomIn> with SingleTickerProviderStateMixin {
AnimationController? controller;
bool disposed = false;
late Animation<double> fade;
late Animation<double> opacity;
@override
void dispose() {
disposed = true;
controller!.dispose();
super.dispose();
}
@override
void initState() {
super.initState();
controller = AnimationController(duration: widget.duration, vsync: this);
fade = Tween(begin: 0.0, end: widget.from)
.animate(CurvedAnimation(curve: Curves.easeOut, parent: controller!));
opacity = Tween<double>(begin: 0.0, end: 1).animate(
CurvedAnimation(parent: controller!, curve: Interval(0, 0.65)));
if (!widget.manualTrigger && widget.animate) {
Future.delayed(widget.delay, () {
if (!disposed) {
controller?.forward();
}
});
}
if (widget.controller is Function) {
widget.controller!(controller!);
}
}
@override
Widget build(BuildContext context) {
if (widget.animate && widget.delay.inMilliseconds == 0) {
controller?.forward();
}
return AnimatedBuilder(
animation: fade,
builder: (BuildContext context, Widget? child) {
return Transform.scale(
scale: fade.value,
child: Opacity(
opacity: opacity.value,
child: widget.child,
),
);
});
}
}
/// Class [ZoomOut]:
/// [key]: optional widget key reference
/// [child]: mandatory, widget to animate
/// [duration]: how much time the animation should take
/// [delay]: delay before the animation starts
/// [controller]: optional/mandatory, exposes the animation controller created by Animate_do
/// the controller can be use to repeat, reverse and anything you want, its just an animation controller
class ZoomOut extends StatefulWidget {
final Key? key;
final Widget child;
final Duration duration;
final Duration delay;
final Function(AnimationController)? controller;
final bool manualTrigger;
final bool animate;
final double from;
ZoomOut(
{this.key,
required this.child,
this.duration = const Duration(milliseconds: 500),
this.delay = const Duration(milliseconds: 0),
this.controller,
this.manualTrigger = false,
this.animate = true,
this.from = 0.0})
: super(key: key) {
if (manualTrigger == true && controller == null) {
throw FlutterError('If you want to use manualTrigger:true, \n\n'
'Then you must provide the controller property, that is a callback like:\n\n'
' ( controller: AnimationController) => yourController = controller \n\n');
}
}
@override
_ZoomOutState createState() => _ZoomOutState();
}
/// State class, where the magic happens
class _ZoomOutState extends State<ZoomOut> with SingleTickerProviderStateMixin {
AnimationController? controller;
bool disposed = false;
late Animation<double> zoom;
late Animation<double> opacity;
@override
void dispose() {
disposed = true;
controller!.dispose();
super.dispose();
}
@override
void initState() {
super.initState();
controller = AnimationController(duration: widget.duration, vsync: this);
zoom = Tween(begin: 1.0, end: widget.from)
.animate(CurvedAnimation(curve: Curves.easeOut, parent: controller!));
opacity = Tween<double>(begin: 1.0, end: 0.0).animate(
CurvedAnimation(parent: controller!, curve: Interval(0, 0.65)));
if (!widget.manualTrigger && widget.animate) {
Future.delayed(widget.delay, () {
if (!disposed) {
controller?.forward();
}
});
}
if (widget.controller is Function) {
widget.controller!(controller!);
}
}
@override
Widget build(BuildContext context) {
if (widget.animate && widget.delay.inMilliseconds == 0) {
controller?.forward();
}
return AnimatedBuilder(
animation: controller!,
builder: (BuildContext context, Widget? child) {
return Transform.scale(
scale: zoom.value,
child: Opacity(
opacity: opacity.value,
child: widget.child,
),
);
});
}
}
<|start_filename|>lib/src/animate_do_slides.dart<|end_filename|>
import 'package:flutter/material.dart';
/// Class [SlideInUp]:
/// [key]: optional widget key reference
/// [child]: mandatory, widget to animate
/// [duration]: how much time the animation should take
/// [delay]: delay before the animation starts
/// [controller]: optional/mandatory, exposes the animation controller created by Animate_do
/// the controller can be use to repeat, reverse and anything you want, its just an animation controller
class SlideInUp extends StatefulWidget {
final Key? key;
final Widget child;
final Duration duration;
final Duration delay;
final Function(AnimationController)? controller;
final bool manualTrigger;
final bool animate;
final double from;
SlideInUp(
{this.key,
required this.child,
this.duration = const Duration(milliseconds: 600),
this.delay = const Duration(milliseconds: 0),
this.controller,
this.manualTrigger = false,
this.animate = true,
this.from = 100})
: super(key: key) {
if (manualTrigger == true && controller == null) {
throw FlutterError('If you want to use manualTrigger:true, \n\n'
'Then you must provide the controller property, that is a callback like:\n\n'
' ( controller: AnimationController) => yourController = controller \n\n');
}
}
@override
_SlideInUpState createState() => _SlideInUpState();
}
/// State class, where the magic happens
class _SlideInUpState extends State<SlideInUp>
with SingleTickerProviderStateMixin {
AnimationController? controller;
bool disposed = false;
late Animation<double> animation;
@override
void dispose() {
disposed = true;
controller!.dispose();
super.dispose();
}
@override
void initState() {
super.initState();
controller = AnimationController(duration: widget.duration, vsync: this);
animation = Tween<double>(begin: widget.from, end: 0)
.animate(CurvedAnimation(parent: controller!, curve: Curves.easeOut));
if (!widget.manualTrigger && widget.animate) {
Future.delayed(widget.delay, () {
if (!disposed) {
controller?.forward();
}
});
}
if (widget.controller is Function) {
widget.controller!(controller!);
}
}
@override
Widget build(BuildContext context) {
if (widget.animate && widget.delay.inMilliseconds == 0) {
controller?.forward();
}
return AnimatedBuilder(
animation: controller!,
builder: (BuildContext context, Widget? child) {
return Transform.translate(
offset: Offset(0, animation.value), child: widget.child);
});
}
}
/// Class [SlideInDown]:
/// [key]: optional widget key reference
/// [child]: mandatory, widget to animate
/// [duration]: how much time the animation should take
/// [delay]: delay before the animation starts
/// [controller]: optional/mandatory, exposes the animation controller created by Animate_do
/// the controller can be use to repeat, reverse and anything you want, its just an animation controller
class SlideInDown extends StatelessWidget {
final Key? key;
final Widget child;
final Duration duration;
final Duration delay;
final Function(AnimationController)? controller;
final bool manualTrigger;
final bool animate;
final double from;
SlideInDown(
{this.key,
required this.child,
this.duration = const Duration(milliseconds: 600),
this.delay = const Duration(milliseconds: 0),
this.controller,
this.manualTrigger = false,
this.animate = true,
this.from = 100})
: super(key: key) {
if (manualTrigger == true && controller == null) {
throw FlutterError('If you want to use manualTrigger:true, \n\n'
'Then you must provide the controller property, that is a callback like:\n\n'
' ( controller: AnimationController) => yourController = controller \n\n');
}
}
@override
Widget build(BuildContext context) => SlideInUp(
child: child,
duration: duration,
delay: delay,
controller: controller,
manualTrigger: manualTrigger,
animate: animate,
from: from * -1,
);
}
/// Class [SlideInLeft]:
/// [key]: optional widget key reference
/// [child]: mandatory, widget to animate
/// [duration]: how much time the animation should take
/// [delay]: delay before the animation starts
/// [controller]: optional/mandatory, exposes the animation controller created by Animate_do
/// the controller can be use to repeat, reverse and anything you want, its just an animation controller
class SlideInLeft extends StatefulWidget {
final Key? key;
final Widget child;
final Duration duration;
final Duration delay;
final Function(AnimationController)? controller;
final bool manualTrigger;
final bool animate;
final double from;
SlideInLeft(
{this.key,
required this.child,
this.duration = const Duration(milliseconds: 600),
this.delay = const Duration(milliseconds: 0),
this.controller,
this.manualTrigger = false,
this.animate = true,
this.from = 100})
: super(key: key) {
if (manualTrigger == true && controller == null) {
throw FlutterError('If you want to use manualTrigger:true, \n\n'
'Then you must provide the controller property, that is a callback like:\n\n'
' ( controller: AnimationController) => yourController = controller \n\n');
}
}
@override
_SlideInLeftState createState() => _SlideInLeftState();
}
/// State class, where the magic happens
class _SlideInLeftState extends State<SlideInLeft>
with SingleTickerProviderStateMixin {
AnimationController? controller;
bool disposed = false;
late Animation<double> animation;
@override
void dispose() {
disposed = true;
controller!.dispose();
super.dispose();
}
@override
void initState() {
super.initState();
controller = AnimationController(duration: widget.duration, vsync: this);
animation = Tween<double>(begin: widget.from * -1, end: 0)
.animate(CurvedAnimation(parent: controller!, curve: Curves.easeOut));
if (!widget.manualTrigger && widget.animate) {
Future.delayed(widget.delay, () {
if (!disposed) {
controller?.forward();
}
});
}
if (widget.controller is Function) {
widget.controller!(controller!);
}
}
@override
Widget build(BuildContext context) {
if (widget.animate && widget.delay.inMilliseconds == 0) {
controller?.forward();
}
return AnimatedBuilder(
animation: controller!,
builder: (BuildContext context, Widget? child) {
return Transform.translate(
offset: Offset(animation.value, 0), child: widget.child);
});
}
}
/// Class [SlideInRight]:
/// [key]: optional widget key reference
/// [child]: mandatory, widget to animate
/// [duration]: how much time the animation should take
/// [delay]: delay before the animation starts
/// [controller]: optional/mandatory, exposes the animation controller created by Animate_do
/// the controller can be use to repeat, reverse and anything you want, its just an animation controller
class SlideInRight extends StatelessWidget {
final Key? key;
final Widget child;
final Duration duration;
final Duration delay;
final Function(AnimationController)? controller;
final bool manualTrigger;
final bool animate;
final double from;
SlideInRight(
{this.key,
required this.child,
this.duration = const Duration(milliseconds: 600),
this.delay = const Duration(milliseconds: 0),
this.controller,
this.manualTrigger = false,
this.animate = true,
this.from = 100})
: super(key: key) {
if (manualTrigger == true && controller == null) {
throw FlutterError('If you want to use manualTrigger:true, \n\n'
'Then you must provide the controller property, that is a callback like:\n\n'
' ( controller: AnimationController) => yourController = controller \n\n');
}
}
@override
Widget build(BuildContext context) => SlideInLeft(
child: child,
duration: duration,
delay: delay,
controller: controller,
manualTrigger: manualTrigger,
animate: animate,
from: from * -1,
);
}
<|start_filename|>example/main.dart<|end_filename|>
import 'package:flutter/material.dart';
import 'package:animate_do/animate_do.dart';
void main() => runApp(MyApp());
class MyApp extends StatelessWidget {
@override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Material App',
home: Scaffold(
body: Center(
child: BounceInDown( child: Square() )
),
),
);
}
}
class Square extends StatelessWidget {
@override
Widget build(BuildContext context) {
return Container(
width: 50,
height: 50,
decoration: BoxDecoration(
color: Colors.blueAccent,
),
);
}
}
<|start_filename|>test/animate_do_test.dart<|end_filename|>
// import 'package:flutter_test/flutter_test.dart';
// import 'package:animate_do/animate_do.dart';
void main() {
// Testing in the future
}
| Arkangel12/animate_do_package |
<|start_filename|>cmd/full.go<|end_filename|>
// Copyright © 2018 <NAME> <EMAIL>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package cmd
import (
"os"
"fmt"
"strings"
"github.com/fubarhouse/ansible-role-tester/util"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
)
func newFullCmd() *cobra.Command {
// Shared state between Run and PostRun
var config util.AnsibleConfig
var report util.AnsibleReport
return &cobra.Command{
Use: "full",
Short: "Complete end-to-end test process.",
Long: `Runs a complete end-to-end process which performs the following:
- creates a container
- installs a requirements file
- test the role syntax
- runs the role
- tests for idempotence
- removes the container
You should be able to dockerRun all of this from the role folder on
the local file system. If you encounter errors, there's a lot
of flexibility in configuration, just change the defaults as
required.
`,
Run: func(cmd *cobra.Command, args []string) {
config = util.AnsibleConfig{
HostPath: source,
Inventory: inventory,
RemotePath: destination,
ExtraRolesPath: extraRoles,
LibraryPath: libraryPath,
RequirementsFile: requirements,
PlaybookFile: playbook,
Verbose: verbose,
Remote: remote,
Quiet: quiet,
}
var dist util.Distribution
if !custom {
var e error
dist, e = util.GetDistribution(image, image, "/sbin/init", "/sys/fs/cgroup:/sys/fs/cgroup:ro", user, distro)
if e != nil && !quiet {
log.Fatalln("Incompatible distribution was inputted.")
}
} else {
dist = *util.NewCustomDistribution()
user := strings.Split(image, "/")[0]
container := strings.Split(image, ":")[0]
container = strings.Split(container, "/")[1]
tag := strings.Split(image, ":")[1]
dist.Privileged = true
util.CustomDistributionValueSet(&dist, "Name", containerID)
//util.CustomValueSet(&dist, "Privileged", "true")
util.CustomDistributionValueSet(&dist, "Container", fmt.Sprintf("%s/%s:%s", user, container, tag))
util.CustomDistributionValueSet(&dist, "User", user)
util.CustomDistributionValueSet(&dist, "Distro", image)
util.CustomFamilyValueSet(&dist.Family, "Initialise", initialise)
util.CustomFamilyValueSet(&dist.Family, "Volume", volume)
}
dist.CID = containerID
if !config.IsAnsibleRole() {
if !quiet {
log.Fatalf("Path %v is not recognized as an Ansible role.", config.HostPath)
}
os.Exit(util.NotARoleCode)
}
util.MapInventory(dist.CID, &config)
util.MapRequirements(&config)
util.MapPlaybook(&config)
report = util.NewReport(&config)
report.Meta.ReportFile = reportFilename
report.Ansible.Distribution = dist
if !dist.DockerCheck() {
dist.DockerRun(&config, &report)
report.Docker.Run = dist.DockerCheck()
}
hosts, _ := dist.AnsibleHosts(&config, &report)
report.Ansible.Hosts = hosts
if remote {
for _, host := range hosts {
if host == "localhost" {
log.Errorln("remote runs should be run directly, not through this tool")
dist.DockerKill(quiet)
}
}
}
report.Ansible.Requirements = dist.RoleInstall(&config)
if !remote {
report.Ansible.Syntax = dist.RoleSyntaxCheck(&config)
if report.Ansible.Syntax {
report.Ansible.Run.Result, report.Ansible.Run.Time = dist.RoleTest(&config)
}
if report.Ansible.Run.Result {
report.Ansible.Idempotence.Result, report.Ansible.Idempotence.Time = dist.IdempotenceTest(&config)
}
} else {
report.Ansible.Syntax = dist.RoleSyntaxCheckRemote(&config)
if report.Ansible.Syntax {
report.Ansible.Run.Result, report.Ansible.Run.Time = dist.RoleTestRemote(&config)
}
if report.Ansible.Run.Result {
report.Ansible.Idempotence.Result, report.Ansible.Idempotence.Time = dist.IdempotenceTestRemote(&config)
}
}
dist.DockerKill(quiet)
if !dist.DockerCheck() {
report.Docker.Kill = true
}
if reportProvided {
report.Ansible.Config = config
report.Printf()
}
},
// Analyze report and return the proper exit code.
PostRun: func(cmd *cobra.Command, args []string) {
// fmt.Println("PostRun called")
if !report.Docker.Run {
os.Exit(util.DockerRunCode)
} else if !report.Ansible.Syntax {
os.Exit(util.AnsibleSyntaxCode)
} else if !report.Ansible.Run.Result {
os.Exit(util.AnsibleRunCode)
} else if !report.Ansible.Idempotence.Result {
os.Exit(util.AnsibleIdempotenceCode)
} else {
os.Exit(util.OKCode)
}
},
}
}
func addFullFlags(fullCmd *cobra.Command, dir string) {
fullCmd.Flags().StringVarP(&containerID, "name", "n", containerID, "Name of the container")
fullCmd.Flags().StringVarP(&source, "source", "s", dir, "Location of the role to test")
fullCmd.Flags().StringVarP(&destination, "destination", "d", "", "Location which the role will be mounted to")
fullCmd.Flags().StringVarP(&requirements, "requirements", "r", "", "Path to requirements file.")
fullCmd.Flags().StringVarP(&extraRoles, "extra-roles", "x", "", "Path to roles folder with dependencies.")
fullCmd.Flags().StringVarP(&playbook, "playbook", "p", "playbook.yml", "The filename of the playbook")
fullCmd.Flags().BoolVarP(&noOutput, "no-output", "o", false, "Hide output from all Docker commands")
fullCmd.Flags().BoolVarP(&quiet, "quiet", "q", false, "Enable quiet mode")
fullCmd.Flags().BoolVarP(&verbose, "verbose", "v", false, "Enable verbose mode for Ansible commands.")
fullCmd.Flags().BoolVarP(&custom, "custom", "c", false, "Provide my own custom distribution.")
fullCmd.Flags().StringVarP(&inventory, "inventory", "e", "", "Inventory file")
fullCmd.Flags().BoolVarP(&remote, "remote", "m", false, "Run the test remotely to the container")
fullCmd.Flags().BoolVarP(&reportProvided, "report", "f", false, "Provide a report after completion")
fullCmd.Flags().StringVarP(&reportFilename, "report-output", "b", "report.yml", "Filename in current working directory to write a report to")
fullCmd.Flags().StringVarP(&libraryPath, "library", "", "", "Path to library folder with modules.")
fullCmd.Flags().StringVarP(&initialise, "initialise", "a", "/bin/systemd", "The initialise command for the image")
fullCmd.Flags().StringVarP(&volume, "volume", "l", "/sys/fs/cgroup:/sys/fs/cgroup:ro", "The volume argument for the image")
fullCmd.Flags().StringVarP(&image, "image", "i", "", "The image reference to use.")
fullCmd.Flags().StringVarP(&user, "user", "u", "fubarhouse", "Selectively choose a compatible docker image from a specified user.")
fullCmd.Flags().StringVarP(&distro, "distribution", "t", "ubuntu1804", "Selectively choose a compatible docker image of a specified distribution.")
}
func init() {
fullCmd := newFullCmd()
pwd, _ := os.Getwd()
addFullFlags(fullCmd, pwd)
rootCmd.AddCommand(fullCmd)
}
func InitFullCmdForTest(dir string) *cobra.Command {
fullCmd := newFullCmd()
addFullFlags(fullCmd, dir)
return fullCmd
}
<|start_filename|>util/distributions.go<|end_filename|>
package util
import (
"errors"
"strings"
"fmt"
"reflect"
log "github.com/sirupsen/logrus"
)
// A Distribution declares the options to
// pass to Docker to dockerRun and test the container.
type Distribution struct {
// CID is the name/id of the container.
CID string
// Name is the identifying name of the distribution
Name string
// Privileged is a boolean to indicate to use privileged
Privileged bool
// The fully qualified container name in the format:
// name/image:version - ie fubarhouse/docker-ansible:bionic
Container string
// User is the user associated to the image file, used
// when searching for a user from the command line tool.
User string
// Distro is the distro associated to the image file, used
// when searching for a distro from the command line tool.
Distro string
// Family associated to this distribution.
Family Family
}
// Family is a set of characteristics describing a family of linux distributions.
// For example, ubuntu, centos, debian or fedora.
type Family struct {
Name string
Initialise string
Volume string
}
// CentOS Family Distribution Identifier
var CentOS = Family{
"CentOS",
"/sbin/init",
"/sys/fs/cgroup:/sys/fs/cgroup:ro",
}
// Debian Family Distribution Identifier
var Debian = Family{
"Debian",
"/bin/systemd",
"/sys/fs/cgroup:/sys/fs/cgroup:ro",
}
// Fedora Family Distribution Identifier
var Fedora = Family{
"Fedora",
"/usr/lib/systemd/systemd",
"/sys/fs/cgroup:/sys/fs/cgroup:ro",
}
// Ubuntu Family Distribution Identifier
var Ubuntu = Family{
"Ubuntu",
"/sbin/init",
"/sys/fs/cgroup:/sys/fs/cgroup:ro",
}
// CentOS6 Distribution declaration
var CentOS6 = Distribution{
"",
"centos6",
true,
"fubarhouse/docker-ansible:centos-6",
"fubarhouse",
"centos6",
CentOS,
}
// CentOS7 Distribution declaration
var CentOS7 = Distribution{
"",
"centos7",
true,
"fubarhouse/docker-ansible:centos-7",
"fubarhouse",
"centos7",
CentOS,
}
// DebianWheezy Distribution declaration
var DebianWheezy = Distribution{
"",
"wheezy",
true,
"fubarhouse/docker-ansible:wheezy",
"fubarhouse",
"debian7",
Debian,
}
// DebianJessie Distribution declaration
var DebianJessie = Distribution{
"",
"jessie",
true,
"fubarhouse/docker-ansible:jessie",
"fubarhouse",
"debian8",
Debian,
}
// DebianStretch Distribution declaration
var DebianStretch = Distribution{
"",
"stretch",
true,
"fubarhouse/docker-ansible:stretch",
"fubarhouse",
"debian9",
Debian,
}
// DebianBuster Distribution declaration
var DebianBuster = Distribution{
"",
"buster",
true,
"fubarhouse/docker-ansible:buster",
"fubarhouse",
"debian10",
Debian,
}
// Fedora24 Distribution declaration
var Fedora24 = Distribution{
"",
"fedora24",
true,
"fubarhouse/docker-ansible:fedora-24",
"fubarhouse",
"fedora24",
Fedora,
}
// Fedora25 Distribution declaration
var Fedora25 = Distribution{
"",
"fedora25",
true,
"fubarhouse/docker-ansible:fedora-25",
"fubarhouse",
"fedora25",
Fedora,
}
// Fedora26 Distribution declaration
var Fedora26 = Distribution{
"",
"fedora26",
true,
"fubarhouse/docker-ansible:fedora-26",
"fubarhouse",
"fedora26",
Fedora,
}
// Fedora27 Distribution declaration
var Fedora27 = Distribution{
"",
"fedora27",
true,
"fubarhouse/docker-ansible:fedora-27",
"fubarhouse",
"fedora27",
Fedora,
}
// Fedora28 Distribution declaration
var Fedora28 = Distribution{
"",
"fedora28",
true,
"fubarhouse/docker-ansible:fedora-28",
"fubarhouse",
"fedora28",
Fedora,
}
// Fedora29 Distribution declaration
var Fedora29 = Distribution{
"",
"fedora29",
true,
"fubarhouse/docker-ansible:fedora-29",
"fubarhouse",
"fedora29",
Fedora,
}
// Fedora30 Distribution declaration
var Fedora30 = Distribution{
"",
"fedora30",
true,
"fubarhouse/docker-ansible:fedora-30",
"fubarhouse",
"fedora30",
Fedora,
}
// Fedora31 Distribution declaration
var Fedora31 = Distribution{
"",
"fedora31",
true,
"fubarhouse/docker-ansible:fedora-31",
"fubarhouse",
"fedora31",
Fedora,
}
// Ubuntu1204 Distribution declaration
var Ubuntu1204 = Distribution{
"",
"ubuntu1204",
true,
"fubarhouse/docker-ansible:precise",
"fubarhouse",
"ubuntu1204",
Ubuntu,
}
// Ubuntu1210 Distribution declaration
var Ubuntu1210 = Distribution{
"",
"ubuntu1210",
true,
"fubarhouse/docker-ansible:quantal",
"fubarhouse",
"ubuntu1210",
Ubuntu,
}
// Ubuntu1304 Distribution declaration
var Ubuntu1304 = Distribution{
"",
"ubuntu1304",
true,
"fubarhouse/docker-ansible:raring",
"fubarhouse",
"ubuntu1304",
Ubuntu,
}
// Ubuntu1310 Distribution declaration
var Ubuntu1310 = Distribution{
"",
"ubuntu1310",
true,
"fubarhouse/docker-ansible:saucy",
"fubarhouse",
"ubuntu1310",
Ubuntu,
}
// Ubuntu1404 Distribution declaration
var Ubuntu1404 = Distribution{
"",
"ubuntu1404",
true,
"fubarhouse/docker-ansible:trusty",
"fubarhouse",
"ubuntu1404",
Ubuntu,
}
// Ubuntu1410 Distribution declaration
var Ubuntu1410 = Distribution{
"",
"ubuntu1410",
true,
"fubarhouse/docker-ansible:utopic",
"fubarhouse",
"ubuntu1410",
Ubuntu,
}
// Ubuntu1504 Distribution declaration
var Ubuntu1504 = Distribution{
"",
"ubuntu1504",
true,
"fubarhouse/docker-ansible:vivid",
"fubarhouse",
"ubuntu1504",
Ubuntu,
}
// Ubuntu1510 Distribution declaration
var Ubuntu1510 = Distribution{
"",
"ubuntu1510",
true,
"fubarhouse/docker-ansible:wily",
"fubarhouse",
"ubuntu1510",
Ubuntu,
}
// Ubuntu1604 Distribution declaration
var Ubuntu1604 = Distribution{
"",
"ubuntu1604",
true,
"fubarhouse/docker-ansible:xenial",
"fubarhouse",
"ubuntu1604",
Ubuntu,
}
// Ubuntu1610 Distribution declaration
var Ubuntu1610 = Distribution{
"",
"ubuntu1610",
true,
"fubarhouse/docker-ansible:yakkety",
"fubarhouse",
"ubuntu1610",
Ubuntu,
}
// Ubuntu1704 Distribution declaration
var Ubuntu1704 = Distribution{
"",
"ubuntu1704",
true,
"fubarhouse/docker-ansible:zesty",
"fubarhouse",
"ubuntu1704",
Ubuntu,
}
// Ubuntu1710 Distribution declaration
var Ubuntu1710 = Distribution{
"",
"ubuntu1710",
true,
"fubarhouse/docker-ansible:artful",
"fubarhouse",
"ubuntu1710",
Ubuntu,
}
// Ubuntu1804 Distribution declaration
var Ubuntu1804 = Distribution{
"",
"ubuntu1804",
true,
"fubarhouse/docker-ansible:bionic",
"fubarhouse",
"ubuntu1804",
Ubuntu,
}
// Ubuntu1810 Distribution declaration
var Ubuntu1810 = Distribution{
"",
"ubuntu1810",
true,
"fubarhouse/docker-ansible:cosmic",
"fubarhouse",
"ubuntu1810",
Ubuntu,
}
// Ubuntu1904 Distribution declaration
var Ubuntu1904 = Distribution{
"",
"ubuntu1904",
true,
"fubarhouse/docker-ansible:disco",
"fubarhouse",
"ubuntu1904",
Ubuntu,
}
// Ubuntu2004 Distribution declaration
var Ubuntu2004 = Distribution{
"",
"ubuntu2004",
true,
"fubarhouse/docker-ansible:focal",
"fubarhouse",
"ubuntu2004",
Ubuntu,
}
// JeffCentOS6 Distribution declaration
var JeffCentOS6 = Distribution{
"",
"centos6",
true,
"geerlingguy/docker-centos6-ansible:latest",
"geerlingguy",
"centos6",
CentOS,
}
// JeffCentOS7 Distribution declaration
var JeffCentOS7 = Distribution{
"",
"centos7",
true,
"geerlingguy/docker-centos7-ansible:latest",
"geerlingguy",
"centos7",
CentOS,
}
// JeffUbuntu1204 Distribution declaration
var JeffUbuntu1204 = Distribution{
"",
"ubuntu1204",
true,
"geerlingguy/docker-ubuntu1204-ansible:latest",
"geerlingguy",
"ubuntu1204",
Ubuntu,
}
// JeffUbuntu1404 Distribution declaration
var JeffUbuntu1404 = Distribution{
"",
"ubuntu1404",
true,
"geerlingguy/docker-ubuntu1404-ansible:latest",
"geerlingguy",
"ubuntu1404",
Ubuntu,
}
// JeffUbuntu1604 Distribution declaration
var JeffUbuntu1604 = Distribution{
"",
"ubuntu1604",
true,
"geerlingguy/docker-ubuntu1604-ansible:latest",
"geerlingguy",
"ubuntu1604",
Ubuntu,
}
// JeffUbuntu1804 Distribution declaration
var JeffUbuntu1804 = Distribution{
"",
"ubuntu1804",
true,
"geerlingguy/docker-ubuntu1804-ansible:latest",
"geerlingguy",
"ubuntu1804",
Ubuntu,
}
// JeffDebian8 Distribution declaration
var JeffDebian8 = Distribution{
"",
"debian8",
true,
"geerlingguy/docker-debian8-ansible:latest",
"geerlingguy",
"debian8",
Debian,
}
// JeffDebian9 Distribution declaration
var JeffDebian9 = Distribution{
"",
"debian9",
true,
"geerlingguy/docker-debian9-ansible:latest",
"geerlingguy",
"debian9",
Debian,
}
// JeffFedora24 Distribution declaration
var JeffFedora24 = Distribution{
"",
"fedora24",
true,
"geerlingguy/docker-fedora24-ansible:latest",
"geerlingguy",
"fedora24",
Fedora,
}
// JeffFedora27 Distribution declaration
var JeffFedora27 = Distribution{
"",
"fedora27",
true,
"geerlingguy/docker-fedora27-ansible:latest",
"geerlingguy",
"fedora27",
Fedora,
}
// Distributions is a slice of all distributions listed above.
var Distributions = []Distribution{
CentOS6,
CentOS7,
DebianWheezy,
DebianJessie,
DebianStretch,
DebianBuster,
Fedora24,
Fedora25,
Fedora26,
Fedora27,
Fedora28,
Fedora29,
Fedora30,
Fedora31,
Ubuntu1204,
Ubuntu1210,
Ubuntu1304,
Ubuntu1310,
Ubuntu1404,
Ubuntu1410,
Ubuntu1504,
Ubuntu1510,
Ubuntu1604,
Ubuntu1610,
Ubuntu1704,
Ubuntu1710,
Ubuntu1804,
Ubuntu1810,
Ubuntu1904,
Ubuntu2004,
JeffCentOS6,
JeffCentOS7,
JeffUbuntu1204,
JeffUbuntu1404,
JeffUbuntu1604,
JeffUbuntu1804,
JeffDebian8,
JeffDebian9,
JeffFedora24,
JeffFedora27,
}
// NewCustomDistribution will return an empty distribution.
func NewCustomDistribution() *Distribution {
return new(Distribution)
}
// CustomDistributionValueSet will set a field to a given value from a Distribution.
func CustomDistributionValueSet(dist *Distribution, key, value string) error {
v := reflect.ValueOf(dist).Elem().FieldByName(key)
if v.IsValid() {
v.SetString(value)
return nil
}
return errors.New("invalid key/value pair was specified")
}
// CustomFamilyValueSet will set a field to a given value from a Family.
func CustomFamilyValueSet(family *Family, key, value string) error {
v := reflect.ValueOf(family).Elem().FieldByName(key)
if v.IsValid() {
v.SetString(value)
return nil
}
return errors.New("invalid key/value pair was specified")
}
// CustomDistributionValueGet will get a field value from a Distribution.
func CustomDistributionValueGet(dist *Distribution, key string) (string, error) {
s := reflect.ValueOf(dist).Elem()
typeOfT := s.Type()
for i := 0; i < s.NumField(); i++ {
if typeOfT.Field(i).Name == key {
f := s.Field(i)
return fmt.Sprintf("%s", f.Interface()), nil
}
}
return "", errors.New("could not find the specified field")
}
// CustomFamilyValueGet will get a field value from a Family.
func CustomFamilyValueGet(family *Family, key string) (string, error) {
s := reflect.ValueOf(family).Elem()
typeOfT := s.Type()
for i := 0; i < s.NumField(); i++ {
if typeOfT.Field(i).Name == key {
f := s.Field(i)
return fmt.Sprintf("%s", f.Interface()), nil
}
}
return "", errors.New("could not find the specified field")
}
// GetDistribution will get the distribution object to allow dynamic
// loading of different distributions. A suitable struct will be compiled
// from the inputs and returned with an error if the specified container
// cannot be found.
func GetDistribution(container, target, init, volume, user, distro string) (Distribution, error) {
// We will search for the exact container.
for _, dist := range Distributions {
// Check for explicit matches using image.
if dist.Container == container {
return dist, nil
}
// Check for explicit matches for user and distro.
if dist.User == user && dist.Distro == distro {
return dist, nil
}
}
c, _ := DockerExec([]string{
"images",
container,
}, false)
if !strings.Contains(c, container) {
log.Errorf("no valid image was found for '%v'\n", container)
}
return Distribution{},
errors.New("could not find matching distribution")
}
| fubarhouse/ansible-role-tester |
<|start_filename|>gen/tests/unioncmds/gen-marshallers.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package unioncmds
import (
"encoding/binary"
"errors"
)
type UnionCmds struct {
Tag uint8
X [2]uint32
Y uint32
}
func (u *UnionCmds) Parse(data []byte) ([]byte, error) {
cur := data
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
u.Tag = cur[0]
cur = cur[1:]
}
{
switch {
case u.Tag == 1:
case u.Tag == 2:
{
return nil, errors.New("disallowed case")
}
default:
{
for idx := 0; idx < 2; idx++ {
if len(cur) < 4 {
return nil, errors.New("data too short")
}
u.X[idx] = binary.BigEndian.Uint32(cur)
cur = cur[4:]
}
}
}
}
{
if len(cur) < 4 {
return nil, errors.New("data too short")
}
u.Y = binary.BigEndian.Uint32(cur)
cur = cur[4:]
}
return cur, nil
}
func ParseUnionCmds(data []byte) (*UnionCmds, error) {
u := new(UnionCmds)
_, err := u.Parse(data)
if err != nil {
return nil, err
}
return u, nil
}
<|start_filename|>gen/tests/unionlen/gen-marshallers.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package unionlen
import (
"encoding/binary"
"errors"
)
type UnionWithLen struct {
Tag uint16
UnionLen uint16
R uint8
G uint8
B uint8
Year uint16
Month uint8
Day uint8
Unparseable []uint8
RightAfterTheUnion uint16
}
func (u *UnionWithLen) Parse(data []byte) ([]byte, error) {
cur := data
{
if len(cur) < 2 {
return nil, errors.New("data too short")
}
u.Tag = binary.BigEndian.Uint16(cur)
cur = cur[2:]
}
{
if len(cur) < 2 {
return nil, errors.New("data too short")
}
u.UnionLen = binary.BigEndian.Uint16(cur)
cur = cur[2:]
}
{
if len(cur) < int(u.UnionLen) {
return nil, errors.New("data too short")
}
restore := cur[int(u.UnionLen):]
cur = cur[:int(u.UnionLen)]
switch {
case u.Tag == 1:
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
u.R = cur[0]
cur = cur[1:]
}
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
u.G = cur[0]
cur = cur[1:]
}
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
u.B = cur[0]
cur = cur[1:]
}
case u.Tag == 2:
{
if len(cur) < 2 {
return nil, errors.New("data too short")
}
u.Year = binary.BigEndian.Uint16(cur)
cur = cur[2:]
}
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
u.Month = cur[0]
cur = cur[1:]
}
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
u.Day = cur[0]
cur = cur[1:]
}
{
cur = []byte{}
}
default:
{
u.Unparseable = make([]uint8, 0)
for len(cur) > 0 {
var tmp uint8
if len(cur) < 1 {
return nil, errors.New("data too short")
}
tmp = cur[0]
cur = cur[1:]
u.Unparseable = append(u.Unparseable, tmp)
}
}
}
if len(cur) > 0 {
return nil, errors.New("trailing data disallowed")
}
cur = restore
}
{
if len(cur) < 2 {
return nil, errors.New("data too short")
}
u.RightAfterTheUnion = binary.BigEndian.Uint16(cur)
cur = cur[2:]
}
return cur, nil
}
func ParseUnionWithLen(data []byte) (*UnionWithLen, error) {
u := new(UnionWithLen)
_, err := u.Parse(data)
if err != nil {
return nil, err
}
return u, nil
}
<|start_filename|>gen/tests/unioncmds/gen-marshallers_test.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package unioncmds
import "testing"
func TestUnionCmdsCorpus(t *testing.T) {
cases := []struct {
Data []byte
}{
{
Data: []byte{0x1, 0x1, 0x94, 0xfd, 0xc2},
},
{
Data: []byte{0x87, 0x4e, 0xa3, 0xae, 0x5e, 0xfa, 0x2f, 0xfc, 0xcc, 0x1, 0x94, 0xfd, 0xc2},
},
}
for _, c := range cases {
_, err := ParseUnionCmds(c.Data)
if err != nil {
t.Fatal(err)
}
}
}
<|start_filename|>tv/selector.go<|end_filename|>
package tv
import "math/rand"
// Selector selects which vectors to keep from a given list.
type Selector interface {
SelectVectors([]Vector) []Vector
}
// SelectorFunc implements Selector interface with a plain function.
type SelectorFunc func([]Vector) []Vector
// SelectVectors calls f.
func (f SelectorFunc) SelectVectors(vs []Vector) []Vector {
return f(vs)
}
// Exhaustive selects all vectors.
var Exhaustive Selector = SelectorFunc(func(vs []Vector) []Vector { return vs })
// RandomSampleSelector selects a random sample of up to n vectors.
func RandomSampleSelector(n int) Selector {
return SelectorFunc(func(vs []Vector) []Vector {
m := len(vs)
if m <= n {
return vs
}
sample := make([]Vector, n)
pi := rand.Perm(m)
for i := 0; i < n; i++ {
sample[i] = vs[pi[i]]
}
return sample
})
}
<|start_filename|>doc.go<|end_filename|>
// Package trunnel is a code generator for binary parsing.
package trunnel
<|start_filename|>gen/tests/vararray/gen-fuzz.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
// +build gofuzz
package vararray
func FuzzVarArray(data []byte) int {
_, err := ParseVarArray(data)
if err != nil {
return 0
}
return 1
}
<|start_filename|>gen/tests/rem/gen-marshallers_test.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package rem
import "testing"
func TestRemCorpus(t *testing.T) {
cases := []struct {
Data []byte
}{
{
Data: []byte{0xa5, 0xee, 0xe8, 0x2a, 0x62, 0xf6, 0x5f, 0xf9, 0x4f, 0x6e, 0xc8, 0x73, 0x5b, 0x4, 0x12, 0xff, 0xd3, 0x41, 0xc0},
},
}
for _, c := range cases {
_, err := ParseRem(c.Data)
if err != nil {
t.Fatal(err)
}
}
}
<|start_filename|>docs/manual.html<|end_filename|>
<p><head>
<title>Trunnel manual</title>
<style>
<!--
body { padding: 0 10% 0 15%; }
h1 { text-indent: -10%; }
h2 { text-indent: -7%; }
h3,h4,h5,h6 {text-indent: -3%; }
-->
</style>
</head></p>
<h1>Trunnel: a simple binary-format parser/encoder.</h1>
<p>Trunnel is a tool that takes descriptions of binary formats and
generates C code to parse and encode them. It's designed for
simplicity rather than maximum generality; if you need a tool that can
parse nearly anything at the cost of a bit more complexity, have a
look at "nail" instead.</p>
<p>Here are the goals for Trunnel:</p>
<ul>
<li>Support all the binary formats used by Tor.</li>
<li>Generate human-readable, obviously correct code.</li>
<li>Generate secure code.</li>
<li>Generate code that compiles without warnings on a wide variety of
compilers and platforms.</li>
<li>Provide a somewhat idiot-proof API.</li>
<li>Very high test coverage (currently, at 99% for code generator, 99% for
generated code, and 100% for support library).</li>
<li>Be efficient enough for its performance not to matter for most
applications.</li>
<li>Have a specification format that's easy to read and write.</li>
</ul>
<p>Here are some non-goals for Trunnel:</p>
<ul>
<li>Support every possible format.</li>
<li>Parse formats that aren't byte-based.</li>
<li>Parse formats that require backtracking.</li>
<li>Run as fast as possible.</li>
<li>Support very old versions of Python with the code generator.</li>
<li>Support pre-ANSI C with the code generator.</li>
<li>Generate optimal code</li>
<li>Generate code with no redundant checks</li>
<li>Handle input incrementally</li>
</ul>
<h2>1. About this document</h2>
<p>I'll start with a quick example of the Trunnel workflow, and then move on to
document the format of the files that Trunnel uses to define binary formats.
After that, I'll briefly discuss the C functions that Trunnel exposes to the
outside world.</p>
<h2>2. Working with Trunnel</h2>
<p>Here's a quick overview of what Trunnel can do for you.</p>
<p>First, you write a simple format description of your binary formats in a
trunnel file. It can look something like:</p>
<pre><code>const SHA256_LEN = 32;
struct sha256_digest {
u8 digest[SHA256_LEN];
}
struct message {
u8 version;
u8 command;
u16 length IN [0..4096];
u8 body[length]
u8 digest[SHA256_LEN];
}
</code></pre>
<p>Then you save that file with a name like <code>myformat.trunnel</code> and run trunnel
on it. (Right now, that's <code>python -m trunnel myformat.trunnel</code> .) If the
input file is well-formatted, Trunnel will generate a header file
(<code>myformat.h</code>) and an implementation file (<code>myformat.c</code>).</p>
<p>To use this code in your program, include the header file and build and link
with the C file. You'll also need to distribute both generated code files,
along with trunnel-impl.h, trunnel.h, and trunnel.c. (Trunnel will
emit those files for you when you run <code>python -m trunnel
--write-c-files</code>.)</p>
<p>Then you can write code that uses the generated functions documented in
myformat.h.</p>
<h2>3. Writing trunnel definitions</h2>
<p>A trunnel definition file can contain any number of three types of
definitions: constants, structure declarations, and extern declarations.</p>
<p>Both kinds of C comments are allowed: C99 comments that start with a
<code>//</code>, and the C comments that start with a <code>/*</code>. Additionally, you
can insert doxygen-style comments that start with <code>/**</code> before any
structure, constant, or structure member. These will be included
verbatim in the output file.</p>
<p>Constants are declared with:</p>
<pre><code>const <CONST_NAME> = <VAL> ;
</code></pre>
<p>As in:</p>
<pre><code>const N_ELEMENTS = 100;
const U8_MAX = 0xff;
</code></pre>
<p>Constants can be used in the file anywhere that a number can be used. The
name of a constant must be a C identifier in all-caps.</p>
<p>Structure declarations define a format that trunnel can parse. They take
the form of:</p>
<pre><code>struct <ID> {
<member>
<member>
...
}
</code></pre>
<p>As in:</p>
<pre><code>struct rgb {
u8 r;
u8 g;
u8 b;
}
</code></pre>
<p>The names of structures and their members may be any valid C
identifier containing at least one lowercase letter. Structures can
contain 0, 1, or more members. We define the possible member types
below.</p>
<p>An extern structure definition takes the form of:</p>
<pre><code>extern struct <ID>;
</code></pre>
<p>As in:</p>
<pre><code>extern struct message;
</code></pre>
<p>An extern struct definition declares that a structure will be defined in
another trunnel file, and that it's okay to use it in this trunnel file.</p>
<p>Finally, an options definition takes the form of:</p>
<pre><code>trunnel options <ID_LIST> ;
</code></pre>
<p>As in:</p>
<pre><code>trunnel options foo, bar, baz;
</code></pre>
<p>These options are used to control code generation.</p>
<h3>Structure members: integers</h3>
<p>All integers are given as 8, 16, 32, or 64-bit values:</p>
<pre><code>u8 value_a;
u16 value_b;
u32 value_c;
u64 value_d;
</code></pre>
<p>These values are encoded and parsed in network (big-endian) order. The
corresponding values in C are generated as <code>uint8_t</code>, <code>uint16_t</code>, <code>uint32_t</code>,
and <code>uint64_t</code>.</p>
<p>(Signed values and little-endian values aren't supported.)</p>
<p>You can specify constraints for an integer value by providing a list of
one or more values and ranges.</p>
<pre><code>u8 version_num IN [ 4, 5, 6 ];
u16 length IN [ 0..16384 ];
u16 length2 IN [ 0..MAX_LEN ];
u8 version_num2 IN [ 1, 2, 4..6, 9..128 ];
</code></pre>
<p>In a newly constructed structure, all integer fields are initialized to their
lowest constrained value (or to 0 if no constraint is given).</p>
<h3>Structure members: Nested structures</h3>
<p>You can specify that one structure contains another, as in:</p>
<pre><code>struct inner inner_val;
</code></pre>
<p>You can also define the structure itself inline, as in:</p>
<pre><code>struct inner {
u16 a;
u16 b;
} inner_val;
</code></pre>
<p>It's okay to use a structure before it's defined, but Trunnel does require
that structure definitions be non-circular.</p>
<p>In a newly constructed structure, all structure fields are initialized to
NULL.</p>
<h3>Structure members: NUL-terminated strings</h3>
<p>You can specify a string whose length is determined by a terminating 0 (NUL)
byte, with:</p>
<pre><code>nulterm <ID>;
</code></pre>
<p>As in:</p>
<pre><code>nulterm string;
</code></pre>
<p>In a newly constructed structure, all nul-terminated string fields are
initialized to NULL.</p>
<h3>Structure members: fixed-length arrays</h3>
<p>A structure can contain fixed-length arrays of integers, structures, or
(8-bit) characters. The lengths of the arrays can be expressed as
decimal literals, hexadecimal literals, or constants:</p>
<pre><code>u8 ipv6_addr[16];
u32 elements[N_ELEMENTS];
struct rgb colors[2];
char hostname[0x40];
</code></pre>
<p>Each of these types is parsed and encoded by parsing or encoding its
members the specified number of times. Strings are not expected to be
NUL-terminated in the binary format.</p>
<p>Fixed-length arrays of integers are represented as arrays of the appropriate
uint*_t type. Fixed-length arrays of structures are represented as arrays of
pointers to that structure type. Fixed-length arrays of char are represented
as having one extra byte at the end, so that we can ensure that the C
representation of the array always ends with NUL -- internal NULs are
permitted, however.</p>
<p>In newly constructed structures, as before, integers are initialized to 0 and
structures are initialized to <code>NUL</code>. Character arrays are initialized to be
filled with 0-valued bytes.</p>
<h3>Structure members: variable-length arrays</h3>
<p>A structure can contain arrays of integers, structures, or characters whose
lengths depend on an earlier integer-valued field:</p>
<pre><code>u16 length;
u8 bytes[length];
u64 bignums[length];
struct rgb colors[length];
char string[length];
</code></pre>
<p>Each of these types is parsed and encoded by parsing or encoding its
members the specified number of times. Strings are not expected to be
NUL-terminated in the binary format.</p>
<p>You can also specify that a variable-length array continues to the end of the
containing structure or union by leaving its length field empty:</p>
<pre><code>u8 remaining_bytes[];
u32 remaining_words[];
struct rgb remaining_colors[];
char remaining_text[];
</code></pre>
<p>Of course, you couldn't end a structure with all four of those: they can't
<em>all</em> extend to the end of a structure. We also require that these "greedy"
arrays consume their input completely: If you specify <code>u32
remaining_words[];</code>, then the input must contain a multiple of 4 bytes, or it
will be invalid.</p>
<p>Variable-length arrays are represented internally with a dynamic array type
that expands as needed to hold all its elements. You can inspect and modify
them through a set of accessor functions documented later on.</p>
<p>In newly constructed structures, all variable-length arrays are empty.</p>
<p>It's an error to try to encode a variable-length array with a length field if
that array's length field doesn't match its actual length.</p>
<h3>Structure members: zero-length indices into the input</h3>
<p>Sometimes you need to record the position in the input the corresponds to
a position in the structure. You can use an <code>@ptr</code> field to record
a position within a structure when parsing it:</p>
<pre><code>struct s {
nulterm unsigned_header;
@ptr start_of_signed_material;
u32 bodylen;
u8 body[bodylen];
u64 flags;
@ptr end_of_signed_material;
u16 signature_len;
u8 signature[signature_len];
}
</code></pre>
<p>When an object of this type is parsed, then <code>start_of_signed_material</code>
and <code>end_of_signed_material</code> will get set to pointers into the input.
These pointers are only set when the input is parsed; you don't need
to set them to encode the object.</p>
<h3>Structure members: unions</h3>
<p>You can specify that different elements should be parsed based on some
earlier integer field:</p>
<pre><code> u8 tag;
union addr[tag] {
4 : u32 ipv4_addr;
5 : ; // Nothing to parse here.
6 : u8 ipv6_addr[16];
0xf0,0xf1 : u8 hostname_len;
char hostname[hostname_len];
0xF2 .. 0xFF : struct extension ext;
default : fail;
};
</code></pre>
<p>Only one variant of the union, depending on the given tag value, is parsed
or encoded.</p>
<p>You can specify the behavior of the union when no tag value is matched using
the <code>default:</code> label. The <code>fail</code> production is a special value that causes
parsing and encoding to always fail for a given tag value. The <code>default: fail;</code>
case is understood unless some other behavior for default is given.</p>
<p>The fields in a union are represented by storing them in the generated
structure. (To avoid user errors, no C union is generated.) Their names are
prefixed with the name of the union, so <code>ipv4_addr</code> would be stored as
<code>addr_ipv4_addr</code>, and so on.</p>
<p>When encoding a union, only the fields referenced by the actual tag value are
inspected: it's okay to encode if the other fields are invalid.</p>
<h3>Structure members: unions with length constraints</h3>
<p>Tagged unions are pretty useful for describing typed fields. But many users
of typed fields need to support unknown types in order to future-proof
themselves against later extensions. You can do this as:</p>
<pre><code>u8 tag;
u16 length;
union addr[tag] with length length {
4 : u32 ipv4_addr;
6 : u8 ipv6_addr[16];
7 : ignore;
0xEE : u32 ipv4_addr;
...;
0xEF : u32 ipv4_addr;
u8 remainder[];
0xF0 : char hostname[];
default: u8 unrecognized[];
};
</code></pre>
<p>Here, the union is required to take up a number of bytes dependent on the
value of <code>length</code>. The <code>hostname</code> and <code>unrecognized</code> cases extend to the end
of the union. The <code>...</code> in the <code>0xEE</code> case indicates that extra bytes are
accepted and ignored, whereas in the <code>0xEF</code> case, extra bytes are accepted and
stored. Unless otherwise specified, the length field must match the length
of the fields in the union exactly.</p>
<p>When encoding a union of this kind, you do <em>not</em> need to set the 'length'
field; trunnel will fill it in for you in the output automatically based on
the actual length.</p>
<p>(<em>In a future version of Trunnel</em>, length constraints might be supported
independently of unions; the code is orthogonal internally.)</p>
<h3>Structure variants: end-of-string constraints</h3>
<p>By default, trunnel allows extra data to appear after the end of a
structure when parsing it from the input. To suppress this behavior
for a given structure, you can give an end-of-string constraint:</p>
<pre><code>struct fourbytes {
u16 x;
u16 y;
eos;
}
</code></pre>
<p>(<em>This feature might go away</em> in a future version if it doesn't turn
out to be useful.)</p>
<h3>Fields that extend up to a certain point before the end of the structure</h3>
<p>Some data formats have fixed-width fields at the end, and
indeterminate-extent fields in the middle. For example, you might
have an "encrypted message" format where the first 16 bytes are a
salt, the last 32 bytes are a message authentication code, and
everything in the middle is an encrypted message. You can express
this in Trunnel with:</p>
<pre><code>struct encrypted {
u8 salt[16];
u8 message[..-32];
u8 mac[32];
}
</code></pre>
<p>The "..-32" notation means that the array should try to consume
everything up to but not including the last 32 bytes of the message.</p>
<p>You can also use this notation to indicate the extent of a union:</p>
<pre><code>struct encrypted {
u8 type;
union u[type] with length ..-32 {
1: u8 bytes[];
2: u8 salt[16];
u8 other_bytes[];
}
u64 data[4];
}
</code></pre>
<h3>Parameterizing structures</h3>
<p>Some protocols have the type or length of some structure fields depend
on settings elsewhere in the protocol. For example, you might have a
TLS-like protocol where each encrypted records's format depends on
some session parameters.</p>
<p>To support this, trunnel provides context-dependent objects:</p>
<pre><code>context stream_settings {
u8 block_mode;
u8 iv_len;
u8 block_len;
u8 mac_len;
}
struct encrypted_record with context stream_settings {
u8 iv[stream_settings.iv_len];
union msg[stream_settings.block_mode] {
0: u16 n_bytes; u8 bytes[n_bytes];
1: u16 n_blocks; struct block[n_blocks];
};
u8 mac[stream_settings.maclen];
}
struct block with context stream_settings {
u8 body[stream_settings.block_len]
}
</code></pre>
<p>In the example above, the lengths of the <code>mac</code>, <code>iv</code>, and <code>body</code>
fields do not depend on values within the structures themselves;
instead, they depend on the values set within the <code>stream_settings</code>
context. It's similar for the tag of the <code>msg</code> union: it depends on a
value in the stream_settings context.</p>
<p>Note also that the <code>stream_settings</code> context can propagate from the
<code>encrypted_record</code> structure to the <code>block</code> structure it contains. It
is an error to include a context-dependent structure in an environment
that doesn't declare the same context dependency.</p>
<p>Contexts may only include integer types, and may not declare integer
restrictions.</p>
<h2>4. Controlling code generation with options</h2>
<p>Two options are supported in Trunnel right now:</p>
<pre><code>trunnel option opaque;
trunnel option very_opaque;
</code></pre>
<p>The <code>opaque</code> option makes the generated structures not get exposed in the
generated header files by default. You can override this and expose a single
structure name by defining <code>TRUNNEL_EXPOSE_<STRUCTNAME>_</code> in your C before
including the generated header.</p>
<p>The <code>very_opaque</code> option prevents the generated structures from being put
into the generated header files at all: you will only be able to access their
fields with the generated accessor functions.</p>
<h2>5. Using Trunnel's generated code</h2>
<p>When you run Trunnel on <code>module.trunnel</code>, it generates <code>module.c</code> and
<code>module.h</code>. Your program should include <code>module.h</code>, and compile and link
<code>module.c</code>.</p>
<p>For each structure you define in your trunnel file, Trunnel will generate a
structure with an <code>_st</code> suffix and a typedef with a <code>_t</code> suffix. For
example, <code>struct rgb</code> in your definition file wile generate <code>struct rgb_st;</code>
and <code>typedef struct rgb_st rgb_t;</code> in C.</p>
<p>In addition to consulting the documentation below, you can also read the
comments in the generated header file to learn how to use the generated
functions.</p>
<p>In the examples below, I'll be assuming a structure called <code>example</code>, defined
with something like:</p>
<pre><code>struct example {
u16 shortword;
/* Contents go here... */
}
</code></pre>
<h3>Generated code: creating and destroying objects</h3>
<p>Every object gets a <code>new</code> and a <code>free</code> function:</p>
<pre><code> example_t *example_new(void);
void example_free(example_t *obj);
</code></pre>
<p>The <code>example_new()</code> function creates a new <code>example_t</code>, with its fields
initialized to 0, NULL, or to their lowest legal value (in the cases of
constrained integers).</p>
<p>The <code>example_free()</code> function frees the provided object, along with all the
objects inside it. It's okay to call it with NULL.</p>
<h3>Generated code: encoding an object</h3>
<p>If you have a filled-in object, you can encode it into a buffer:</p>
<pre><code> ssize_t example_encode(uint8_t *buf, size_t buf_len, const example_t *obj);
</code></pre>
<p>The <code>buf_len</code> parameter describes the number of available bytes in <code>buf</code> to
use for encoding <code>obj</code>. On success, this function will return the number of
bytes that it used. On failure, the function will return -2 on a truncated
result, where providing a longer <code>buf_len</code> might make it succeed, and will
return -1 if there is an error that prevents encoding the object entirely.</p>
<p>You can find out the required buffer length before the encoding, if you like:</p>
<pre><code> ssize_t example_encoded_len(const example_t *obj);
</code></pre>
<p>This function returns a negative value on an error. On success, it
returns the suggested length of the buffer to allocate for encoding
'obj'. Note that this number may be an underestimate or an
overestimate: you still need to check for truncation when encoding.</p>
<h3>Generated code: checking an object for correctness</h3>
<p>If you want to find out whether you can encode an object, or find out why an
encode operation has just failed, you can call:</p>
<pre><code> const char *example_check(const example_t *obj);
</code></pre>
<p>This function returns <code>NULL</code> if the object is correct and encodeable, and
returns a string explaining what has gone wrong otherwise.</p>
<h3>Generated code: parsing an object</h3>
<p>Here's the big one: parsing an object form a binary string.</p>
<pre><code>ssize_t example_parse(example_t **out, const uint8_t *inp, size_t inp_len);
</code></pre>
<p>Here we take up to <code>inp_len</code> bytes from the buffer <code>inp</code>. On success, this
function returns the number of bytes actually consumed, and sets <code>*out</code> to a
newly allocated <code>example_t</code> holding the parsed object. On failure, it returns
-1 if the input was completely invalid, and -2 if it was possibly truncated.</p>
<p>Note that truncation detection depends on the actual layout of your
objects. Some valid encoded objects are prefixes of other encoded
objects. In these cases, there's no way to tell that truncation has
occurred.</p>
<h3>Generated code: accessor functions</h3>
<p>For each struct member, Trunnel creates a set of set and get functions to
inspect and change its value. If you've specified the <code>opaque</code> or <code>very_opaque</code>
option, these are the only (recommended) way to view or modify a structure.</p>
<p>Each type has its own set of accessors.</p>
<p>By convention, the set accessors (the ones that modify the objects) return <code>0</code>
on success and <code>-1</code> on failure. Additionally on failure, they set an error
code on the object that prevents the object from being encoded unless the
error code is cleared.</p>
<p><strong>Integers</strong> and <strong>nul-terminated strings</strong> have a <code>get</code> and <code>set</code> function:</p>
<pre><code> struct example {
u8 a;
u16 b in [ 5..5000 ];
nulterm s;
}
</code></pre>
<p>will produce these self-explanatory accessor functions:</p>
<pre><code> uint8_t example_get_a(const example_t *ex);
int example_set_a(const example_t *ex, uint8_t val);
uint16_t example_get_b(const example_t *ex);
int example_set_b(const example_t *ex, uint16_t val);
const char *example_get_s(const example_t *ex);
int example_set_s(const example_t *ex, const char *val);
</code></pre>
<p>Note that the string set function makes a copy of its input string.</p>
<p><strong>Structures</strong> have a get, set, and set0 function:</p>
<pre><code> struct example {
struct rgb xyz;
}
</code></pre>
<p>becomes:</p>
<pre><code> rgb_t *example_get_xyz(example_t *ex);
int example_set_xyz(example_t *ex, rgb_t *val);
int example_set0_xyz(example_t *ex, rgb_t *val);
</code></pre>
<p>The <code>set</code> and <code>set0</code> functions behave identically, except that the set function
frees the previous value of the xyz field (if any), whereas the set0 function
will overwrite it.</p>
<p><strong>All arrays</strong> have functions to inspect them and change their members, so
that:</p>
<pre><code>struct example {
struct rgb colors[16];
}
// OR
struct example {
u8 n;
struct rgb colors[n];
}
</code></pre>
<p>will both produce:</p>
<pre><code>size_t example_getlen_colors(const example_t *example);
rgb_t **example_getarray_colors(const example_t *example);
rgb_t *example_get_colors(const example_t *example, size_t idx);
int example_set_colors(example_t *example, size_t idx, rgb_t *val);
int example_set0_colors(example_t *example, size_t idx, rgb_t *val);
</code></pre>
<p>In this case, the getlen function returns the length of the array, the
getarray function returns a pointer to the array itself, and the <code>get</code> and
<code>set</code> and <code>set0</code> functions access or replace the value of the array at a
given index. The set0 function is only generated in the case of an array of
structures: when it is generated, <code>set</code> frees the old value of the array at
that index (if any), and <code>set0</code> does not.</p>
<p><strong>Variable-length arrays</strong> additionally have functions that adjust their
lengths, so that :</p>
<pre><code> struct example {
u8 n;
struct rgb colors[n];
}
</code></pre>
<p>will also produce:</p>
<pre><code> int example_add_colors(example_t *example, rgb_t *val);
int example_setlen_colors(example_t *example, size_t newlen);
</code></pre>
<p>The <code>add</code> function appends a new item to the end of the array. The <code>setlen</code>
function changes the current length of the array. (If the length increases,
the new fields are padded with <code>0</code> or <code>NULL</code> as appropriate. If the length
decreases, the removed members are freed if necessary.)</p>
<p>Note that the length field <code>n</code> is not automatically kept in sync with the
length of the dynamic array <code>colors</code>.</p>
<p>Finally, <strong>variable-length arrays of char</strong> have extra functions to help you
access them as variable-length strings:</p>
<pre><code>struct example {
u8 n;
char value[n];
}
</code></pre>
<p>produces:</p>
<pre><code>const char *example_getstr_value(example_t *obj);
int example_setstr_value(example_t *obj, const char *val);
int example_setstr0_value(example_t *obj, const char *val, size_t len);
</code></pre>
<p>The <code>getstr</code> function is identical to <code>getarray</code>, except that it guarantees a
NUL-terminated result. (It can return <code>NULL</code> if it fails to NUL-terminate the
answer.) This time the <code>setstr0</code> function takes a new value and its length;
the <code>setstr</code> function just takes a value and assumes it is NUL-terminated.</p>
<h3>Generated code: the impact of contexts</h3>
<p>If you declare context-dependent structures, Trunnel will add extra
context arguments to the generated <code>encode</code>, <code>parse</code>, and <code>check</code>
functions. For example, if you say:</p>
<pre><code>context len {
u16 len;
}
struct msg with context len {
u8 tag;
u16 items[len.len];
}
</code></pre>
<p>Then trunnel will generate those functions with the protptypes:</p>
<pre><code>ssize_t msg_encode(uint8_t *buf, size_t buf_len, msg_t *obj,
const len_t *len_ctx);
ssize_t msg_encoded_len(msg_t *obj, const len_t *len_ctx);
const char *msg_check(const msg_t *obj, const len_t *len_ctx);
ssize_t msg_parse(msg_t **out, const uint8_t *inp, size_t inp_len,
const len_t *len_ctx);
</code></pre>
<p>Trunnel will also generate a declaration for the context type, along
with <code>new</code>, <code>free</code>, and accessor functions for it:</p>
<pre><code>struct len_t {
uint16_t len;
};
len_t *len_new(void);
void len_free(len_t *len);
int len_set_len(len_t *len, uint16_t newval);
uint16_t len_get_len(const len_t *len);
</code></pre>
<h3>Extending trunnel</h3>
<p>You can extend Trunnel using the 'extern struct' mechanism described above.
All you need to do is provide your own structure definition, along with
<code>parse</code>, <code>encode</code>, <code>free</code>, and <code>check</code> functions. The generated trunnel code
will use those functions as appropriate to access your extended type.</p>
<h3>Overriding allocators and other internal functions</h3>
<p>By default, trunnel uses the libc malloc implementation for its
allocation. You can override this by defining a "trunnel-local.h"
file, and defining the TRUNNEL<em>LOCAL</em>H macro when compiling any
trunnel C files. When you do this, your "trunnel-local.h" will get
included before any generated trunnel code.</p>
<p>To replace the allocator, you must use #define to declare replacements
for the following functions:</p>
<ul>
<li><code>trunnel_malloc</code></li>
<li><code>trunnel_calloc</code></li>
<li><code>trunnel_realloc</code> OR <code>trunnel_reallocarray</code></li>
<li><code>trunnel_free_</code> (note trailing underscore</li>
<li><code>trunnel_strdup</code></li>
</ul>
<p>These functions work equivalently to their libc counterparts, and take
arguments in the same order.</p>
<p>You can also replace trunnel's internal-error behavior by defining one
or more of these:</p>
<ul>
<li><code>trunnel_abort</code></li>
<li><code>trunnel_assert</code></li>
</ul>
<p>These macros are also expected to behave equivalently to their libc
counterparts. They are only invoked in the case of an internal
programming error in Trunnel -- if Trunnel is implemented correctly,
they should be unreachable.</p>
<p>Finally, if you want to ensure that all objects freed by trunnel are
wiped from memory before they're freed, you can define a
<code>trunnel_memwipe</code> function. For example:</p>
<pre><code>#define trunnel_memwipe(mem, len) memset_s((mem), 0, (len));
</code></pre>
<p>Note that the standard caveats about wiping memory apply: if this
matters to you, you should know why you should be using memset<em>s or
explicit</em>bzero instead of memset here.</p>
<h3>Notes on thread-safety</h3>
<p>There are no global structures and there are no locks. It's up to you to
avoid calling multiple functions at once on the same structure. If you
manage to avoid that, Trunnel should be thread-safe.</p>
<|start_filename|>gen/tests/nulterm/nulterm_test.go<|end_filename|>
package nulterm
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestNultermParseLengthErrors(t *testing.T) {
r := new(NulTerm)
for n := 0; n < 6; n++ {
_, err := r.Parse(make([]byte, n))
require.Error(t, err)
}
}
func TestNultermMissingNul(t *testing.T) {
b := []byte{
1, 2, 3, 4,
'n', 'o', 'n', 'u', 'l',
}
_, err := new(NulTerm).Parse(b)
assert.Error(t, err)
}
func TestNultermStandard(t *testing.T) {
n := new(NulTerm)
b := []byte{
1, 2, 3, 4,
'h', 'e', 'l', 'l', 'o', 0,
5,
'r', 'e', 's', 't',
}
expect := &NulTerm{
X: 0x01020304,
S: "hello",
Y: 5,
}
rest, err := n.Parse(b)
require.NoError(t, err)
assert.Equal(t, expect, n)
assert.Equal(t, []byte("rest"), rest)
}
<|start_filename|>gen/gen_test.go<|end_filename|>
package gen
import (
"flag"
"path/filepath"
"strings"
"testing"
"github.com/stretchr/testify/require"
"github.com/mmcloughlin/trunnel/ast"
"github.com/mmcloughlin/trunnel/internal/test"
"github.com/mmcloughlin/trunnel/parse"
)
var update = flag.Bool("update", false, "update golden files")
type TestCase struct {
TrunnelFile string
Dir string
Name string
}
func NewTestCaseFromTrunnel(path string) TestCase {
dir, file := filepath.Split(path)
name := strings.TrimSuffix(file, filepath.Ext(file))
return TestCase{
TrunnelFile: path,
Dir: dir,
Name: name,
}
}
func LoadTestCasesGlob(pattern string) ([]TestCase, error) {
filenames, err := filepath.Glob(pattern)
if err != nil {
return nil, err
}
t := make([]TestCase, len(filenames))
for i, filename := range filenames {
t[i] = NewTestCaseFromTrunnel(filename)
}
return t, nil
}
func (t TestCase) Config() Config {
return Config{
Package: t.Name,
Dir: t.Dir,
}
}
func TestGeneratedFiles(t *testing.T) {
cases, err := LoadTestCasesGlob("tests/*/*.trunnel")
require.NoError(t, err)
for _, c := range cases {
t.Run(c.Name, func(t *testing.T) {
tmp, clean := test.TempDir(t)
defer clean()
f, err := parse.File(c.TrunnelFile)
require.NoError(t, err)
cfg := c.Config()
if !*update {
cfg.Dir = tmp
}
err = Package(cfg, []*ast.File{f})
require.NoError(t, err)
cmp := []string{
"gen-marshallers.go",
"gen-marshallers_test.go",
"gen-fuzz.go",
}
for _, path := range cmp {
t.Run(path, func(t *testing.T) {
got := filepath.Join(cfg.Dir, path)
expect := filepath.Join(c.Dir, path)
if !test.FileExists(expect) {
t.SkipNow()
}
test.AssertFileContentsEqual(t, expect, got)
})
}
})
}
}
<|start_filename|>tv/doc.go<|end_filename|>
// Package tv generates test vectors for trunnel types.
package tv
<|start_filename|>parse/internal/parser/gen-parser.go<|end_filename|>
// Code generated by pigeon; DO NOT EDIT.
package parser
import (
"bytes"
"errors"
"fmt"
"io"
"io/ioutil"
"math"
"os"
"sort"
"strconv"
"strings"
"sync"
"unicode"
"unicode/utf8"
"github.com/mmcloughlin/trunnel/ast"
)
const lingeringDeclarationsKey = "lingering_declarations"
func (c *current) addLingeringDeclaration(d interface{}) {
var ds []interface{}
if e, ok := c.state[lingeringDeclarationsKey]; ok {
ds = e.([]interface{})
}
c.state[lingeringDeclarationsKey] = append(ds, d)
}
func (c *current) getLingeringDeclarations() []interface{} {
if ds, ok := c.state[lingeringDeclarationsKey]; ok {
return ds.([]interface{})
}
return []interface{}{}
}
var g = &grammar{
rules: []*rule{
{
name: "File",
pos: position{line: 28, col: 1, offset: 587},
expr: &actionExpr{
pos: position{line: 28, col: 9, offset: 595},
run: (*parser).callonFile1,
expr: &seqExpr{
pos: position{line: 28, col: 9, offset: 595},
exprs: []interface{}{
&ruleRefExpr{
pos: position{line: 28, col: 9, offset: 595},
name: "_",
},
&labeledExpr{
pos: position{line: 28, col: 11, offset: 597},
label: "ds",
expr: &zeroOrMoreExpr{
pos: position{line: 28, col: 14, offset: 600},
expr: &ruleRefExpr{
pos: position{line: 28, col: 14, offset: 600},
name: "Declaration",
},
},
},
&ruleRefExpr{
pos: position{line: 28, col: 27, offset: 613},
name: "_",
},
&ruleRefExpr{
pos: position{line: 28, col: 29, offset: 615},
name: "EOF",
},
},
},
},
},
{
name: "Declaration",
pos: position{line: 55, col: 1, offset: 1412},
expr: &actionExpr{
pos: position{line: 55, col: 16, offset: 1427},
run: (*parser).callonDeclaration1,
expr: &seqExpr{
pos: position{line: 55, col: 16, offset: 1427},
exprs: []interface{}{
&ruleRefExpr{
pos: position{line: 55, col: 16, offset: 1427},
name: "_",
},
&labeledExpr{
pos: position{line: 55, col: 18, offset: 1429},
label: "d",
expr: &choiceExpr{
pos: position{line: 55, col: 21, offset: 1432},
alternatives: []interface{}{
&ruleRefExpr{
pos: position{line: 55, col: 21, offset: 1432},
name: "ConstDeclaration",
},
&ruleRefExpr{
pos: position{line: 55, col: 40, offset: 1451},
name: "ContextDeclaration",
},
&ruleRefExpr{
pos: position{line: 55, col: 61, offset: 1472},
name: "StructDeclaration",
},
&ruleRefExpr{
pos: position{line: 55, col: 81, offset: 1492},
name: "ExternDeclaration",
},
&ruleRefExpr{
pos: position{line: 55, col: 101, offset: 1512},
name: "PragmaDeclaration",
},
},
},
},
&ruleRefExpr{
pos: position{line: 55, col: 120, offset: 1531},
name: "_",
},
},
},
},
},
{
name: "ConstDeclaration",
pos: position{line: 61, col: 1, offset: 1596},
expr: &actionExpr{
pos: position{line: 61, col: 21, offset: 1616},
run: (*parser).callonConstDeclaration1,
expr: &seqExpr{
pos: position{line: 61, col: 21, offset: 1616},
exprs: []interface{}{
&litMatcher{
pos: position{line: 61, col: 21, offset: 1616},
val: "const",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 61, col: 29, offset: 1624},
name: "__",
},
&labeledExpr{
pos: position{line: 61, col: 32, offset: 1627},
label: "n",
expr: &ruleRefExpr{
pos: position{line: 61, col: 34, offset: 1629},
name: "ConstIdentifier",
},
},
&ruleRefExpr{
pos: position{line: 61, col: 50, offset: 1645},
name: "_",
},
&litMatcher{
pos: position{line: 61, col: 52, offset: 1647},
val: "=",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 61, col: 56, offset: 1651},
name: "_",
},
&labeledExpr{
pos: position{line: 61, col: 58, offset: 1653},
label: "v",
expr: &ruleRefExpr{
pos: position{line: 61, col: 60, offset: 1655},
name: "IntLiteral",
},
},
&ruleRefExpr{
pos: position{line: 61, col: 71, offset: 1666},
name: "_",
},
&litMatcher{
pos: position{line: 61, col: 73, offset: 1668},
val: ";",
ignoreCase: false,
},
},
},
},
},
{
name: "ContextDeclaration",
pos: position{line: 73, col: 1, offset: 1929},
expr: &actionExpr{
pos: position{line: 73, col: 23, offset: 1951},
run: (*parser).callonContextDeclaration1,
expr: &seqExpr{
pos: position{line: 73, col: 23, offset: 1951},
exprs: []interface{}{
&litMatcher{
pos: position{line: 73, col: 23, offset: 1951},
val: "context",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 73, col: 33, offset: 1961},
name: "__",
},
&labeledExpr{
pos: position{line: 73, col: 36, offset: 1964},
label: "n",
expr: &ruleRefExpr{
pos: position{line: 73, col: 38, offset: 1966},
name: "Identifier",
},
},
&ruleRefExpr{
pos: position{line: 73, col: 49, offset: 1977},
name: "__",
},
&litMatcher{
pos: position{line: 73, col: 52, offset: 1980},
val: "{",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 73, col: 56, offset: 1984},
name: "_",
},
&labeledExpr{
pos: position{line: 73, col: 58, offset: 1986},
label: "fs",
expr: &zeroOrMoreExpr{
pos: position{line: 73, col: 61, offset: 1989},
expr: &ruleRefExpr{
pos: position{line: 73, col: 61, offset: 1989},
name: "ContextMember",
},
},
},
&ruleRefExpr{
pos: position{line: 73, col: 76, offset: 2004},
name: "_",
},
&litMatcher{
pos: position{line: 73, col: 78, offset: 2006},
val: "}",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 73, col: 82, offset: 2010},
name: "_",
},
&zeroOrOneExpr{
pos: position{line: 73, col: 84, offset: 2012},
expr: &litMatcher{
pos: position{line: 73, col: 84, offset: 2012},
val: ";",
ignoreCase: false,
},
},
},
},
},
},
{
name: "ContextMember",
pos: position{line: 84, col: 1, offset: 2200},
expr: &actionExpr{
pos: position{line: 84, col: 18, offset: 2217},
run: (*parser).callonContextMember1,
expr: &seqExpr{
pos: position{line: 84, col: 18, offset: 2217},
exprs: []interface{}{
&ruleRefExpr{
pos: position{line: 84, col: 18, offset: 2217},
name: "_",
},
&labeledExpr{
pos: position{line: 84, col: 20, offset: 2219},
label: "t",
expr: &ruleRefExpr{
pos: position{line: 84, col: 22, offset: 2221},
name: "IntType",
},
},
&ruleRefExpr{
pos: position{line: 84, col: 30, offset: 2229},
name: "__",
},
&labeledExpr{
pos: position{line: 84, col: 33, offset: 2232},
label: "n",
expr: &ruleRefExpr{
pos: position{line: 84, col: 35, offset: 2234},
name: "Identifier",
},
},
&ruleRefExpr{
pos: position{line: 84, col: 46, offset: 2245},
name: "_",
},
&litMatcher{
pos: position{line: 84, col: 48, offset: 2247},
val: ";",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 84, col: 52, offset: 2251},
name: "_",
},
},
},
},
},
{
name: "StructDeclaration",
pos: position{line: 94, col: 1, offset: 2437},
expr: &actionExpr{
pos: position{line: 94, col: 22, offset: 2458},
run: (*parser).callonStructDeclaration1,
expr: &seqExpr{
pos: position{line: 94, col: 22, offset: 2458},
exprs: []interface{}{
&labeledExpr{
pos: position{line: 94, col: 22, offset: 2458},
label: "s",
expr: &ruleRefExpr{
pos: position{line: 94, col: 24, offset: 2460},
name: "StructDecl",
},
},
&ruleRefExpr{
pos: position{line: 94, col: 35, offset: 2471},
name: "_",
},
&zeroOrOneExpr{
pos: position{line: 94, col: 37, offset: 2473},
expr: &litMatcher{
pos: position{line: 94, col: 37, offset: 2473},
val: ";",
ignoreCase: false,
},
},
},
},
},
},
{
name: "StructDecl",
pos: position{line: 100, col: 1, offset: 2591},
expr: &actionExpr{
pos: position{line: 100, col: 15, offset: 2605},
run: (*parser).callonStructDecl1,
expr: &seqExpr{
pos: position{line: 100, col: 15, offset: 2605},
exprs: []interface{}{
&labeledExpr{
pos: position{line: 100, col: 15, offset: 2605},
label: "n",
expr: &ruleRefExpr{
pos: position{line: 100, col: 17, offset: 2607},
name: "StructIdentifier",
},
},
&labeledExpr{
pos: position{line: 100, col: 34, offset: 2624},
label: "ctx",
expr: &zeroOrOneExpr{
pos: position{line: 100, col: 38, offset: 2628},
expr: &seqExpr{
pos: position{line: 100, col: 39, offset: 2629},
exprs: []interface{}{
&ruleRefExpr{
pos: position{line: 100, col: 39, offset: 2629},
name: "__",
},
&ruleRefExpr{
pos: position{line: 100, col: 42, offset: 2632},
name: "ContextRefs",
},
},
},
},
},
&ruleRefExpr{
pos: position{line: 100, col: 56, offset: 2646},
name: "_",
},
&litMatcher{
pos: position{line: 100, col: 58, offset: 2648},
val: "{",
ignoreCase: false,
},
&labeledExpr{
pos: position{line: 100, col: 62, offset: 2652},
label: "ms",
expr: &zeroOrMoreExpr{
pos: position{line: 100, col: 65, offset: 2655},
expr: &ruleRefExpr{
pos: position{line: 100, col: 65, offset: 2655},
name: "StructMember",
},
},
},
&ruleRefExpr{
pos: position{line: 100, col: 79, offset: 2669},
name: "_",
},
&labeledExpr{
pos: position{line: 100, col: 81, offset: 2671},
label: "e",
expr: &zeroOrOneExpr{
pos: position{line: 100, col: 83, offset: 2673},
expr: &ruleRefExpr{
pos: position{line: 100, col: 83, offset: 2673},
name: "StructEnding",
},
},
},
&ruleRefExpr{
pos: position{line: 100, col: 97, offset: 2687},
name: "_",
},
&litMatcher{
pos: position{line: 100, col: 99, offset: 2689},
val: "}",
ignoreCase: false,
},
},
},
},
},
{
name: "StructIdentifier",
pos: position{line: 121, col: 1, offset: 3014},
expr: &actionExpr{
pos: position{line: 121, col: 21, offset: 3034},
run: (*parser).callonStructIdentifier1,
expr: &seqExpr{
pos: position{line: 121, col: 21, offset: 3034},
exprs: []interface{}{
&litMatcher{
pos: position{line: 121, col: 21, offset: 3034},
val: "struct",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 121, col: 30, offset: 3043},
name: "__",
},
&labeledExpr{
pos: position{line: 121, col: 33, offset: 3046},
label: "n",
expr: &ruleRefExpr{
pos: position{line: 121, col: 35, offset: 3048},
name: "Identifier",
},
},
},
},
},
},
{
name: "ExternDeclaration",
pos: position{line: 125, col: 1, offset: 3080},
expr: &actionExpr{
pos: position{line: 125, col: 22, offset: 3101},
run: (*parser).callonExternDeclaration1,
expr: &seqExpr{
pos: position{line: 125, col: 22, offset: 3101},
exprs: []interface{}{
&litMatcher{
pos: position{line: 125, col: 22, offset: 3101},
val: "extern",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 125, col: 31, offset: 3110},
name: "__",
},
&labeledExpr{
pos: position{line: 125, col: 34, offset: 3113},
label: "n",
expr: &ruleRefExpr{
pos: position{line: 125, col: 36, offset: 3115},
name: "StructIdentifier",
},
},
&labeledExpr{
pos: position{line: 125, col: 53, offset: 3132},
label: "ctx",
expr: &zeroOrOneExpr{
pos: position{line: 125, col: 57, offset: 3136},
expr: &seqExpr{
pos: position{line: 125, col: 58, offset: 3137},
exprs: []interface{}{
&ruleRefExpr{
pos: position{line: 125, col: 58, offset: 3137},
name: "__",
},
&ruleRefExpr{
pos: position{line: 125, col: 61, offset: 3140},
name: "ContextRefs",
},
},
},
},
},
&ruleRefExpr{
pos: position{line: 125, col: 74, offset: 3153},
name: "_",
},
&litMatcher{
pos: position{line: 125, col: 76, offset: 3155},
val: ";",
ignoreCase: false,
},
},
},
},
},
{
name: "PragmaDeclaration",
pos: position{line: 135, col: 1, offset: 3299},
expr: &actionExpr{
pos: position{line: 135, col: 22, offset: 3320},
run: (*parser).callonPragmaDeclaration1,
expr: &seqExpr{
pos: position{line: 135, col: 22, offset: 3320},
exprs: []interface{}{
&litMatcher{
pos: position{line: 135, col: 22, offset: 3320},
val: "trunnel",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 135, col: 32, offset: 3330},
name: "__",
},
&labeledExpr{
pos: position{line: 135, col: 35, offset: 3333},
label: "n",
expr: &ruleRefExpr{
pos: position{line: 135, col: 37, offset: 3335},
name: "Identifier",
},
},
&ruleRefExpr{
pos: position{line: 135, col: 48, offset: 3346},
name: "__",
},
&labeledExpr{
pos: position{line: 135, col: 51, offset: 3349},
label: "opts",
expr: &ruleRefExpr{
pos: position{line: 135, col: 56, offset: 3354},
name: "IdentifierList",
},
},
&ruleRefExpr{
pos: position{line: 135, col: 71, offset: 3369},
name: "_",
},
&litMatcher{
pos: position{line: 135, col: 73, offset: 3371},
val: ";",
ignoreCase: false,
},
},
},
},
},
{
name: "ContextRefs",
pos: position{line: 145, col: 1, offset: 3534},
expr: &actionExpr{
pos: position{line: 145, col: 16, offset: 3549},
run: (*parser).callonContextRefs1,
expr: &seqExpr{
pos: position{line: 145, col: 16, offset: 3549},
exprs: []interface{}{
&litMatcher{
pos: position{line: 145, col: 16, offset: 3549},
val: "with",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 145, col: 23, offset: 3556},
name: "__",
},
&litMatcher{
pos: position{line: 145, col: 26, offset: 3559},
val: "context",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 145, col: 36, offset: 3569},
name: "__",
},
&labeledExpr{
pos: position{line: 145, col: 39, offset: 3572},
label: "ns",
expr: &ruleRefExpr{
pos: position{line: 145, col: 42, offset: 3575},
name: "IdentifierList",
},
},
},
},
},
},
{
name: "StructMember",
pos: position{line: 159, col: 1, offset: 3894},
expr: &actionExpr{
pos: position{line: 159, col: 17, offset: 3910},
run: (*parser).callonStructMember1,
expr: &seqExpr{
pos: position{line: 159, col: 17, offset: 3910},
exprs: []interface{}{
&ruleRefExpr{
pos: position{line: 159, col: 17, offset: 3910},
name: "_",
},
&labeledExpr{
pos: position{line: 159, col: 19, offset: 3912},
label: "m",
expr: &choiceExpr{
pos: position{line: 159, col: 22, offset: 3915},
alternatives: []interface{}{
&ruleRefExpr{
pos: position{line: 159, col: 22, offset: 3915},
name: "SMArray",
},
&ruleRefExpr{
pos: position{line: 159, col: 32, offset: 3925},
name: "SMInteger",
},
&ruleRefExpr{
pos: position{line: 159, col: 44, offset: 3937},
name: "SMPosition",
},
&ruleRefExpr{
pos: position{line: 159, col: 57, offset: 3950},
name: "SMString",
},
&ruleRefExpr{
pos: position{line: 159, col: 68, offset: 3961},
name: "SMStruct",
},
&ruleRefExpr{
pos: position{line: 159, col: 79, offset: 3972},
name: "SMUnion",
},
},
},
},
&ruleRefExpr{
pos: position{line: 159, col: 88, offset: 3981},
name: "_",
},
&litMatcher{
pos: position{line: 159, col: 90, offset: 3983},
val: ";",
ignoreCase: false,
},
},
},
},
},
{
name: "StructEnding",
pos: position{line: 167, col: 1, offset: 4092},
expr: &actionExpr{
pos: position{line: 167, col: 17, offset: 4108},
run: (*parser).callonStructEnding1,
expr: &seqExpr{
pos: position{line: 167, col: 17, offset: 4108},
exprs: []interface{}{
&ruleRefExpr{
pos: position{line: 167, col: 17, offset: 4108},
name: "_",
},
&labeledExpr{
pos: position{line: 167, col: 19, offset: 4110},
label: "e",
expr: &choiceExpr{
pos: position{line: 167, col: 22, offset: 4113},
alternatives: []interface{}{
&ruleRefExpr{
pos: position{line: 167, col: 22, offset: 4113},
name: "SMRemainder",
},
&ruleRefExpr{
pos: position{line: 167, col: 36, offset: 4127},
name: "StructEOS",
},
},
},
},
&ruleRefExpr{
pos: position{line: 167, col: 47, offset: 4138},
name: "_",
},
&litMatcher{
pos: position{line: 167, col: 49, offset: 4140},
val: ";",
ignoreCase: false,
},
},
},
},
},
{
name: "StructEOS",
pos: position{line: 171, col: 1, offset: 4165},
expr: &actionExpr{
pos: position{line: 171, col: 14, offset: 4178},
run: (*parser).callonStructEOS1,
expr: &litMatcher{
pos: position{line: 171, col: 14, offset: 4178},
val: "eos",
ignoreCase: false,
},
},
},
{
name: "SMArray",
pos: position{line: 178, col: 1, offset: 4271},
expr: &actionExpr{
pos: position{line: 178, col: 12, offset: 4282},
run: (*parser).callonSMArray1,
expr: &labeledExpr{
pos: position{line: 178, col: 12, offset: 4282},
label: "a",
expr: &choiceExpr{
pos: position{line: 178, col: 15, offset: 4285},
alternatives: []interface{}{
&ruleRefExpr{
pos: position{line: 178, col: 15, offset: 4285},
name: "SMFixedArray",
},
&ruleRefExpr{
pos: position{line: 178, col: 30, offset: 4300},
name: "SMVarArray",
},
},
},
},
},
},
{
name: "SMFixedArray",
pos: position{line: 184, col: 1, offset: 4380},
expr: &actionExpr{
pos: position{line: 184, col: 17, offset: 4396},
run: (*parser).callonSMFixedArray1,
expr: &seqExpr{
pos: position{line: 184, col: 17, offset: 4396},
exprs: []interface{}{
&labeledExpr{
pos: position{line: 184, col: 17, offset: 4396},
label: "b",
expr: &ruleRefExpr{
pos: position{line: 184, col: 19, offset: 4398},
name: "ArrayBase",
},
},
&ruleRefExpr{
pos: position{line: 184, col: 29, offset: 4408},
name: "__",
},
&labeledExpr{
pos: position{line: 184, col: 32, offset: 4411},
label: "n",
expr: &ruleRefExpr{
pos: position{line: 184, col: 34, offset: 4413},
name: "Identifier",
},
},
&ruleRefExpr{
pos: position{line: 184, col: 45, offset: 4424},
name: "_",
},
&litMatcher{
pos: position{line: 184, col: 47, offset: 4426},
val: "[",
ignoreCase: false,
},
&labeledExpr{
pos: position{line: 184, col: 51, offset: 4430},
label: "s",
expr: &ruleRefExpr{
pos: position{line: 184, col: 53, offset: 4432},
name: "Integer",
},
},
&litMatcher{
pos: position{line: 184, col: 61, offset: 4440},
val: "]",
ignoreCase: false,
},
},
},
},
},
{
name: "SMVarArray",
pos: position{line: 197, col: 1, offset: 4693},
expr: &actionExpr{
pos: position{line: 197, col: 15, offset: 4707},
run: (*parser).callonSMVarArray1,
expr: &seqExpr{
pos: position{line: 197, col: 15, offset: 4707},
exprs: []interface{}{
&labeledExpr{
pos: position{line: 197, col: 15, offset: 4707},
label: "b",
expr: &ruleRefExpr{
pos: position{line: 197, col: 17, offset: 4709},
name: "ArrayBase",
},
},
&ruleRefExpr{
pos: position{line: 197, col: 27, offset: 4719},
name: "__",
},
&labeledExpr{
pos: position{line: 197, col: 30, offset: 4722},
label: "n",
expr: &ruleRefExpr{
pos: position{line: 197, col: 32, offset: 4724},
name: "Identifier",
},
},
&ruleRefExpr{
pos: position{line: 197, col: 43, offset: 4735},
name: "_",
},
&litMatcher{
pos: position{line: 197, col: 45, offset: 4737},
val: "[",
ignoreCase: false,
},
&labeledExpr{
pos: position{line: 197, col: 49, offset: 4741},
label: "l",
expr: &ruleRefExpr{
pos: position{line: 197, col: 51, offset: 4743},
name: "LengthConstraint",
},
},
&litMatcher{
pos: position{line: 197, col: 68, offset: 4760},
val: "]",
ignoreCase: false,
},
},
},
},
},
{
name: "LengthConstraint",
pos: position{line: 207, col: 1, offset: 4934},
expr: &actionExpr{
pos: position{line: 207, col: 21, offset: 4954},
run: (*parser).callonLengthConstraint1,
expr: &labeledExpr{
pos: position{line: 207, col: 21, offset: 4954},
label: "l",
expr: &choiceExpr{
pos: position{line: 207, col: 24, offset: 4957},
alternatives: []interface{}{
&ruleRefExpr{
pos: position{line: 207, col: 24, offset: 4957},
name: "Leftover",
},
&ruleRefExpr{
pos: position{line: 207, col: 35, offset: 4968},
name: "IDRef",
},
},
},
},
},
},
{
name: "Leftover",
pos: position{line: 211, col: 1, offset: 4996},
expr: &actionExpr{
pos: position{line: 211, col: 13, offset: 5008},
run: (*parser).callonLeftover1,
expr: &seqExpr{
pos: position{line: 211, col: 13, offset: 5008},
exprs: []interface{}{
&litMatcher{
pos: position{line: 211, col: 13, offset: 5008},
val: "..-",
ignoreCase: false,
},
&labeledExpr{
pos: position{line: 211, col: 19, offset: 5014},
label: "i",
expr: &ruleRefExpr{
pos: position{line: 211, col: 21, offset: 5016},
name: "Integer",
},
},
},
},
},
},
{
name: "SMRemainder",
pos: position{line: 217, col: 1, offset: 5146},
expr: &actionExpr{
pos: position{line: 217, col: 16, offset: 5161},
run: (*parser).callonSMRemainder1,
expr: &seqExpr{
pos: position{line: 217, col: 16, offset: 5161},
exprs: []interface{}{
&labeledExpr{
pos: position{line: 217, col: 16, offset: 5161},
label: "b",
expr: &ruleRefExpr{
pos: position{line: 217, col: 18, offset: 5163},
name: "ArrayBase",
},
},
&ruleRefExpr{
pos: position{line: 217, col: 28, offset: 5173},
name: "__",
},
&labeledExpr{
pos: position{line: 217, col: 31, offset: 5176},
label: "n",
expr: &ruleRefExpr{
pos: position{line: 217, col: 33, offset: 5178},
name: "Identifier",
},
},
&ruleRefExpr{
pos: position{line: 217, col: 44, offset: 5189},
name: "_",
},
&litMatcher{
pos: position{line: 217, col: 46, offset: 5191},
val: "[",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 217, col: 50, offset: 5195},
name: "_",
},
&litMatcher{
pos: position{line: 217, col: 52, offset: 5197},
val: "]",
ignoreCase: false,
},
},
},
},
},
{
name: "ArrayBase",
pos: position{line: 232, col: 1, offset: 5461},
expr: &actionExpr{
pos: position{line: 232, col: 14, offset: 5474},
run: (*parser).callonArrayBase1,
expr: &labeledExpr{
pos: position{line: 232, col: 14, offset: 5474},
label: "t",
expr: &choiceExpr{
pos: position{line: 232, col: 17, offset: 5477},
alternatives: []interface{}{
&ruleRefExpr{
pos: position{line: 232, col: 17, offset: 5477},
name: "IntType",
},
&ruleRefExpr{
pos: position{line: 232, col: 27, offset: 5487},
name: "CharType",
},
&ruleRefExpr{
pos: position{line: 232, col: 38, offset: 5498},
name: "StructRef",
},
},
},
},
},
},
{
name: "SMInteger",
pos: position{line: 239, col: 1, offset: 5578},
expr: &actionExpr{
pos: position{line: 239, col: 14, offset: 5591},
run: (*parser).callonSMInteger1,
expr: &seqExpr{
pos: position{line: 239, col: 14, offset: 5591},
exprs: []interface{}{
&labeledExpr{
pos: position{line: 239, col: 14, offset: 5591},
label: "t",
expr: &ruleRefExpr{
pos: position{line: 239, col: 16, offset: 5593},
name: "IntType",
},
},
&ruleRefExpr{
pos: position{line: 239, col: 24, offset: 5601},
name: "_",
},
&labeledExpr{
pos: position{line: 239, col: 26, offset: 5603},
label: "n",
expr: &ruleRefExpr{
pos: position{line: 239, col: 28, offset: 5605},
name: "Identifier",
},
},
&ruleRefExpr{
pos: position{line: 239, col: 39, offset: 5616},
name: "_",
},
&labeledExpr{
pos: position{line: 239, col: 41, offset: 5618},
label: "cst",
expr: &zeroOrOneExpr{
pos: position{line: 239, col: 45, offset: 5622},
expr: &ruleRefExpr{
pos: position{line: 239, col: 45, offset: 5622},
name: "IntConstraint",
},
},
},
},
},
},
},
{
name: "SMPosition",
pos: position{line: 252, col: 1, offset: 5825},
expr: &actionExpr{
pos: position{line: 252, col: 15, offset: 5839},
run: (*parser).callonSMPosition1,
expr: &seqExpr{
pos: position{line: 252, col: 15, offset: 5839},
exprs: []interface{}{
&litMatcher{
pos: position{line: 252, col: 15, offset: 5839},
val: "@ptr",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 252, col: 22, offset: 5846},
name: "__",
},
&labeledExpr{
pos: position{line: 252, col: 25, offset: 5849},
label: "n",
expr: &ruleRefExpr{
pos: position{line: 252, col: 27, offset: 5851},
name: "Identifier",
},
},
},
},
},
},
{
name: "SMString",
pos: position{line: 261, col: 1, offset: 5970},
expr: &actionExpr{
pos: position{line: 261, col: 13, offset: 5982},
run: (*parser).callonSMString1,
expr: &seqExpr{
pos: position{line: 261, col: 13, offset: 5982},
exprs: []interface{}{
&litMatcher{
pos: position{line: 261, col: 13, offset: 5982},
val: "nulterm",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 261, col: 23, offset: 5992},
name: "__",
},
&labeledExpr{
pos: position{line: 261, col: 26, offset: 5995},
label: "n",
expr: &ruleRefExpr{
pos: position{line: 261, col: 28, offset: 5997},
name: "Identifier",
},
},
},
},
},
},
{
name: "SMStruct",
pos: position{line: 271, col: 1, offset: 6159},
expr: &actionExpr{
pos: position{line: 271, col: 13, offset: 6171},
run: (*parser).callonSMStruct1,
expr: &seqExpr{
pos: position{line: 271, col: 13, offset: 6171},
exprs: []interface{}{
&labeledExpr{
pos: position{line: 271, col: 13, offset: 6171},
label: "s",
expr: &ruleRefExpr{
pos: position{line: 271, col: 15, offset: 6173},
name: "StructRef",
},
},
&ruleRefExpr{
pos: position{line: 271, col: 25, offset: 6183},
name: "__",
},
&labeledExpr{
pos: position{line: 271, col: 28, offset: 6186},
label: "n",
expr: &ruleRefExpr{
pos: position{line: 271, col: 30, offset: 6188},
name: "Identifier",
},
},
},
},
},
},
{
name: "SMUnion",
pos: position{line: 280, col: 1, offset: 6354},
expr: &actionExpr{
pos: position{line: 280, col: 12, offset: 6365},
run: (*parser).callonSMUnion1,
expr: &seqExpr{
pos: position{line: 280, col: 12, offset: 6365},
exprs: []interface{}{
&litMatcher{
pos: position{line: 280, col: 12, offset: 6365},
val: "union",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 280, col: 20, offset: 6373},
name: "__",
},
&labeledExpr{
pos: position{line: 280, col: 23, offset: 6376},
label: "n",
expr: &ruleRefExpr{
pos: position{line: 280, col: 25, offset: 6378},
name: "Identifier",
},
},
&ruleRefExpr{
pos: position{line: 280, col: 36, offset: 6389},
name: "_",
},
&litMatcher{
pos: position{line: 280, col: 38, offset: 6391},
val: "[",
ignoreCase: false,
},
&labeledExpr{
pos: position{line: 280, col: 42, offset: 6395},
label: "t",
expr: &ruleRefExpr{
pos: position{line: 280, col: 44, offset: 6397},
name: "IDRef",
},
},
&litMatcher{
pos: position{line: 280, col: 50, offset: 6403},
val: "]",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 280, col: 54, offset: 6407},
name: "_",
},
&labeledExpr{
pos: position{line: 280, col: 56, offset: 6409},
label: "l",
expr: &zeroOrOneExpr{
pos: position{line: 280, col: 58, offset: 6411},
expr: &ruleRefExpr{
pos: position{line: 280, col: 58, offset: 6411},
name: "UnionLength",
},
},
},
&ruleRefExpr{
pos: position{line: 280, col: 71, offset: 6424},
name: "_",
},
&litMatcher{
pos: position{line: 280, col: 73, offset: 6426},
val: "{",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 280, col: 77, offset: 6430},
name: "_",
},
&labeledExpr{
pos: position{line: 280, col: 79, offset: 6432},
label: "cs",
expr: &zeroOrMoreExpr{
pos: position{line: 280, col: 82, offset: 6435},
expr: &ruleRefExpr{
pos: position{line: 280, col: 82, offset: 6435},
name: "UnionMember",
},
},
},
&ruleRefExpr{
pos: position{line: 280, col: 95, offset: 6448},
name: "_",
},
&litMatcher{
pos: position{line: 280, col: 97, offset: 6450},
val: "}",
ignoreCase: false,
},
},
},
},
},
{
name: "UnionLength",
pos: position{line: 296, col: 1, offset: 6757},
expr: &actionExpr{
pos: position{line: 296, col: 16, offset: 6772},
run: (*parser).callonUnionLength1,
expr: &seqExpr{
pos: position{line: 296, col: 16, offset: 6772},
exprs: []interface{}{
&litMatcher{
pos: position{line: 296, col: 16, offset: 6772},
val: "with",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 296, col: 23, offset: 6779},
name: "__",
},
&litMatcher{
pos: position{line: 296, col: 26, offset: 6782},
val: "length",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 296, col: 35, offset: 6791},
name: "__",
},
&labeledExpr{
pos: position{line: 296, col: 38, offset: 6794},
label: "l",
expr: &ruleRefExpr{
pos: position{line: 296, col: 40, offset: 6796},
name: "LengthConstraint",
},
},
},
},
},
},
{
name: "UnionMember",
pos: position{line: 302, col: 1, offset: 6909},
expr: &actionExpr{
pos: position{line: 302, col: 16, offset: 6924},
run: (*parser).callonUnionMember1,
expr: &seqExpr{
pos: position{line: 302, col: 16, offset: 6924},
exprs: []interface{}{
&ruleRefExpr{
pos: position{line: 302, col: 16, offset: 6924},
name: "_",
},
&labeledExpr{
pos: position{line: 302, col: 18, offset: 6926},
label: "cse",
expr: &ruleRefExpr{
pos: position{line: 302, col: 22, offset: 6930},
name: "UnionCase",
},
},
&ruleRefExpr{
pos: position{line: 302, col: 32, offset: 6940},
name: "_",
},
&litMatcher{
pos: position{line: 302, col: 34, offset: 6942},
val: ":",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 302, col: 38, offset: 6946},
name: "_",
},
&labeledExpr{
pos: position{line: 302, col: 40, offset: 6948},
label: "fs",
expr: &ruleRefExpr{
pos: position{line: 302, col: 43, offset: 6951},
name: "UnionBody",
},
},
&ruleRefExpr{
pos: position{line: 302, col: 53, offset: 6961},
name: "_",
},
},
},
},
},
{
name: "UnionCase",
pos: position{line: 316, col: 1, offset: 7178},
expr: &choiceExpr{
pos: position{line: 316, col: 14, offset: 7191},
alternatives: []interface{}{
&actionExpr{
pos: position{line: 316, col: 14, offset: 7191},
run: (*parser).callonUnionCase2,
expr: &labeledExpr{
pos: position{line: 316, col: 14, offset: 7191},
label: "l",
expr: &ruleRefExpr{
pos: position{line: 316, col: 16, offset: 7193},
name: "IntList",
},
},
},
&actionExpr{
pos: position{line: 318, col: 5, offset: 7223},
run: (*parser).callonUnionCase5,
expr: &litMatcher{
pos: position{line: 318, col: 5, offset: 7223},
val: "default",
ignoreCase: false,
},
},
},
},
},
{
name: "UnionBody",
pos: position{line: 329, col: 1, offset: 7471},
expr: &choiceExpr{
pos: position{line: 329, col: 14, offset: 7484},
alternatives: []interface{}{
&actionExpr{
pos: position{line: 329, col: 14, offset: 7484},
run: (*parser).callonUnionBody2,
expr: &seqExpr{
pos: position{line: 329, col: 14, offset: 7484},
exprs: []interface{}{
&ruleRefExpr{
pos: position{line: 329, col: 14, offset: 7484},
name: "_",
},
&litMatcher{
pos: position{line: 329, col: 16, offset: 7486},
val: ";",
ignoreCase: false,
},
},
},
},
&actionExpr{
pos: position{line: 331, col: 5, offset: 7514},
run: (*parser).callonUnionBody6,
expr: &seqExpr{
pos: position{line: 331, col: 5, offset: 7514},
exprs: []interface{}{
&litMatcher{
pos: position{line: 331, col: 5, offset: 7514},
val: "fail",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 331, col: 12, offset: 7521},
name: "_",
},
&litMatcher{
pos: position{line: 331, col: 14, offset: 7523},
val: ";",
ignoreCase: false,
},
},
},
},
&actionExpr{
pos: position{line: 333, col: 5, offset: 7573},
run: (*parser).callonUnionBody11,
expr: &seqExpr{
pos: position{line: 333, col: 5, offset: 7573},
exprs: []interface{}{
&litMatcher{
pos: position{line: 333, col: 5, offset: 7573},
val: "ignore",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 333, col: 14, offset: 7582},
name: "_",
},
&litMatcher{
pos: position{line: 333, col: 16, offset: 7584},
val: ";",
ignoreCase: false,
},
},
},
},
&actionExpr{
pos: position{line: 335, col: 5, offset: 7636},
run: (*parser).callonUnionBody16,
expr: &labeledExpr{
pos: position{line: 335, col: 5, offset: 7636},
label: "fs",
expr: &ruleRefExpr{
pos: position{line: 335, col: 8, offset: 7639},
name: "UnionFields",
},
},
},
},
},
},
{
name: "UnionFields",
pos: position{line: 339, col: 1, offset: 7673},
expr: &actionExpr{
pos: position{line: 339, col: 16, offset: 7688},
run: (*parser).callonUnionFields1,
expr: &seqExpr{
pos: position{line: 339, col: 16, offset: 7688},
exprs: []interface{}{
&labeledExpr{
pos: position{line: 339, col: 16, offset: 7688},
label: "ms",
expr: &zeroOrMoreExpr{
pos: position{line: 339, col: 19, offset: 7691},
expr: &ruleRefExpr{
pos: position{line: 339, col: 19, offset: 7691},
name: "UnionField",
},
},
},
&ruleRefExpr{
pos: position{line: 339, col: 31, offset: 7703},
name: "_",
},
&labeledExpr{
pos: position{line: 339, col: 33, offset: 7705},
label: "e",
expr: &zeroOrOneExpr{
pos: position{line: 339, col: 35, offset: 7707},
expr: &ruleRefExpr{
pos: position{line: 339, col: 35, offset: 7707},
name: "ExtentSpec",
},
},
},
},
},
},
},
{
name: "UnionField",
pos: position{line: 356, col: 1, offset: 8036},
expr: &actionExpr{
pos: position{line: 356, col: 15, offset: 8050},
run: (*parser).callonUnionField1,
expr: &seqExpr{
pos: position{line: 356, col: 15, offset: 8050},
exprs: []interface{}{
&ruleRefExpr{
pos: position{line: 356, col: 15, offset: 8050},
name: "_",
},
&labeledExpr{
pos: position{line: 356, col: 17, offset: 8052},
label: "m",
expr: &choiceExpr{
pos: position{line: 356, col: 20, offset: 8055},
alternatives: []interface{}{
&ruleRefExpr{
pos: position{line: 356, col: 20, offset: 8055},
name: "SMArray",
},
&ruleRefExpr{
pos: position{line: 356, col: 30, offset: 8065},
name: "SMInteger",
},
&ruleRefExpr{
pos: position{line: 356, col: 42, offset: 8077},
name: "SMString",
},
&ruleRefExpr{
pos: position{line: 356, col: 53, offset: 8088},
name: "SMStruct",
},
},
},
},
&ruleRefExpr{
pos: position{line: 356, col: 63, offset: 8098},
name: "_",
},
&litMatcher{
pos: position{line: 356, col: 65, offset: 8100},
val: ";",
ignoreCase: false,
},
},
},
},
},
{
name: "ExtentSpec",
pos: position{line: 364, col: 1, offset: 8212},
expr: &choiceExpr{
pos: position{line: 364, col: 15, offset: 8226},
alternatives: []interface{}{
&actionExpr{
pos: position{line: 364, col: 15, offset: 8226},
run: (*parser).callonExtentSpec2,
expr: &seqExpr{
pos: position{line: 364, col: 15, offset: 8226},
exprs: []interface{}{
&litMatcher{
pos: position{line: 364, col: 15, offset: 8226},
val: "...",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 364, col: 21, offset: 8232},
name: "_",
},
&litMatcher{
pos: position{line: 364, col: 23, offset: 8234},
val: ";",
ignoreCase: false,
},
},
},
},
&actionExpr{
pos: position{line: 366, col: 5, offset: 8272},
run: (*parser).callonExtentSpec7,
expr: &seqExpr{
pos: position{line: 366, col: 5, offset: 8272},
exprs: []interface{}{
&labeledExpr{
pos: position{line: 366, col: 5, offset: 8272},
label: "r",
expr: &ruleRefExpr{
pos: position{line: 366, col: 7, offset: 8274},
name: "SMRemainder",
},
},
&ruleRefExpr{
pos: position{line: 366, col: 19, offset: 8286},
name: "_",
},
&litMatcher{
pos: position{line: 366, col: 21, offset: 8288},
val: ";",
ignoreCase: false,
},
},
},
},
},
},
},
{
name: "StructRef",
pos: position{line: 370, col: 1, offset: 8313},
expr: &choiceExpr{
pos: position{line: 370, col: 14, offset: 8326},
alternatives: []interface{}{
&actionExpr{
pos: position{line: 370, col: 14, offset: 8326},
run: (*parser).callonStructRef2,
expr: &seqExpr{
pos: position{line: 370, col: 14, offset: 8326},
exprs: []interface{}{
&labeledExpr{
pos: position{line: 370, col: 14, offset: 8326},
label: "s",
expr: &ruleRefExpr{
pos: position{line: 370, col: 16, offset: 8328},
name: "StructDecl",
},
},
&stateCodeExpr{
pos: position{line: 370, col: 27, offset: 8339},
run: (*parser).callonStructRef6,
},
},
},
},
&actionExpr{
pos: position{line: 372, col: 5, offset: 8449},
run: (*parser).callonStructRef7,
expr: &labeledExpr{
pos: position{line: 372, col: 5, offset: 8449},
label: "n",
expr: &ruleRefExpr{
pos: position{line: 372, col: 7, offset: 8451},
name: "StructIdentifier",
},
},
},
},
},
},
{
name: "CharType",
pos: position{line: 376, col: 1, offset: 8520},
expr: &actionExpr{
pos: position{line: 376, col: 13, offset: 8532},
run: (*parser).callonCharType1,
expr: &litMatcher{
pos: position{line: 376, col: 13, offset: 8532},
val: "char",
ignoreCase: false,
},
},
},
{
name: "IntType",
pos: position{line: 385, col: 1, offset: 8654},
expr: &actionExpr{
pos: position{line: 385, col: 12, offset: 8665},
run: (*parser).callonIntType1,
expr: &seqExpr{
pos: position{line: 385, col: 12, offset: 8665},
exprs: []interface{}{
&litMatcher{
pos: position{line: 385, col: 12, offset: 8665},
val: "u",
ignoreCase: false,
},
&labeledExpr{
pos: position{line: 385, col: 16, offset: 8669},
label: "b",
expr: &choiceExpr{
pos: position{line: 385, col: 19, offset: 8672},
alternatives: []interface{}{
&litMatcher{
pos: position{line: 385, col: 19, offset: 8672},
val: "8",
ignoreCase: false,
},
&litMatcher{
pos: position{line: 385, col: 25, offset: 8678},
val: "16",
ignoreCase: false,
},
&litMatcher{
pos: position{line: 385, col: 32, offset: 8685},
val: "32",
ignoreCase: false,
},
&litMatcher{
pos: position{line: 385, col: 39, offset: 8692},
val: "64",
ignoreCase: false,
},
},
},
},
},
},
},
},
{
name: "IntConstraint",
pos: position{line: 393, col: 1, offset: 8856},
expr: &actionExpr{
pos: position{line: 393, col: 18, offset: 8873},
run: (*parser).callonIntConstraint1,
expr: &seqExpr{
pos: position{line: 393, col: 18, offset: 8873},
exprs: []interface{}{
&litMatcher{
pos: position{line: 393, col: 18, offset: 8873},
val: "IN",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 393, col: 23, offset: 8878},
name: "__",
},
&litMatcher{
pos: position{line: 393, col: 26, offset: 8881},
val: "[",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 393, col: 30, offset: 8885},
name: "_",
},
&labeledExpr{
pos: position{line: 393, col: 32, offset: 8887},
label: "l",
expr: &ruleRefExpr{
pos: position{line: 393, col: 34, offset: 8889},
name: "IntList",
},
},
&ruleRefExpr{
pos: position{line: 393, col: 42, offset: 8897},
name: "_",
},
&litMatcher{
pos: position{line: 393, col: 44, offset: 8899},
val: "]",
ignoreCase: false,
},
},
},
},
},
{
name: "IntList",
pos: position{line: 400, col: 1, offset: 8995},
expr: &actionExpr{
pos: position{line: 400, col: 12, offset: 9006},
run: (*parser).callonIntList1,
expr: &seqExpr{
pos: position{line: 400, col: 12, offset: 9006},
exprs: []interface{}{
&labeledExpr{
pos: position{line: 400, col: 12, offset: 9006},
label: "m",
expr: &ruleRefExpr{
pos: position{line: 400, col: 14, offset: 9008},
name: "IntListMember",
},
},
&labeledExpr{
pos: position{line: 400, col: 28, offset: 9022},
label: "ms",
expr: &zeroOrMoreExpr{
pos: position{line: 400, col: 31, offset: 9025},
expr: &seqExpr{
pos: position{line: 400, col: 32, offset: 9026},
exprs: []interface{}{
&ruleRefExpr{
pos: position{line: 400, col: 32, offset: 9026},
name: "_",
},
&litMatcher{
pos: position{line: 400, col: 34, offset: 9028},
val: ",",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 400, col: 38, offset: 9032},
name: "_",
},
&ruleRefExpr{
pos: position{line: 400, col: 40, offset: 9034},
name: "IntListMember",
},
},
},
},
},
},
},
},
},
{
name: "IntListMember",
pos: position{line: 411, col: 1, offset: 9324},
expr: &actionExpr{
pos: position{line: 411, col: 18, offset: 9341},
run: (*parser).callonIntListMember1,
expr: &seqExpr{
pos: position{line: 411, col: 18, offset: 9341},
exprs: []interface{}{
&labeledExpr{
pos: position{line: 411, col: 18, offset: 9341},
label: "lo",
expr: &ruleRefExpr{
pos: position{line: 411, col: 21, offset: 9344},
name: "Integer",
},
},
&labeledExpr{
pos: position{line: 411, col: 29, offset: 9352},
label: "hi",
expr: &zeroOrOneExpr{
pos: position{line: 411, col: 32, offset: 9355},
expr: &seqExpr{
pos: position{line: 411, col: 34, offset: 9357},
exprs: []interface{}{
&ruleRefExpr{
pos: position{line: 411, col: 34, offset: 9357},
name: "_",
},
&litMatcher{
pos: position{line: 411, col: 36, offset: 9359},
val: "..",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 411, col: 41, offset: 9364},
name: "_",
},
&ruleRefExpr{
pos: position{line: 411, col: 43, offset: 9366},
name: "Integer",
},
},
},
},
},
},
},
},
},
{
name: "Integer",
pos: position{line: 424, col: 1, offset: 9571},
expr: &actionExpr{
pos: position{line: 424, col: 12, offset: 9582},
run: (*parser).callonInteger1,
expr: &labeledExpr{
pos: position{line: 424, col: 12, offset: 9582},
label: "i",
expr: &choiceExpr{
pos: position{line: 424, col: 15, offset: 9585},
alternatives: []interface{}{
&ruleRefExpr{
pos: position{line: 424, col: 15, offset: 9585},
name: "IntegerConstRef",
},
&ruleRefExpr{
pos: position{line: 424, col: 33, offset: 9603},
name: "IntegerLiteral",
},
},
},
},
},
},
{
name: "IntegerConstRef",
pos: position{line: 428, col: 1, offset: 9640},
expr: &actionExpr{
pos: position{line: 428, col: 20, offset: 9659},
run: (*parser).callonIntegerConstRef1,
expr: &labeledExpr{
pos: position{line: 428, col: 20, offset: 9659},
label: "n",
expr: &ruleRefExpr{
pos: position{line: 428, col: 22, offset: 9661},
name: "ConstIdentifier",
},
},
},
},
{
name: "IntegerLiteral",
pos: position{line: 432, col: 1, offset: 9735},
expr: &actionExpr{
pos: position{line: 432, col: 19, offset: 9753},
run: (*parser).callonIntegerLiteral1,
expr: &labeledExpr{
pos: position{line: 432, col: 19, offset: 9753},
label: "v",
expr: &ruleRefExpr{
pos: position{line: 432, col: 21, offset: 9755},
name: "IntLiteral",
},
},
},
},
{
name: "IDRef",
pos: position{line: 441, col: 1, offset: 9879},
expr: &actionExpr{
pos: position{line: 441, col: 10, offset: 9888},
run: (*parser).callonIDRef1,
expr: &seqExpr{
pos: position{line: 441, col: 10, offset: 9888},
exprs: []interface{}{
&labeledExpr{
pos: position{line: 441, col: 10, offset: 9888},
label: "s",
expr: &zeroOrOneExpr{
pos: position{line: 441, col: 12, offset: 9890},
expr: &seqExpr{
pos: position{line: 441, col: 13, offset: 9891},
exprs: []interface{}{
&ruleRefExpr{
pos: position{line: 441, col: 13, offset: 9891},
name: "Identifier",
},
&litMatcher{
pos: position{line: 441, col: 24, offset: 9902},
val: ".",
ignoreCase: false,
},
},
},
},
},
&labeledExpr{
pos: position{line: 441, col: 30, offset: 9908},
label: "n",
expr: &ruleRefExpr{
pos: position{line: 441, col: 32, offset: 9910},
name: "Identifier",
},
},
},
},
},
},
{
name: "IdentifierList",
pos: position{line: 451, col: 1, offset: 10051},
expr: &actionExpr{
pos: position{line: 451, col: 19, offset: 10069},
run: (*parser).callonIdentifierList1,
expr: &seqExpr{
pos: position{line: 451, col: 19, offset: 10069},
exprs: []interface{}{
&labeledExpr{
pos: position{line: 451, col: 19, offset: 10069},
label: "n",
expr: &ruleRefExpr{
pos: position{line: 451, col: 21, offset: 10071},
name: "Identifier",
},
},
&labeledExpr{
pos: position{line: 451, col: 32, offset: 10082},
label: "ns",
expr: &zeroOrMoreExpr{
pos: position{line: 451, col: 35, offset: 10085},
expr: &seqExpr{
pos: position{line: 451, col: 36, offset: 10086},
exprs: []interface{}{
&ruleRefExpr{
pos: position{line: 451, col: 36, offset: 10086},
name: "_",
},
&litMatcher{
pos: position{line: 451, col: 38, offset: 10088},
val: ",",
ignoreCase: false,
},
&ruleRefExpr{
pos: position{line: 451, col: 42, offset: 10092},
name: "_",
},
&ruleRefExpr{
pos: position{line: 451, col: 44, offset: 10094},
name: "Identifier",
},
},
},
},
},
},
},
},
},
{
name: "Identifier",
pos: position{line: 459, col: 1, offset: 10258},
expr: &actionExpr{
pos: position{line: 459, col: 15, offset: 10272},
run: (*parser).callonIdentifier1,
expr: &seqExpr{
pos: position{line: 459, col: 15, offset: 10272},
exprs: []interface{}{
&charClassMatcher{
pos: position{line: 459, col: 15, offset: 10272},
val: "[a-zA-Z_]",
chars: []rune{'_'},
ranges: []rune{'a', 'z', 'A', 'Z'},
ignoreCase: false,
inverted: false,
},
&zeroOrMoreExpr{
pos: position{line: 459, col: 25, offset: 10282},
expr: &charClassMatcher{
pos: position{line: 459, col: 25, offset: 10282},
val: "[a-zA-Z0-9_]",
chars: []rune{'_'},
ranges: []rune{'a', 'z', 'A', 'Z', '0', '9'},
ignoreCase: false,
inverted: false,
},
},
},
},
},
},
{
name: "ConstIdentifier",
pos: position{line: 463, col: 1, offset: 10330},
expr: &actionExpr{
pos: position{line: 463, col: 20, offset: 10349},
run: (*parser).callonConstIdentifier1,
expr: &seqExpr{
pos: position{line: 463, col: 20, offset: 10349},
exprs: []interface{}{
&charClassMatcher{
pos: position{line: 463, col: 20, offset: 10349},
val: "[A-Z_]",
chars: []rune{'_'},
ranges: []rune{'A', 'Z'},
ignoreCase: false,
inverted: false,
},
&zeroOrMoreExpr{
pos: position{line: 463, col: 27, offset: 10356},
expr: &charClassMatcher{
pos: position{line: 463, col: 27, offset: 10356},
val: "[A-Z0-9_]",
chars: []rune{'_'},
ranges: []rune{'A', 'Z', '0', '9'},
ignoreCase: false,
inverted: false,
},
},
},
},
},
},
{
name: "IntLiteral",
pos: position{line: 469, col: 1, offset: 10416},
expr: &actionExpr{
pos: position{line: 469, col: 15, offset: 10430},
run: (*parser).callonIntLiteral1,
expr: &choiceExpr{
pos: position{line: 469, col: 16, offset: 10431},
alternatives: []interface{}{
&ruleRefExpr{
pos: position{line: 469, col: 16, offset: 10431},
name: "HexLiteral",
},
&ruleRefExpr{
pos: position{line: 469, col: 29, offset: 10444},
name: "OctalLiteral",
},
&ruleRefExpr{
pos: position{line: 469, col: 44, offset: 10459},
name: "DecimalLiteral",
},
},
},
},
},
{
name: "DecimalLiteral",
pos: position{line: 473, col: 1, offset: 10529},
expr: &oneOrMoreExpr{
pos: position{line: 473, col: 19, offset: 10547},
expr: &charClassMatcher{
pos: position{line: 473, col: 19, offset: 10547},
val: "[0-9]",
ranges: []rune{'0', '9'},
ignoreCase: false,
inverted: false,
},
},
},
{
name: "HexLiteral",
pos: position{line: 475, col: 1, offset: 10555},
expr: &seqExpr{
pos: position{line: 475, col: 15, offset: 10569},
exprs: []interface{}{
&litMatcher{
pos: position{line: 475, col: 15, offset: 10569},
val: "0x",
ignoreCase: false,
},
&oneOrMoreExpr{
pos: position{line: 475, col: 20, offset: 10574},
expr: &charClassMatcher{
pos: position{line: 475, col: 20, offset: 10574},
val: "[0-9a-fA-F]",
ranges: []rune{'0', '9', 'a', 'f', 'A', 'F'},
ignoreCase: false,
inverted: false,
},
},
},
},
},
{
name: "OctalLiteral",
pos: position{line: 477, col: 1, offset: 10588},
expr: &seqExpr{
pos: position{line: 477, col: 17, offset: 10604},
exprs: []interface{}{
&litMatcher{
pos: position{line: 477, col: 17, offset: 10604},
val: "0",
ignoreCase: false,
},
&oneOrMoreExpr{
pos: position{line: 477, col: 21, offset: 10608},
expr: &charClassMatcher{
pos: position{line: 477, col: 21, offset: 10608},
val: "[0-7]",
ranges: []rune{'0', '7'},
ignoreCase: false,
inverted: false,
},
},
},
},
},
{
name: "SourceChar",
pos: position{line: 481, col: 1, offset: 10629},
expr: &anyMatcher{
line: 481, col: 15, offset: 10643,
},
},
{
name: "Comment",
pos: position{line: 482, col: 1, offset: 10645},
expr: &choiceExpr{
pos: position{line: 482, col: 12, offset: 10656},
alternatives: []interface{}{
&ruleRefExpr{
pos: position{line: 482, col: 12, offset: 10656},
name: "MultiLineComment",
},
&ruleRefExpr{
pos: position{line: 482, col: 31, offset: 10675},
name: "SingleLineComment",
},
},
},
},
{
name: "MultiLineComment",
pos: position{line: 483, col: 1, offset: 10693},
expr: &seqExpr{
pos: position{line: 483, col: 21, offset: 10713},
exprs: []interface{}{
&litMatcher{
pos: position{line: 483, col: 21, offset: 10713},
val: "/*",
ignoreCase: false,
},
&zeroOrMoreExpr{
pos: position{line: 483, col: 26, offset: 10718},
expr: &seqExpr{
pos: position{line: 483, col: 28, offset: 10720},
exprs: []interface{}{
¬Expr{
pos: position{line: 483, col: 28, offset: 10720},
expr: &litMatcher{
pos: position{line: 483, col: 29, offset: 10721},
val: "*/",
ignoreCase: false,
},
},
&ruleRefExpr{
pos: position{line: 483, col: 34, offset: 10726},
name: "SourceChar",
},
},
},
},
&litMatcher{
pos: position{line: 483, col: 48, offset: 10740},
val: "*/",
ignoreCase: false,
},
},
},
},
{
name: "SingleLineComment",
pos: position{line: 484, col: 1, offset: 10745},
expr: &seqExpr{
pos: position{line: 484, col: 22, offset: 10766},
exprs: []interface{}{
&litMatcher{
pos: position{line: 484, col: 22, offset: 10766},
val: "//",
ignoreCase: false,
},
&zeroOrMoreExpr{
pos: position{line: 484, col: 27, offset: 10771},
expr: &seqExpr{
pos: position{line: 484, col: 29, offset: 10773},
exprs: []interface{}{
¬Expr{
pos: position{line: 484, col: 29, offset: 10773},
expr: &ruleRefExpr{
pos: position{line: 484, col: 30, offset: 10774},
name: "EOL",
},
},
&ruleRefExpr{
pos: position{line: 484, col: 34, offset: 10778},
name: "SourceChar",
},
},
},
},
},
},
},
{
name: "__",
pos: position{line: 488, col: 1, offset: 10815},
expr: &oneOrMoreExpr{
pos: position{line: 488, col: 7, offset: 10821},
expr: &ruleRefExpr{
pos: position{line: 488, col: 7, offset: 10821},
name: "Skip",
},
},
},
{
name: "_",
pos: position{line: 489, col: 1, offset: 10827},
expr: &zeroOrMoreExpr{
pos: position{line: 489, col: 6, offset: 10832},
expr: &ruleRefExpr{
pos: position{line: 489, col: 6, offset: 10832},
name: "Skip",
},
},
},
{
name: "Skip",
pos: position{line: 491, col: 1, offset: 10839},
expr: &choiceExpr{
pos: position{line: 491, col: 10, offset: 10848},
alternatives: []interface{}{
&ruleRefExpr{
pos: position{line: 491, col: 10, offset: 10848},
name: "Whitespace",
},
&ruleRefExpr{
pos: position{line: 491, col: 23, offset: 10861},
name: "EOL",
},
&ruleRefExpr{
pos: position{line: 491, col: 29, offset: 10867},
name: "Comment",
},
},
},
},
{
name: "Whitespace",
pos: position{line: 493, col: 1, offset: 10877},
expr: &charClassMatcher{
pos: position{line: 493, col: 15, offset: 10891},
val: "[ \\t\\r]",
chars: []rune{' ', '\t', '\r'},
ignoreCase: false,
inverted: false,
},
},
{
name: "EOL",
pos: position{line: 494, col: 1, offset: 10899},
expr: &litMatcher{
pos: position{line: 494, col: 8, offset: 10906},
val: "\n",
ignoreCase: false,
},
},
{
name: "EOF",
pos: position{line: 495, col: 1, offset: 10911},
expr: ¬Expr{
pos: position{line: 495, col: 8, offset: 10918},
expr: &anyMatcher{
line: 495, col: 9, offset: 10919,
},
},
},
},
}
func (c *current) onFile1(ds interface{}) (interface{}, error) {
f := &ast.File{}
lingering := c.getLingeringDeclarations()
decls := append(ds.([]interface{}), lingering...)
for _, i := range decls {
switch d := i.(type) {
case *ast.Constant:
f.Constants = append(f.Constants, d)
case *ast.Context:
f.Contexts = append(f.Contexts, d)
case *ast.Struct:
f.Structs = append(f.Structs, d)
case *ast.Pragma:
f.Pragmas = append(f.Pragmas, d)
default:
return nil, errors.New("unknown declaration")
}
}
return f, nil
}
func (p *parser) callonFile1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onFile1(stack["ds"])
}
func (c *current) onDeclaration1(d interface{}) (interface{}, error) {
return d, nil
}
func (p *parser) callonDeclaration1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onDeclaration1(stack["d"])
}
func (c *current) onConstDeclaration1(n, v interface{}) (interface{}, error) {
return &ast.Constant{
Name: n.(string),
Value: v.(int64),
}, nil
}
func (p *parser) callonConstDeclaration1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onConstDeclaration1(stack["n"], stack["v"])
}
func (c *current) onContextDeclaration1(n, fs interface{}) (interface{}, error) {
f := make([]*ast.Field, 0)
for _, i := range fs.([]interface{}) {
f = append(f, i.(*ast.Field))
}
return &ast.Context{
Name: n.(string),
Members: f,
}, nil
}
func (p *parser) callonContextDeclaration1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onContextDeclaration1(stack["n"], stack["fs"])
}
func (c *current) onContextMember1(t, n interface{}) (interface{}, error) {
return &ast.Field{
Type: t.(*ast.IntType),
Name: n.(string),
}, nil
}
func (p *parser) callonContextMember1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onContextMember1(stack["t"], stack["n"])
}
func (c *current) onStructDeclaration1(s interface{}) (interface{}, error) {
return s, nil
}
func (p *parser) callonStructDeclaration1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onStructDeclaration1(stack["s"])
}
func (c *current) onStructDecl1(n, ctx, ms, e interface{}) (interface{}, error) {
m := make([]ast.Member, 0)
for _, i := range ms.([]interface{}) {
m = append(m, i.(ast.Member))
}
if e != nil {
m = append(m, e.(ast.Member))
}
s := &ast.Struct{
Name: n.(string),
Members: m,
}
if ctx != nil {
s.Contexts = ctx.([]interface{})[1].([]string)
}
return s, nil
}
func (p *parser) callonStructDecl1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onStructDecl1(stack["n"], stack["ctx"], stack["ms"], stack["e"])
}
func (c *current) onStructIdentifier1(n interface{}) (interface{}, error) {
return n, nil
}
func (p *parser) callonStructIdentifier1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onStructIdentifier1(stack["n"])
}
func (c *current) onExternDeclaration1(n, ctx interface{}) (interface{}, error) {
e := &ast.Struct{
Name: n.(string),
}
if ctx != nil {
e.Contexts = ctx.([]interface{})[1].([]string)
}
return e, nil
}
func (p *parser) callonExternDeclaration1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onExternDeclaration1(stack["n"], stack["ctx"])
}
func (c *current) onPragmaDeclaration1(n, opts interface{}) (interface{}, error) {
return &ast.Pragma{
Type: n.(string),
Options: opts.([]string),
}, nil
}
func (p *parser) callonPragmaDeclaration1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onPragmaDeclaration1(stack["n"], stack["opts"])
}
func (c *current) onContextRefs1(ns interface{}) (interface{}, error) {
return ns, nil
}
func (p *parser) callonContextRefs1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onContextRefs1(stack["ns"])
}
func (c *current) onStructMember1(m interface{}) (interface{}, error) {
return m, nil
}
func (p *parser) callonStructMember1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onStructMember1(stack["m"])
}
func (c *current) onStructEnding1(e interface{}) (interface{}, error) {
return e, nil
}
func (p *parser) callonStructEnding1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onStructEnding1(stack["e"])
}
func (c *current) onStructEOS1() (interface{}, error) {
return &ast.EOS{}, nil
}
func (p *parser) callonStructEOS1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onStructEOS1()
}
func (c *current) onSMArray1(a interface{}) (interface{}, error) {
return a, nil
}
func (p *parser) callonSMArray1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onSMArray1(stack["a"])
}
func (c *current) onSMFixedArray1(b, n, s interface{}) (interface{}, error) {
return &ast.Field{
Name: n.(string),
Type: &ast.FixedArrayMember{
Base: b.(ast.ArrayBase),
Size: s.(ast.Integer),
},
}, nil
}
func (p *parser) callonSMFixedArray1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onSMFixedArray1(stack["b"], stack["n"], stack["s"])
}
func (c *current) onSMVarArray1(b, n, l interface{}) (interface{}, error) {
return &ast.Field{
Name: n.(string),
Type: &ast.VarArrayMember{
Base: b.(ast.ArrayBase),
Constraint: l.(ast.LengthConstraint),
},
}, nil
}
func (p *parser) callonSMVarArray1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onSMVarArray1(stack["b"], stack["n"], stack["l"])
}
func (c *current) onLengthConstraint1(l interface{}) (interface{}, error) {
return l, nil
}
func (p *parser) callonLengthConstraint1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onLengthConstraint1(stack["l"])
}
func (c *current) onLeftover1(i interface{}) (interface{}, error) {
return &ast.Leftover{Num: i.(ast.Integer)}, nil
}
func (p *parser) callonLeftover1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onLeftover1(stack["i"])
}
func (c *current) onSMRemainder1(b, n interface{}) (interface{}, error) {
return &ast.Field{
Name: n.(string),
Type: &ast.VarArrayMember{
Base: b.(ast.ArrayBase),
Constraint: nil,
},
}, nil
}
func (p *parser) callonSMRemainder1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onSMRemainder1(stack["b"], stack["n"])
}
func (c *current) onArrayBase1(t interface{}) (interface{}, error) {
return t, nil
}
func (p *parser) callonArrayBase1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onArrayBase1(stack["t"])
}
func (c *current) onSMInteger1(t, n, cst interface{}) (interface{}, error) {
i := t.(*ast.IntType)
if cst != nil {
i.Constraint = cst.(*ast.IntegerList)
}
return &ast.Field{
Name: n.(string),
Type: i,
}, nil
}
func (p *parser) callonSMInteger1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onSMInteger1(stack["t"], stack["n"], stack["cst"])
}
func (c *current) onSMPosition1(n interface{}) (interface{}, error) {
return &ast.Field{
Name: n.(string),
Type: &ast.Ptr{},
}, nil
}
func (p *parser) callonSMPosition1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onSMPosition1(stack["n"])
}
func (c *current) onSMString1(n interface{}) (interface{}, error) {
return &ast.Field{
Name: n.(string),
Type: &ast.NulTermString{},
}, nil
}
func (p *parser) callonSMString1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onSMString1(stack["n"])
}
func (c *current) onSMStruct1(s, n interface{}) (interface{}, error) {
return &ast.Field{
Name: n.(string),
Type: s.(*ast.StructRef),
}, nil
}
func (p *parser) callonSMStruct1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onSMStruct1(stack["s"], stack["n"])
}
func (c *current) onSMUnion1(n, t, l, cs interface{}) (interface{}, error) {
cases := make([]*ast.UnionCase, 0)
for _, i := range cs.([]interface{}) {
cases = append(cases, i.(*ast.UnionCase))
}
u := &ast.UnionMember{
Name: n.(string),
Tag: t.(*ast.IDRef),
Cases: cases,
}
if l != nil {
u.Length = l.(ast.LengthConstraint)
}
return u, nil
}
func (p *parser) callonSMUnion1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onSMUnion1(stack["n"], stack["t"], stack["l"], stack["cs"])
}
func (c *current) onUnionLength1(l interface{}) (interface{}, error) {
return l, nil
}
func (p *parser) callonUnionLength1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onUnionLength1(stack["l"])
}
func (c *current) onUnionMember1(cse, fs interface{}) (interface{}, error) {
uc := &ast.UnionCase{}
if fs != nil {
uc.Members = fs.([]ast.Member)
}
if cse != nil {
uc.Case = cse.(*ast.IntegerList)
}
return uc, nil
}
func (p *parser) callonUnionMember1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onUnionMember1(stack["cse"], stack["fs"])
}
func (c *current) onUnionCase2(l interface{}) (interface{}, error) {
return l, nil
}
func (p *parser) callonUnionCase2() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onUnionCase2(stack["l"])
}
func (c *current) onUnionCase5() (interface{}, error) {
return nil, nil
}
func (p *parser) callonUnionCase5() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onUnionCase5()
}
func (c *current) onUnionBody2() (interface{}, error) {
return nil, nil
}
func (p *parser) callonUnionBody2() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onUnionBody2()
}
func (c *current) onUnionBody6() (interface{}, error) {
return []ast.Member{&ast.Fail{}}, nil
}
func (p *parser) callonUnionBody6() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onUnionBody6()
}
func (c *current) onUnionBody11() (interface{}, error) {
return []ast.Member{&ast.Ignore{}}, nil
}
func (p *parser) callonUnionBody11() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onUnionBody11()
}
func (c *current) onUnionBody16(fs interface{}) (interface{}, error) {
return fs, nil
}
func (p *parser) callonUnionBody16() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onUnionBody16(stack["fs"])
}
func (c *current) onUnionFields1(ms, e interface{}) (interface{}, error) {
fs := []ast.Member{}
for _, i := range ms.([]interface{}) {
fs = append(fs, i.(ast.Member))
}
if e != nil {
fs = append(fs, e)
}
return fs, nil
}
func (p *parser) callonUnionFields1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onUnionFields1(stack["ms"], stack["e"])
}
func (c *current) onUnionField1(m interface{}) (interface{}, error) {
return m, nil
}
func (p *parser) callonUnionField1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onUnionField1(stack["m"])
}
func (c *current) onExtentSpec2() (interface{}, error) {
return &ast.Ignore{}, nil
}
func (p *parser) callonExtentSpec2() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onExtentSpec2()
}
func (c *current) onExtentSpec7(r interface{}) (interface{}, error) {
return r, nil
}
func (p *parser) callonExtentSpec7() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onExtentSpec7(stack["r"])
}
func (c *current) onStructRef6(s interface{}) error {
c.addLingeringDeclaration(s)
return nil
}
func (p *parser) callonStructRef6() error {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onStructRef6(stack["s"])
}
func (c *current) onStructRef2(s interface{}) (interface{}, error) {
return &ast.StructRef{Name: s.(*ast.Struct).Name}, nil
}
func (p *parser) callonStructRef2() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onStructRef2(stack["s"])
}
func (c *current) onStructRef7(n interface{}) (interface{}, error) {
return &ast.StructRef{Name: n.(string)}, nil
}
func (p *parser) callonStructRef7() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onStructRef7(stack["n"])
}
func (c *current) onCharType1() (interface{}, error) {
return &ast.CharType{}, nil
}
func (p *parser) callonCharType1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onCharType1()
}
func (c *current) onIntType1(b interface{}) (interface{}, error) {
s, err := strconv.Atoi(string(b.([]byte)))
return &ast.IntType{Size: uint(s)}, err
}
func (p *parser) callonIntType1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onIntType1(stack["b"])
}
func (c *current) onIntConstraint1(l interface{}) (interface{}, error) {
return l, nil
}
func (p *parser) callonIntConstraint1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onIntConstraint1(stack["l"])
}
func (c *current) onIntList1(m, ms interface{}) (interface{}, error) {
r := []*ast.IntegerRange{m.(*ast.IntegerRange)}
for _, i := range ms.([]interface{}) {
r = append(r, i.([]interface{})[3].(*ast.IntegerRange))
}
return &ast.IntegerList{Ranges: r}, nil
}
func (p *parser) callonIntList1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onIntList1(stack["m"], stack["ms"])
}
func (c *current) onIntListMember1(lo, hi interface{}) (interface{}, error) {
r := &ast.IntegerRange{
Low: lo.(ast.Integer),
}
if hi != nil {
r.High = hi.([]interface{})[3].(ast.Integer)
}
return r, nil
}
func (p *parser) callonIntListMember1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onIntListMember1(stack["lo"], stack["hi"])
}
func (c *current) onInteger1(i interface{}) (interface{}, error) {
return i, nil
}
func (p *parser) callonInteger1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onInteger1(stack["i"])
}
func (c *current) onIntegerConstRef1(n interface{}) (interface{}, error) {
return &ast.IntegerConstRef{Name: n.(string)}, nil
}
func (p *parser) callonIntegerConstRef1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onIntegerConstRef1(stack["n"])
}
func (c *current) onIntegerLiteral1(v interface{}) (interface{}, error) {
return &ast.IntegerLiteral{Value: v.(int64)}, nil
}
func (p *parser) callonIntegerLiteral1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onIntegerLiteral1(stack["v"])
}
func (c *current) onIDRef1(s, n interface{}) (interface{}, error) {
r := &ast.IDRef{
Name: n.(string),
}
if s != nil {
r.Scope = s.([]interface{})[0].(string)
}
return r, nil
}
func (p *parser) callonIDRef1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onIDRef1(stack["s"], stack["n"])
}
func (c *current) onIdentifierList1(n, ns interface{}) (interface{}, error) {
ids := []string{n.(string)}
for _, i := range ns.([]interface{}) {
ids = append(ids, i.([]interface{})[3].(string))
}
return ids, nil
}
func (p *parser) callonIdentifierList1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onIdentifierList1(stack["n"], stack["ns"])
}
func (c *current) onIdentifier1() (interface{}, error) {
return string(c.text), nil
}
func (p *parser) callonIdentifier1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onIdentifier1()
}
func (c *current) onConstIdentifier1() (interface{}, error) {
return string(c.text), nil
}
func (p *parser) callonConstIdentifier1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onConstIdentifier1()
}
func (c *current) onIntLiteral1() (interface{}, error) {
return strconv.ParseInt(string(c.text), 0, 64)
}
func (p *parser) callonIntLiteral1() (interface{}, error) {
stack := p.vstack[len(p.vstack)-1]
_ = stack
return p.cur.onIntLiteral1()
}
var (
// errNoRule is returned when the grammar to parse has no rule.
errNoRule = errors.New("grammar has no rule")
// errInvalidEntrypoint is returned when the specified entrypoint rule
// does not exit.
errInvalidEntrypoint = errors.New("invalid entrypoint")
// errInvalidEncoding is returned when the source is not properly
// utf8-encoded.
errInvalidEncoding = errors.New("invalid encoding")
// errMaxExprCnt is used to signal that the maximum number of
// expressions have been parsed.
errMaxExprCnt = errors.New("max number of expresssions parsed")
)
// Option is a function that can set an option on the parser. It returns
// the previous setting as an Option.
type Option func(*parser) Option
// MaxExpressions creates an Option to stop parsing after the provided
// number of expressions have been parsed, if the value is 0 then the parser will
// parse for as many steps as needed (possibly an infinite number).
//
// The default for maxExprCnt is 0.
func MaxExpressions(maxExprCnt uint64) Option {
return func(p *parser) Option {
oldMaxExprCnt := p.maxExprCnt
p.maxExprCnt = maxExprCnt
return MaxExpressions(oldMaxExprCnt)
}
}
// Entrypoint creates an Option to set the rule name to use as entrypoint.
// The rule name must have been specified in the -alternate-entrypoints
// if generating the parser with the -optimize-grammar flag, otherwise
// it may have been optimized out. Passing an empty string sets the
// entrypoint to the first rule in the grammar.
//
// The default is to start parsing at the first rule in the grammar.
func Entrypoint(ruleName string) Option {
return func(p *parser) Option {
oldEntrypoint := p.entrypoint
p.entrypoint = ruleName
if ruleName == "" {
p.entrypoint = g.rules[0].name
}
return Entrypoint(oldEntrypoint)
}
}
// Statistics adds a user provided Stats struct to the parser to allow
// the user to process the results after the parsing has finished.
// Also the key for the "no match" counter is set.
//
// Example usage:
//
// input := "input"
// stats := Stats{}
// _, err := Parse("input-file", []byte(input), Statistics(&stats, "no match"))
// if err != nil {
// log.Panicln(err)
// }
// b, err := json.MarshalIndent(stats.ChoiceAltCnt, "", " ")
// if err != nil {
// log.Panicln(err)
// }
// fmt.Println(string(b))
//
func Statistics(stats *Stats, choiceNoMatch string) Option {
return func(p *parser) Option {
oldStats := p.Stats
p.Stats = stats
oldChoiceNoMatch := p.choiceNoMatch
p.choiceNoMatch = choiceNoMatch
if p.Stats.ChoiceAltCnt == nil {
p.Stats.ChoiceAltCnt = make(map[string]map[string]int)
}
return Statistics(oldStats, oldChoiceNoMatch)
}
}
// Debug creates an Option to set the debug flag to b. When set to true,
// debugging information is printed to stdout while parsing.
//
// The default is false.
func Debug(b bool) Option {
return func(p *parser) Option {
old := p.debug
p.debug = b
return Debug(old)
}
}
// Memoize creates an Option to set the memoize flag to b. When set to true,
// the parser will cache all results so each expression is evaluated only
// once. This guarantees linear parsing time even for pathological cases,
// at the expense of more memory and slower times for typical cases.
//
// The default is false.
func Memoize(b bool) Option {
return func(p *parser) Option {
old := p.memoize
p.memoize = b
return Memoize(old)
}
}
// AllowInvalidUTF8 creates an Option to allow invalid UTF-8 bytes.
// Every invalid UTF-8 byte is treated as a utf8.RuneError (U+FFFD)
// by character class matchers and is matched by the any matcher.
// The returned matched value, c.text and c.offset are NOT affected.
//
// The default is false.
func AllowInvalidUTF8(b bool) Option {
return func(p *parser) Option {
old := p.allowInvalidUTF8
p.allowInvalidUTF8 = b
return AllowInvalidUTF8(old)
}
}
// Recover creates an Option to set the recover flag to b. When set to
// true, this causes the parser to recover from panics and convert it
// to an error. Setting it to false can be useful while debugging to
// access the full stack trace.
//
// The default is true.
func Recover(b bool) Option {
return func(p *parser) Option {
old := p.recover
p.recover = b
return Recover(old)
}
}
// GlobalStore creates an Option to set a key to a certain value in
// the globalStore.
func GlobalStore(key string, value interface{}) Option {
return func(p *parser) Option {
old := p.cur.globalStore[key]
p.cur.globalStore[key] = value
return GlobalStore(key, old)
}
}
// InitState creates an Option to set a key to a certain value in
// the global "state" store.
func InitState(key string, value interface{}) Option {
return func(p *parser) Option {
old := p.cur.state[key]
p.cur.state[key] = value
return InitState(key, old)
}
}
// ParseFile parses the file identified by filename.
func ParseFile(filename string, opts ...Option) (i interface{}, err error) {
f, err := os.Open(filename)
if err != nil {
return nil, err
}
defer func() {
if closeErr := f.Close(); closeErr != nil {
err = closeErr
}
}()
return ParseReader(filename, f, opts...)
}
// ParseReader parses the data from r using filename as information in the
// error messages.
func ParseReader(filename string, r io.Reader, opts ...Option) (interface{}, error) {
b, err := ioutil.ReadAll(r)
if err != nil {
return nil, err
}
return Parse(filename, b, opts...)
}
// Parse parses the data from b using filename as information in the
// error messages.
func Parse(filename string, b []byte, opts ...Option) (interface{}, error) {
return newParser(filename, b, opts...).parse(g)
}
// position records a position in the text.
type position struct {
line, col, offset int
}
func (p position) String() string {
return strconv.Itoa(p.line) + ":" + strconv.Itoa(p.col) + " [" + strconv.Itoa(p.offset) + "]"
}
// savepoint stores all state required to go back to this point in the
// parser.
type savepoint struct {
position
rn rune
w int
}
type current struct {
pos position // start position of the match
text []byte // raw text of the match
// state is a store for arbitrary key,value pairs that the user wants to be
// tied to the backtracking of the parser.
// This is always rolled back if a parsing rule fails.
state storeDict
// globalStore is a general store for the user to store arbitrary key-value
// pairs that they need to manage and that they do not want tied to the
// backtracking of the parser. This is only modified by the user and never
// rolled back by the parser. It is always up to the user to keep this in a
// consistent state.
globalStore storeDict
}
type storeDict map[string]interface{}
// the AST types...
type grammar struct {
pos position
rules []*rule
}
type rule struct {
pos position
name string
displayName string
expr interface{}
}
type choiceExpr struct {
pos position
alternatives []interface{}
}
type actionExpr struct {
pos position
expr interface{}
run func(*parser) (interface{}, error)
}
type recoveryExpr struct {
pos position
expr interface{}
recoverExpr interface{}
failureLabel []string
}
type seqExpr struct {
pos position
exprs []interface{}
}
type throwExpr struct {
pos position
label string
}
type labeledExpr struct {
pos position
label string
expr interface{}
}
type expr struct {
pos position
expr interface{}
}
type andExpr expr
type notExpr expr
type zeroOrOneExpr expr
type zeroOrMoreExpr expr
type oneOrMoreExpr expr
type ruleRefExpr struct {
pos position
name string
}
type stateCodeExpr struct {
pos position
run func(*parser) error
}
type andCodeExpr struct {
pos position
run func(*parser) (bool, error)
}
type notCodeExpr struct {
pos position
run func(*parser) (bool, error)
}
type litMatcher struct {
pos position
val string
ignoreCase bool
}
type charClassMatcher struct {
pos position
val string
basicLatinChars [128]bool
chars []rune
ranges []rune
classes []*unicode.RangeTable
ignoreCase bool
inverted bool
}
type anyMatcher position
// errList cumulates the errors found by the parser.
type errList []error
func (e *errList) add(err error) {
*e = append(*e, err)
}
func (e errList) err() error {
if len(e) == 0 {
return nil
}
e.dedupe()
return e
}
func (e *errList) dedupe() {
var cleaned []error
set := make(map[string]bool)
for _, err := range *e {
if msg := err.Error(); !set[msg] {
set[msg] = true
cleaned = append(cleaned, err)
}
}
*e = cleaned
}
func (e errList) Error() string {
switch len(e) {
case 0:
return ""
case 1:
return e[0].Error()
default:
var buf bytes.Buffer
for i, err := range e {
if i > 0 {
buf.WriteRune('\n')
}
buf.WriteString(err.Error())
}
return buf.String()
}
}
// parserError wraps an error with a prefix indicating the rule in which
// the error occurred. The original error is stored in the Inner field.
type parserError struct {
Inner error
pos position
prefix string
expected []string
}
// Error returns the error message.
func (p *parserError) Error() string {
return p.prefix + ": " + p.Inner.Error()
}
// newParser creates a parser with the specified input source and options.
func newParser(filename string, b []byte, opts ...Option) *parser {
stats := Stats{
ChoiceAltCnt: make(map[string]map[string]int),
}
p := &parser{
filename: filename,
errs: new(errList),
data: b,
pt: savepoint{position: position{line: 1}},
recover: true,
cur: current{
state: make(storeDict),
globalStore: make(storeDict),
},
maxFailPos: position{col: 1, line: 1},
maxFailExpected: make([]string, 0, 20),
Stats: &stats,
// start rule is rule [0] unless an alternate entrypoint is specified
entrypoint: g.rules[0].name,
}
p.setOptions(opts)
if p.maxExprCnt == 0 {
p.maxExprCnt = math.MaxUint64
}
return p
}
// setOptions applies the options to the parser.
func (p *parser) setOptions(opts []Option) {
for _, opt := range opts {
opt(p)
}
}
type resultTuple struct {
v interface{}
b bool
end savepoint
}
const choiceNoMatch = -1
// Stats stores some statistics, gathered during parsing
type Stats struct {
// ExprCnt counts the number of expressions processed during parsing
// This value is compared to the maximum number of expressions allowed
// (set by the MaxExpressions option).
ExprCnt uint64
// ChoiceAltCnt is used to count for each ordered choice expression,
// which alternative is used how may times.
// These numbers allow to optimize the order of the ordered choice expression
// to increase the performance of the parser
//
// The outer key of ChoiceAltCnt is composed of the name of the rule as well
// as the line and the column of the ordered choice.
// The inner key of ChoiceAltCnt is the number (one-based) of the matching alternative.
// For each alternative the number of matches are counted. If an ordered choice does not
// match, a special counter is incremented. The name of this counter is set with
// the parser option Statistics.
// For an alternative to be included in ChoiceAltCnt, it has to match at least once.
ChoiceAltCnt map[string]map[string]int
}
type parser struct {
filename string
pt savepoint
cur current
data []byte
errs *errList
depth int
recover bool
debug bool
memoize bool
// memoization table for the packrat algorithm:
// map[offset in source] map[expression or rule] {value, match}
memo map[int]map[interface{}]resultTuple
// rules table, maps the rule identifier to the rule node
rules map[string]*rule
// variables stack, map of label to value
vstack []map[string]interface{}
// rule stack, allows identification of the current rule in errors
rstack []*rule
// parse fail
maxFailPos position
maxFailExpected []string
maxFailInvertExpected bool
// max number of expressions to be parsed
maxExprCnt uint64
// entrypoint for the parser
entrypoint string
allowInvalidUTF8 bool
*Stats
choiceNoMatch string
// recovery expression stack, keeps track of the currently available recovery expression, these are traversed in reverse
recoveryStack []map[string]interface{}
}
// push a variable set on the vstack.
func (p *parser) pushV() {
if cap(p.vstack) == len(p.vstack) {
// create new empty slot in the stack
p.vstack = append(p.vstack, nil)
} else {
// slice to 1 more
p.vstack = p.vstack[:len(p.vstack)+1]
}
// get the last args set
m := p.vstack[len(p.vstack)-1]
if m != nil && len(m) == 0 {
// empty map, all good
return
}
m = make(map[string]interface{})
p.vstack[len(p.vstack)-1] = m
}
// pop a variable set from the vstack.
func (p *parser) popV() {
// if the map is not empty, clear it
m := p.vstack[len(p.vstack)-1]
if len(m) > 0 {
// GC that map
p.vstack[len(p.vstack)-1] = nil
}
p.vstack = p.vstack[:len(p.vstack)-1]
}
// push a recovery expression with its labels to the recoveryStack
func (p *parser) pushRecovery(labels []string, expr interface{}) {
if cap(p.recoveryStack) == len(p.recoveryStack) {
// create new empty slot in the stack
p.recoveryStack = append(p.recoveryStack, nil)
} else {
// slice to 1 more
p.recoveryStack = p.recoveryStack[:len(p.recoveryStack)+1]
}
m := make(map[string]interface{}, len(labels))
for _, fl := range labels {
m[fl] = expr
}
p.recoveryStack[len(p.recoveryStack)-1] = m
}
// pop a recovery expression from the recoveryStack
func (p *parser) popRecovery() {
// GC that map
p.recoveryStack[len(p.recoveryStack)-1] = nil
p.recoveryStack = p.recoveryStack[:len(p.recoveryStack)-1]
}
func (p *parser) print(prefix, s string) string {
if !p.debug {
return s
}
fmt.Printf("%s %d:%d:%d: %s [%#U]\n",
prefix, p.pt.line, p.pt.col, p.pt.offset, s, p.pt.rn)
return s
}
func (p *parser) in(s string) string {
p.depth++
return p.print(strings.Repeat(" ", p.depth)+">", s)
}
func (p *parser) out(s string) string {
p.depth--
return p.print(strings.Repeat(" ", p.depth)+"<", s)
}
func (p *parser) addErr(err error) {
p.addErrAt(err, p.pt.position, []string{})
}
func (p *parser) addErrAt(err error, pos position, expected []string) {
var buf bytes.Buffer
if p.filename != "" {
buf.WriteString(p.filename)
}
if buf.Len() > 0 {
buf.WriteString(":")
}
buf.WriteString(fmt.Sprintf("%d:%d (%d)", pos.line, pos.col, pos.offset))
if len(p.rstack) > 0 {
if buf.Len() > 0 {
buf.WriteString(": ")
}
rule := p.rstack[len(p.rstack)-1]
if rule.displayName != "" {
buf.WriteString("rule " + rule.displayName)
} else {
buf.WriteString("rule " + rule.name)
}
}
pe := &parserError{Inner: err, pos: pos, prefix: buf.String(), expected: expected}
p.errs.add(pe)
}
func (p *parser) failAt(fail bool, pos position, want string) {
// process fail if parsing fails and not inverted or parsing succeeds and invert is set
if fail == p.maxFailInvertExpected {
if pos.offset < p.maxFailPos.offset {
return
}
if pos.offset > p.maxFailPos.offset {
p.maxFailPos = pos
p.maxFailExpected = p.maxFailExpected[:0]
}
if p.maxFailInvertExpected {
want = "!" + want
}
p.maxFailExpected = append(p.maxFailExpected, want)
}
}
// read advances the parser to the next rune.
func (p *parser) read() {
p.pt.offset += p.pt.w
rn, n := utf8.DecodeRune(p.data[p.pt.offset:])
p.pt.rn = rn
p.pt.w = n
p.pt.col++
if rn == '\n' {
p.pt.line++
p.pt.col = 0
}
if rn == utf8.RuneError && n == 1 { // see utf8.DecodeRune
if !p.allowInvalidUTF8 {
p.addErr(errInvalidEncoding)
}
}
}
// restore parser position to the savepoint pt.
func (p *parser) restore(pt savepoint) {
if p.debug {
defer p.out(p.in("restore"))
}
if pt.offset == p.pt.offset {
return
}
p.pt = pt
}
// Cloner is implemented by any value that has a Clone method, which returns a
// copy of the value. This is mainly used for types which are not passed by
// value (e.g map, slice, chan) or structs that contain such types.
//
// This is used in conjunction with the global state feature to create proper
// copies of the state to allow the parser to properly restore the state in
// the case of backtracking.
type Cloner interface {
Clone() interface{}
}
var statePool = &sync.Pool{
New: func() interface{} { return make(storeDict) },
}
func (sd storeDict) Discard() {
for k := range sd {
delete(sd, k)
}
statePool.Put(sd)
}
// clone and return parser current state.
func (p *parser) cloneState() storeDict {
if p.debug {
defer p.out(p.in("cloneState"))
}
state := statePool.Get().(storeDict)
for k, v := range p.cur.state {
if c, ok := v.(Cloner); ok {
state[k] = c.Clone()
} else {
state[k] = v
}
}
return state
}
// restore parser current state to the state storeDict.
// every restoreState should applied only one time for every cloned state
func (p *parser) restoreState(state storeDict) {
if p.debug {
defer p.out(p.in("restoreState"))
}
p.cur.state.Discard()
p.cur.state = state
}
// get the slice of bytes from the savepoint start to the current position.
func (p *parser) sliceFrom(start savepoint) []byte {
return p.data[start.position.offset:p.pt.position.offset]
}
func (p *parser) getMemoized(node interface{}) (resultTuple, bool) {
if len(p.memo) == 0 {
return resultTuple{}, false
}
m := p.memo[p.pt.offset]
if len(m) == 0 {
return resultTuple{}, false
}
res, ok := m[node]
return res, ok
}
func (p *parser) setMemoized(pt savepoint, node interface{}, tuple resultTuple) {
if p.memo == nil {
p.memo = make(map[int]map[interface{}]resultTuple)
}
m := p.memo[pt.offset]
if m == nil {
m = make(map[interface{}]resultTuple)
p.memo[pt.offset] = m
}
m[node] = tuple
}
func (p *parser) buildRulesTable(g *grammar) {
p.rules = make(map[string]*rule, len(g.rules))
for _, r := range g.rules {
p.rules[r.name] = r
}
}
func (p *parser) parse(g *grammar) (val interface{}, err error) {
if len(g.rules) == 0 {
p.addErr(errNoRule)
return nil, p.errs.err()
}
// TODO : not super critical but this could be generated
p.buildRulesTable(g)
if p.recover {
// panic can be used in action code to stop parsing immediately
// and return the panic as an error.
defer func() {
if e := recover(); e != nil {
if p.debug {
defer p.out(p.in("panic handler"))
}
val = nil
switch e := e.(type) {
case error:
p.addErr(e)
default:
p.addErr(fmt.Errorf("%v", e))
}
err = p.errs.err()
}
}()
}
startRule, ok := p.rules[p.entrypoint]
if !ok {
p.addErr(errInvalidEntrypoint)
return nil, p.errs.err()
}
p.read() // advance to first rune
val, ok = p.parseRule(startRule)
if !ok {
if len(*p.errs) == 0 {
// If parsing fails, but no errors have been recorded, the expected values
// for the farthest parser position are returned as error.
maxFailExpectedMap := make(map[string]struct{}, len(p.maxFailExpected))
for _, v := range p.maxFailExpected {
maxFailExpectedMap[v] = struct{}{}
}
expected := make([]string, 0, len(maxFailExpectedMap))
eof := false
if _, ok := maxFailExpectedMap["!."]; ok {
delete(maxFailExpectedMap, "!.")
eof = true
}
for k := range maxFailExpectedMap {
expected = append(expected, k)
}
sort.Strings(expected)
if eof {
expected = append(expected, "EOF")
}
p.addErrAt(errors.New("no match found, expected: "+listJoin(expected, ", ", "or")), p.maxFailPos, expected)
}
return nil, p.errs.err()
}
return val, p.errs.err()
}
func listJoin(list []string, sep string, lastSep string) string {
switch len(list) {
case 0:
return ""
case 1:
return list[0]
default:
return strings.Join(list[:len(list)-1], sep) + " " + lastSep + " " + list[len(list)-1]
}
}
func (p *parser) parseRule(rule *rule) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseRule " + rule.name))
}
if p.memoize {
res, ok := p.getMemoized(rule)
if ok {
p.restore(res.end)
return res.v, res.b
}
}
start := p.pt
p.rstack = append(p.rstack, rule)
p.pushV()
val, ok := p.parseExpr(rule.expr)
p.popV()
p.rstack = p.rstack[:len(p.rstack)-1]
if ok && p.debug {
p.print(strings.Repeat(" ", p.depth)+"MATCH", string(p.sliceFrom(start)))
}
if p.memoize {
p.setMemoized(start, rule, resultTuple{val, ok, p.pt})
}
return val, ok
}
func (p *parser) parseExpr(expr interface{}) (interface{}, bool) {
var pt savepoint
if p.memoize {
res, ok := p.getMemoized(expr)
if ok {
p.restore(res.end)
return res.v, res.b
}
pt = p.pt
}
p.ExprCnt++
if p.ExprCnt > p.maxExprCnt {
panic(errMaxExprCnt)
}
var val interface{}
var ok bool
switch expr := expr.(type) {
case *actionExpr:
val, ok = p.parseActionExpr(expr)
case *andCodeExpr:
val, ok = p.parseAndCodeExpr(expr)
case *andExpr:
val, ok = p.parseAndExpr(expr)
case *anyMatcher:
val, ok = p.parseAnyMatcher(expr)
case *charClassMatcher:
val, ok = p.parseCharClassMatcher(expr)
case *choiceExpr:
val, ok = p.parseChoiceExpr(expr)
case *labeledExpr:
val, ok = p.parseLabeledExpr(expr)
case *litMatcher:
val, ok = p.parseLitMatcher(expr)
case *notCodeExpr:
val, ok = p.parseNotCodeExpr(expr)
case *notExpr:
val, ok = p.parseNotExpr(expr)
case *oneOrMoreExpr:
val, ok = p.parseOneOrMoreExpr(expr)
case *recoveryExpr:
val, ok = p.parseRecoveryExpr(expr)
case *ruleRefExpr:
val, ok = p.parseRuleRefExpr(expr)
case *seqExpr:
val, ok = p.parseSeqExpr(expr)
case *stateCodeExpr:
val, ok = p.parseStateCodeExpr(expr)
case *throwExpr:
val, ok = p.parseThrowExpr(expr)
case *zeroOrMoreExpr:
val, ok = p.parseZeroOrMoreExpr(expr)
case *zeroOrOneExpr:
val, ok = p.parseZeroOrOneExpr(expr)
default:
panic(fmt.Sprintf("unknown expression type %T", expr))
}
if p.memoize {
p.setMemoized(pt, expr, resultTuple{val, ok, p.pt})
}
return val, ok
}
func (p *parser) parseActionExpr(act *actionExpr) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseActionExpr"))
}
start := p.pt
val, ok := p.parseExpr(act.expr)
if ok {
p.cur.pos = start.position
p.cur.text = p.sliceFrom(start)
state := p.cloneState()
actVal, err := act.run(p)
if err != nil {
p.addErrAt(err, start.position, []string{})
}
p.restoreState(state)
val = actVal
}
if ok && p.debug {
p.print(strings.Repeat(" ", p.depth)+"MATCH", string(p.sliceFrom(start)))
}
return val, ok
}
func (p *parser) parseAndCodeExpr(and *andCodeExpr) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseAndCodeExpr"))
}
state := p.cloneState()
ok, err := and.run(p)
if err != nil {
p.addErr(err)
}
p.restoreState(state)
return nil, ok
}
func (p *parser) parseAndExpr(and *andExpr) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseAndExpr"))
}
pt := p.pt
state := p.cloneState()
p.pushV()
_, ok := p.parseExpr(and.expr)
p.popV()
p.restoreState(state)
p.restore(pt)
return nil, ok
}
func (p *parser) parseAnyMatcher(any *anyMatcher) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseAnyMatcher"))
}
if p.pt.rn == utf8.RuneError && p.pt.w == 0 {
// EOF - see utf8.DecodeRune
p.failAt(false, p.pt.position, ".")
return nil, false
}
start := p.pt
p.read()
p.failAt(true, start.position, ".")
return p.sliceFrom(start), true
}
func (p *parser) parseCharClassMatcher(chr *charClassMatcher) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseCharClassMatcher"))
}
cur := p.pt.rn
start := p.pt
// can't match EOF
if cur == utf8.RuneError && p.pt.w == 0 { // see utf8.DecodeRune
p.failAt(false, start.position, chr.val)
return nil, false
}
if chr.ignoreCase {
cur = unicode.ToLower(cur)
}
// try to match in the list of available chars
for _, rn := range chr.chars {
if rn == cur {
if chr.inverted {
p.failAt(false, start.position, chr.val)
return nil, false
}
p.read()
p.failAt(true, start.position, chr.val)
return p.sliceFrom(start), true
}
}
// try to match in the list of ranges
for i := 0; i < len(chr.ranges); i += 2 {
if cur >= chr.ranges[i] && cur <= chr.ranges[i+1] {
if chr.inverted {
p.failAt(false, start.position, chr.val)
return nil, false
}
p.read()
p.failAt(true, start.position, chr.val)
return p.sliceFrom(start), true
}
}
// try to match in the list of Unicode classes
for _, cl := range chr.classes {
if unicode.Is(cl, cur) {
if chr.inverted {
p.failAt(false, start.position, chr.val)
return nil, false
}
p.read()
p.failAt(true, start.position, chr.val)
return p.sliceFrom(start), true
}
}
if chr.inverted {
p.read()
p.failAt(true, start.position, chr.val)
return p.sliceFrom(start), true
}
p.failAt(false, start.position, chr.val)
return nil, false
}
func (p *parser) incChoiceAltCnt(ch *choiceExpr, altI int) {
choiceIdent := fmt.Sprintf("%s %d:%d", p.rstack[len(p.rstack)-1].name, ch.pos.line, ch.pos.col)
m := p.ChoiceAltCnt[choiceIdent]
if m == nil {
m = make(map[string]int)
p.ChoiceAltCnt[choiceIdent] = m
}
// We increment altI by 1, so the keys do not start at 0
alt := strconv.Itoa(altI + 1)
if altI == choiceNoMatch {
alt = p.choiceNoMatch
}
m[alt]++
}
func (p *parser) parseChoiceExpr(ch *choiceExpr) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseChoiceExpr"))
}
for altI, alt := range ch.alternatives {
// dummy assignment to prevent compile error if optimized
_ = altI
state := p.cloneState()
p.pushV()
val, ok := p.parseExpr(alt)
p.popV()
if ok {
p.incChoiceAltCnt(ch, altI)
return val, ok
}
p.restoreState(state)
}
p.incChoiceAltCnt(ch, choiceNoMatch)
return nil, false
}
func (p *parser) parseLabeledExpr(lab *labeledExpr) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseLabeledExpr"))
}
p.pushV()
val, ok := p.parseExpr(lab.expr)
p.popV()
if ok && lab.label != "" {
m := p.vstack[len(p.vstack)-1]
m[lab.label] = val
}
return val, ok
}
func (p *parser) parseLitMatcher(lit *litMatcher) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseLitMatcher"))
}
ignoreCase := ""
if lit.ignoreCase {
ignoreCase = "i"
}
val := string(strconv.AppendQuote([]byte{}, lit.val)) + ignoreCase // wrap 'lit.val' with double quotes
start := p.pt
for _, want := range lit.val {
cur := p.pt.rn
if lit.ignoreCase {
cur = unicode.ToLower(cur)
}
if cur != want {
p.failAt(false, start.position, val)
p.restore(start)
return nil, false
}
p.read()
}
p.failAt(true, start.position, val)
return p.sliceFrom(start), true
}
func (p *parser) parseNotCodeExpr(not *notCodeExpr) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseNotCodeExpr"))
}
state := p.cloneState()
ok, err := not.run(p)
if err != nil {
p.addErr(err)
}
p.restoreState(state)
return nil, !ok
}
func (p *parser) parseNotExpr(not *notExpr) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseNotExpr"))
}
pt := p.pt
state := p.cloneState()
p.pushV()
p.maxFailInvertExpected = !p.maxFailInvertExpected
_, ok := p.parseExpr(not.expr)
p.maxFailInvertExpected = !p.maxFailInvertExpected
p.popV()
p.restoreState(state)
p.restore(pt)
return nil, !ok
}
func (p *parser) parseOneOrMoreExpr(expr *oneOrMoreExpr) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseOneOrMoreExpr"))
}
var vals []interface{}
for {
p.pushV()
val, ok := p.parseExpr(expr.expr)
p.popV()
if !ok {
if len(vals) == 0 {
// did not match once, no match
return nil, false
}
return vals, true
}
vals = append(vals, val)
}
}
func (p *parser) parseRecoveryExpr(recover *recoveryExpr) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseRecoveryExpr (" + strings.Join(recover.failureLabel, ",") + ")"))
}
p.pushRecovery(recover.failureLabel, recover.recoverExpr)
val, ok := p.parseExpr(recover.expr)
p.popRecovery()
return val, ok
}
func (p *parser) parseRuleRefExpr(ref *ruleRefExpr) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseRuleRefExpr " + ref.name))
}
if ref.name == "" {
panic(fmt.Sprintf("%s: invalid rule: missing name", ref.pos))
}
rule := p.rules[ref.name]
if rule == nil {
p.addErr(fmt.Errorf("undefined rule: %s", ref.name))
return nil, false
}
return p.parseRule(rule)
}
func (p *parser) parseSeqExpr(seq *seqExpr) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseSeqExpr"))
}
vals := make([]interface{}, 0, len(seq.exprs))
pt := p.pt
state := p.cloneState()
for _, expr := range seq.exprs {
val, ok := p.parseExpr(expr)
if !ok {
p.restoreState(state)
p.restore(pt)
return nil, false
}
vals = append(vals, val)
}
return vals, true
}
func (p *parser) parseStateCodeExpr(state *stateCodeExpr) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseStateCodeExpr"))
}
err := state.run(p)
if err != nil {
p.addErr(err)
}
return nil, true
}
func (p *parser) parseThrowExpr(expr *throwExpr) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseThrowExpr"))
}
for i := len(p.recoveryStack) - 1; i >= 0; i-- {
if recoverExpr, ok := p.recoveryStack[i][expr.label]; ok {
if val, ok := p.parseExpr(recoverExpr); ok {
return val, ok
}
}
}
return nil, false
}
func (p *parser) parseZeroOrMoreExpr(expr *zeroOrMoreExpr) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseZeroOrMoreExpr"))
}
var vals []interface{}
for {
p.pushV()
val, ok := p.parseExpr(expr.expr)
p.popV()
if !ok {
return vals, true
}
vals = append(vals, val)
}
}
func (p *parser) parseZeroOrOneExpr(expr *zeroOrOneExpr) (interface{}, bool) {
if p.debug {
defer p.out(p.in("parseZeroOrOneExpr"))
}
p.pushV()
val, _ := p.parseExpr(expr.expr)
p.popV()
// whether it matched or not, consider it a match
return val, true
}
<|start_filename|>examples/color/gen-marshallers_test.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package color
import "testing"
func TestColorCorpus(t *testing.T) {
cases := []struct {
Data []byte
}{
{
Data: []byte{0xfd, 0x94, 0x1},
},
}
for _, c := range cases {
_, err := ParseColor(c.Data)
if err != nil {
t.Fatal(err)
}
}
}
<|start_filename|>gen/tests/contexts/gen-marshallers.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package contexts
import (
"encoding/binary"
"errors"
)
type Flag struct {
Flagval uint8
}
type Count struct {
Countval uint8
}
type Point struct {
X uint8
Y uint8
}
func (p *Point) Parse(data []byte) ([]byte, error) {
cur := data
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
p.X = cur[0]
if !(0 <= p.X && p.X <= 254) {
return nil, errors.New("integer constraint violated")
}
cur = cur[1:]
}
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
p.Y = cur[0]
cur = cur[1:]
}
return cur, nil
}
func ParsePoint(data []byte) (*Point, error) {
p := new(Point)
_, err := p.Parse(data)
if err != nil {
return nil, err
}
return p, nil
}
type Twosize struct {
X uint32
Y uint16
}
func (t *Twosize) Parse(data []byte, flag Flag) ([]byte, error) {
cur := data
{
switch {
case flag.Flagval == 0:
{
if len(cur) < 4 {
return nil, errors.New("data too short")
}
t.X = binary.BigEndian.Uint32(cur)
if !(0 <= t.X && t.X <= 2147483647) {
return nil, errors.New("integer constraint violated")
}
cur = cur[4:]
}
case flag.Flagval == 1 || flag.Flagval == 3:
{
if len(cur) < 2 {
return nil, errors.New("data too short")
}
t.Y = binary.BigEndian.Uint16(cur)
cur = cur[2:]
}
}
}
return cur, nil
}
func ParseTwosize(data []byte, flag Flag) (*Twosize, error) {
t := new(Twosize)
_, err := t.Parse(data, flag)
if err != nil {
return nil, err
}
return t, nil
}
type Varsize struct {
A uint32
Msg []uint8
}
func (v *Varsize) Parse(data []byte, count Count) ([]byte, error) {
cur := data
{
if len(cur) < 4 {
return nil, errors.New("data too short")
}
v.A = binary.BigEndian.Uint32(cur)
cur = cur[4:]
}
{
v.Msg = make([]uint8, int(count.Countval))
for idx := 0; idx < int(count.Countval); idx++ {
if len(cur) < 1 {
return nil, errors.New("data too short")
}
v.Msg[idx] = cur[0]
cur = cur[1:]
}
}
return cur, nil
}
func ParseVarsize(data []byte, count Count) (*Varsize, error) {
v := new(Varsize)
_, err := v.Parse(data, count)
if err != nil {
return nil, err
}
return v, nil
}
type Ccomplex struct {
P *Point
Tsz *Twosize
Vsz *Varsize
A []uint8
B []uint16
}
func (c *Ccomplex) Parse(data []byte, flag Flag, count Count) ([]byte, error) {
cur := data
{
var err error
c.P = new(Point)
cur, err = c.P.Parse(cur)
if err != nil {
return nil, err
}
}
{
var err error
c.Tsz = new(Twosize)
cur, err = c.Tsz.Parse(cur, flag)
if err != nil {
return nil, err
}
}
{
var err error
c.Vsz = new(Varsize)
cur, err = c.Vsz.Parse(cur, count)
if err != nil {
return nil, err
}
}
{
if len(cur) < int(count.Countval) {
return nil, errors.New("data too short")
}
restore := cur[int(count.Countval):]
cur = cur[:int(count.Countval)]
switch {
case flag.Flagval == 0:
{
c.A = make([]uint8, 0)
for len(cur) > 0 {
var tmp uint8
if len(cur) < 1 {
return nil, errors.New("data too short")
}
tmp = cur[0]
cur = cur[1:]
c.A = append(c.A, tmp)
}
}
case flag.Flagval == 1:
{
c.B = make([]uint16, 0)
for len(cur) > 0 {
var tmp uint16
if len(cur) < 2 {
return nil, errors.New("data too short")
}
tmp = binary.BigEndian.Uint16(cur)
cur = cur[2:]
c.B = append(c.B, tmp)
}
}
}
if len(cur) > 0 {
return nil, errors.New("trailing data disallowed")
}
cur = restore
}
return cur, nil
}
func ParseCcomplex(data []byte, flag Flag, count Count) (*Ccomplex, error) {
c := new(Ccomplex)
_, err := c.Parse(data, flag, count)
if err != nil {
return nil, err
}
return c, nil
}
<|start_filename|>gen/tests/rem/gen-fuzz.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
// +build gofuzz
package rem
func FuzzRem(data []byte) int {
_, err := ParseRem(data)
if err != nil {
return 0
}
return 1
}
<|start_filename|>gen/tests/eos/gen-marshallers_test.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package eos
import "testing"
func TestFourbytesCorpus(t *testing.T) {
cases := []struct {
Data []byte
}{
{
Data: []byte{0xfd, 0xc2, 0x1, 0x94},
},
}
for _, c := range cases {
_, err := ParseFourbytes(c.Data)
if err != nil {
t.Fatal(err)
}
}
}
<|start_filename|>gen/tests/constant/gen-marshallers_test.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package constant
import "testing"
func TestConstantsCorpus(t *testing.T) {
cases := []struct {
Data []byte
}{
{
Data: []byte{0x42},
},
}
for _, c := range cases {
_, err := ParseConstants(c.Data)
if err != nil {
t.Fatal(err)
}
}
}
<|start_filename|>gen/tests/nest/gen-marshallers_test.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package nest
import "testing"
func TestPointCorpus(t *testing.T) {
cases := []struct {
Data []byte
}{
{
Data: []byte{0x94, 0x1},
},
}
for _, c := range cases {
_, err := ParsePoint(c.Data)
if err != nil {
t.Fatal(err)
}
}
}
func TestRectCorpus(t *testing.T) {
cases := []struct {
Data []byte
}{
{
Data: []byte{0x2f, 0xfa, 0xc2, 0xfd},
},
}
for _, c := range cases {
_, err := ParseRect(c.Data)
if err != nil {
t.Fatal(err)
}
}
}
<|start_filename|>testdata/trunnel/Makefile<|end_filename|>
REF=../../ref/trunnel
all: README.md import
README.md: manifest.txt
echo 'Test files found in trunnel repository version:' > $@
echo '```' >> $@
git submodule status ${REF} >> $@
echo '```' >> $@
echo 'Original locations:' >> $@
echo '```' >> $@
cat $< >> $@
echo '```' >> $@
import: manifest.txt
xargs -i cp -v {} . < $<
manifest.txt: FORCE
# excluding tor.trunnel because it contains examples of unions without
# trailing semi-colons
find ${REF}/examples ${REF}/test/valid -name '*.trunnel' | grep -v 'tor\.trunnel' > $@
.PHONY: FORCE
<|start_filename|>gen/tests/nest/gen-marshallers.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package nest
import "errors"
type Point struct {
X uint8
Y uint8
}
func (p *Point) Parse(data []byte) ([]byte, error) {
cur := data
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
p.X = cur[0]
cur = cur[1:]
}
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
p.Y = cur[0]
cur = cur[1:]
}
return cur, nil
}
func ParsePoint(data []byte) (*Point, error) {
p := new(Point)
_, err := p.Parse(data)
if err != nil {
return nil, err
}
return p, nil
}
type Rect struct {
NorthEast *Point
SouthWest *Point
}
func (r *Rect) Parse(data []byte) ([]byte, error) {
cur := data
{
var err error
r.NorthEast = new(Point)
cur, err = r.NorthEast.Parse(cur)
if err != nil {
return nil, err
}
}
{
var err error
r.SouthWest = new(Point)
cur, err = r.SouthWest.Parse(cur)
if err != nil {
return nil, err
}
}
return cur, nil
}
func ParseRect(data []byte) (*Rect, error) {
r := new(Rect)
_, err := r.Parse(data)
if err != nil {
return nil, err
}
return r, nil
}
<|start_filename|>tv/corpus_test.go<|end_filename|>
package tv
import (
"io/ioutil"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/mmcloughlin/trunnel/internal/test"
)
func TestCorpusVectors(t *testing.T) {
c := &Corpus{}
assert.Nil(t, c.Vectors("a"))
vs := []Vector{NewVector([]byte("hello"))}
c.AddVectors("a", vs)
assert.Equal(t, vs, c.Vectors("a"))
}
func TestWriteCorpusReal(t *testing.T) {
c := &Corpus{}
c.AddVectors("a", []Vector{NewVector([]byte("hello"))})
// write corpus to a temp directory
dir, clean := test.TempDir(t)
defer clean()
t.Log(dir)
err := WriteCorpus(c, dir)
require.NoError(t, err)
// confirm the vector file exists
digest := "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824" // SHA256("hello")
path := filepath.Join(dir, "a", "corpus", digest)
b, err := ioutil.ReadFile(path)
require.NoError(t, err)
assert.Equal(t, []byte("hello"), b)
}
<|start_filename|>gen/tests/color/color_test.go<|end_filename|>
package color
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestColorParseLengthErrors(t *testing.T) {
c := new(Color)
for n := 0; n < 3; n++ {
_, err := c.Parse(make([]byte, n))
require.Error(t, err)
}
}
func TestColorStandard(t *testing.T) {
c := new(Color)
b := []byte("Hello World!")
rest, err := c.Parse(b)
require.NoError(t, err)
assert.Equal(t, &Color{R: 'H', G: 'e', B: 'l'}, c)
assert.Equal(t, []byte("lo World!"), rest)
}
<|start_filename|>fault/errors.go<|end_filename|>
// Package fault defines trunnel error types.
package fault
import "github.com/pkg/errors"
// ErrNotImplemented indicates a trunnel feature is not implemented.
var ErrNotImplemented = errors.New("not implemented")
// UnexpectedType is raised when a function receives an argument of an
// unexpected type. This could happen when visiting a malformed AST, for example.
type UnexpectedType struct {
error
}
// NewUnexpectedType builds an UnexpectedType error for the object t.
func NewUnexpectedType(t interface{}) UnexpectedType {
return UnexpectedType{
error: errors.Errorf("unexpected type %T", t),
}
}
<|start_filename|>gen/tests/ints/ints_test.go<|end_filename|>
package ints
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestIntsParseLengthErrors(t *testing.T) {
x := new(Ints)
for n := 0; n < 15; n++ {
_, err := x.Parse(make([]byte, n))
require.Error(t, err)
}
}
func TestIntsStandard(t *testing.T) {
x := new(Ints)
b := []byte{
1,
2, 3,
4, 5, 6, 7,
8, 9, 10, 11, 12, 13, 14, 15,
'r', 'e', 's', 't',
}
expect := &Ints{
Byte: 0x01,
Word: 0x0203,
Dword: 0x04050607,
Qword: 0x08090a0b0c0d0e0f,
}
rest, err := x.Parse(b)
require.NoError(t, err)
assert.Equal(t, expect, x)
assert.Equal(t, []byte("rest"), rest)
}
<|start_filename|>gen/tests/nulterm/gen-marshallers_test.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package nulterm
import "testing"
func TestNulTermCorpus(t *testing.T) {
cases := []struct {
Data []byte
}{
{
Data: []byte{0x94, 0xfd, 0xc2, 0xfa, 0x62, 0x79, 0x68, 0x69, 0x7a, 0x7a, 0x0, 0x1},
},
}
for _, c := range cases {
_, err := ParseNulTerm(c.Data)
if err != nil {
t.Fatal(err)
}
}
}
<|start_filename|>examples/color/generate.go<|end_filename|>
// Package color defines an (R,G,B) type.
package color
//go:generate trunnel build -p color color.trunnel
<|start_filename|>internal/test/utils_test.go<|end_filename|>
package test
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestFileExists(t *testing.T) {
assert.True(t, FileExists("utils.go"))
assert.False(t, FileExists("doesnotexist"))
}
<|start_filename|>gen/tests/vararray/vararray_test.go<|end_filename|>
package vararray
import (
"encoding/binary"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestVarArrayParseNoLength(t *testing.T) {
_, err := new(VarArray).Parse(make([]byte, 1))
require.Error(t, err)
}
func TestVarArrayParseEmpty(t *testing.T) {
v := new(VarArray)
_, err := v.Parse(make([]byte, 2))
require.NoError(t, err)
assert.Equal(t, &VarArray{
NWords: 0,
Words: []uint32{},
}, v)
}
func TestVarArrayTooShort(t *testing.T) {
v := new(VarArray)
for n := 1; n < 10; n++ {
b := make([]byte, 2+4*n-1)
binary.BigEndian.PutUint16(b, uint16(n))
_, err := v.Parse(b)
require.Error(t, err)
}
}
func TestVarArraySuccess(t *testing.T) {
v := new(VarArray)
b := []byte{
0, 3, // length
0, 1, 2, 3,
4, 5, 6, 7,
8, 9, 10, 11,
'r', 'e', 's', 't',
}
rest, err := v.Parse(b)
require.NoError(t, err)
assert.Equal(t, &VarArray{
NWords: 3,
Words: []uint32{
0x00010203,
0x04050607,
0x08090a0b,
},
}, v)
assert.Equal(t, []byte("rest"), rest)
}
<|start_filename|>examples/date/generate.go<|end_filename|>
// Package date defines a date type demonstrating trunnel integer constraints.
package date
//go:generate trunnel build -p date date.trunnel
<|start_filename|>gen/tests/unionlen/unionlen_test.go<|end_filename|>
package unionlen
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestParseErrors(t *testing.T) {
cases := []struct {
Name string
Data []byte
}{
{Name: "short_tag", Data: []byte{0}},
{Name: "short_len", Data: []byte{0, 0, 0}},
{Name: "short_union", Data: []byte{0, 0, 42, 42}},
{Name: "short_color_r", Data: []byte{0, 1, 0, 0}},
{Name: "short_color_g", Data: []byte{0, 1, 0, 1, 'R'}},
{Name: "short_color_b", Data: []byte{0, 1, 0, 2, 'R', 'G'}},
{Name: "union_trailing", Data: []byte{0, 1, 0, 6, 'R', 'G', 'B', 'b', 'a', 'd'}},
{Name: "short_date_year", Data: []byte{0, 2, 0, 1, 0}},
{Name: "short_date_month", Data: []byte{0, 2, 0, 2, 7, 225}},
{Name: "short_date_day", Data: []byte{0, 2, 0, 3, 7, 225, 11}},
{Name: "short_after_union", Data: []byte{0, 1, 0, 3, 'R', 'G', 'B', 0}},
}
for _, c := range cases {
t.Run(c.Name, func(t *testing.T) {
_, err := new(UnionWithLen).Parse(c.Data)
assert.Error(t, err)
})
}
}
func TestParseCases(t *testing.T) {
cases := []struct {
Name string
Data []byte
Expect *UnionWithLen
}{
{
Name: "color",
Data: []byte{
0, 1, // color case
0, 3, // len 3
'R', 'G', 'B',
0x13, 0x37,
'r', 'e', 's', 't',
},
Expect: &UnionWithLen{
Tag: 1,
UnionLen: 3,
R: 'R',
G: 'G',
B: 'B',
RightAfterTheUnion: 0x1337,
},
},
{
Name: "date",
Data: []byte{
0, 2, // date case
0, 7, // should ignore anything over 4 bytes
7, 225, 11, 16, // the date
42, 42, 42, // ignored
0x13, 0x37,
'r', 'e', 's', 't',
},
Expect: &UnionWithLen{
Tag: 2,
UnionLen: 7,
Year: 2017,
Month: 11,
Day: 16,
RightAfterTheUnion: 0x1337,
},
},
{
Name: "default",
Data: []byte{
0x13, 0x37, // should fall to default case
0, 12,
'H', 'e', 'l', 'l', 'o', ' ', 'W', 'o', 'r', 'l', 'd', '!',
0x13, 0x37,
'r', 'e', 's', 't',
},
Expect: &UnionWithLen{
Tag: 0x1337,
UnionLen: 12,
Unparseable: []byte("Hello World!"),
RightAfterTheUnion: 0x1337,
},
},
}
for _, c := range cases {
t.Run(c.Name, func(t *testing.T) {
u := new(UnionWithLen)
rest, err := u.Parse(c.Data)
require.NoError(t, err)
assert.Equal(t, c.Expect, u)
assert.Equal(t, []byte("rest"), rest)
})
}
}
<|start_filename|>parse/parse_test.go<|end_filename|>
package parse
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/mmcloughlin/trunnel/ast"
)
func TestEmpty(t *testing.T) {
empty := map[string]string{
"blank": "",
"spaces": " ",
"tab": "\t",
"single_line_comment": "// nothing here",
"multi_line_comment": " /* or here*/\t",
}
for n, src := range empty {
t.Run(n, func(t *testing.T) {
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, &ast.File{}, f)
})
}
}
func TestConstant(t *testing.T) {
cases := []struct {
Name string
Code string
Expect int64
}{
{"decimal", `const CONST_ID = 123;`, 123},
{"hex", `const CONST_ID = 0x7b;`, 123},
{"octal", `const CONST_ID = 0173;`, 123},
{"zero_decimal", `const CONST_ID = 0;`, 0},
{"zero_hex", `const CONST_ID = 0x00;`, 0},
}
for _, c := range cases {
t.Run(c.Name, func(t *testing.T) {
f, err := String(c.Code)
require.NoError(t, err)
expect := &ast.File{
Constants: []*ast.Constant{
{
Name: "CONST_ID",
Value: c.Expect,
},
},
}
assert.Equal(t, expect, f)
})
}
}
func TestStructBasic(t *testing.T) {
src := `struct rgb { u8 r; u8 g; u8 b; }`
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "rgb",
Members: []ast.Member{
&ast.Field{Type: ast.U8, Name: "r"},
&ast.Field{Type: ast.U8, Name: "g"},
&ast.Field{Type: ast.U8, Name: "b"},
},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestExtern(t *testing.T) {
src := "extern struct rgb;"
expect := &ast.File{
Structs: []*ast.Struct{
{Name: "rgb"},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestExternContexts(t *testing.T) {
src := "extern struct rgb with context a,b,c;"
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "rgb",
Contexts: []string{"a", "b", "c"},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestIntTypes(t *testing.T) {
src := `struct s { u8 a; u16 b; u32 c; u64 d; }`
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "s",
Members: []ast.Member{
&ast.Field{Type: ast.U8, Name: "a"},
&ast.Field{Type: ast.U16, Name: "b"},
&ast.Field{Type: ast.U32, Name: "c"},
&ast.Field{Type: ast.U64, Name: "d"},
},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestField(t *testing.T) {
s := `
struct int_constraints {
u8 version_num IN [ 4, 5, 6 ];
u16 length IN [ 0..16384 ];
u16 length2 IN [ 0..MAX_LEN ];
u8 version_num2 IN [ 1, 2, 4..6, 9..128 ];
};
`
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "int_constraints",
Members: []ast.Member{
&ast.Field{
Name: "version_num",
Type: &ast.IntType{
Size: 8,
Constraint: &ast.IntegerList{
Ranges: []*ast.IntegerRange{
{Low: &ast.IntegerLiteral{Value: 4}},
{Low: &ast.IntegerLiteral{Value: 5}},
{Low: &ast.IntegerLiteral{Value: 6}},
},
},
},
},
&ast.Field{
Name: "length",
Type: &ast.IntType{
Size: 16,
Constraint: &ast.IntegerList{
Ranges: []*ast.IntegerRange{
{
Low: &ast.IntegerLiteral{Value: 0},
High: &ast.IntegerLiteral{Value: 16384},
},
},
},
},
},
&ast.Field{
Name: "length2",
Type: &ast.IntType{
Size: 16,
Constraint: &ast.IntegerList{
Ranges: []*ast.IntegerRange{
{
Low: &ast.IntegerLiteral{},
High: &ast.IntegerConstRef{Name: "MAX_LEN"},
},
},
},
},
},
&ast.Field{
Name: "version_num2",
Type: &ast.IntType{
Size: 8,
Constraint: &ast.IntegerList{
Ranges: []*ast.IntegerRange{
{Low: &ast.IntegerLiteral{Value: 1}},
{Low: &ast.IntegerLiteral{Value: 2}},
{Low: &ast.IntegerLiteral{Value: 4}, High: &ast.IntegerLiteral{Value: 6}},
{Low: &ast.IntegerLiteral{Value: 9}, High: &ast.IntegerLiteral{Value: 128}},
},
},
},
},
},
},
},
}
f, err := String(s)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestNulTermString(t *testing.T) {
src := `struct nul_term_string {
nulterm str;
};
`
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "nul_term_string",
Members: []ast.Member{
&ast.Field{
Name: "str",
Type: &ast.NulTermString{},
},
},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestNestedStructs(t *testing.T) {
src := `
struct rgb { u8 r; u8 g; u8 b; };
struct outer {
struct rgb color;
struct inner { u8 a; u64 b; } c;
};
`
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "rgb",
Members: []ast.Member{
&ast.Field{Type: ast.U8, Name: "r"},
&ast.Field{Type: ast.U8, Name: "g"},
&ast.Field{Type: ast.U8, Name: "b"},
},
},
{
Name: "outer",
Members: []ast.Member{
&ast.Field{Name: "color", Type: &ast.StructRef{Name: "rgb"}},
&ast.Field{Name: "c", Type: &ast.StructRef{Name: "inner"}},
},
},
{
Name: "inner",
Members: []ast.Member{
&ast.Field{Type: ast.U8, Name: "a"},
&ast.Field{Type: ast.U64, Name: "b"},
},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestFixedArraySimple(t *testing.T) {
src := `struct fixed_arrays {
u8 a[8];
u32 b[SIZE];
char s[13];
}`
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "fixed_arrays",
Members: []ast.Member{
&ast.Field{
Name: "a",
Type: &ast.FixedArrayMember{
Base: ast.U8,
Size: &ast.IntegerLiteral{Value: 8},
},
},
&ast.Field{
Name: "b",
Type: &ast.FixedArrayMember{
Base: ast.U32,
Size: &ast.IntegerConstRef{Name: "SIZE"},
},
},
&ast.Field{
Name: "s",
Type: &ast.FixedArrayMember{
Base: &ast.CharType{},
Size: &ast.IntegerLiteral{Value: 13},
},
},
},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestFixedArrayStructs(t *testing.T) {
src := `struct fixed_array_structs {
struct another x[3];
struct inner {
u8 a;
u32 b;
} y[7];
}`
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "fixed_array_structs",
Members: []ast.Member{
&ast.Field{
Name: "x",
Type: &ast.FixedArrayMember{
Base: &ast.StructRef{Name: "another"},
Size: &ast.IntegerLiteral{Value: 3},
},
},
&ast.Field{
Name: "y",
Type: &ast.FixedArrayMember{
Base: &ast.StructRef{Name: "inner"},
Size: &ast.IntegerLiteral{Value: 7},
},
},
},
},
{
Name: "inner",
Members: []ast.Member{
&ast.Field{Type: ast.U8, Name: "a"},
&ast.Field{Type: ast.U32, Name: "b"},
},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestVarLengthArray(t *testing.T) {
src := `struct var_length_array {
u16 length;
u8 bytes[length];
}`
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "var_length_array",
Members: []ast.Member{
&ast.Field{Type: ast.U16, Name: "length"},
&ast.Field{
Name: "bytes",
Type: &ast.VarArrayMember{
Base: ast.U8,
Constraint: &ast.IDRef{Name: "length"},
},
},
},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestRemainderArray(t *testing.T) {
src := `struct remainder_array {
u8 x;
u8 rest[];
}`
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "remainder_array",
Members: []ast.Member{
&ast.Field{Type: ast.U8, Name: "x"},
&ast.Field{
Name: "rest",
Type: &ast.VarArrayMember{
Base: ast.U8,
Constraint: nil,
},
},
},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestEOS(t *testing.T) {
src := ` struct fourbytes {
u16 x;
u16 y;
eos;
}`
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "fourbytes",
Members: []ast.Member{
&ast.Field{Type: ast.U16, Name: "x"},
&ast.Field{Type: ast.U16, Name: "y"},
&ast.EOS{},
},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestVarLengthString(t *testing.T) {
src := `struct pascal_string {
u8 hostname_len;
char hostname[hostname_len];
}`
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "pascal_string",
Members: []ast.Member{
&ast.Field{Type: ast.U8, Name: "hostname_len"},
&ast.Field{
Name: "hostname",
Type: &ast.VarArrayMember{
Base: &ast.CharType{},
Constraint: &ast.IDRef{Name: "hostname_len"},
},
},
},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestLeftoverLengthArray(t *testing.T) {
src := `struct encrypted {
u8 salt[16];
u8 message[..-32];
u8 mac[32];
}`
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "encrypted",
Members: []ast.Member{
&ast.Field{
Name: "salt",
Type: &ast.FixedArrayMember{
Base: ast.U8,
Size: &ast.IntegerLiteral{Value: 16},
},
},
&ast.Field{
Name: "message",
Type: &ast.VarArrayMember{
Base: ast.U8,
Constraint: &ast.Leftover{
Num: &ast.IntegerLiteral{Value: 32},
},
},
},
&ast.Field{
Name: "mac",
Type: &ast.FixedArrayMember{
Base: ast.U8,
Size: &ast.IntegerLiteral{Value: 32},
},
},
},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestUnion(t *testing.T) {
src := `struct has_union {
u8 tag;
union addr[tag] {
4 : u32 ipv4_addr;
5 : ;
6 : u8 ipv6_addr[16];
0xf0,0xf1 : u8 hostname_len;
char hostname[hostname_len];
0xF2 .. 0xFF : struct extension ext;
default : fail;
};
}`
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "has_union",
Members: []ast.Member{
&ast.Field{Type: ast.U8, Name: "tag"},
&ast.UnionMember{
Name: "addr",
Tag: &ast.IDRef{Name: "tag"},
Cases: []*ast.UnionCase{
{
Case: ast.NewIntegerList(ast.NewIntegerRangeSingleLiteral(4)),
Members: []ast.Member{
&ast.Field{Type: ast.U32, Name: "ipv4_addr"},
},
},
{
Case: ast.NewIntegerList(ast.NewIntegerRangeSingleLiteral(5)),
},
{
Case: ast.NewIntegerList(ast.NewIntegerRangeSingleLiteral(6)),
Members: []ast.Member{
&ast.Field{
Name: "ipv6_addr",
Type: &ast.FixedArrayMember{
Base: ast.U8,
Size: &ast.IntegerLiteral{Value: 16},
},
},
},
},
{
Case: ast.NewIntegerList(
ast.NewIntegerRangeSingleLiteral(0xf0),
ast.NewIntegerRangeSingleLiteral(0xf1),
),
Members: []ast.Member{
&ast.Field{Type: ast.U8, Name: "hostname_len"},
&ast.Field{
Name: "hostname",
Type: &ast.VarArrayMember{
Base: &ast.CharType{},
Constraint: &ast.IDRef{Name: "hostname_len"},
},
},
},
},
{
Case: ast.NewIntegerList(
ast.NewIntegerRangeLiteral(0xf2, 0xff),
),
Members: []ast.Member{
&ast.Field{Name: "ext", Type: &ast.StructRef{Name: "extension"}},
},
},
{
Case: nil, // default
Members: []ast.Member{&ast.Fail{}},
},
},
},
},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestUnionExtentSpec(t *testing.T) {
src := `struct union_extent {
u8 tag;
u16 length;
union addr[tag] with length length {
7 : ignore;
0xEE : u32 ipv4_addr;
...;
0xEF : u32 ipv4_addr;
u8 remainder[];
default: u8 unrecognized[];
};
};`
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "union_extent",
Members: []ast.Member{
&ast.Field{Type: ast.U8, Name: "tag"},
&ast.Field{Type: ast.U16, Name: "length"},
&ast.UnionMember{
Name: "addr",
Tag: &ast.IDRef{Name: "tag"},
Length: &ast.IDRef{Name: "length"},
Cases: []*ast.UnionCase{
{
Case: ast.NewIntegerList(ast.NewIntegerRangeSingleLiteral(7)),
Members: []ast.Member{&ast.Ignore{}},
},
{
Case: ast.NewIntegerList(ast.NewIntegerRangeSingleLiteral(0xee)),
Members: []ast.Member{
&ast.Field{Type: ast.U32, Name: "ipv4_addr"},
&ast.Ignore{},
},
},
{
Case: ast.NewIntegerList(ast.NewIntegerRangeSingleLiteral(0xef)),
Members: []ast.Member{
&ast.Field{Type: ast.U32, Name: "ipv4_addr"},
&ast.Field{
Name: "remainder",
Type: &ast.VarArrayMember{
Base: ast.U8,
Constraint: nil,
},
},
},
},
{
Case: nil,
Members: []ast.Member{
&ast.Field{
Name: "unrecognized",
Type: &ast.VarArrayMember{
Base: ast.U8,
Constraint: nil,
},
},
},
},
},
},
},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestUnionMembersAfter(t *testing.T) {
src := `struct encrypted {
u8 type;
union u[type] with length ..-32 {
1: u8 bytes[];
2: u8 salt[16];
u8 other_bytes[];
};
u64 data[4];
}`
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "encrypted",
Members: []ast.Member{
&ast.Field{Type: ast.U8, Name: "type"},
&ast.UnionMember{
Name: "u",
Tag: &ast.IDRef{Name: "type"},
Length: &ast.Leftover{Num: &ast.IntegerLiteral{Value: 32}},
Cases: []*ast.UnionCase{
{
Case: ast.NewIntegerList(ast.NewIntegerRangeSingleLiteral(1)),
Members: []ast.Member{
&ast.Field{
Name: "bytes",
Type: &ast.VarArrayMember{
Base: ast.U8,
Constraint: nil,
},
},
},
},
{
Case: ast.NewIntegerList(ast.NewIntegerRangeSingleLiteral(2)),
Members: []ast.Member{
&ast.Field{
Name: "salt",
Type: &ast.FixedArrayMember{
Base: ast.U8,
Size: &ast.IntegerLiteral{Value: 16},
},
},
&ast.Field{
Name: "other_bytes",
Type: &ast.VarArrayMember{
Base: ast.U8,
Constraint: nil,
},
},
},
},
},
},
&ast.Field{
Name: "data",
Type: &ast.FixedArrayMember{
Base: ast.U64,
Size: &ast.IntegerLiteral{Value: 4},
},
},
},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestPragma(t *testing.T) {
cases := []struct {
Name string
Code string
Type string
Options []string
}{
{
"single",
"trunnel options ident1;",
"options",
[]string{"ident1"},
},
{
"multi",
"trunnel special ident1, ident2\t, ident3 ,ident4 ;",
"special",
[]string{"ident1", "ident2", "ident3", "ident4"},
},
}
for _, c := range cases {
t.Run(c.Name, func(t *testing.T) {
f, err := String(c.Code)
require.NoError(t, err)
expect := &ast.File{
Pragmas: []*ast.Pragma{
{Type: c.Type, Options: c.Options},
},
}
assert.Equal(t, expect, f)
})
}
}
func TestComments(t *testing.T) {
src := `struct /* comments can
be anywhere */ rgb {
u8 r; /* this is a multi
line comment that /*should exclude this:
u8 a;
(hopefully) */
u8 // end of line comment
g; //}
u8 b;
}`
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "rgb",
Members: []ast.Member{
&ast.Field{Type: ast.U8, Name: "r"},
&ast.Field{Type: ast.U8, Name: "g"},
&ast.Field{Type: ast.U8, Name: "b"},
},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestContext(t *testing.T) {
src := `context ctx { u8 a; u16 b; u32 c; u64 d; }`
expect := &ast.File{
Contexts: []*ast.Context{
{
Name: "ctx",
Members: []*ast.Field{
{Type: ast.U8, Name: "a"},
{Type: ast.U16, Name: "b"},
{Type: ast.U32, Name: "c"},
{Type: ast.U64, Name: "d"},
},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
// TestContextStructMemberErrors confirms we get errors for member types that
// are valid in structs but not in contexts.
func TestContextStructMemberErrors(t *testing.T) {
members := map[string]string{
"fixed_array": "u8 fixed_array[2];",
"var_array": "u16 len; u8 var_array[len];",
"eos": "u8 a; eos;",
"remaining": "u32 a; u8 rest[];",
"union": `u8 tag; union u[tag] {
1 : ignore;
default: fail;
};`,
"int_constraint": "u8 x IN [ 42 ];",
}
for n, m := range members {
t.Run(n, func(t *testing.T) {
_, err := String("struct verify {" + m + "}")
require.NoError(t, err)
_, err = String("context ctx {" + m + "}")
assert.Error(t, err)
})
}
}
func TestStructWithContext(t *testing.T) {
src := `struct encrypted_record with context stream_settings {
u8 iv[stream_settings.iv_len];
};`
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "encrypted_record",
Contexts: []string{"stream_settings"},
Members: []ast.Member{
&ast.Field{
Name: "iv",
Type: &ast.VarArrayMember{
Base: ast.U8,
Constraint: &ast.IDRef{
Scope: "stream_settings",
Name: "iv_len",
},
},
},
},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestStructWithMultipleContexts(t *testing.T) {
src := `struct multi with context ctx0,ctx1 , ctx2, ctx3 {
u8 x;
};`
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "multi",
Contexts: []string{"ctx0", "ctx1", "ctx2", "ctx3"},
Members: []ast.Member{
&ast.Field{Type: ast.U8, Name: "x"},
},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
func TestStructPtr(t *testing.T) {
src := `struct haspos {
nulterm s1;
/** Position right after the first NUL. */
@ptr pos1;
nulterm s2;
@ptr pos2;
u32 x;
}`
expect := &ast.File{
Structs: []*ast.Struct{
{
Name: "haspos",
Members: []ast.Member{
&ast.Field{Name: "s1", Type: &ast.NulTermString{}},
&ast.Field{Name: "pos1", Type: &ast.Ptr{}},
&ast.Field{Name: "s2", Type: &ast.NulTermString{}},
&ast.Field{Name: "pos2", Type: &ast.Ptr{}},
&ast.Field{Type: ast.U32, Name: "x"},
},
},
},
}
f, err := String(src)
require.NoError(t, err)
assert.Equal(t, expect, f)
}
<|start_filename|>parse/internal/parser/doc.go<|end_filename|>
// Package parser contains a pigeon-generated parser for trunnel source.
package parser
<|start_filename|>gen/core.go<|end_filename|>
package gen
import (
"io/ioutil"
"path/filepath"
"strings"
"github.com/mmcloughlin/random"
"github.com/serenize/snaker"
"github.com/mmcloughlin/trunnel/ast"
"github.com/mmcloughlin/trunnel/fault"
"github.com/mmcloughlin/trunnel/tv"
)
// Config defines options for package generation.
type Config struct {
Package string // package name
Dir string // directory to output to
Seed int64 // seed for corpus generation
}
// PackageName returns the name of the generated package.
func (c Config) PackageName() string {
return c.Package
}
// OutputDirectory returns the configured output directory.
func (c Config) OutputDirectory() string {
return c.Dir
}
// Path returns a path to rel within the configured output directory.
func (c Config) Path(rel string) string {
return filepath.Join(c.OutputDirectory(), rel)
}
func (c Config) write(name string, b []byte) error {
return ioutil.WriteFile(c.Path(name), b, 0640)
}
// Package generates a Go package for the given files.
func Package(cfg Config, fs []*ast.File) error {
// Marshaller file
b, err := Marshallers(cfg.PackageName(), fs)
if err != nil {
return err
}
if err = cfg.write("gen-marshallers.go", b); err != nil {
return err
}
// Test vector corpus (some features not implemented yet)
c, err := tv.GenerateFiles(fs, tv.WithRandom(random.NewWithSeed(cfg.Seed)))
if err == fault.ErrNotImplemented {
return nil
}
if err != nil {
return err
}
fuzzDir := filepath.Join(cfg.OutputDirectory(), "testdata/fuzz")
if err = tv.WriteCorpus(c, fuzzDir); err != nil {
return err
}
// Test file
b, err = CorpusTests(cfg.PackageName(), c)
if err != nil {
return err
}
if err = cfg.write("gen-marshallers_test.go", b); err != nil {
return err
}
// Fuzzer
b, err = Fuzzers(cfg.PackageName(), c)
if err != nil {
return err
}
if err = cfg.write("gen-fuzz.go", b); err != nil {
return err
}
return nil
}
func name(n string) string {
return snaker.SnakeToCamel(strings.ToLower(n))
}
<|start_filename|>gen/tests/fixie/gen-marshallers_test.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package fixie
import "testing"
func TestColorCorpus(t *testing.T) {
cases := []struct {
Data []byte
}{
{
Data: []byte{0xfd, 0x94, 0x1},
},
}
for _, c := range cases {
_, err := ParseColor(c.Data)
if err != nil {
t.Fatal(err)
}
}
}
func TestFixieDemoCorpus(t *testing.T) {
cases := []struct {
Data []byte
}{
{
Data: []byte{0x51, 0xe1, 0xec, 0x85, 0xe2, 0x2e, 0x89, 0x78, 0xa1, 0xd4, 0xf, 0x85, 0x51, 0x46, 0x39, 0xd, 0xe0, 0xb1, 0xa7, 0x9e, 0xaf, 0x48, 0x18, 0xd, 0x2d, 0xb, 0x75, 0xfb, 0x2a, 0xbd, 0xf4, 0x4a, 0x4f, 0xf9, 0x5f, 0xf6, 0x62, 0xa5, 0xee, 0xe8, 0xd3, 0xff, 0x12, 0x4, 0x5b, 0x73, 0xc8, 0x6e, 0x41, 0xc0, 0xfc, 0x2f, 0xfa, 0xc2},
},
}
for _, c := range cases {
_, err := ParseFixieDemo(c.Data)
if err != nil {
t.Fatal(err)
}
}
}
<|start_filename|>cmd/trunnel/trunnel.go<|end_filename|>
package main
import (
"log"
"os"
"github.com/urfave/cli"
"github.com/mmcloughlin/trunnel/gen"
"github.com/mmcloughlin/trunnel/meta"
"github.com/mmcloughlin/trunnel/parse"
)
func main() {
app := cli.NewApp()
app.Name = "trunnel"
app.Usage = "Code generator for binary parsing"
app.Version = meta.GitSHA
app.Commands = []*cli.Command{
build,
}
if err := app.Run(os.Args); err != nil {
log.Fatal(err)
}
}
// build command
var (
cfg gen.Config
build = &cli.Command{
Name: "build",
Usage: "Generate go package from trunnel",
ArgsUsage: "<trunnelfile>...",
Flags: []cli.Flag{
&cli.StringFlag{
Name: "pkg",
Aliases: []string{"p"},
Usage: "package name",
Destination: &cfg.Package,
},
&cli.StringFlag{
Name: "dir",
Aliases: []string{"d"},
Usage: "output directory",
Value: ".",
Destination: &cfg.Dir,
},
},
Action: func(c *cli.Context) error {
filenames := c.Args().Slice()
if len(filenames) == 0 {
return cli.NewExitError("missing trunnel filenames", 1)
}
fs, err := parse.Files(filenames)
if err != nil {
return cli.NewExitError(err, 1)
}
if err = gen.Package(cfg, fs); err != nil {
return cli.NewExitError(err, 1)
}
return nil
},
}
)
<|start_filename|>gen/tests/unionlo/gen-marshallers.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package unionlo
import (
"encoding/binary"
"errors"
)
type Unlo struct {
Tag uint8
X uint8
Y []uint8
Byte uint8
Z []uint16
Leftoverlen uint8
Leftovers []uint8
}
func (u *Unlo) Parse(data []byte) ([]byte, error) {
cur := data
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
u.Tag = cur[0]
cur = cur[1:]
}
{
if len(cur) < 8 {
return nil, errors.New("data too short")
}
restore := cur[len(cur)-8:]
cur = cur[:len(cur)-8]
switch {
case u.Tag == 1:
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
u.X = cur[0]
cur = cur[1:]
}
case u.Tag == 2:
{
u.Y = make([]uint8, 0)
for len(cur) > 0 {
var tmp uint8
if len(cur) < 1 {
return nil, errors.New("data too short")
}
tmp = cur[0]
cur = cur[1:]
u.Y = append(u.Y, tmp)
}
}
case u.Tag == 4:
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
u.Byte = cur[0]
cur = cur[1:]
}
{
u.Z = make([]uint16, 0)
for len(cur) > 0 {
var tmp uint16
if len(cur) < 2 {
return nil, errors.New("data too short")
}
tmp = binary.BigEndian.Uint16(cur)
cur = cur[2:]
u.Z = append(u.Z, tmp)
}
}
}
if len(cur) > 0 {
return nil, errors.New("trailing data disallowed")
}
cur = restore
}
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
u.Leftoverlen = cur[0]
cur = cur[1:]
}
{
u.Leftovers = make([]uint8, int(u.Leftoverlen))
for idx := 0; idx < int(u.Leftoverlen); idx++ {
if len(cur) < 1 {
return nil, errors.New("data too short")
}
u.Leftovers[idx] = cur[0]
cur = cur[1:]
}
}
return cur, nil
}
func ParseUnlo(data []byte) (*Unlo, error) {
u := new(Unlo)
_, err := u.Parse(data)
if err != nil {
return nil, err
}
return u, nil
}
<|start_filename|>inspect/inspect.go<|end_filename|>
// Package inspect provides tools for extracting information from the AST.
package inspect
import (
"errors"
"github.com/mmcloughlin/trunnel/ast"
"github.com/mmcloughlin/trunnel/fault"
"github.com/mmcloughlin/trunnel/internal/intervals"
)
// Structs builds a name to struct mapping for all structs in the file.
func Structs(f *ast.File) (map[string]*ast.Struct, error) {
structs := map[string]*ast.Struct{}
for _, s := range f.Structs {
n := s.Name
if _, found := structs[n]; found {
return nil, errors.New("duplicate struct name")
}
structs[n] = s
}
return structs, nil
}
// Contexts builds a name to context mapping for all contexts in the file.
func Contexts(f *ast.File) (map[string]*ast.Context, error) {
ctxs := map[string]*ast.Context{}
for _, ctx := range f.Contexts {
n := ctx.Name
if _, found := ctxs[n]; found {
return nil, errors.New("duplicate context name")
}
ctxs[n] = ctx
}
return ctxs, nil
}
// Constants builds a map of constant name to value from the declarations in f.
// Errors on duplicate constant names.
func Constants(f *ast.File) (map[string]int64, error) {
v := map[string]int64{}
for _, c := range f.Constants {
n := c.Name
if _, found := v[n]; found {
return nil, errors.New("duplicate constant name")
}
v[n] = c.Value
}
return v, nil
}
// Resolver maintains indexes of various parts of a trunnel file.
type Resolver struct {
structs map[string]*ast.Struct
contexts map[string]*ast.Context
constants map[string]int64
}
// NewResolverEmpty returns a new empty resolver.
func NewResolverEmpty() *Resolver {
return &Resolver{
structs: map[string]*ast.Struct{},
contexts: map[string]*ast.Context{},
constants: map[string]int64{},
}
}
// NewResolver builds a resolver from the given file.
func NewResolver(f *ast.File) (*Resolver, error) {
return NewResolverFiles([]*ast.File{f})
}
// NewResolverFiles builds a resolver from the given files.
func NewResolverFiles(fs []*ast.File) (*Resolver, error) {
r := NewResolverEmpty()
for _, f := range fs {
if err := r.AddFile(f); err != nil {
return nil, err
}
}
return r, nil
}
// AddFile adds declarations from the file to the Resolver.
func (r *Resolver) AddFile(f *ast.File) error {
structs, err := Structs(f)
if err != nil {
return err
}
for _, s := range structs {
if err = r.AddStruct(s); err != nil {
return err
}
}
contexts, err := Contexts(f)
if err != nil {
return err
}
for _, ctx := range contexts {
if err = r.AddContext(ctx); err != nil {
return err
}
}
constants, err := Constants(f)
if err != nil {
return err
}
for n, v := range constants {
if err := r.SetConstant(n, v); err != nil {
return err
}
}
return nil
}
// AddStruct adds a struct to the Resolver. An existing extern declaration can
// be overridden by an actual declaration.
func (r *Resolver) AddStruct(s *ast.Struct) error {
if e, exists := r.Struct(s.Name); exists && !e.Extern() {
return errors.New("cannot override non-extern struct")
}
r.structs[s.Name] = s
return nil
}
// Struct returns the struct with the given name. Includes extern struct
// declarations.
func (r *Resolver) Struct(n string) (*ast.Struct, bool) {
s, ok := r.structs[n]
return s, ok
}
// Structs returns all the structs in the resolver.
func (r *Resolver) Structs() []*ast.Struct {
structs := make([]*ast.Struct, 0, len(r.structs))
for _, s := range r.structs {
structs = append(structs, s)
}
return structs
}
// StructNonExtern returns the non-extern struct with the given name.
func (r *Resolver) StructNonExtern(n string) (*ast.Struct, error) {
s, ok := r.Struct(n)
if !ok {
return nil, errors.New("struct not found")
}
if s.Extern() {
return nil, errors.New("struct is external")
}
return s, nil
}
// AddContext adds a context to the Resolver.
func (r *Resolver) AddContext(ctx *ast.Context) error {
if _, exists := r.Context(ctx.Name); exists {
return errors.New("cannot override context")
}
r.contexts[ctx.Name] = ctx
return nil
}
// Context returns the context with the given name.
func (r *Resolver) Context(n string) (*ast.Context, bool) {
ctx, ok := r.contexts[n]
return ctx, ok
}
// AddConstant adds a constant declaration.
func (r *Resolver) AddConstant(c *ast.Constant) error {
return r.SetConstant(c.Name, c.Value)
}
// SetConstant sets a constant value. Errors if the constant value conflicts
// with an existsing setting.
func (r *Resolver) SetConstant(n string, v int64) error {
if e, exists := r.constants[n]; exists && e != v {
return errors.New("cannot override constant")
}
r.constants[n] = v
return nil
}
// Constant looks up the value of constant n.
func (r *Resolver) Constant(n string) (int64, bool) {
v, ok := r.constants[n]
return v, ok
}
// Integer resolves i to an integer value.
func (r *Resolver) Integer(i ast.Integer) (int64, error) {
switch i := i.(type) {
case *ast.IntegerConstRef:
v, ok := r.Constant(i.Name)
if !ok {
return 0, errors.New("constant undefined")
}
return v, nil
case *ast.IntegerLiteral:
return i.Value, nil
default:
return 0, fault.NewUnexpectedType(i)
}
}
// Intervals builds an intervals object from an integer list.
func (r *Resolver) Intervals(l *ast.IntegerList) (*intervals.Set, error) {
is := make([]intervals.Interval, len(l.Ranges))
for i, rng := range l.Ranges {
lo, err := r.Integer(rng.Low)
if err != nil {
return nil, err
}
if rng.High == nil {
is[i] = intervals.Single(uint64(lo)) // XXX cast
continue
}
hi, err := r.Integer(rng.High)
if err != nil {
return nil, err
}
is[i] = intervals.Range(uint64(lo), uint64(hi)) // XXX cast
}
if intervals.Overlaps(is) {
return nil, errors.New("overlapping intervals")
}
return intervals.NewSet(is...), nil
}
// IntType looks up the integer type referred to by ref. The local struct is
// required to resolve references to fields within the struct.
func (r *Resolver) IntType(ref *ast.IDRef, local *ast.Struct) (*ast.IntType, error) {
var fs []*ast.Field
if ref.Scope == "" {
fs = structFields(local)
} else {
ctx, ok := r.Context(ref.Scope)
if !ok {
return nil, errors.New("could not find context")
}
fs = ctx.Members
}
for _, f := range fs {
if f.Name != ref.Name {
continue
}
i, ok := f.Type.(*ast.IntType)
if !ok {
return nil, errors.New("referenced field does not have integer type")
}
return i, nil
}
return nil, errors.New("could not resolve reference")
}
// structFields extracts the top-level fields from s.
func structFields(s *ast.Struct) []*ast.Field {
fs := []*ast.Field{}
for _, m := range s.Members {
if f, ok := m.(*ast.Field); ok {
fs = append(fs, f)
}
}
return fs
}
<|start_filename|>gen/tests/eos/eos_test.go<|end_filename|>
package eos
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestEOSParseTooShort(t *testing.T) {
f := new(Fourbytes)
for n := 0; n < 4; n++ {
_, err := f.Parse(make([]byte, n))
require.Error(t, err)
}
}
func TestEOSParseTooLong(t *testing.T) {
f := new(Fourbytes)
for n := 5; n < 10; n++ {
_, err := f.Parse(make([]byte, n))
require.Error(t, err)
}
}
func TestEOSStandard(t *testing.T) {
f := new(Fourbytes)
b := []byte{1, 2, 3, 4}
rest, err := f.Parse(b)
require.NoError(t, err)
assert.Equal(t, &Fourbytes{
X: 0x0102,
Y: 0x0304,
}, f)
assert.Equal(t, []byte{}, rest)
}
<|start_filename|>gen/files_test.go<|end_filename|>
package gen
import (
"path/filepath"
"strings"
"testing"
"github.com/stretchr/testify/require"
"github.com/mmcloughlin/trunnel/ast"
"github.com/mmcloughlin/trunnel/internal/test"
"github.com/mmcloughlin/trunnel/parse"
)
func TestFilesBuild(t *testing.T) {
dirs := []string{
"testdata/valid",
"../testdata/tor",
"../testdata/trunnel",
}
for _, dir := range dirs {
t.Run(filepath.Base(dir), func(t *testing.T) {
groups, err := test.LoadFileGroups(dir)
require.NoError(t, err)
for _, group := range groups {
t.Run(strings.Join(group, ","), func(t *testing.T) {
Build(t, group)
})
}
})
}
}
func Build(t *testing.T, filenames []string) {
srcs := [][]byte{}
for _, filename := range filenames {
f, err := parse.File(filename)
require.NoError(t, err)
src, err := Marshallers("pkg", []*ast.File{f})
require.NoError(t, err)
srcs = append(srcs, src)
}
output, err := test.Build(srcs)
if err != nil {
t.Fatal(string(output))
}
}
<|start_filename|>gen/tests/constant/gen-fuzz.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
// +build gofuzz
package constant
func FuzzConstants(data []byte) int {
_, err := ParseConstants(data)
if err != nil {
return 0
}
return 1
}
<|start_filename|>gen/tests/fixie/fixie_test.go<|end_filename|>
package fixie
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestFixieParseTooShort(t *testing.T) {
f := new(FixieDemo)
for n := 0; n < 54; n++ {
_, err := f.Parse(make([]byte, n))
require.Error(t, err)
}
}
func TestFixieStandard(t *testing.T) {
f := new(FixieDemo)
b := []byte{
0, 1, 2, 3, 4, 5, 6, 7, // bytes
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', // letters
0, 1, 2, 3, 4, 5, 6, 7, // shortwords
0, 1, 2, 3, 4, 5, 6, 7, // words
0, 1, 2, 3, 4, 5, 6, 7, // big_words[0]
0, 1, 2, 3, 4, 5, 6, 7, // big_words[1]
'r', 'g', 'b', // colors[0]
'R', 'G', 'B', // colors[1]
'r', 'e', 's', 't',
}
rest, err := f.Parse(b)
require.NoError(t, err)
assert.Equal(t, &FixieDemo{
Bytes: [8]uint8{
0, 1, 2, 3, 4, 5, 6, 7,
},
Letters: [8]byte{
'a', 'b', 'c', 'd',
'e', 'f', 'g', 'h',
},
Shortwords: [4]uint16{
0x0001, 0x0203,
0x0405, 0x0607,
},
Words: [2]uint32{
0x00010203,
0x04050607,
},
BigWords: [2]uint64{
0x0001020304050607,
0x0001020304050607,
},
Colors: [2]*Color{
{R: 'r', G: 'g', B: 'b'},
{R: 'R', G: 'G', B: 'B'},
},
}, f)
assert.Equal(t, []byte("rest"), rest)
}
<|start_filename|>meta/meta.go<|end_filename|>
// Package meta provides versioning information.
package meta
// GitSHA provides the git hash of the build. Populated at build time.
var GitSHA string
<|start_filename|>ast/types_test.go<|end_filename|>
package ast
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestStructExtern(t *testing.T) {
assert.True(t, Struct{}.Extern())
assert.False(t, Struct{Members: []Member{}}.Extern())
assert.False(t, Struct{Members: []Member{Field{}}}.Extern())
}
<|start_filename|>gen/tests/Makefile<|end_filename|>
trunnels=$(wildcard */*.trunnel)
srcs=$(addprefix c/,$(subst .trunnel,.c,$(trunnels)))
all: $(srcs)
c/%.c c/%.h: %.trunnel
mkdir -p $(dir $@)
python -m trunnel --target-dir $(dir $@) $<
<|start_filename|>inspect/branches_test.go<|end_filename|>
package inspect
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/mmcloughlin/trunnel/ast"
"github.com/mmcloughlin/trunnel/internal/intervals"
"github.com/mmcloughlin/trunnel/parse"
)
func TestNewBranchesNoDefault(t *testing.T) {
b := BuildBranches(t, `struct basic {
u8 tag;
union u[tag] {
1,100: u8 r; u8 g; u8 b;
2..17, 42: u16 y; u8 m; u8 d;
};
u16 right_after_the_union;
};`)
require.Len(t, b.branches, 2)
assert.Equal(t, "1,100", b.branches[0].Set.String())
assert.Equal(t, "2-17,42", b.branches[1].Set.String())
}
func TestNewBranchesDefault(t *testing.T) {
b := BuildBranches(t, `struct basic {
u8 tag;
union u[tag] {
0..0xf: u32 a;
0xf4..0xff: u32 b;
default: u32 c;
};
u16 right_after_the_union;
};`)
require.Len(t, b.branches, 3)
assert.Equal(t, "0-15", b.branches[0].Set.String())
assert.Equal(t, "244-255", b.branches[1].Set.String())
assert.Equal(t, "16-243", b.branches[2].Set.String())
}
func TestNewBranchesBadTagRef(t *testing.T) {
_, err := BuildBranchesWithError(t, `struct basic {
union u[tagdoesnotexist] {
0..0xf: u32 a;
0xf4..0xff: u32 b;
default: u32 c;
};
};`)
assert.EqualError(t, err, "could not resolve reference")
}
func TestNewBranchesBadIntervals(t *testing.T) {
_, err := BuildBranchesWithError(t, `struct basic {
u8 tag;
union u[tag] {
0..IDK: u32 a;
};
};`)
assert.EqualError(t, err, "constant undefined")
}
func TestBranchesLookupAndAll(t *testing.T) {
branches := []Branch{
{Set: intervals.NewSet(intervals.Range(30, 45), intervals.Range(100, 300))},
{Set: intervals.NewSet(intervals.Range(10, 20), intervals.Single(1001))},
}
b := &Branches{branches: branches}
assert.Equal(t, branches, b.All())
cases := []struct {
X int64
Branch Branch
OK bool
}{
{40, branches[0], true},
{45, branches[0], true},
{200, branches[0], true},
{10, branches[1], true},
{15, branches[1], true},
{1001, branches[1], true},
{10000, Branch{}, false},
{46, Branch{}, false},
}
for _, c := range cases {
branch, ok := b.Lookup(c.X)
assert.Equal(t, c.Branch, branch)
assert.Equal(t, c.OK, ok)
}
}
func BuildBranches(t *testing.T, code string) *Branches {
b, err := BuildBranchesWithError(t, code)
require.NoError(t, err)
return b
}
func BuildBranchesWithError(t *testing.T, code string) (*Branches, error) {
f, err := parse.String(code)
require.NoError(t, err)
r, err := NewResolver(f)
require.NoError(t, err)
s, ok := r.Struct("basic")
require.True(t, ok)
u := lookupUnion(s, "u")
require.NotNil(t, u)
return NewBranches(r, s, u)
}
func lookupUnion(s *ast.Struct, n string) *ast.UnionMember {
for _, m := range s.Members {
if u, ok := m.(*ast.UnionMember); ok {
return u
}
}
return nil
}
<|start_filename|>gen/tests/pos/pos_test.go<|end_filename|>
package pos
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestParseErrors(t *testing.T) {
cases := []struct {
Name string
Data []byte
}{
{Name: "s1_missing_nil", Data: []byte{'a'}},
{Name: "s2_missing_nil", Data: []byte{'a', 0, 'b'}},
{Name: "x_short", Data: []byte{'a', 0, 'b', 0, 1}},
}
for _, c := range cases {
t.Run(c.Name, func(t *testing.T) {
_, err := new(Haspos).Parse(c.Data)
assert.Error(t, err)
})
}
}
func TestParseCases(t *testing.T) {
cases := []struct {
Name string
Data []byte
Expect *Haspos
}{
{
Name: "hello_world",
Data: []byte{
'H', 'e', 'l', 'l', 'o', 0,
'W', 'o', 'r', 'l', 'd', '!', 0,
0, 1, 2, 3,
'r', 'e', 's', 't',
},
Expect: &Haspos{
S1: "Hello",
Pos1: len("Hello") + 1,
S2: "World!",
Pos2: len("Hello World!") + 1,
X: 0x00010203,
},
},
}
for _, c := range cases {
t.Run(c.Name, func(t *testing.T) {
p := new(Haspos)
rest, err := p.Parse(c.Data)
require.NoError(t, err)
assert.Equal(t, c.Expect, p)
assert.Equal(t, []byte("rest"), rest)
})
}
}
<|start_filename|>examples/date/gen-marshallers.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package date
import (
"encoding/binary"
"errors"
)
type Date struct {
Year uint16
Month uint8
Day uint8
}
func (d *Date) Parse(data []byte) ([]byte, error) {
cur := data
{
if len(cur) < 2 {
return nil, errors.New("data too short")
}
d.Year = binary.BigEndian.Uint16(cur)
if !(1970 <= d.Year && d.Year <= 65535) {
return nil, errors.New("integer constraint violated")
}
cur = cur[2:]
}
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
d.Month = cur[0]
if !(d.Month == 1 || d.Month == 2 || d.Month == 3 || d.Month == 4 || d.Month == 5 || d.Month == 6 || d.Month == 7 || d.Month == 8 || d.Month == 9 || d.Month == 10 || d.Month == 11 || d.Month == 12) {
return nil, errors.New("integer constraint violated")
}
cur = cur[1:]
}
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
d.Day = cur[0]
if !(d.Day == 1 || d.Day == 2 || (3 <= d.Day && d.Day <= 31)) {
return nil, errors.New("integer constraint violated")
}
cur = cur[1:]
}
return cur, nil
}
func ParseDate(data []byte) (*Date, error) {
d := new(Date)
_, err := d.Parse(data)
if err != nil {
return nil, err
}
return d, nil
}
<|start_filename|>gen/fuzz.go<|end_filename|>
package gen
import "github.com/mmcloughlin/trunnel/tv"
// Fuzzers generates fuzzing functions for the types in the corpus.
func Fuzzers(pkg string, c *tv.Corpus) ([]byte, error) {
p := &printer{}
p.markgenerated()
p.printf("// +build gofuzz\n\n")
p.pkg(pkg)
for _, s := range c.Suites {
if constrained(s.Vectors) {
continue
}
p.printf("func Fuzz%s(data []byte) int {\n", name(s.Type))
p.printf("_, err := Parse%s(data)\n", name(s.Type))
p.printf("if err != nil { return 0 }\n")
p.printf("return 1")
p.printf("}\n\n")
}
return p.imported()
}
<|start_filename|>gen/tests/fixie/gen-marshallers.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package fixie
import (
"encoding/binary"
"errors"
)
type Color struct {
R uint8
G uint8
B uint8
}
func (c *Color) Parse(data []byte) ([]byte, error) {
cur := data
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
c.R = cur[0]
cur = cur[1:]
}
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
c.G = cur[0]
cur = cur[1:]
}
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
c.B = cur[0]
cur = cur[1:]
}
return cur, nil
}
func ParseColor(data []byte) (*Color, error) {
c := new(Color)
_, err := c.Parse(data)
if err != nil {
return nil, err
}
return c, nil
}
type FixieDemo struct {
Bytes [8]uint8
Letters [8]byte
Shortwords [4]uint16
Words [2]uint32
BigWords [2]uint64
Colors [2]*Color
}
func (f *FixieDemo) Parse(data []byte) ([]byte, error) {
cur := data
{
for idx := 0; idx < 8; idx++ {
if len(cur) < 1 {
return nil, errors.New("data too short")
}
f.Bytes[idx] = cur[0]
cur = cur[1:]
}
}
{
for idx := 0; idx < 8; idx++ {
if len(cur) < 1 {
return nil, errors.New("data too short")
}
f.Letters[idx] = cur[0]
cur = cur[1:]
}
}
{
for idx := 0; idx < 4; idx++ {
if len(cur) < 2 {
return nil, errors.New("data too short")
}
f.Shortwords[idx] = binary.BigEndian.Uint16(cur)
cur = cur[2:]
}
}
{
for idx := 0; idx < 2; idx++ {
if len(cur) < 4 {
return nil, errors.New("data too short")
}
f.Words[idx] = binary.BigEndian.Uint32(cur)
cur = cur[4:]
}
}
{
for idx := 0; idx < 2; idx++ {
if len(cur) < 8 {
return nil, errors.New("data too short")
}
f.BigWords[idx] = binary.BigEndian.Uint64(cur)
cur = cur[8:]
}
}
{
for idx := 0; idx < 2; idx++ {
var err error
f.Colors[idx] = new(Color)
cur, err = f.Colors[idx].Parse(cur)
if err != nil {
return nil, err
}
}
}
return cur, nil
}
func ParseFixieDemo(data []byte) (*FixieDemo, error) {
f := new(FixieDemo)
_, err := f.Parse(data)
if err != nil {
return nil, err
}
return f, nil
}
<|start_filename|>gen/tests/unionlo/unionlo_test.go<|end_filename|>
package unionlo
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestParseErrors(t *testing.T) {
cases := []struct {
Name string
Data []byte
}{
{Name: "empty", Data: []byte{}},
{Name: "short_union", Data: []byte{1}},
{Name: "short_1", Data: []byte{1, 0, 0, 0, 0, 0, 0, 0, 0}},
{Name: "short_4_byte", Data: []byte{4, 0, 0, 0, 0, 0, 0, 0, 0}},
{Name: "short_4_z", Data: []byte{4, 42, 1, 0, 0, 0, 0, 0, 0, 0, 0}},
{Name: "leftoverlen", Data: []byte{4, 42, 1, 2, 255, 0, 0, 0, 0, 0, 0, 0}},
{Name: "trailing_data", Data: []byte{1, 2, 3, 0, 0, 0, 0, 0, 0, 0, 0}},
}
for _, c := range cases {
t.Run(c.Name, func(t *testing.T) {
_, err := new(Unlo).Parse(c.Data)
assert.Error(t, err)
})
}
}
func TestParseCases(t *testing.T) {
cases := []struct {
Name string
Data []byte
Expect *Unlo
}{
{
Name: "x",
Data: []byte{
1,
42,
3,
'a', 'b', 'c',
'r', 'e', 's', 't',
},
Expect: &Unlo{
Tag: 1,
X: 42,
Leftoverlen: 3,
Leftovers: []byte("abc"),
},
},
{
Name: "y",
Data: []byte{
2,
'H', 'e', 'l', 'l', 'o', ' ', 'W', 'o', 'r', 'l', 'd', '!',
3,
'a', 'b', 'c',
'r', 'e', 's', 't',
},
Expect: &Unlo{
Tag: 2,
Y: []byte("Hello World!"),
Leftoverlen: 3,
Leftovers: []byte("abc"),
},
},
{
Name: "z",
Data: []byte{
4,
42, 0, 1, 2, 3, 4, 5,
3,
'a', 'b', 'c',
'r', 'e', 's', 't',
},
Expect: &Unlo{
Tag: 4,
Byte: 42,
Z: []uint16{0x0001, 0x0203, 0x0405},
Leftoverlen: 3,
Leftovers: []byte("abc"),
},
},
}
for _, c := range cases {
t.Run(c.Name, func(t *testing.T) {
u := new(Unlo)
rest, err := u.Parse(c.Data)
require.NoError(t, err)
assert.Equal(t, c.Expect, u)
assert.Equal(t, []byte("rest"), rest)
})
}
}
<|start_filename|>tv/generator.go<|end_filename|>
package tv
import (
"github.com/mmcloughlin/random"
"github.com/mmcloughlin/trunnel/ast"
"github.com/mmcloughlin/trunnel/fault"
"github.com/mmcloughlin/trunnel/inspect"
)
// Vector is a test vector.
type Vector struct {
Data []byte
Constraints Constraints
}
// NewVector builds a test vector with empty constraints.
func NewVector(b []byte) Vector {
return Vector{
Data: b,
Constraints: NewConstraints(),
}
}
func cross(a, b []Vector) ([]Vector, error) {
p := []Vector{}
for _, u := range a {
for _, w := range b {
m, err := u.Constraints.Merge(w.Constraints)
if err != nil {
return nil, err
}
v := Vector{
Data: append(u.Data, w.Data...),
Constraints: m,
}
p = append(p, v)
}
}
return p, nil
}
type generator struct {
resolver *inspect.Resolver
constraints Constraints
strct *ast.Struct
selector Selector
rnd random.Interface
}
// Generate generates a set of test vectors for the types defined in f.
func Generate(f *ast.File, opts ...Option) (*Corpus, error) {
return GenerateFiles([]*ast.File{f}, opts...)
}
// GenerateFiles generates test vectors for the types in the given files.
func GenerateFiles(fs []*ast.File, opts ...Option) (*Corpus, error) {
g := &generator{
selector: Exhaustive,
rnd: random.New(),
}
for _, opt := range opts {
opt(g)
}
return g.files(fs)
}
// Option is an option to control test vector generation.
type Option func(*generator)
// WithSelector sets the method for cutting down the number of vectors we have.
func WithSelector(s Selector) Option {
return func(g *generator) {
g.selector = s
}
}
// WithRandom sets the random source for test vector generation.
func WithRandom(r random.Interface) Option {
return func(g *generator) {
g.rnd = r
}
}
func (g *generator) init(fs []*ast.File) (err error) {
g.resolver, err = inspect.NewResolverFiles(fs)
return
}
func (g *generator) files(fs []*ast.File) (*Corpus, error) {
err := g.init(fs)
if err != nil {
return nil, err
}
c := &Corpus{}
for _, f := range fs {
for _, s := range f.Structs {
if s.Extern() {
continue
}
g.constraints = NewConstraints()
vs, err := g.structure(s)
if err != nil {
return nil, err
}
c.Suites = append(c.Suites, Suite{
Type: s.Name,
Vectors: vs,
})
}
}
return c, nil
}
func (g *generator) structure(s *ast.Struct) ([]Vector, error) {
restore := g.strct
g.strct = s
vs := []Vector{
{
Data: []byte{},
Constraints: g.constraints.CloneGlobal(),
},
}
vs, err := g.members(vs, s.Members)
if err != nil {
return nil, err
}
g.constraints.ClearLocal()
for _, v := range vs {
v.Constraints.ClearLocal()
}
g.strct = restore
return vs, nil
}
func (g *generator) members(vs []Vector, ms []ast.Member) ([]Vector, error) {
n := len(ms)
for i := n - 1; i >= 0; i-- {
extended := []Vector{}
for _, v := range vs {
g.constraints = v.Constraints
mvs, err := g.member(ms[i])
if err != nil {
return nil, err
}
for _, mv := range mvs {
mv.Data = append(mv.Data, v.Data...)
extended = append(extended, mv)
}
}
vs = g.selector.SelectVectors(extended)
}
return vs, nil
}
func (g *generator) member(m ast.Member) ([]Vector, error) {
switch m := m.(type) {
case *ast.Field:
return g.field(m)
case *ast.UnionMember:
return g.union(m)
case *ast.Ignore:
return []Vector{
g.empty(),
g.vector(g.randbytes(1, 7)),
}, nil
case *ast.Fail:
return []Vector{}, nil
case *ast.EOS:
return []Vector{g.empty()}, nil
default:
return nil, fault.NewUnexpectedType(m)
}
}
func (g *generator) field(f *ast.Field) ([]Vector, error) {
switch t := f.Type.(type) {
case *ast.IntType:
return g.intType(f.Name, t)
case *ast.CharType:
return g.intType(f.Name, ast.U8)
case *ast.Ptr:
return []Vector{g.empty()}, nil
case *ast.NulTermString:
return []Vector{g.vector(g.randnulterm(2, 20))}, nil
case *ast.StructRef:
s, err := g.resolver.StructNonExtern(t.Name)
if err != nil {
return nil, err
}
return g.structure(s)
case *ast.FixedArrayMember:
return g.array(t.Base, t.Size)
case *ast.VarArrayMember:
return g.array(t.Base, t.Constraint)
default:
return nil, fault.NewUnexpectedType(t)
}
}
func (g *generator) intType(name string, t *ast.IntType) ([]Vector, error) {
var b []byte
switch x, ok := g.constraints.LookupLocal(name); {
case ok:
b = intbytes(x, t.Size)
case t.Constraint != nil:
s, err := g.resolver.Intervals(t.Constraint)
if err != nil {
return nil, err
}
r := s.RandomWithGenerator(g.rnd)
b = intbytes(int64(r), t.Size) // XXX cast
default:
b = g.randint(t.Size)
}
return []Vector{g.vector(b)}, nil
}
func (g *generator) array(base ast.Type, s ast.LengthConstraint) ([]Vector, error) {
iv := g.empty()
v := []Vector{iv}
var n int64
switch s := s.(type) {
case *ast.IntegerConstRef, *ast.IntegerLiteral:
i, err := g.resolver.Integer(s)
if err != nil {
return nil, err
}
n = i
case *ast.IDRef:
n = iv.Constraints.LookupOrCreateRef(s, int64(g.randbtw(1, 20)))
case nil:
n = int64(g.randbtw(1, 20))
case *ast.Leftover:
return nil, fault.ErrNotImplemented
default:
return nil, fault.NewUnexpectedType(s)
}
for i := int64(0); i < n; i++ {
w, err := g.field(&ast.Field{Type: base})
if err != nil {
return nil, err
}
v, err = cross(w, v)
if err != nil {
return nil, err
}
v = g.selector.SelectVectors(v)
}
return v, nil
}
func (g *generator) union(u *ast.UnionMember) ([]Vector, error) {
branches, err := inspect.NewBranches(g.resolver, g.strct, u)
if err != nil {
return nil, err
}
// has the tag already been set?
options := branches.All()
t, ok := g.constraints.LookupRef(u.Tag)
if ok {
branch, ok := branches.Lookup(t)
if !ok {
return []Vector{}, nil
}
options = []inspect.Branch{branch}
}
base := g.constraints.Clone()
results := []Vector{}
for _, b := range options {
g.constraints = base.Clone()
g.constraints.LookupOrCreateRef(u.Tag, int64(b.Set.RandomWithGenerator(g.rnd))) // XXX cast
vs, err := g.members([]Vector{g.empty()}, b.Case.Members)
if err != nil {
return nil, err
}
results = append(results, vs...)
}
// set length constraint
if u.Length != nil {
return g.lenconstrain(u.Length, results)
}
return results, nil
}
func (g *generator) lenconstrain(c ast.LengthConstraint, vs []Vector) ([]Vector, error) {
r, ok := c.(*ast.IDRef)
if !ok {
return nil, fault.ErrNotImplemented
}
results := []Vector{}
for _, v := range vs {
n := int64(len(v.Data))
cst := v.Constraints.Clone()
m := cst.LookupOrCreateRef(r, n)
if m != n {
continue
}
results = append(results, Vector{
Data: v.Data,
Constraints: cst,
})
}
return results, nil
}
// empty returns an empty Vector with current constraints.
func (g *generator) empty() Vector {
return g.vector([]byte{})
}
// vector builds vector with the current constraints.
func (g *generator) vector(b []byte) Vector {
return Vector{
Data: b,
Constraints: g.constraints,
}
}
func (g *generator) randint(bits uint) []byte {
b := make([]byte, bits/8)
g.randread(b)
return b
}
// randbtw generates an integer between a and b, inclusive.
func (g *generator) randbtw(a, b int) int {
return a + g.rnd.Intn(b-a+1)
}
// randbytes returns a random byre array of length between a and b.
func (g *generator) randbytes(a, b int) []byte {
d := make([]byte, g.randbtw(a, b))
g.randread(d)
return d
}
// randread reads random bytes into b from the configured random source.
func (g *generator) randread(b []byte) {
if _, err := g.rnd.Read(b); err != nil {
panic(err) // should never happen
}
}
// randnulterm generates a random nul-terminated string of length in [a,b]
// inclusive of a and b, not including the nul byte.
func (g *generator) randnulterm(a, b int) []byte {
const alpha = "abcdefghijklmnopqrstuvwxyz"
n := g.randbtw(a, b)
s := make([]byte, n+1)
for i := 0; i < n; i++ {
s[i] = alpha[g.rnd.Intn(len(alpha))]
}
return s
}
func intbytes(x int64, bits uint) []byte {
n := bits / 8
b := make([]byte, n)
for i := uint(0); i < n; i++ {
b[n-1-i] = byte(x)
x >>= 8
}
return b
}
<|start_filename|>gen/tests/unionbasic/unionbasic_test.go<|end_filename|>
package unionbasic
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestParseTagConstraint(t *testing.T) {
b := []byte{42, 0, 0, 0, 0}
_, err := new(Basic).Parse(b)
assert.EqualError(t, err, "integer constraint violated")
}
func TestParseEmpty(t *testing.T) {
_, err := new(Basic).Parse([]byte{})
assert.Error(t, err)
}
func TestParseShortCases(t *testing.T) {
tags := []byte{2, 3, 4, 6}
for _, tag := range tags {
_, err := new(Basic).Parse([]byte{tag, 1})
assert.Error(t, err)
}
}
func TestParseCases(t *testing.T) {
cases := []struct {
Name string
Data []byte
Expect *Basic
}{
{
Name: "date",
Data: []byte{2, 7, 225, 11, 15, 'r', 'e', 's', 't'},
Expect: &Basic{
Tag: 2,
D: &Date{Year: 2017, Month: 11, Day: 15},
},
},
{
Name: "integer",
Data: []byte{3, 0, 1, 2, 3, 'r', 'e', 's', 't'},
Expect: &Basic{
Tag: 3,
Num: 0x00010203,
},
},
{
Name: "int_array",
Data: []byte{4, 0, 1, 2, 3, 4, 5, 6, 7, 'r', 'e', 's', 't'},
Expect: &Basic{
Tag: 4,
Eightbytes: [8]byte{0, 1, 2, 3, 4, 5, 6, 7},
},
},
{
Name: "nulterm",
Data: []byte{
6,
'H', 'e', 'l', 'l', 'o', ' ', 'W', 'o', 'r', 'l', 'd', '!', 0,
'r', 'e', 's', 't',
},
Expect: &Basic{
Tag: 6,
String: "Hello World!",
},
},
}
for _, c := range cases {
t.Run(c.Name, func(t *testing.T) {
u := new(Basic)
rest, err := u.Parse(c.Data)
require.NoError(t, err)
assert.Equal(t, c.Expect, u)
assert.Equal(t, []byte("rest"), rest)
})
}
}
func TestParseDateErrors(t *testing.T) {
for n := 1; n < 5; n++ {
b := make([]byte, n)
b[0] = 2
_, err := new(Basic).Parse(b)
assert.Error(t, err)
}
}
<|start_filename|>gen/tests/vararray/gen-marshallers.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package vararray
import (
"encoding/binary"
"errors"
)
type VarArray struct {
NWords uint16
Words []uint32
}
func (v *VarArray) Parse(data []byte) ([]byte, error) {
cur := data
{
if len(cur) < 2 {
return nil, errors.New("data too short")
}
v.NWords = binary.BigEndian.Uint16(cur)
cur = cur[2:]
}
{
v.Words = make([]uint32, int(v.NWords))
for idx := 0; idx < int(v.NWords); idx++ {
if len(cur) < 4 {
return nil, errors.New("data too short")
}
v.Words[idx] = binary.BigEndian.Uint32(cur)
cur = cur[4:]
}
}
return cur, nil
}
func ParseVarArray(data []byte) (*VarArray, error) {
v := new(VarArray)
_, err := v.Parse(data)
if err != nil {
return nil, err
}
return v, nil
}
<|start_filename|>Makefile<|end_filename|>
PKG = github.com/mmcloughlin/trunnel
CMD = $(PKG)/cmd/trunnel
GITSHA = `git rev-parse --short HEAD`
LDFLAGS = "-X $(PKG)/meta.GitSHA=$(GITSHA)"
SRC = $(shell find . -type f -name '*.go')
SRC_EXCL_GEN = $(shell find . -type f -name '*.go' -not -name 'gen-*.go')
.PHONY: install
install:
go install -a -ldflags $(LDFLAGS) $(CMD)
.PHONY: generate
generate: tools
go generate -x ./...
.PHONY: readme
readme:
embedmd -w README.md
.PHONY: lint
lint:
golangci-lint run
.PHONY: imports
imports:
gofumports -w -local $(PKG) $(SRC)
.PHONY: fmt
fmt:
gofmt -w -s $(SRC)
.PHONY: cloc
cloc:
cloc $(SRC_EXCL_GEN)
docs/manual.html: ref/trunnel/doc/trunnel.md
mkdir -p docs
markdown $^ > $@
.PHONY: tools
tools:
go get -u \
github.com/mna/pigeon \
github.com/campoy/embedmd \
mvdan.cc/gofumpt/gofumports
curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh -s -- -b $(GOPATH)/bin v1.21.0
<|start_filename|>gen/tests/constraint/constraint_test.go<|end_filename|>
package constraint
import (
"encoding/binary"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func pack(y uint16, m, d uint8) []byte {
data := []byte{0, 0, m, d}
binary.BigEndian.PutUint16(data, y)
return data
}
func TestDateParseLengthErrors(t *testing.T) {
d := new(Date)
b := pack(2017, 11, 15)
for n := 0; n < 4; n++ {
_, err := d.Parse(b[:n])
require.Error(t, err)
}
}
func TestDateParseSuccess(t *testing.T) {
date := new(Date)
b := pack(2017, 11, 15)
extra := []byte("blah")
rest, err := date.Parse(append(b, extra...))
require.NoError(t, err)
assert.Equal(t, &Date{Year: 2017, Month: 11, Day: 15}, date)
assert.Equal(t, extra, rest)
}
func TestDateParseYearConstraint(t *testing.T) {
d := new(Date)
for y := 1700; y < 4000; y++ {
b := pack(uint16(y), 11, 15)
_, err := d.Parse(b)
if y >= 1970 {
assert.NoError(t, err)
} else {
assert.EqualError(t, err, "integer constraint violated")
}
}
}
func TestDateParseMonthConstraint(t *testing.T) {
d := new(Date)
for m := 0; m < 256; m++ {
b := pack(2017, byte(m), 15)
_, err := d.Parse(b)
if 1 <= m && m <= 12 {
assert.NoError(t, err)
} else {
assert.EqualError(t, err, "integer constraint violated")
}
}
}
func TestDateParseDayConstraint(t *testing.T) {
date := new(Date)
for d := 0; d < 256; d++ {
b := pack(2017, 11, byte(d))
_, err := date.Parse(b)
if 1 <= d && d <= 31 {
assert.NoError(t, err)
} else {
assert.EqualError(t, err, "integer constraint violated")
}
}
}
<|start_filename|>internal/test/utils.go<|end_filename|>
// Package test provides utilities for trunnel testing.
package test
import (
"bytes"
"fmt"
"io/ioutil"
"os"
"os/exec"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
yaml "gopkg.in/yaml.v2"
)
// Glob runs a test on all the files matching a glob pattern.
func Glob(t *testing.T, pattern string, f func(*testing.T, string)) {
filenames, err := filepath.Glob(pattern)
require.NoError(t, err)
for _, filename := range filenames {
t.Run(filename, func(t *testing.T) {
f(t, filename)
})
}
}
// TrunnelFiles returns all the trunnel files in the given directory.
func TrunnelFiles(dir string) ([]string, error) {
pattern := filepath.Join(dir, "*.trunnel")
return filepath.Glob(pattern)
}
// LoadFileGroups looks for trunnel files in a directory and returns groups of
// files that can be "compiled" together (accounting for extern struct
// declarations). Dependencies can be recorded in a deps.yaml file in the
// directory.
func LoadFileGroups(dir string) ([][]string, error) {
deps, err := LoadDependenciesDir(dir)
if err != nil {
return nil, err
}
filenames, err := TrunnelFiles(dir)
if err != nil {
return nil, err
}
groups := [][]string{}
for _, filename := range filenames {
group := []string{filename}
base := filepath.Base(filename)
if ds, ok := deps.Dependencies[base]; ok {
for _, d := range ds {
group = append(group, filepath.Join(dir, d))
}
}
groups = append(groups, group)
}
return groups, nil
}
// Dependencies records dependencies between trunnel files.
type Dependencies struct {
Dependencies map[string][]string
}
// LoadDependenciesFile loads Dependencies from a YAML file.
func LoadDependenciesFile(filename string) (*Dependencies, error) {
b, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
deps := &Dependencies{}
if err := yaml.Unmarshal(b, deps); err != nil {
return nil, err
}
return deps, nil
}
// LoadDependenciesDir looks for "deps.yml" in the directory and loads it if
// it exists. If the file is not found, it loads an empty set of dependencies.
func LoadDependenciesDir(dir string) (*Dependencies, error) {
filename := filepath.Join(dir, "deps.yml")
deps, err := LoadDependenciesFile(filename)
if os.IsNotExist(err) {
return &Dependencies{
Dependencies: map[string][]string{},
}, nil
}
return deps, err
}
// TempDir creates a temp directory. Returns the path to the directory and a
// cleanup function.
func TempDir(t *testing.T) (string, func()) {
dir, err := ioutil.TempDir("", "trunnel")
require.NoError(t, err)
return dir, func() {
require.NoError(t, os.RemoveAll(dir))
}
}
// Build checks whether Go source code src builds correctly. Returns the output
// of "go build" and an error, if any.
func Build(srcs [][]byte) ([]byte, error) {
dir, err := ioutil.TempDir("", "trunnel")
if err != nil {
return nil, err
}
defer func() {
_ = os.RemoveAll(dir)
}()
filenames := []string{}
for i, src := range srcs {
filename := filepath.Join(dir, fmt.Sprintf("src%03d.go", i))
if err := ioutil.WriteFile(filename, src, 0600); err != nil {
return nil, err
}
filenames = append(filenames, filename)
}
args := append([]string{"build"}, filenames...)
cmd := exec.Command("go", args...)
return cmd.CombinedOutput()
}
// FileExists tests whether filename exists.
func FileExists(filename string) bool {
_, err := os.Stat(filename)
return !os.IsNotExist(err)
}
// FileContentsEqual determines whether a and b have the same contents.
func FileContentsEqual(a, b string) (bool, error) {
da, err := ioutil.ReadFile(a)
if err != nil {
return false, err
}
db, err := ioutil.ReadFile(b)
if err != nil {
return false, err
}
return bytes.Equal(da, db), nil
}
// AssertFileContentsEqual asserts that files a and b have the same contents.
func AssertFileContentsEqual(t *testing.T, a, b string) {
eq, err := FileContentsEqual(a, b)
require.NoError(t, err)
assert.True(t, eq)
}
<|start_filename|>tv/corpus.go<|end_filename|>
package tv
import (
"crypto/sha256"
"encoding/hex"
"path/filepath"
"github.com/pkg/errors"
"github.com/spf13/afero"
)
// Corpus is a collection of test vectors for multiple types.
type Corpus struct {
Suites []Suite
}
// Suite contains test vectors for one type.
type Suite struct {
Type string
Vectors []Vector
}
// AddSuite appends a suite to the corpus.
func (c *Corpus) AddSuite(s Suite) {
c.Suites = append(c.Suites, s)
}
// AddVectors adds vectors to the corpus for type n. Convenience wrapper around
// AddSuite.
func (c *Corpus) AddVectors(n string, vs []Vector) {
c.AddSuite(Suite{
Type: n,
Vectors: vs,
})
}
// Vectors looks up test vectors for the given type. Returns nil if none found.
func (c Corpus) Vectors(n string) []Vector {
for _, s := range c.Suites {
if s.Type == n {
return s.Vectors
}
}
return nil
}
// WriteCorpus writes the corpus of vectors in a standard structure under dir.
func WriteCorpus(c *Corpus, dir string) error {
fs := afero.NewBasePathFs(afero.NewOsFs(), dir)
return writecorpus(c, fs, sha256namer)
}
// corpus writes vectors to the filesystem fs.
func writecorpus(c *Corpus, fs afero.Fs, namer func([]byte) string) error {
a := afero.Afero{Fs: fs}
for _, s := range c.Suites {
dir := filepath.Join(s.Type, "corpus")
if err := a.MkdirAll(dir, 0750); err != nil {
return errors.Wrap(err, "could not create directory")
}
for _, v := range s.Vectors {
filename := namer(v.Data)
path := filepath.Join(dir, filename)
if err := a.WriteFile(path, v.Data, 0640); err != nil {
return errors.Wrap(err, "failed to write file")
}
}
}
return nil
}
// sha256namer returns the hex-encoded sha256 hash of b.
func sha256namer(b []byte) string {
d := sha256.Sum256(b)
return hex.EncodeToString(d[:])
}
<|start_filename|>gen/tests.go<|end_filename|>
package gen
import "github.com/mmcloughlin/trunnel/tv"
// CorpusTests generates a test file based on a corpus of test vectors.
func CorpusTests(pkg string, c *tv.Corpus) ([]byte, error) {
p := &printer{}
p.header(pkg)
for _, s := range c.Suites {
if constrained(s.Vectors) {
continue
}
p.printf("func Test%sCorpus(t *testing.T) {\n", name(s.Type))
// cases
p.printf("cases := []struct{\nData []byte\n}{\n")
for _, v := range s.Vectors {
p.printf("{\nData: %#v,\n},\n", v.Data)
}
p.printf("}\n")
// test each one
p.printf("for _, c := range cases {\n")
p.printf("_, err := Parse%s(c.Data)\n", name(s.Type))
p.printf("if err != nil { t.Fatal(err) }\n")
p.printf("}\n")
p.printf("}\n\n")
}
return p.imported()
}
func constrained(vs []tv.Vector) bool {
for _, v := range vs {
if len(v.Constraints) > 0 {
return true
}
}
return false
}
<|start_filename|>gen/tests/contexts/gen-marshallers_test.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package contexts
import "testing"
func TestPointCorpus(t *testing.T) {
cases := []struct {
Data []byte
}{
{
Data: []byte{0x25, 0x1},
},
}
for _, c := range cases {
_, err := ParsePoint(c.Data)
if err != nil {
t.Fatal(err)
}
}
}
<|start_filename|>gen/doc.go<|end_filename|>
// Package gen generates Go parser code from a trunnel AST.
package gen
<|start_filename|>gen/tests/constant/constant_test.go<|end_filename|>
package constant
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestParse(t *testing.T) {
cases := map[byte]bool{42: true, 0x42: true, 042: true}
for x := 0; x < 256; x++ {
c := new(Constants)
_, err := c.Parse([]byte{byte(x)})
if _, ok := cases[byte(x)]; ok {
require.NoError(t, err)
assert.Equal(t, &Constants{X: byte(x)}, c)
} else {
assert.Error(t, err)
}
}
}
func TestParseEmpty(t *testing.T) {
_, err := new(Constants).Parse([]byte{})
assert.Error(t, err)
}
<|start_filename|>gen/tests/rem/rem_test.go<|end_filename|>
package rem
import (
"math/rand"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestRemParseLengthErrors(t *testing.T) {
for n := 0; n < 4; n++ {
_, err := new(Rem).Parse(make([]byte, n))
require.Error(t, err)
}
}
// TestRemParseAnyLength confirms that we never get any "rest" bytes returned
// from parsing. These should be taken by the "remaining" array.
func TestRemParseAnyLength(t *testing.T) {
r := new(Rem)
for trial := 0; trial < 100; trial++ {
n := 4 + rand.Intn(1000)
rest, err := r.Parse(make([]byte, n))
require.NoError(t, err)
assert.Equal(t, []byte{}, rest)
assert.Equal(t, n-4, len(r.Tail))
}
}
func TestRemParseEmptyTail(t *testing.T) {
b := []byte{0, 1, 2, 3}
r := new(Rem)
rest, err := r.Parse(b)
require.NoError(t, err)
assert.Equal(t, []byte{}, rest)
assert.Equal(t, &Rem{
Head: 0x00010203,
Tail: []byte{},
}, r)
}
func TestRemParseSuccess(t *testing.T) {
b := []byte{
0, 1, 2, 3,
't', 'h', 'e', 't', 'a', 'i', 'l',
}
r := new(Rem)
rest, err := r.Parse(b)
require.NoError(t, err)
assert.Equal(t, []byte{}, rest)
assert.Equal(t, &Rem{
Head: 0x00010203,
Tail: []byte("thetail"),
}, r)
}
<|start_filename|>gen/tests/unioncmds/unioncmds_test.go<|end_filename|>
package unioncmds
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestParseErrors(t *testing.T) {
cases := []struct {
Name string
Data []byte
}{
{Name: "empty", Data: []byte{}},
{Name: "fail_case", Data: []byte{2}},
{Name: "default_short", Data: []byte{123, 0, 1, 2}},
{Name: "y_short", Data: []byte{1, 0, 1, 2}},
}
for _, c := range cases {
t.Run(c.Name, func(t *testing.T) {
_, err := new(UnionCmds).Parse(c.Data)
assert.Error(t, err)
})
}
}
func TestParseCases(t *testing.T) {
cases := []struct {
Name string
Data []byte
Expect *UnionCmds
}{
{
Name: "ignore",
Data: []byte{
1, // ignore case
0, 1, 2, 3,
'r', 'e', 's', 't',
},
Expect: &UnionCmds{
Tag: 1,
Y: 0x00010203,
},
},
{
Name: "default",
Data: []byte{
42,
0, 1, 2, 3,
4, 5, 6, 7,
8, 9, 10, 11,
'r', 'e', 's', 't',
},
Expect: &UnionCmds{
Tag: 42,
X: [2]uint32{
0x00010203,
0x04050607,
},
Y: 0x08090a0b,
},
},
}
for _, c := range cases {
t.Run(c.Name, func(t *testing.T) {
u := new(UnionCmds)
rest, err := u.Parse(c.Data)
require.NoError(t, err)
assert.Equal(t, c.Expect, u)
assert.Equal(t, []byte("rest"), rest)
})
}
}
<|start_filename|>gen/printer.go<|end_filename|>
package gen
import (
"bytes"
"fmt"
"golang.org/x/tools/imports"
)
type printer struct {
buf bytes.Buffer
}
func (p *printer) output() []byte {
return p.buf.Bytes()
}
func (p *printer) imported() ([]byte, error) {
return imports.Process("", p.output(), nil)
}
func (p *printer) header(pkg string) {
p.markgenerated()
p.pkg(pkg)
}
func (p *printer) markgenerated() {
p.printf("// Code generated by trunnel. DO NOT EDIT.\n\n")
}
func (p *printer) pkg(pkg string) {
p.printf("package %s\n\n", pkg)
}
func (p *printer) printf(format string, a ...interface{}) {
if _, err := fmt.Fprintf(&p.buf, format, a...); err != nil {
panic(err) // printing to a bytes.Buffer should never fail
}
}
<|start_filename|>gen/tests/ints/gen-marshallers_test.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package ints
import "testing"
func TestIntsCorpus(t *testing.T) {
cases := []struct {
Data []byte
}{
{
Data: []byte{0x73, 0x4, 0x5b, 0x41, 0xd3, 0xff, 0x12, 0x1, 0x94, 0xfd, 0xc2, 0xfa, 0x2f, 0xfc, 0xc0},
},
}
for _, c := range cases {
_, err := ParseInts(c.Data)
if err != nil {
t.Fatal(err)
}
}
}
<|start_filename|>tv/files_test.go<|end_filename|>
package tv
import (
"path/filepath"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/mmcloughlin/trunnel/fault"
"github.com/mmcloughlin/trunnel/inspect"
"github.com/mmcloughlin/trunnel/internal/test"
"github.com/mmcloughlin/trunnel/parse"
)
func TestFiles(t *testing.T) {
dirs := []string{
"../testdata/tor",
"../testdata/trunnel",
}
for _, dir := range dirs {
t.Run(filepath.Base(dir), func(t *testing.T) {
groups, err := test.LoadFileGroups(dir)
require.NoError(t, err)
for _, group := range groups {
t.Run(strings.Join(group, ","), func(t *testing.T) {
VerifyGroup(t, group)
})
}
})
}
}
func VerifyGroup(t *testing.T, filenames []string) {
fs, err := parse.Files(filenames)
require.NoError(t, err)
c, err := GenerateFiles(fs, WithSelector(RandomSampleSelector(16)))
if err == fault.ErrNotImplemented {
t.Log(err)
t.SkipNow()
}
require.NoError(t, err)
r, err := inspect.NewResolverFiles(fs)
require.NoError(t, err)
for _, s := range r.Structs() {
if s.Extern() {
continue
}
t.Run(s.Name, func(t *testing.T) {
num := len(c.Vectors(s.Name))
t.Logf("%d test vectors for %s", num, s.Name)
assert.True(t, num > 0)
})
}
}
<|start_filename|>parse/parse.go<|end_filename|>
// Package parse implements a parser for trunnel source files.
package parse
import (
"io"
"strings"
"github.com/mmcloughlin/trunnel/ast"
"github.com/mmcloughlin/trunnel/parse/internal/parser"
)
//go:generate pigeon -o internal/parser/gen-parser.go trunnel.pigeon
// File parses filename.
func File(filename string) (*ast.File, error) {
return cast(parser.ParseFile(filename))
}
// Files is a convenience for parsing multiple files.
func Files(filenames []string) ([]*ast.File, error) {
fs := make([]*ast.File, len(filenames))
for i, filename := range filenames {
f, err := File(filename)
if err != nil {
return nil, err
}
fs[i] = f
}
return fs, nil
}
// Reader parses the data from r using filename as information in
// error messages.
func Reader(filename string, r io.Reader) (*ast.File, error) {
return cast(parser.ParseReader(filename, r))
}
// String parses s.
func String(s string) (*ast.File, error) {
return Reader("string", strings.NewReader(s))
}
func cast(i interface{}, err error) (*ast.File, error) {
if err != nil {
return nil, err
}
return i.(*ast.File), nil
}
<|start_filename|>inspect/branches.go<|end_filename|>
package inspect
import (
"github.com/mmcloughlin/trunnel/ast"
"github.com/mmcloughlin/trunnel/internal/intervals"
)
// Branch is a case of a union.
type Branch struct {
Set *intervals.Set
Case *ast.UnionCase
}
// Branches represents the branches of a union.
type Branches struct {
branches []Branch
}
// NewBranches builds a Branches object from a union.
func NewBranches(r *Resolver, s *ast.Struct, u *ast.UnionMember) (*Branches, error) {
t, err := r.IntType(u.Tag, s)
if err != nil {
return nil, err
}
dflt := Branch{
Set: intervals.IntType(t.Size),
}
b := &Branches{}
for _, c := range u.Cases {
if c.Case == nil {
dflt.Case = c
continue
}
s, err := r.Intervals(c.Case)
if err != nil {
return nil, err
}
b.branches = append(b.branches, Branch{
Set: s,
Case: c,
})
dflt.Set.Subtract(s)
}
if dflt.Case != nil {
b.branches = append(b.branches, dflt)
}
return b, nil
}
// Lookup fetches the branch x falls into.
func (b *Branches) Lookup(x int64) (Branch, bool) {
for _, branch := range b.branches {
if branch.Set.Contains(uint64(x)) { // XXX cast
return branch, true
}
}
return Branch{}, false
}
// All returns all branches.
func (b *Branches) All() []Branch {
return b.branches
}
<|start_filename|>gen/tests/nulterm/gen-marshallers.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package nulterm
import (
"bytes"
"encoding/binary"
"errors"
)
type NulTerm struct {
X uint32
S string
Y uint8
}
func (n *NulTerm) Parse(data []byte) ([]byte, error) {
cur := data
{
if len(cur) < 4 {
return nil, errors.New("data too short")
}
n.X = binary.BigEndian.Uint32(cur)
cur = cur[4:]
}
{
i := bytes.IndexByte(cur, 0)
if i < 0 {
return nil, errors.New("could not parse nul-term string")
}
n.S, cur = string(cur[:i]), cur[i+1:]
}
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
n.Y = cur[0]
cur = cur[1:]
}
return cur, nil
}
func ParseNulTerm(data []byte) (*NulTerm, error) {
n := new(NulTerm)
_, err := n.Parse(data)
if err != nil {
return nil, err
}
return n, nil
}
<|start_filename|>gen/tests/pos/gen-marshallers.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package pos
import (
"bytes"
"encoding/binary"
"errors"
)
type Haspos struct {
S1 string
Pos1 int
S2 string
Pos2 int
X uint32
}
func (h *Haspos) Parse(data []byte) ([]byte, error) {
cur := data
{
i := bytes.IndexByte(cur, 0)
if i < 0 {
return nil, errors.New("could not parse nul-term string")
}
h.S1, cur = string(cur[:i]), cur[i+1:]
}
{
h.Pos1 = len(data) - len(cur)
}
{
i := bytes.IndexByte(cur, 0)
if i < 0 {
return nil, errors.New("could not parse nul-term string")
}
h.S2, cur = string(cur[:i]), cur[i+1:]
}
{
h.Pos2 = len(data) - len(cur)
}
{
if len(cur) < 4 {
return nil, errors.New("data too short")
}
h.X = binary.BigEndian.Uint32(cur)
cur = cur[4:]
}
return cur, nil
}
func ParseHaspos(data []byte) (*Haspos, error) {
h := new(Haspos)
_, err := h.Parse(data)
if err != nil {
return nil, err
}
return h, nil
}
<|start_filename|>gen/tests/color/gen-marshallers.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package color
import "errors"
type Color struct {
R uint8
G uint8
B uint8
}
func (c *Color) Parse(data []byte) ([]byte, error) {
cur := data
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
c.R = cur[0]
cur = cur[1:]
}
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
c.G = cur[0]
cur = cur[1:]
}
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
c.B = cur[0]
cur = cur[1:]
}
return cur, nil
}
func ParseColor(data []byte) (*Color, error) {
c := new(Color)
_, err := c.Parse(data)
if err != nil {
return nil, err
}
return c, nil
}
<|start_filename|>gen/tests/unionbasic/gen-marshallers.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package unionbasic
import (
"bytes"
"encoding/binary"
"errors"
)
type Date struct {
Year uint16
Month uint8
Day uint8
}
func (d *Date) Parse(data []byte) ([]byte, error) {
cur := data
{
if len(cur) < 2 {
return nil, errors.New("data too short")
}
d.Year = binary.BigEndian.Uint16(cur)
cur = cur[2:]
}
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
d.Month = cur[0]
cur = cur[1:]
}
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
d.Day = cur[0]
cur = cur[1:]
}
return cur, nil
}
func ParseDate(data []byte) (*Date, error) {
d := new(Date)
_, err := d.Parse(data)
if err != nil {
return nil, err
}
return d, nil
}
type Basic struct {
Tag uint8
D *Date
Num uint32
Eightbytes [8]uint8
String string
}
func (b *Basic) Parse(data []byte) ([]byte, error) {
cur := data
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
b.Tag = cur[0]
if !(b.Tag == 2 || b.Tag == 3 || b.Tag == 4 || b.Tag == 5 || b.Tag == 6) {
return nil, errors.New("integer constraint violated")
}
cur = cur[1:]
}
{
switch {
case b.Tag == 2:
{
var err error
b.D = new(Date)
cur, err = b.D.Parse(cur)
if err != nil {
return nil, err
}
}
case b.Tag == 3:
{
if len(cur) < 4 {
return nil, errors.New("data too short")
}
b.Num = binary.BigEndian.Uint32(cur)
cur = cur[4:]
}
case b.Tag == 4:
{
for idx := 0; idx < 8; idx++ {
if len(cur) < 1 {
return nil, errors.New("data too short")
}
b.Eightbytes[idx] = cur[0]
cur = cur[1:]
}
}
case b.Tag == 6:
{
i := bytes.IndexByte(cur, 0)
if i < 0 {
return nil, errors.New("could not parse nul-term string")
}
b.String, cur = string(cur[:i]), cur[i+1:]
}
}
}
return cur, nil
}
func ParseBasic(data []byte) (*Basic, error) {
b := new(Basic)
_, err := b.Parse(data)
if err != nil {
return nil, err
}
return b, nil
}
<|start_filename|>gen/tests/constant/gen-marshallers.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package constant
import "errors"
type Constants struct {
X uint8
}
func (c *Constants) Parse(data []byte) ([]byte, error) {
cur := data
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
c.X = cur[0]
if !(c.X == 42 || c.X == 66 || c.X == 34) {
return nil, errors.New("integer constraint violated")
}
cur = cur[1:]
}
return cur, nil
}
func ParseConstants(data []byte) (*Constants, error) {
c := new(Constants)
_, err := c.Parse(data)
if err != nil {
return nil, err
}
return c, nil
}
<|start_filename|>gen/tests/unionlen/gen-marshallers_test.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package unionlen
import "testing"
func TestUnionWithLenCorpus(t *testing.T) {
cases := []struct {
Data []byte
}{
{
Data: []byte{0x0, 0x1, 0x0, 0x3, 0xfa, 0xc2, 0xfd, 0x1, 0x94},
},
{
Data: []byte{0x0, 0x2, 0x0, 0x4, 0xe2, 0xd6, 0xf3, 0xc3, 0x1, 0x94},
},
{
Data: []byte{0x0, 0x2, 0x0, 0x6, 0xbe, 0xcc, 0xed, 0x54, 0x2f, 0xfc, 0x1, 0x94},
},
{
Data: []byte{0x97, 0x18, 0x0, 0x7, 0x5a, 0x52, 0x88, 0x5e, 0xae, 0xa3, 0x4e, 0x1, 0x94},
},
}
for _, c := range cases {
_, err := ParseUnionWithLen(c.Data)
if err != nil {
t.Fatal(err)
}
}
}
<|start_filename|>gen/tests/eos/gen-marshallers.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package eos
import (
"encoding/binary"
"errors"
)
type Fourbytes struct {
X uint16
Y uint16
}
func (f *Fourbytes) Parse(data []byte) ([]byte, error) {
cur := data
{
if len(cur) < 2 {
return nil, errors.New("data too short")
}
f.X = binary.BigEndian.Uint16(cur)
cur = cur[2:]
}
{
if len(cur) < 2 {
return nil, errors.New("data too short")
}
f.Y = binary.BigEndian.Uint16(cur)
cur = cur[2:]
}
{
if len(cur) > 0 {
return nil, errors.New("trailing data disallowed")
}
}
return cur, nil
}
func ParseFourbytes(data []byte) (*Fourbytes, error) {
f := new(Fourbytes)
_, err := f.Parse(data)
if err != nil {
return nil, err
}
return f, nil
}
<|start_filename|>gen/tests/ints/gen-marshallers.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package ints
import (
"encoding/binary"
"errors"
)
type Ints struct {
Byte uint8
Word uint16
Dword uint32
Qword uint64
}
func (i *Ints) Parse(data []byte) ([]byte, error) {
cur := data
{
if len(cur) < 1 {
return nil, errors.New("data too short")
}
i.Byte = cur[0]
cur = cur[1:]
}
{
if len(cur) < 2 {
return nil, errors.New("data too short")
}
i.Word = binary.BigEndian.Uint16(cur)
cur = cur[2:]
}
{
if len(cur) < 4 {
return nil, errors.New("data too short")
}
i.Dword = binary.BigEndian.Uint32(cur)
cur = cur[4:]
}
{
if len(cur) < 8 {
return nil, errors.New("data too short")
}
i.Qword = binary.BigEndian.Uint64(cur)
cur = cur[8:]
}
return cur, nil
}
func ParseInts(data []byte) (*Ints, error) {
i := new(Ints)
_, err := i.Parse(data)
if err != nil {
return nil, err
}
return i, nil
}
<|start_filename|>gen/tests/vararray/gen-marshallers_test.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package vararray
import "testing"
func TestVarArrayCorpus(t *testing.T) {
cases := []struct {
Data []byte
}{
{
Data: []byte{0x0, 0xf, 0x4e, 0xe2, 0xd3, 0xd0, 0x78, 0x8, 0x75, 0xd6, 0xe1, 0x51, 0x14, 0x55, 0x2e, 0xe2, 0x85, 0xec, 0xd4, 0xa1, 0x78, 0x89, 0x46, 0x51, 0x85, 0xf, 0xe0, 0xb1, 0xd, 0x39, 0xaf, 0x48, 0xa7, 0x9e, 0x75, 0xfb, 0x18, 0xd, 0xf4, 0x4a, 0x2d, 0xb, 0xee, 0xe8, 0x2a, 0xbd, 0x5f, 0xf6, 0x62, 0xa5, 0xc8, 0x6e, 0x4f, 0xf9, 0x12, 0x4, 0x5b, 0x73, 0xc0, 0x41, 0xd3, 0xff},
},
}
for _, c := range cases {
_, err := ParseVarArray(c.Data)
if err != nil {
t.Fatal(err)
}
}
}
<|start_filename|>tv/constraints.go<|end_filename|>
package tv
import (
"github.com/pkg/errors"
"github.com/mmcloughlin/trunnel/ast"
)
// Constraints records fixed values for struct/context fields.
type Constraints map[string]map[string]int64
// NewConstraints builds an empty set of constraints.
func NewConstraints() Constraints {
return Constraints{}
}
// Lookup returns the value of the constraint on s.k.
func (c Constraints) Lookup(s, k string) (int64, bool) {
values, ok := c[s]
if !ok {
return 0, false
}
v, ok := values[k]
return v, ok
}
// LookupLocal is a convenience for looking up in the local scope "".
func (c Constraints) LookupLocal(k string) (int64, bool) {
return c.Lookup("", k)
}
// LookupRef is a convenience for looking up an AST IDRef.
func (c Constraints) LookupRef(r *ast.IDRef) (int64, bool) {
return c.Lookup(r.Scope, r.Name)
}
// Set sets the value of s.k.
func (c Constraints) Set(s, k string, v int64) error {
if _, exists := c[s]; !exists {
c[s] = map[string]int64{}
}
if u, exists := c[s][k]; exists && u != v {
return errors.New("conflicting constraint")
}
c[s][k] = v
return nil
}
// SetRef is a convenience for setting the value of an AST IDRef.
func (c Constraints) SetRef(r *ast.IDRef, v int64) error {
return c.Set(r.Scope, r.Name, v)
}
// LookupOrCreate looks up s.k and returns the value if it exists. Otherwise the
// constraint is set to v and returned.
func (c Constraints) LookupOrCreate(s, k string, v int64) int64 {
if u, ok := c.Lookup(s, k); ok {
return u
}
if err := c.Set(s, k, v); err != nil {
panic(err) // should not happen, we already checked if it exists
}
return v
}
// LookupOrCreateRef is a convenience for LookupOrCreate with an AST IDRef.
func (c Constraints) LookupOrCreateRef(r *ast.IDRef, v int64) int64 {
return c.LookupOrCreate(r.Scope, r.Name, v)
}
// ClearScope deletes all constraints in scope s.
func (c Constraints) ClearScope(s string) {
delete(c, s)
}
// ClearLocal deletes all constraints in the local scope.
func (c Constraints) ClearLocal() {
c.ClearScope("")
}
// Update applies all constraints in d to c.
func (c Constraints) Update(d Constraints) error {
for s, values := range d {
for k, v := range values {
if err := c.Set(s, k, v); err != nil {
return err
}
}
}
return nil
}
// Clone returns a deep copy of c.
func (c Constraints) Clone() Constraints {
clone := NewConstraints()
if err := clone.Update(c); err != nil {
panic(err) // theoretically impossible
}
return clone
}
// CloneGlobal clones all constraints apart from the local ones. It is a
// convenience for Clone followed by ClearLocal.
func (c Constraints) CloneGlobal() Constraints {
g := c.Clone()
g.ClearLocal()
return g
}
// Merge builds a new set of constraints by merging c and other. Errors on conflict.
func (c Constraints) Merge(other Constraints) (Constraints, error) {
m := c.Clone()
if err := m.Update(other); err != nil {
return nil, err
}
return m, nil
}
<|start_filename|>inspect/inspect_test.go<|end_filename|>
package inspect
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/mmcloughlin/trunnel/ast"
"github.com/mmcloughlin/trunnel/internal/intervals"
)
func TestStructs(t *testing.T) {
f := &ast.File{
Structs: []*ast.Struct{
{Name: "a"},
{Name: "b"},
{Name: "c"},
},
}
s, err := Structs(f)
require.NoError(t, err)
assert.Equal(t, map[string]*ast.Struct{
"a": {Name: "a"},
"b": {Name: "b"},
"c": {Name: "c"},
}, s)
}
func TestStructsDupe(t *testing.T) {
f := &ast.File{
Structs: []*ast.Struct{
{Name: "a"},
{Name: "b"},
{Name: "a"},
},
}
_, err := Structs(f)
assert.EqualError(t, err, "duplicate struct name")
}
func TestConstants(t *testing.T) {
f := &ast.File{
Constants: []*ast.Constant{
{Name: "a", Value: 1},
{Name: "b", Value: 2},
{Name: "c", Value: 3},
},
}
v, err := Constants(f)
require.NoError(t, err)
assert.Equal(t, map[string]int64{
"a": 1,
"b": 2,
"c": 3,
}, v)
}
func TestConstantsDupe(t *testing.T) {
f := &ast.File{
Constants: []*ast.Constant{
{Name: "a"},
{Name: "b"},
{Name: "a"},
},
}
_, err := Constants(f)
assert.EqualError(t, err, "duplicate constant name")
}
func TestNewResolverErrors(t *testing.T) {
files := []*ast.File{
{
Structs: []*ast.Struct{
{Name: "a"},
{Name: "a"},
},
},
{
Contexts: []*ast.Context{
{Name: "a"},
{Name: "a"},
},
},
{
Constants: []*ast.Constant{
{Name: "a"},
{Name: "a"},
},
},
}
for _, f := range files {
_, err := NewResolver(f)
assert.Error(t, err)
}
}
func TestResolverAddFileErrors(t *testing.T) {
files := []*ast.File{
{
Structs: []*ast.Struct{
{Name: "a", Members: []ast.Member{
&ast.Field{Name: "x", Type: ast.U8},
}},
},
},
{Contexts: []*ast.Context{{Name: "a"}}},
{Constants: []*ast.Constant{{Name: "a"}}},
}
for _, f := range files {
r := NewResolverEmpty()
err := r.AddFile(f)
require.NoError(t, err)
f.Constants = []*ast.Constant{{Name: "a", Value: 1}} // hack to force constant conflict
err = r.AddFile(f)
require.Error(t, err)
}
}
func TestResolverAddStructOverrideError(t *testing.T) {
r := NewResolverEmpty()
s := &ast.Struct{Name: "a", Members: []ast.Member{
&ast.Field{Name: "x", Type: ast.U16},
}}
err := r.AddStruct(s)
require.NoError(t, err)
err = r.AddStruct(s)
require.EqualError(t, err, "cannot override non-extern struct")
}
func TestResolverAddStructOverrideExtern(t *testing.T) {
r := NewResolverEmpty()
err := r.AddStruct(&ast.Struct{Name: "a"})
require.NoError(t, err)
err = r.AddStruct(&ast.Struct{Name: "a", Members: []ast.Member{
&ast.Field{Name: "x", Type: ast.U16},
}})
require.NoError(t, err)
}
func TestResolverStruct(t *testing.T) {
f := &ast.File{
Structs: []*ast.Struct{
{Name: "a"},
{Name: "b"},
},
}
r, err := NewResolver(f)
require.NoError(t, err)
s, ok := r.Struct("b")
assert.True(t, ok)
assert.Equal(t, &ast.Struct{Name: "b"}, s)
_, ok = r.Struct("idk")
assert.False(t, ok)
}
func TestResolverStructNonExtern(t *testing.T) {
f := &ast.File{
Structs: []*ast.Struct{
{
Name: "a",
Members: []ast.Member{
&ast.Field{Name: "x", Type: ast.U16},
},
},
{
Name: "b",
Members: nil, // extern
},
},
}
r, err := NewResolver(f)
require.NoError(t, err)
_, err = r.StructNonExtern("a")
assert.NoError(t, err)
_, err = r.StructNonExtern("b")
assert.EqualError(t, err, "struct is external")
_, err = r.StructNonExtern("c")
assert.EqualError(t, err, "struct not found")
}
func TestResolverAddContext(t *testing.T) {
r := NewResolverEmpty()
ctx := &ast.Context{Name: "a"}
err := r.AddContext(ctx)
require.NoError(t, err)
err = r.AddContext(ctx)
require.EqualError(t, err, "cannot override context")
c, ok := r.Context("a")
assert.True(t, ok)
assert.Equal(t, &ast.Context{Name: "a"}, c)
}
func TestResolverContext(t *testing.T) {
f := &ast.File{
Contexts: []*ast.Context{
{Name: "a"},
{Name: "b"},
},
}
r, err := NewResolver(f)
require.NoError(t, err)
c, ok := r.Context("b")
assert.True(t, ok)
assert.Equal(t, &ast.Context{Name: "b"}, c)
_, ok = r.Context("idk")
assert.False(t, ok)
}
func TestResolverAddConstant(t *testing.T) {
r := NewResolverEmpty()
c := &ast.Constant{
Name: "X",
Value: 42,
}
err := r.AddConstant(c)
require.NoError(t, err)
v, ok := r.Constant("X")
require.True(t, ok)
assert.Equal(t, int64(42), v)
}
func TestResolverSetConstantTwice(t *testing.T) {
r := NewResolverEmpty()
err := r.SetConstant("X", 42)
require.NoError(t, err)
err = r.SetConstant("X", 42)
require.NoError(t, err)
v, ok := r.Constant("X")
require.True(t, ok)
assert.Equal(t, int64(42), v)
}
func TestResolverSetConstantOverride(t *testing.T) {
r := NewResolverEmpty()
err := r.SetConstant("X", 42)
require.NoError(t, err)
err = r.SetConstant("X", 43)
assert.Error(t, err)
}
func TestResolverInteger(t *testing.T) {
f := &ast.File{
Constants: []*ast.Constant{
{Name: "a", Value: 1},
{Name: "b", Value: 2},
{Name: "c", Value: 3},
},
}
r, err := NewResolver(f)
require.NoError(t, err)
cases := []struct {
Name string
Int ast.Integer
Value int64
HasError bool
}{
{
Name: "constref",
Int: &ast.IntegerConstRef{Name: "b"},
Value: 2,
},
{
Name: "undefconst",
Int: &ast.IntegerConstRef{Name: "no"},
HasError: true,
},
{
Name: "literal",
Int: &ast.IntegerLiteral{Value: 42},
Value: 42,
},
{
Name: "unexpectedtype",
Int: &ast.File{},
HasError: true,
},
}
for _, c := range cases {
t.Run(c.Name, func(t *testing.T) {
v, err := r.Integer(c.Int)
assert.Equal(t, c.Value, v)
assert.Equal(t, c.HasError, err != nil)
})
}
}
func TestResolverIntervals(t *testing.T) {
r, err := NewResolver(&ast.File{})
require.NoError(t, err)
cases := []struct {
Name string
List *ast.IntegerList
Set *intervals.Set
HasError bool
}{
{
Name: "basic",
List: ast.NewIntegerList(ast.NewIntegerRangeLiteral(4, 5)),
Set: intervals.NewSet(intervals.Range(4, 5)),
},
{
Name: "single",
List: ast.NewIntegerList(ast.NewIntegerRangeSingleLiteral(42)),
Set: intervals.NewSet(intervals.Single(42)),
},
{
Name: "multi",
List: ast.NewIntegerList(
ast.NewIntegerRangeLiteral(1, 10),
ast.NewIntegerRangeLiteral(100, 1000),
),
Set: intervals.NewSet(
intervals.Range(1, 10),
intervals.Range(100, 1000),
),
},
{
Name: "overlaps",
List: ast.NewIntegerList(
ast.NewIntegerRangeLiteral(1, 10),
ast.NewIntegerRangeLiteral(5, 14),
),
HasError: true,
},
{
Name: "lownil",
List: &ast.IntegerList{
Ranges: []*ast.IntegerRange{
{Low: nil},
},
},
HasError: true,
},
{
Name: "highbadtype",
List: &ast.IntegerList{
Ranges: []*ast.IntegerRange{
{
Low: &ast.IntegerLiteral{Value: 3},
High: &ast.File{},
},
},
},
HasError: true,
},
}
for _, c := range cases {
t.Run(c.Name, func(t *testing.T) {
s, err := r.Intervals(c.List)
assert.Equal(t, c.Set, s)
assert.Equal(t, c.HasError, err != nil)
})
}
}
func TestResolverIntType(t *testing.T) {
s := &ast.Struct{
Name: "name",
Members: []ast.Member{
&ast.Field{Name: "a", Type: ast.U8},
&ast.Field{Name: "b", Type: ast.U16},
&ast.Field{Name: "c", Type: ast.U32},
&ast.Field{Name: "s", Type: &ast.NulTermString{}},
},
}
f := &ast.File{
Structs: []*ast.Struct{s},
Contexts: []*ast.Context{
{
Name: "ctx",
Members: []*ast.Field{
{Name: "a", Type: ast.U8},
{Name: "b", Type: ast.U16},
{Name: "c", Type: ast.U32},
},
},
},
}
r, err := NewResolver(f)
require.NoError(t, err)
cases := []struct {
Name string
Ref *ast.IDRef
IntType *ast.IntType
HasError bool
}{
{
Name: "local",
Ref: &ast.IDRef{Name: "b"},
IntType: ast.U16,
},
{
Name: "ctx",
Ref: &ast.IDRef{Scope: "ctx", Name: "c"},
IntType: ast.U32,
},
{
Name: "undefctx",
Ref: &ast.IDRef{Scope: "what", Name: "c"},
HasError: true,
},
{
Name: "undeffield",
Ref: &ast.IDRef{Scope: "ctx", Name: "missing"},
HasError: true,
},
{
Name: "notint",
Ref: &ast.IDRef{Name: "s"},
HasError: true,
},
}
for _, c := range cases {
t.Run(c.Name, func(t *testing.T) {
i, err := r.IntType(c.Ref, s)
assert.Equal(t, c.IntType, i)
assert.Equal(t, c.HasError, err != nil)
})
}
}
<|start_filename|>internal/intervals/intervals.go<|end_filename|>
// Package intervals provides tools for manipulating collections of integer intervals.
package intervals
import (
"fmt"
"math"
"sort"
"strconv"
"strings"
"github.com/mmcloughlin/random"
)
// Interval represents the inclusive range of integers [lo, hi].
type Interval struct {
lo uint64
hi uint64
}
// Range builds the interval [l, h].
func Range(l, h uint64) Interval {
if h < l {
panic("bad range")
}
return Interval{lo: l, hi: h}
}
// Single builds the interval containing only x.
func Single(x uint64) Interval {
return Range(x, x)
}
// Bits returns the interval [0, 2^n-1].
func Bits(n uint) Interval {
return Range(0, (1<<n)-1)
}
// OpenLeft returns the interval [0, h].
func OpenLeft(h uint64) Interval {
return Range(0, h)
}
// OpenRight returns the interval [l, 2^64-1].
func OpenRight(l uint64) Interval {
return Range(l, math.MaxUint64)
}
// Size returns the interval size.
func (i Interval) Size() uint64 {
return i.hi - i.lo + 1
}
// Single returns true if the interval contains one integer.
func (i Interval) Single() bool {
return i.Size() == 1
}
// Contains returns whether x is contained in the interval.
func (i Interval) Contains(x uint64) bool {
return i.lo <= x && x <= i.hi
}
func (i Interval) String() string {
switch {
case i.Single():
return strconv.FormatUint(i.lo, 10)
default:
return fmt.Sprintf("%d-%d", i.lo, i.hi)
}
}
// Overlaps returns true if any intervals overlap.
func Overlaps(is []Interval) bool {
intersections := thresholds(2, is)
return len(intersections) > 0
}
// Simplify simplifies a set of intervals such that they cover the the same set
// of integers in a minimal way.
func Simplify(is []Interval) []Interval {
return thresholds(1, is)
}
// Set is a collection of intervals.
type Set struct {
intervals []Interval
}
// NewSet builds a set from the union of given intervals. The intervals will be
// passed through simplify.
func NewSet(is ...Interval) *Set {
return &Set{intervals: Simplify(is)}
}
// IntType returns the set of possible values of an n-bit integer.
func IntType(n uint) *Set {
return NewSet(Bits(n))
}
func (s Set) String() string {
is := []string{}
for _, i := range s.intervals {
is = append(is, i.String())
}
return strings.Join(is, ",")
}
// Contains returns whether x is contained in the set.
func (s Set) Contains(x uint64) bool {
for _, i := range s.intervals {
if i.Contains(x) {
return true
}
}
return false
}
// Subtract subtracts other from s.
func (s *Set) Subtract(other *Set) {
s.intervals = thresholds(2, s.intervals, complement(other.intervals))
}
// complement returns the "complement" of the intervals. In our case this is the
// result of subtracting from the full 64-bit interval.
func complement(is []Interval) []Interval {
s := uint64(0)
var c []Interval
for _, i := range is {
if i.lo > s {
c = append(c, Range(s, i.lo-1))
}
s = i.hi + 1
}
if s != 0 {
c = append(c, OpenRight(s))
}
return c
}
func intervaledges(is []Interval) []edge {
es := edges{}
for _, i := range is {
es = append(es, edge{x: i.lo, d: 1})
es = append(es, edge{x: i.hi, d: -1})
}
return es
}
func thresholds(thresh int, intervalsets ...[]Interval) []Interval {
es := []edge{}
for _, is := range intervalsets {
es = append(es, intervaledges(is)...)
}
sort.Sort(edges(es))
n := 0
inside := false
result := []Interval{}
var start uint64
for _, e := range es {
n += e.d
if !inside && n >= thresh {
start = e.x
inside = true
} else if inside && n < thresh {
result = append(result, Range(start, e.x))
inside = false
}
}
return result
}
type edge struct {
x uint64
d int
}
type edges []edge
func (e edges) Len() int { return len(e) }
func (e edges) Swap(i, j int) { e[i], e[j] = e[j], e[i] }
func (e edges) Less(i, j int) bool {
a, b := e[i], e[j]
return a.x < b.x || (a.x == b.x && b.d < a.d)
}
// Random returns a random element of the collection. Assumes the collection
// contains non-overlapping intervals. Panics if s is empty.
func (s Set) Random() uint64 {
return s.RandomWithGenerator(random.New())
}
// RandomWithGenerator is like Random() but allows you to control the random
// generator.
func (s Set) RandomWithGenerator(rnd random.Interface) uint64 {
if len(s.intervals) == 0 {
panic("empty set")
}
type step struct {
upper uint64
delta uint64
}
steps := []step{}
var cuml uint64
for _, i := range s.intervals {
cuml += i.Size()
steps = append(steps, step{
upper: cuml,
delta: i.hi - cuml + 1,
})
}
r := randuint64n(rnd, cuml)
for _, step := range steps {
if r < step.upper {
return r + step.delta
}
}
panic("unreachable")
}
// randuint64n returns a random uint64 in [0,n).
func randuint64n(rnd random.Interface, n uint64) uint64 {
mask := ^uint64(0)
for mask > n {
mask >>= 1
}
mask = (mask << 1) | uint64(1)
for {
r := randuint64(rnd) & mask
if r < n {
return r
}
}
}
func randuint64(rnd random.Interface) uint64 {
return uint64(rnd.Int63())>>31 | uint64(rnd.Int63())<<32
}
<|start_filename|>examples/date/gen-marshallers_test.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package date
import "testing"
func TestDateCorpus(t *testing.T) {
cases := []struct {
Data []byte
}{
{
Data: []byte{0x3b, 0x8, 0x1, 0x11},
},
}
for _, c := range cases {
_, err := ParseDate(c.Data)
if err != nil {
t.Fatal(err)
}
}
}
<|start_filename|>gen/tests/contexts/contexts_test.go<|end_filename|>
package contexts
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestParseErrors(t *testing.T) {
cases := []struct {
Name string
Flag Flag
Count Count
Data []byte
}{
{Name: "empty", Data: []byte{}},
{Name: "point_x_constraint", Data: []byte{255}},
{Name: "point_y_short", Data: []byte{254}},
{Name: "tsz_0_x_short", Data: []byte{254, 13, 0}},
{Name: "tsz_0_x_constraint", Data: []byte{254, 13, 0x80, 0, 0, 0}},
{Name: "tsz_1_y_short", Flag: Flag{1}, Data: []byte{254, 13, 0}},
{Name: "vsz_a_short", Count: Count{3}, Data: []byte{254, 13, 0, 1, 2, 3}},
{Name: "vsz_msg_short", Count: Count{2}, Data: []byte{254, 13, 0, 1, 2, 3, 0, 1, 2, 3, 0}},
{Name: "union_short", Count: Count{2}, Data: []byte{254, 13, 0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 0}},
}
for _, c := range cases {
t.Run(c.Name, func(t *testing.T) {
_, err := new(Ccomplex).Parse(c.Data, c.Flag, c.Count)
assert.Error(t, err)
})
}
}
func TestParseCases(t *testing.T) {
cases := []struct {
Name string
Data []byte
Flag Flag
Count Count
Expect *Ccomplex
}{
{
Name: "flag_0",
Flag: Flag{0},
Count: Count{5},
Data: []byte{
42, 13, // point
0, 1, 2, 3, // x
4, 5, 6, 7, // a
'h', 'e', 'l', 'l', 'o', // msg
'w', 'o', 'r', 'l', 'd', // a
'r', 'e', 's', 't',
},
Expect: &Ccomplex{
P: &Point{X: 42, Y: 13},
Tsz: &Twosize{X: 0x00010203},
Vsz: &Varsize{A: 0x04050607, Msg: []byte("hello")},
A: []byte("world"),
},
},
{
Name: "flag_1",
Flag: Flag{1},
Count: Count{6},
Data: []byte{
42, 13, // point
0, 1, // y
4, 5, 6, 7, // a
'h', 'e', 'l', 'l', 'o', '!', // msg
0, 1, 2, 3, 4, 5, // b
'r', 'e', 's', 't',
},
Expect: &Ccomplex{
P: &Point{X: 42, Y: 13},
Tsz: &Twosize{Y: 0x0001},
Vsz: &Varsize{A: 0x04050607, Msg: []byte("hello!")},
B: []uint16{0x0001, 0x0203, 0x0405},
},
},
}
for _, c := range cases {
t.Run(c.Name, func(t *testing.T) {
x := new(Ccomplex)
rest, err := x.Parse(c.Data, c.Flag, c.Count)
require.NoError(t, err)
assert.Equal(t, c.Expect, x)
assert.Equal(t, []byte("rest"), rest)
})
}
}
<|start_filename|>tv/generator_test.go<|end_filename|>
package tv
import (
"encoding/binary"
"testing"
"github.com/mmcloughlin/random"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/mmcloughlin/trunnel/parse"
)
func String(code string) (*Corpus, error) {
f, err := parse.String(code)
if err != nil {
return nil, err
}
return Generate(f, WithRandom(random.NewWithSeed(42)))
}
func TestIntType(t *testing.T) {
v, err := String(`struct color { u8 r; u8 g; u8 b; }`)
require.NoError(t, err)
expect := &Corpus{}
expect.AddVectors("color", []Vector{
NewVector([]byte{0x7f, 0x8c, 0x53}),
})
assert.Equal(t, expect, v)
}
func TestIntConstraint(t *testing.T) {
f, err := parse.String(`struct date {
u16 year IN [ 1970..2017 ];
u8 month IN [ 1, 2..6, 7..12 ];
u8 day IN [ 1..31 ];
}`)
require.NoError(t, err)
for i := 0; i < 10000; i++ {
c, err := Generate(f)
require.NoError(t, err)
b := c.Vectors("date")[0].Data
require.Len(t, b, 4)
y := binary.BigEndian.Uint16(b)
m := b[2]
d := b[3]
require.True(t, 1970 <= y && y <= 2017)
require.True(t, 1 <= m && m <= 12)
require.True(t, 1 <= d && d <= 31)
}
}
func TestNestedStruct(t *testing.T) {
c, err := String(`
struct color { u8 r; u8 g; u8 b; };
struct gradient {
struct color from;
struct color to;
};`)
require.NoError(t, err)
expect := &Corpus{}
expect.AddVectors("color", []Vector{
NewVector([]byte{0x7f, 0x8c, 0x53}),
})
expect.AddVectors("gradient", []Vector{
NewVector([]byte{0x97, 0x1b, 0xbf, 0x64, 0xb1, 0x96}),
})
assert.Equal(t, expect, c)
}
func TestNulTerm(t *testing.T) {
v, err := String(`
struct nul_term {
u16 pre;
nulterm s;
u8 post;
};`)
require.NoError(t, err)
expect := &Corpus{}
expect.AddVectors("nul_term", []Vector{
NewVector([]byte{
0x8c, 0x7f, // pre
'u', 'k', 'p', 't', 't', 0, // s
0x53, // post
}),
})
assert.Equal(t, expect, v)
}
func TestFixedArray(t *testing.T) {
f, err := parse.String(`
const NUM_BYTES = 8;
struct color { u8 r; u8 g; u8 b; }
struct fixie {
u8 bytes[NUM_BYTES];
char letters[NUM_BYTES];
u16 shortwords[4];
u32 words[2];
u64 big_words[2];
struct color colors[2];
}`)
require.NoError(t, err)
for i := 0; i < 1000; i++ {
c, err := Generate(f)
require.NoError(t, err)
d := c.Vectors("fixie")[0].Data
require.Len(t, d, 8+8+2*4+4*2+8*2+3*2)
}
}
func TestVarArray(t *testing.T) {
f, err := parse.String(`struct var { u16 n; u32 words[n]; };`)
require.NoError(t, err)
for i := 0; i < 1000; i++ {
c, err := Generate(f)
require.NoError(t, err)
v := c.Vectors("var")[0]
n := binary.BigEndian.Uint16(v.Data)
require.Len(t, v.Data, 2+4*int(n))
}
}
func TestNestedVar(t *testing.T) {
f, err := parse.String(`
struct var { u16 n; u32 w[n]; };
struct nest { u16 n; struct var v[n]; };
`)
require.NoError(t, err)
for i := 0; i < 100; i++ {
c, err := Generate(f)
require.NoError(t, err)
b := c.Vectors("nest")[0].Data
// should be able to follow the length fields to the end
n, b := binary.BigEndian.Uint16(b), b[2:]
for j := 0; j < int(n); j++ {
l := binary.BigEndian.Uint16(b)
skip := 2 + 4*int(l)
require.True(t, len(b) >= skip)
b = b[skip:]
}
require.Len(t, b, 0)
}
}
func TestLengthDoubleUse(t *testing.T) {
f, err := parse.String(`struct dbl {
u8 n;
u32 a[n];
u64 b[n];
};`)
require.NoError(t, err)
for i := 0; i < 1000; i++ {
c, err := Generate(f)
require.NoError(t, err)
b := c.Vectors("dbl")[0].Data
n := int(b[0])
require.Len(t, b, 1+12*n)
}
}
func TestRemaining(t *testing.T) {
vs, err := String(`struct rem {
u32 head;
u8 tail[];
};`)
require.NoError(t, err)
expect := &Corpus{}
expect.AddVectors("rem", []Vector{
NewVector([]byte{
0x72, 0xe8, 0x9f, 0x5b, 0xb4, 0x4b, 0x9f, 0xbb, 0x97, 0x1b,
}),
})
assert.Equal(t, expect, vs)
}
func TestLeftover(t *testing.T) {
_, err := String(`struct leftover {
u32 head[2];
u32 mid[..-8];
u32 tail[2];
};`)
assert.EqualError(t, err, "not implemented")
}
func TestUnionBasic(t *testing.T) {
vs, err := String(`struct basic {
u8 tag;
union u[tag] {
1: u8 r; u8 g; u8 b;
2: u16 y; u8 m; u8 d;
};
u16 right_after_the_union;
};`)
require.NoError(t, err)
expect := &Corpus{}
expect.AddVectors("basic", []Vector{
NewVector([]byte{0x01, 0xb1, 0x96, 0x7f, 0x53, 0x8c}),
NewVector([]byte{0x02, 0x58, 0x08, 0xbf, 0x64, 0x53, 0x8c}),
})
assert.Equal(t, expect, vs)
}
func TestTagDoubleUse(t *testing.T) {
f, err := parse.String(`struct dbltag {
u8 tag;
union u[tag] {
1: u8 a;
2: u16 b;
};
union w[tag] {
1: u8 c;
2: u16 d;
};
};`)
require.NoError(t, err)
for i := 0; i < 1000; i++ {
c, err := Generate(f)
require.NoError(t, err)
vs := c.Vectors("dbltag")
require.Len(t, vs, 2)
for j, v := range vs {
b := v.Data
tag := j + 1
require.Equal(t, byte(tag), b[0])
require.Len(t, b, 1+2*tag)
}
}
}
func TestUnionDefault(t *testing.T) {
vs, err := String(`struct basic {
u8 tag;
union u[tag] {
1:
u32 a;
default:
u8 ukn[];
};
};`)
require.NoError(t, err)
expect := &Corpus{}
expect.AddVectors("basic", []Vector{
NewVector([]byte{0x01, 0x09, 0xdd, 0x9d, 0x52}),
NewVector([]byte{
0xc4, 0xbc, 0x75, 0xd3, 0x61, 0x3f, 0x08, 0x58, 0x07, 0x9b,
0x70, 0xe0, 0xcb, 0x1a, 0x84, 0x9b, 0xd7, 0xdf,
}),
})
assert.Equal(t, expect, vs)
}
func TestUnionDefaultRange(t *testing.T) {
f, err := parse.String(`struct basic {
u16 tag;
union u[tag] {
0..0x2ff: u8 a;
default: u8 ukn[];
};
};`)
require.NoError(t, err)
for i := 0; i < 1000; i++ {
c, err := Generate(f)
require.NoError(t, err)
vs := c.Vectors("basic")
require.True(t, binary.BigEndian.Uint16(vs[0].Data) <= uint16(0x2ff))
require.True(t, binary.BigEndian.Uint16(vs[1].Data) > uint16(0x2ff))
}
}
func TestUnionCommands(t *testing.T) {
vs, err := String(`struct basic {
u8 tag;
union u[tag] {
1: u32 a;
2..4: ; // empty
5: ignore;
default: fail;
};
};`)
require.NoError(t, err)
expect := &Corpus{}
expect.AddVectors("basic", []Vector{
NewVector([]byte{0x01, 0x09, 0xdd, 0x9d, 0x52}),
NewVector([]byte{0x02}),
NewVector([]byte{0x05}),
NewVector([]byte{0x05, 0xdf, 0xd7, 0x9b, 0x13, 0xdd, 0x1a, 0xac}),
})
assert.Equal(t, expect, vs)
}
func TestPtr(t *testing.T) {
vs, err := String(`struct haspos {
nulterm s;
@ptr pos1;
u32 after;
};`)
require.NoError(t, err)
expect := &Corpus{}
expect.AddVectors("haspos", []Vector{
NewVector([]byte{
'u', 'k', 'p', 't', 't', 0, // s
// pos1 occupies no space
0x53, 0x8c, 0x7f, 0x96, // after
}),
})
assert.Equal(t, expect, vs)
}
func TestEOS(t *testing.T) {
vs, err := String(`struct haseos {
u8 r;
u8 g;
u8 b;
eos;
};`)
require.NoError(t, err)
expect := &Corpus{}
expect.AddVectors("haseos", []Vector{NewVector([]byte{0x7f, 0x8c, 0x53})})
assert.Equal(t, expect, vs)
}
func TestExternStruct(t *testing.T) {
vs, err := String(`extern struct ext;`)
require.NoError(t, err)
assert.Equal(t, &Corpus{}, vs)
}
func TestVarArrayContext(t *testing.T) {
f, err := parse.String(`
context ctx { u16 n; }
struct var with context ctx { u32 words[ctx.n]; };
`)
require.NoError(t, err)
for i := 0; i < 1000; i++ {
c, err := Generate(f)
require.NoError(t, err)
v := c.Vectors("var")[0]
n, ok := v.Constraints.Lookup("ctx", "n")
require.True(t, ok)
require.Len(t, v.Data, 4*int(n))
}
}
func TestUnionContext(t *testing.T) {
f, err := parse.String(`
context ctx { u16 tag; }
struct basic with context ctx {
union u[ctx.tag] {
1: u8 a;
2: u16 b;
4: u32 c;
};
};`)
require.NoError(t, err)
for i := 0; i < 1000; i++ {
c, err := Generate(f)
require.NoError(t, err)
for _, v := range c.Vectors("basic") {
tag, ok := v.Constraints.Lookup("ctx", "tag")
require.True(t, ok)
require.Len(t, v.Data, int(tag))
}
}
}
func TestUnionLength(t *testing.T) {
f, err := parse.String(`struct union_with_len {
u16 tag;
u16 union_len;
union u[tag] with length union_len {
1: u8 r; u8 g; u8 b;
2: u16 year; u8 month; u8 day; ...;
default: u8 unparseable[];
};
u16 right_after_the_union;
};`)
require.NoError(t, err)
for i := 0; i < 1000; i++ {
c, err := Generate(f)
require.NoError(t, err)
vs := c.Vectors("union_with_len")
require.Len(t, vs, 4)
for _, v := range vs {
n := binary.BigEndian.Uint16(v.Data[2:])
require.Len(t, v.Data, 6+int(n))
}
}
}
<|start_filename|>gen/tests/unioncmds/gen-fuzz.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
// +build gofuzz
package unioncmds
func FuzzUnionCmds(data []byte) int {
_, err := ParseUnionCmds(data)
if err != nil {
return 0
}
return 1
}
<|start_filename|>parse/files_test.go<|end_filename|>
package parse
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/mmcloughlin/trunnel/internal/test"
)
func TestValidFiles(t *testing.T) {
test.Glob(t, "./testdata/valid/*.trunnel", valid)
}
func TestFailingFiles(t *testing.T) {
test.Glob(t, "./testdata/failing/*.trunnel", invalid)
}
func TestTorFiles(t *testing.T) {
test.Glob(t, "../testdata/tor/*.trunnel", valid)
}
func TestTrunnelFiles(t *testing.T) {
test.Glob(t, "../testdata/trunnel/*.trunnel", valid)
}
func valid(t *testing.T, filename string) {
_, err := File(filename)
assert.NoError(t, err)
}
func invalid(t *testing.T, filename string) {
_, err := File(filename)
assert.Error(t, err)
}
<|start_filename|>internal/intervals/intervals_test.go<|end_filename|>
package intervals
import (
"fmt"
"math"
"math/rand"
"testing"
"github.com/mmcloughlin/random"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestRange(t *testing.T) {
assert.Equal(t, Interval{lo: 10, hi: 20}, Range(10, 20))
}
func TestBadRange(t *testing.T) {
assert.Panics(t, func() { Range(4, 3) })
}
func TestSingle(t *testing.T) {
assert.Equal(t, Interval{lo: 13, hi: 13}, Single(13))
}
func TestBits(t *testing.T) {
hi := ^uint64(0)
for n := uint(0); n <= 64; n++ {
assert.Equal(t, Interval{lo: 0, hi: hi >> n}, Bits(64-n))
}
}
func TestIntervalSize(t *testing.T) {
assert.Equal(t, uint64(1), Single(13).Size())
assert.Equal(t, uint64(5), Range(13, 17).Size())
}
func TestIntervalString(t *testing.T) {
assert.Equal(t, "13", Single(13).String())
assert.Equal(t, "13-17", Range(13, 17).String())
}
func TestIntervalContains(t *testing.T) {
cases := []struct {
Interval Interval
X uint64
Expect bool
}{
{Single(42), 42, true},
{Single(42), 100, false},
{Range(100, 200), 150, true},
{Range(100, 200), 100, true},
{Range(100, 200), 200, true},
{Range(100, 200), 99, false},
{Range(100, 200), 201, false},
}
for _, c := range cases {
assert.Equal(t, c.Expect, c.Interval.Contains(c.X))
}
}
func TestIntType(t *testing.T) {
assert.Equal(t, NewSet(Range(0, 127)), IntType(7))
}
func TestOverlaps(t *testing.T) {
cases := []struct {
Intervals []Interval
Expect bool
}{
{[]Interval{}, false},
{[]Interval{Single(1)}, false},
{[]Interval{Range(10, 100)}, false},
{[]Interval{Range(10, 100), Range(50, 60)}, true},
{[]Interval{Range(5, 10), Range(10, 15)}, true},
{[]Interval{Range(5, 10), Range(11, 15)}, false},
}
for _, c := range cases {
s := NewSet(c.Intervals...)
t.Run(s.String(), func(t *testing.T) {
assert.Equal(t, c.Expect, Overlaps(c.Intervals))
})
}
}
func TestSetContains(t *testing.T) {
cases := []struct {
Intervals []Interval
X uint64
Expect bool
}{
{[]Interval{}, 10, false},
{[]Interval{Single(1)}, 1, true},
{[]Interval{Single(1), Range(4, 5)}, 4, true},
}
for _, c := range cases {
s := NewSet(c.Intervals...)
t.Run(s.String(), func(t *testing.T) {
assert.Equal(t, c.Expect, s.Contains(c.X))
})
}
}
func TestSetString(t *testing.T) {
cases := []struct {
Intervals []Interval
Expect string
}{
{
Intervals: []Interval{},
Expect: "",
},
{
Intervals: []Interval{Single(2), Range(4, 5)},
Expect: "2,4-5",
},
{
Intervals: []Interval{Single(2), Range(4, 50), Range(30, 300)},
Expect: "2,4-300",
},
}
for _, c := range cases {
assert.Equal(t, c.Expect, NewSet(c.Intervals...).String())
}
}
func TestSubtract(t *testing.T) {
cases := []struct {
A *Set
B *Set
Expect *Set
}{
{
NewSet(Range(1, 10)),
NewSet(Range(5, 15)),
NewSet(Range(1, 4)),
},
{
NewSet(Range(50, 100)),
NewSet(Range(25, 75)),
NewSet(Range(76, 100)),
},
{
NewSet(Range(0, 4), Range(8, 12)),
NewSet(Range(2, 10)),
NewSet(Range(0, 1), Range(11, 12)),
},
{
NewSet(Range(50, 100)),
NewSet(Single(75)),
NewSet(Range(50, 74), Range(76, 100)),
},
{
IntType(8),
IntType(4),
NewSet(Range(16, 255)),
},
{
IntType(64),
IntType(32),
NewSet(Range(1<<32, math.MaxUint64)),
},
}
for _, c := range cases {
t.Run(fmt.Sprintf("(%s)-(%s)", c.A, c.B), func(t *testing.T) {
c.A.Subtract(c.B)
assert.Equal(t, c.Expect, c.A)
})
}
}
func TestComplement(t *testing.T) {
cases := []struct {
Intervals []Interval
Expect []Interval
}{
{},
{
Intervals: []Interval{Range(10, 20)},
Expect: []Interval{OpenLeft(9), OpenRight(21)},
},
{
Intervals: []Interval{OpenLeft(42)},
Expect: []Interval{OpenRight(43)},
},
{
Intervals: []Interval{OpenRight(42)},
Expect: []Interval{OpenLeft(41)},
},
}
for _, c := range cases {
assert.Equal(t, c.Expect, complement(c.Intervals))
}
}
func TestSetRandomContains(t *testing.T) {
s := NewSet(Single(1), Range(42, 53), Range(100, 1000))
for i := 0; i < NumTrials(); i++ {
require.True(t, s.Contains(s.Random()))
}
}
func TestSetRandomObserveAll(t *testing.T) {
s := NewSet(Single(1), Range(4200, 4201), Range(7, 10))
counts := map[uint64]int{}
for i := 0; i < NumTrials(); i++ {
r := s.Random()
if _, ok := counts[r]; !ok {
counts[r] = 0
}
counts[r]++
}
t.Log(counts)
expect := []uint64{1, 7, 8, 9, 10, 4200, 4201}
for _, e := range expect {
assert.Contains(t, counts, e)
}
}
func TestSetRandomEmpty(t *testing.T) {
assert.Panics(t, func() { Set{}.Random() })
}
func TestRandUint64n(t *testing.T) {
rnd, err := random.NewFromSeeder(random.CryptoSeeder)
require.NoError(t, err)
for i := 0; i < NumTrials(); i++ {
n := uint64(2 + rand.Intn(42))
require.True(t, randuint64n(rnd, n) < n)
}
}
func NumTrials() int {
if testing.Short() {
return 1000
}
return 100000
}
<|start_filename|>gen/tests/unionbasic/gen-marshallers_test.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package unionbasic
import "testing"
func TestDateCorpus(t *testing.T) {
cases := []struct {
Data []byte
}{
{
Data: []byte{0xfd, 0xc2, 0x94, 0x1},
},
}
for _, c := range cases {
_, err := ParseDate(c.Data)
if err != nil {
t.Fatal(err)
}
}
}
func TestBasicCorpus(t *testing.T) {
cases := []struct {
Data []byte
}{
{
Data: []byte{0x3, 0xfc, 0x59, 0x2f, 0xfa},
},
{
Data: []byte{0x3, 0xc1, 0x8f, 0xd, 0xce},
},
{
Data: []byte{0x4, 0x8a, 0xf, 0x0, 0x3f, 0x5c, 0x4, 0x77, 0xcc},
},
{
Data: []byte{0x6, 0x76, 0x77, 0x73, 0x68, 0x6f, 0x69, 0x70, 0x67, 0x7a, 0x6d, 0x67, 0x61, 0x78, 0x0},
},
}
for _, c := range cases {
_, err := ParseBasic(c.Data)
if err != nil {
t.Fatal(err)
}
}
}
<|start_filename|>gen/tests/nest/nest_test.go<|end_filename|>
package nest
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestNestParseLengthErrors(t *testing.T) {
r := new(Rect)
for n := 0; n < 4; n++ {
_, err := r.Parse(make([]byte, n))
require.Error(t, err)
}
}
func TestNestStandard(t *testing.T) {
r := new(Rect)
b := []byte{
1, 2,
3, 4,
'r', 'e', 's', 't',
}
expect := &Rect{
NorthEast: &Point{X: 1, Y: 2},
SouthWest: &Point{X: 3, Y: 4},
}
rest, err := r.Parse(b)
require.NoError(t, err)
assert.Equal(t, expect, r)
assert.Equal(t, []byte("rest"), rest)
}
<|start_filename|>gen/tests/pos/gen-marshallers_test.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package pos
import "testing"
func TestHasposCorpus(t *testing.T) {
cases := []struct {
Data []byte
}{
{
Data: []byte{0x61, 0x6b, 0x62, 0x6c, 0x65, 0x0, 0x62, 0x79, 0x68, 0x69, 0x7a, 0x7a, 0x0, 0x1, 0x94, 0xfd, 0xc2},
},
}
for _, c := range cases {
_, err := ParseHaspos(c.Data)
if err != nil {
t.Fatal(err)
}
}
}
<|start_filename|>gen/decl.go<|end_filename|>
package gen
import (
"fmt"
"strconv"
"strings"
"github.com/mmcloughlin/trunnel/ast"
"github.com/mmcloughlin/trunnel/inspect"
)
// Marshallers builds data marshallers for types in the given files.
func Marshallers(pkg string, fs []*ast.File) ([]byte, error) {
g := &generator{}
if err := g.files(pkg, fs); err != nil {
return nil, err
}
return g.imported()
}
type generator struct {
printer
resolver *inspect.Resolver
receiver string // method receiver variable
data string // data variable
}
func (g *generator) files(pkg string, fs []*ast.File) error {
g.header(pkg)
if err := g.init(fs); err != nil {
return err
}
for _, f := range fs {
if err := g.file(f); err != nil {
return err
}
}
return nil
}
func (g *generator) init(fs []*ast.File) (err error) {
g.resolver, err = inspect.NewResolverFiles(fs)
return
}
func (g *generator) file(f *ast.File) error {
for _, c := range f.Contexts {
g.context(c)
}
for _, s := range f.Structs {
g.structure(s)
}
return nil
}
func (g *generator) context(c *ast.Context) {
g.printf("type %s struct {\n", name(c.Name))
for _, m := range c.Members {
g.structMemberDecl(m)
}
g.printf("}\n\n")
}
func (g *generator) structure(s *ast.Struct) {
if s.Extern() {
return
}
g.receiver = strings.ToLower(s.Name[:1])
g.structDecl(s)
g.parse(s)
g.parseConstructor(s)
g.receiver = ""
}
func (g *generator) structDecl(s *ast.Struct) {
g.printf("type %s struct {\n", name(s.Name))
for _, m := range s.Members {
g.structMemberDecl(m)
}
g.printf("}\n\n")
}
func (g *generator) structMemberDecl(m ast.Member) {
switch m := m.(type) {
case *ast.Field:
g.printf("\t%s %s\n", name(m.Name), g.tipe(m.Type))
case *ast.UnionMember:
g.structUnionMemberDecl(m)
case *ast.EOS:
// ignore
default:
panic(unexpected(m))
}
}
func (g *generator) structUnionMemberDecl(m *ast.UnionMember) {
for _, c := range m.Cases {
for _, f := range c.Members {
switch f := f.(type) {
case *ast.Fail, *ast.Ignore:
// nothing
default:
g.structMemberDecl(f)
}
}
}
}
func (g *generator) parseConstructor(s *ast.Struct) {
n := name(s.Name)
g.printf("func Parse%s(data []byte%s) (*%s, error) {\n", n, contextSignature(s.Contexts), n)
g.printf("%s := new(%s)\n", g.receiver, n)
g.printf("_, err := %s.Parse(data%s)\n", g.receiver, contextArgs(s.Contexts))
g.printf("if err != nil { return nil, err }\n")
g.printf("return %s, nil\n", g.receiver)
g.printf("}\n\n")
}
// parse generates a parse function for the type.
func (g *generator) parse(s *ast.Struct) {
g.printf("func (%s *%s) Parse(data []byte%s) ([]byte, error) {\n",
g.receiver, name(s.Name), contextSignature(s.Contexts))
g.printf("cur := data\n")
g.data = "cur"
for _, m := range s.Members {
g.parseMember(m)
}
g.printf("return %s, nil\n}\n\n", g.data)
g.data = ""
}
func (g *generator) parseMember(m ast.Member) {
g.printf("{\n")
switch m := m.(type) {
case *ast.Field:
lhs := g.receiver + "." + name(m.Name)
g.parseType(lhs, m.Type)
case *ast.UnionMember:
g.parseUnionMember(m)
case *ast.EOS:
g.assertEnd()
case *ast.Ignore:
g.printf("%s = []byte{}\n", g.data)
case *ast.Fail:
g.printf("return nil, errors.New(\"disallowed case\")")
default:
panic(unexpected(m))
}
g.printf("}\n")
}
func (g *generator) parseType(lhs string, t ast.Type) {
switch t := t.(type) {
case *ast.NulTermString:
g.printf("i := bytes.IndexByte(%s, 0)\n", g.data)
g.printf("if i < 0 { return nil, errors.New(\"could not parse nul-term string\") }\n")
g.printf("%s, %s = string(%s[:i]), %s[i+1:]\n", lhs, g.data, g.data, g.data)
case *ast.IntType:
g.parseIntType(lhs, t)
case *ast.CharType:
g.parseType(lhs, ast.U8)
case *ast.Ptr:
g.printf("%s = len(data) - len(%s)\n", lhs, g.data)
case *ast.StructRef:
g.printf("var err error\n")
g.printf("%s = new(%s)\n", lhs, name(t.Name))
s, ok := g.resolver.Struct(t.Name)
if !ok {
panic("struct not found") // XXX return err
}
g.printf("%s, err = %s.Parse(%s%s)\n", g.data, lhs, g.data, contextArgs(s.Contexts))
g.printf("if err != nil { return nil, err }\n")
case *ast.FixedArrayMember:
g.parseArray(lhs, t.Base, t.Size)
case *ast.VarArrayMember:
g.parseArray(lhs, t.Base, t.Constraint)
default:
panic(unexpected(t))
}
}
func (g *generator) parseIntType(lhs string, t *ast.IntType) {
n := t.Size / 8
g.lengthCheck(strconv.Itoa(int(n)))
if n == 1 {
g.printf("%s = %s[0]\n", lhs, g.data)
} else {
g.printf("%s = binary.BigEndian.Uint%d(%s)\n", lhs, t.Size, g.data)
}
if t.Constraint != nil {
g.printf("if !(%s) {\n", g.conditional(lhs, t.Constraint))
g.printf("return nil, errors.New(\"integer constraint violated\")\n")
g.printf("}\n")
}
g.printf("%s = %s[%d:]\n", g.data, g.data, n)
}
func (g *generator) parseArray(lhs string, base ast.Type, s ast.LengthConstraint) {
switch s := s.(type) {
case *ast.IntegerConstRef, *ast.IntegerLiteral:
g.printf("for idx := 0; idx < %s; idx++ {\n", g.integer(s))
g.parseType(lhs+"[idx]", base)
g.printf("}\n")
case *ast.IDRef:
size := fmt.Sprintf("int(%s)", g.ref(s))
g.printf("%s = make([]%s, %s)\n", lhs, g.tipe(base), size)
g.printf("for idx := 0; idx < %s; idx++ {\n", size)
g.parseType(lhs+"[idx]", base)
g.printf("}\n")
case *ast.Leftover:
g.constrained(s, func() {
g.parseArray(lhs, base, nil)
})
case nil:
g.printf("%s = make([]%s, 0)\n", lhs, g.tipe(base))
g.printf("for len(%s) > 0 {\n", g.data)
g.printf("var tmp %s\n", g.tipe(base))
g.parseType("tmp", base)
g.printf("%s = append(%s, tmp)\n", lhs, lhs)
g.printf("}\n")
default:
panic(unexpected(s))
}
}
func (g *generator) parseUnionMember(u *ast.UnionMember) {
if u.Length != nil {
g.constrained(u.Length, func() {
g.parseUnionMember(&ast.UnionMember{
Name: u.Name,
Tag: u.Tag,
Cases: u.Cases,
})
})
return
}
tag := g.ref(u.Tag)
g.printf("switch {\n")
for _, c := range u.Cases {
if c.Case == nil {
g.printf("default:\n")
} else {
g.printf("case %s:\n", g.conditional(tag, c.Case))
}
for _, m := range c.Members {
g.parseMember(m)
}
}
g.printf("}\n")
}
func (g *generator) constrained(c ast.LengthConstraint, f func()) {
var n string
switch c := c.(type) {
case *ast.Leftover:
g.lengthCheck(g.integer(c.Num))
n = fmt.Sprintf("len(%s)-%s", g.data, g.integer(c.Num))
case *ast.IDRef:
n = fmt.Sprintf("int(%s)", g.ref(c))
g.lengthCheck(n)
default:
panic(unexpected(c))
}
g.printf("restore := %s[%s:]\n", g.data, n)
g.printf("%s = %s[:%s]\n", g.data, g.data, n)
f()
g.assertEnd()
g.printf("%s = restore\n", g.data)
}
// ref builds a variable reference that resolves to the given trunnel IDRef.
func (g *generator) ref(r *ast.IDRef) string {
if r.Scope == "" {
return g.receiver + "." + name(r.Name)
}
return r.Scope + "." + name(r.Name)
}
func (g *generator) lengthCheck(min string) {
g.printf("if len(%s) < %s { return nil, errors.New(\"data too short\") }\n", g.data, min)
}
func (g *generator) assertEnd() {
g.printf("if len(%s) > 0 { return nil, errors.New(\"trailing data disallowed\") }\n", g.data)
}
func (g *generator) integer(i ast.Integer) string {
x, err := g.resolver.Integer(i)
if err != nil {
panic(err) // XXX panic
}
return strconv.FormatInt(x, 10)
}
func (g *generator) conditional(v string, c *ast.IntegerList) string {
clauses := make([]string, len(c.Ranges))
for i, r := range c.Ranges {
// Single case
if r.High == nil {
clauses[i] = fmt.Sprintf("%s == %s", v, g.integer(r.Low))
} else {
clauses[i] = fmt.Sprintf("(%s <= %s && %s <= %s)", g.integer(r.Low), v, v, g.integer(r.High))
}
}
return strings.Join(clauses, " || ")
}
func (g *generator) tipe(t interface{}) string {
switch t := t.(type) {
case *ast.NulTermString:
return "string"
case *ast.IntType:
return fmt.Sprintf("uint%d", t.Size)
case *ast.CharType:
return "byte"
case *ast.Ptr:
return "int"
case *ast.StructRef:
return "*" + name(t.Name)
case *ast.FixedArrayMember:
return fmt.Sprintf("[%s]%s", g.integer(t.Size), g.tipe(t.Base))
case *ast.VarArrayMember:
return fmt.Sprintf("[]%s", g.tipe(t.Base))
default:
panic(unexpected(t))
}
}
func contextSignature(names []string) string {
s := ""
for _, n := range names {
s += ", " + n + " " + name(n)
}
return s
}
func contextArgs(names []string) string {
s := ""
for _, n := range names {
s += ", " + n
}
return s
}
func unexpected(t interface{}) string {
return fmt.Sprintf("unexpected type %T", t)
}
<|start_filename|>parse/general_test.go<|end_filename|>
package parse
import (
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestFileNotExists(t *testing.T) {
_, err := File("doesnotexist")
assert.Error(t, err)
}
func TestFilesNotExists(t *testing.T) {
_, err := Files([]string{"doesnotexist"})
assert.Error(t, err)
}
func TestFiles(t *testing.T) {
filenames, err := filepath.Glob("testdata/valid/*.trunnel")
require.NoError(t, err)
fs, err := Files(filenames)
require.NoError(t, err)
assert.Len(t, fs, 3)
}
func TestErrorReader(t *testing.T) {
_, err := Reader("", errorReader{})
assert.Error(t, err)
}
type errorReader struct{}
func (r errorReader) Read(_ []byte) (int, error) {
return 0, assert.AnError
}
<|start_filename|>parse/internal/parser/parser_test.go<|end_filename|>
package parser
import (
"strings"
"testing"
"github.com/stretchr/testify/assert"
)
func TestOptions(t *testing.T) {
opts := []Option{
AllowInvalidUTF8(false),
Debug(true),
Entrypoint(""),
MaxExpressions(0),
Memoize(false),
Recover(true),
GlobalStore("foo", "baz"),
InitState("blah", 42),
Statistics(&Stats{}, "hmm"),
}
src := "const A = 1337;"
r := strings.NewReader(src)
_, err := ParseReader("", r, opts...)
assert.NoError(t, err)
}
<|start_filename|>gen/tests/rem/gen-marshallers.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package rem
import (
"encoding/binary"
"errors"
)
type Rem struct {
Head uint32
Tail []uint8
}
func (r *Rem) Parse(data []byte) ([]byte, error) {
cur := data
{
if len(cur) < 4 {
return nil, errors.New("data too short")
}
r.Head = binary.BigEndian.Uint32(cur)
cur = cur[4:]
}
{
r.Tail = make([]uint8, 0)
for len(cur) > 0 {
var tmp uint8
if len(cur) < 1 {
return nil, errors.New("data too short")
}
tmp = cur[0]
cur = cur[1:]
r.Tail = append(r.Tail, tmp)
}
}
return cur, nil
}
func ParseRem(data []byte) (*Rem, error) {
r := new(Rem)
_, err := r.Parse(data)
if err != nil {
return nil, err
}
return r, nil
}
<|start_filename|>ast/types.go<|end_filename|>
// Package ast defines types used to represent syntax trees for trunnel files.
package ast
// File represents a complete trunnel file.
type File struct {
Constants []*Constant
Contexts []*Context
Structs []*Struct
Pragmas []*Pragma
}
// Declarations
// -----------------------------------------------------------------------------
// Constant is a constant declaration.
type Constant struct {
Name string
Value int64
}
// Context is a context declaration.
type Context struct {
Name string
Members []*Field
}
// Struct is a struct declaration.
type Struct struct {
Name string
Contexts []string
Members []Member // nil for extern struct
}
// Extern returns whether the struct declaration is external.
func (s Struct) Extern() bool {
return s.Members == nil
}
// Pragma represents a directive to trunnel.
type Pragma struct {
Type string
Options []string
}
// Struct Members
// -----------------------------------------------------------------------------
// Member is a field in a struct definition.
type Member interface{}
// Field is a data field in a struct.
type Field struct {
Name string
Type Type
}
// UnionMember is a union member of a struct.
type UnionMember struct {
Name string
Tag *IDRef
Length LengthConstraint
Cases []*UnionCase
}
// EOS signals "end of struct".
type EOS struct{}
// Types
// -----------------------------------------------------------------------------
// Type is a type.
type Type interface{}
// IntType represents an integer type (u8, u16, u32 and u64).
type IntType struct {
Size uint
Constraint *IntegerList
}
// Possible IntTypes.
var (
U8 = &IntType{Size: 8}
U16 = &IntType{Size: 16}
U32 = &IntType{Size: 32}
U64 = &IntType{Size: 64}
)
// StructRef represents a reference to a struct type.
type StructRef struct {
Name string
}
// Ptr signals a request to store a pointer to a location within a struct.
type Ptr struct{}
// NulTermString is a NUL-terminated string type.
type NulTermString struct{}
// CharType represents the character type.
type CharType struct{}
// ArrayBase is a type that can be stored in an array.
type ArrayBase interface{}
// FixedArrayMember is a fixed-length array.
type FixedArrayMember struct {
Base ArrayBase
Size Integer
}
// VarArrayMember is a variable-length array.
type VarArrayMember struct {
Base ArrayBase
Constraint LengthConstraint // nil means remainder
}
// Unions
// -----------------------------------------------------------------------------
// UnionCase is a case in a union.
type UnionCase struct {
Case *IntegerList // nil is the default case
Members []Member
}
// Fail directive for a union case.
type Fail struct{}
// Ignore directive in a union case.
type Ignore struct{}
// Other
// -----------------------------------------------------------------------------
// Integer specifies an integer (either directly or via a constant reference).
type Integer interface{}
// IntegerConstRef specifies an integer via a reference to a constant.
type IntegerConstRef struct {
Name string
}
// IntegerLiteral specifies an integer directly.
type IntegerLiteral struct {
Value int64
}
// IntegerRange represents a range of integers.
type IntegerRange struct {
Low Integer
High Integer
}
// NewIntegerRange constructs an IntegerRange from lo to hi.
func NewIntegerRange(lo, hi Integer) *IntegerRange {
return &IntegerRange{
Low: lo,
High: hi,
}
}
// NewIntegerRangeLiteral constructs an IntegerRange with literal bounds.
func NewIntegerRangeLiteral(lo, hi int64) *IntegerRange {
return NewIntegerRange(
&IntegerLiteral{Value: lo},
&IntegerLiteral{Value: hi},
)
}
// NewIntegerRangeSingle constructs an IntegerRange containing just one integer.
func NewIntegerRangeSingle(i Integer) *IntegerRange {
return NewIntegerRange(i, nil)
}
// NewIntegerRangeSingleLiteral constructs an IntegerRange containing a single
// integer specified with a literal.
func NewIntegerRangeSingleLiteral(v int64) *IntegerRange {
return NewIntegerRangeSingle(&IntegerLiteral{Value: v})
}
// IntegerList specifies a set of integers.
type IntegerList struct {
Ranges []*IntegerRange
}
// NewIntegerList constructs an integer list from the given ranges.
func NewIntegerList(ranges ...*IntegerRange) *IntegerList {
return &IntegerList{
Ranges: ranges,
}
}
// LengthConstraint specifies a constraint on the length of a struct member.
type LengthConstraint interface{}
// Leftover is a LengthConstraint which specifies the member occupies all but
// the last Num bytes.
type Leftover struct {
Num Integer
}
// IDRef is a reference to an identifier, possibly within a scope.
type IDRef struct {
Scope string
Name string
}
<|start_filename|>gen/tests/leftover/leftover_test.go<|end_filename|>
package leftover
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestLeftoverParseTooShort(t *testing.T) {
l := new(Leftover)
for n := 0; n < 16; n++ {
_, err := l.Parse(make([]byte, n))
require.Error(t, err)
}
}
func TestLeftoverParseNonMultiplesOf4(t *testing.T) {
for n := 1; n < 1000; n++ {
if n%4 == 0 {
continue
}
_, err := new(Leftover).Parse(make([]byte, n))
require.Error(t, err)
}
}
func TestLeftoverParseSuccess(t *testing.T) {
b := []byte{
0, 0, 0, 0, // head
1, 1, 1, 1,
2, 2, 2, 2, // mid
3, 3, 3, 3,
4, 4, 4, 4,
5, 5, 5, 5, // tail
6, 6, 6, 6,
}
l := new(Leftover)
rest, err := l.Parse(b)
require.NoError(t, err)
assert.Equal(t, []byte{}, rest)
assert.Equal(t, &Leftover{
Head: [2]uint32{0x00000000, 0x01010101},
Mid: []uint32{0x02020202, 0x03030303, 0x04040404},
Tail: [2]uint32{0x05050505, 0x06060606},
}, l)
}
<|start_filename|>gen/tests/leftover/gen-marshallers.go<|end_filename|>
// Code generated by trunnel. DO NOT EDIT.
package leftover
import (
"encoding/binary"
"errors"
)
type Leftover struct {
Head [2]uint32
Mid []uint32
Tail [2]uint32
}
func (l *Leftover) Parse(data []byte) ([]byte, error) {
cur := data
{
for idx := 0; idx < 2; idx++ {
if len(cur) < 4 {
return nil, errors.New("data too short")
}
l.Head[idx] = binary.BigEndian.Uint32(cur)
cur = cur[4:]
}
}
{
if len(cur) < 8 {
return nil, errors.New("data too short")
}
restore := cur[len(cur)-8:]
cur = cur[:len(cur)-8]
l.Mid = make([]uint32, 0)
for len(cur) > 0 {
var tmp uint32
if len(cur) < 4 {
return nil, errors.New("data too short")
}
tmp = binary.BigEndian.Uint32(cur)
cur = cur[4:]
l.Mid = append(l.Mid, tmp)
}
if len(cur) > 0 {
return nil, errors.New("trailing data disallowed")
}
cur = restore
}
{
for idx := 0; idx < 2; idx++ {
if len(cur) < 4 {
return nil, errors.New("data too short")
}
l.Tail[idx] = binary.BigEndian.Uint32(cur)
cur = cur[4:]
}
}
return cur, nil
}
func ParseLeftover(data []byte) (*Leftover, error) {
l := new(Leftover)
_, err := l.Parse(data)
if err != nil {
return nil, err
}
return l, nil
}
<|start_filename|>testdata/tor/Makefile<|end_filename|>
REF=../../ref/tor
all: README.md import
README.md: manifest.txt
echo 'Test files found in tor repository version:' > $@
echo '```' >> $@
git submodule status ${REF} >> $@
echo '```' >> $@
echo 'Original locations:' >> $@
echo '```' >> $@
cat $< >> $@
echo '```' >> $@
import: manifest.txt
xargs -i cp -v {} . < $<
manifest.txt: FORCE
find ${REF} -name '*.trunnel' > $@
.PHONY: FORCE
| mmcloughlin/trunnel |
<|start_filename|>SongBrowserPlugin/Configuration/SongFilterMode.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SongBrowser.Configuration
{
public enum SongFilterMode
{
None,
Favorites,
Playlist,
Search,
Ranked,
Unranked,
Played,
Unplayed,
Requirements,
// For other mods that extend SongBrowser
Custom
}
}
<|start_filename|>SongBrowserPlugin/UI/Base64Sprites.cs<|end_filename|>
using System;
using System.Linq;
using System.Reflection;
using System.Text.RegularExpressions;
using UnityEngine;
namespace SongBrowser.UI
{
class Base64Sprites
{
public static Sprite StarFullIcon;
public static Sprite SpeedIcon;
public static Sprite GraphIcon;
public static Sprite DeleteIcon;
public static Sprite XIcon;
public static Sprite DoubleArrow;
public static Sprite RandomIcon;
public static Sprite NoteStartOffsetIcon;
public static Sprite PlaylistIcon;
public static void Init()
{
SpeedIcon = Base64Sprites.LoadSpriteFromResources("SongBrowser.Assets.Speed.png");
GraphIcon = Base64Sprites.LoadSpriteFromResources("SongBrowser.Assets.Graph.png");
XIcon = Base64Sprites.LoadSpriteFromResources("SongBrowser.Assets.X.png");
StarFullIcon = Base64Sprites.LoadSpriteFromResources("SongBrowser.Assets.StarFull.png");
DeleteIcon = Base64Sprites.LoadSpriteFromResources("SongBrowser.Assets.DeleteIcon.png");
DoubleArrow = Base64Sprites.LoadSpriteFromResources("SongBrowser.Assets.DoubleArrow.png");
RandomIcon = Base64Sprites.LoadSpriteFromResources("SongBrowser.Assets.RandomIcon.png");
NoteStartOffsetIcon = Base64Sprites.LoadSpriteFromResources("SongBrowser.Assets.NoteStartOffset.png");
PlaylistIcon = Base64Sprites.LoadSpriteFromResources("SongBrowser.Assets.PlaylistIcon.png");
}
public static string SpriteToBase64(Sprite input)
{
return Convert.ToBase64String(input.texture.EncodeToPNG());
}
public static Sprite Base64ToSprite(string base64)
{
// prune base64 encoded image header
Regex r = new Regex(@"data:image.*base64,");
base64 = r.Replace(base64, "");
Sprite s;
try
{
Texture2D tex = Base64ToTexture2D(base64);
s = Sprite.Create(tex, new Rect(0, 0, tex.width, tex.height), (Vector2.one / 2f));
}
catch (Exception)
{
Plugin.Log.Critical("Exception loading texture from base64 data.");
s = null;
}
return s;
}
public static Texture2D Base64ToTexture2D(string encodedData)
{
byte[] imageData = Convert.FromBase64String(encodedData);
int width, height;
GetImageSize(imageData, out width, out height);
Texture2D texture = new Texture2D(width, height, TextureFormat.ARGB32, false, true)
{
hideFlags = HideFlags.HideAndDontSave,
filterMode = FilterMode.Trilinear
};
texture.LoadImage(imageData);
return texture;
}
private static void GetImageSize(byte[] imageData, out int width, out int height)
{
width = ReadInt(imageData, 3 + 15);
height = ReadInt(imageData, 3 + 15 + 2 + 2);
}
private static int ReadInt(byte[] imageData, int offset)
{
return (imageData[offset] << 8) | imageData[offset + 1];
}
public static Texture2D LoadTextureRaw(byte[] file)
{
if (file.Count() > 0)
{
Texture2D Tex2D = new Texture2D(2, 2, TextureFormat.RGBA32, false, false);
if (Tex2D.LoadImage(file))
return Tex2D;
}
return null;
}
public static Texture2D LoadTextureFromResources(string resourcePath)
{
return LoadTextureRaw(GetResource(Assembly.GetCallingAssembly(), resourcePath));
}
public static Sprite LoadSpriteRaw(byte[] image, float PixelsPerUnit = 100.0f)
{
return LoadSpriteFromTexture(LoadTextureRaw(image), PixelsPerUnit);
}
public static Sprite LoadSpriteFromTexture(Texture2D SpriteTexture, float PixelsPerUnit = 100.0f)
{
if (SpriteTexture)
return Sprite.Create(SpriteTexture, new Rect(0, 0, SpriteTexture.width, SpriteTexture.height), new Vector2(0, 0), PixelsPerUnit);
return null;
}
public static Sprite LoadSpriteFromResources(string resourcePath, float PixelsPerUnit = 100.0f)
{
return LoadSpriteRaw(GetResource(Assembly.GetCallingAssembly(), resourcePath), PixelsPerUnit);
}
public static byte[] GetResource(Assembly asm, string ResourceName)
{
System.IO.Stream stream = asm.GetManifestResourceStream(ResourceName);
byte[] data = new byte[stream.Length];
stream.Read(data, 0, (int)stream.Length);
return data;
}
}
}
<|start_filename|>SongBrowserPlugin/Installers/SongBrowserMenuInstallers.cs<|end_filename|>
using SongBrowser.UI.ViewControllers;
using Zenject;
namespace SongBrowser.Installers
{
class SongBrowserMenuInstaller : Installer
{
public override void InstallBindings()
{
Container.BindInterfacesTo<SettingsViewController>().AsSingle();
}
}
}
<|start_filename|>SongBrowserPlugin/manifest.json<|end_filename|>
{
"$schema": "https://raw.githubusercontent.com/bsmg/BSIPA-MetadataFileSchema/master/Schema.json",
"author": "Halsafar",
"description": "Adds sort and filter features to the level selection UI.",
"gameVersion": "1.18.0",
"id": "SongBrowser",
"name": "Song Browser",
"version": "6.3.2",
"dependsOn": {
"SongCore": "^3.7.0",
"SongDataCore": "^1.3.8",
"BSIPA": "^4.1.3",
"BS Utils": "^1.4.9",
"BeatSaberMarkupLanguage": "^1.5.1",
"BeatSaberPlaylistsLib": "^1.3.0",
"SiraUtil": "^2.5.5"
},
"misc": {
"plugin-hint": "SongBrowser.Plugin"
}
}
<|start_filename|>SongBrowserPlugin/UI/Browser/SongFilterButton.cs<|end_filename|>
using SongBrowser.Configuration;
using UnityEngine.UI;
namespace SongBrowser.UI
{
class SongFilterButton
{
public SongFilterMode FilterMode;
public Button Button;
}
}
| lhvy/BeatSaberSongBrowser |
<|start_filename|>Dockerfile<|end_filename|>
FROM rust:1-slim-buster as builder
WORKDIR /srv/cosmogony
ENV DEBIAN_FRONTEND noninteractive
RUN apt-get update && apt-get install -y libgeos-c1v5 libgeos-dev && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
COPY . ./
RUN cargo build --release
FROM debian:buster-slim
WORKDIR /srv
ENV DEBIAN_FRONTEND noninteractive
RUN apt-get update && apt-get install -y libgeos-c1v5 libgeos-dev && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
COPY --from=builder /srv/cosmogony/target/release/cosmogony /usr/bin/cosmogony
ENTRYPOINT ["cosmogony"]
| skurfuerst/cosmogony |
<|start_filename|>libwasm/ExportSection.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using Wasm.Binary;
namespace Wasm
{
/// <summary>
/// A type of section that exports values.
/// </summary>
public sealed class ExportSection : Section
{
/// <summary>
/// Creates an empty export section.
/// </summary>
public ExportSection()
{
this.Exports = new List<ExportedValue>();
this.ExtraPayload = new byte[0];
}
/// <summary>
/// Creates an export section from a sequence of exports.
/// </summary>
/// <param name="exports">The exports to put in the export section.</param>
public ExportSection(IEnumerable<ExportedValue> exports)
: this(exports, new byte[0])
{
}
/// <summary>
/// Creates an export section from a sequence of exports and a trailing payload.
/// </summary>
/// <param name="exports">The exports to put in the export section.</param>
/// <param name="extraPayload">
/// A sequence of bytes that have no intrinsic meaning; they are part
/// of the element section but are placed after the element section's actual contents.
/// </param>
public ExportSection(IEnumerable<ExportedValue> exports, byte[] extraPayload)
{
this.Exports = new List<ExportedValue>(exports);
this.ExtraPayload = extraPayload;
}
/// <inheritdoc/>
public override SectionName Name => new SectionName(SectionCode.Export);
/// <summary>
/// Gets the list of all values that are exported by this section.
/// </summary>
/// <returns>A list of all values exported by this section.</returns>
public List<ExportedValue> Exports { get; private set; }
/// <summary>
/// Gets this function section's additional payload.
/// </summary>
/// <returns>The additional payload, as an array of bytes.</returns>
public byte[] ExtraPayload { get; set; }
/// <inheritdoc/>
public override void WritePayloadTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt32((uint)Exports.Count);
foreach (var export in Exports)
{
export.WriteTo(writer);
}
writer.Writer.Write(ExtraPayload);
}
/// <summary>
/// Reads the export section with the given header.
/// </summary>
/// <param name="header">The section header.</param>
/// <param name="reader">A reader for a binary WebAssembly file.</param>
/// <returns>The parsed section.</returns>
public static ExportSection ReadSectionPayload(
SectionHeader header, BinaryWasmReader reader)
{
long startPos = reader.Position;
// Read the function indices.
uint count = reader.ReadVarUInt32();
var exportedVals = new List<ExportedValue>();
for (uint i = 0; i < count; i++)
{
exportedVals.Add(
new ExportedValue(
reader.ReadString(),
(ExternalKind)reader.ReadByte(),
reader.ReadVarUInt32()));
}
// Skip any remaining bytes.
var extraPayload = reader.ReadRemainingPayload(startPos, header);
return new ExportSection(exportedVals, extraPayload);
}
/// <inheritdoc/>
public override void Dump(TextWriter writer)
{
writer.Write(Name.ToString());
writer.Write("; number of entries: ");
writer.Write(Exports.Count);
writer.WriteLine();
for (int i = 0; i < Exports.Count; i++)
{
writer.Write("#");
writer.Write(i);
writer.Write(" -> ");
Exports[i].Dump(writer);
writer.WriteLine();
}
if (ExtraPayload.Length > 0)
{
writer.Write("Extra payload size: ");
writer.Write(ExtraPayload.Length);
writer.WriteLine();
DumpHelpers.DumpBytes(ExtraPayload, writer);
writer.WriteLine();
}
}
}
/// <summary>
/// An entry in an export section.
/// </summary>
public struct ExportedValue
{
/// <summary>
/// Creates an exported value from the given name, kind and index.
/// </summary>
/// <param name="name">The name of the exported value.</param>
/// <param name="kind">The kind of value that is exported.</param>
/// <param name="index">The index into the index space for the value's kind.</param>
public ExportedValue(string name, ExternalKind kind, uint index)
{
this.Name = name;
this.Kind = kind;
this.Index = index;
}
/// <summary>
/// Gets the name of the exported value.
/// </summary>
/// <returns>The name of the exported value.</returns>
public string Name { get; private set; }
/// <summary>
/// Gets the kind of value that is exported.
/// </summary>
/// <returns>The kind of value that is exported.</returns>
public ExternalKind Kind { get; private set; }
/// <summary>
/// Gets the index into the index space for this value's kind.
/// </summary>
/// <returns>The index into the appropriate index space.</returns>
public uint Index { get; private set; }
/// <summary>
/// Writes this exported value to the given WebAssembly file writer.
/// </summary>
/// <param name="writer">The WebAssembly file writer.</param>
public void WriteTo(BinaryWasmWriter writer)
{
writer.WriteString(Name);
writer.Writer.Write((byte)Kind);
writer.WriteVarUInt32(Index);
}
/// <summary>
/// Writes a textual representation of this exported value to the given writer.
/// </summary>
/// <param name="writer">The writer to which text is written.</param>
public void Dump(TextWriter writer)
{
writer.Write("\"");
writer.Write(Name);
writer.Write("\", ");
writer.Write(((object)Kind).ToString().ToLower());
writer.Write(" #");
writer.Write(Index);
}
}
}
<|start_filename|>libwasm/Instructions/IfElseOperator.cs<|end_filename|>
using System.Collections.Generic;
using Wasm.Binary;
namespace Wasm.Instructions
{
/// <summary>
/// Describes an operator that runs one of two blocks
/// </summary>
public sealed class IfElseOperator : Operator
{
internal IfElseOperator(byte opCode, WasmType declaringType, string mnemonic)
: base(opCode, declaringType, mnemonic)
{ }
/// <summary>
/// Reads the immediates (not the opcode) of a WebAssembly instruction
/// for this operator from the given reader and returns the result as an
/// instruction.
/// </summary>
/// <param name="reader">The WebAssembly file reader to read immediates from.</param>
/// <returns>A WebAssembly instruction.</returns>
public override Instruction ReadImmediates(BinaryWasmReader reader)
{
var type = reader.ReadWasmType();
return ReadBlockContents(type, reader);
}
/// <summary>
/// Reads the child instructions of a WebAssembly block from the given reader.
/// </summary>
/// <param name="blockType">The type of value returned by the resulting block.</param>
/// <param name="reader">The WebAssembly file reader.</param>
/// <returns>A WebAssembly block instruction.</returns>
public static IfElseInstruction ReadBlockContents(WasmType blockType, BinaryWasmReader reader)
{
var ifBranch = new List<Instruction>();
List<Instruction> elseBranch = null;
while (true)
{
byte opCode = reader.ReadByte();
if (opCode == Operators.EndOpCode)
{
return new IfElseInstruction(blockType, ifBranch, elseBranch);
}
else if (opCode == Operators.ElseOpCode)
{
if (elseBranch != null)
{
throw new WasmException("More than one 'else' opcode in an 'if' instruction");
}
elseBranch = new List<Instruction>();
}
else
{
var op = Operators.GetOperatorByOpCode(opCode);
(elseBranch == null ? ifBranch : elseBranch).Add(op.ReadImmediates(reader));
}
}
}
/// <summary>
/// Creates an if-else instruction from the given type, if-branch and
/// else-branch.
/// </summary>
/// <param name="type">The type of value returned by the if-else instruction.</param>
/// <param name="ifBranch">The if-else instruction's 'if' branch.</param>
/// <param name="elseBranch">The if-else instruction's 'else' branch.</param>
public IfElseInstruction Create(
WasmType type,
IEnumerable<Instruction> ifBranch,
IEnumerable<Instruction> elseBranch)
{
return new IfElseInstruction(type, ifBranch, elseBranch);
}
/// <summary>
/// Casts the given instruction to this operator's instruction type.
/// </summary>
/// <param name="value">The instruction to cast.</param>
/// <returns>The given instruction as this operator's instruction type.</returns>
public IfElseInstruction CastInstruction(Instruction value)
{
return (IfElseInstruction)value;
}
}
}
<|start_filename|>libwasm-text/ScriptRunner.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Pixie;
using Pixie.Markup;
using Wasm.Interpret;
namespace Wasm.Text
{
/// <summary>
/// Maintains state for and runs a single WebAssembly test script.
/// </summary>
public sealed class ScriptRunner
{
/// <summary>
/// Creates a new script runner.
/// </summary>
/// <param name="log">A log to send diagnostics to.</param>
/// <param name="compiler">A compiler to use for compiling modules.</param>
public ScriptRunner(ILog log, Func<ModuleCompiler> compiler = null)
{
this.Log = log;
this.Assembler = new Assembler(log);
this.Compiler = compiler;
this.moduleInstances = new List<ModuleInstance>();
this.moduleInstancesByName = new Dictionary<string, ModuleInstance>();
this.importer = new NamespacedImporter();
this.importer.RegisterImporter("spectest", new SpecTestImporter(new StringWriter()));
}
/// <summary>
/// Gets a log to which this script runner sends diagnostics.
/// </summary>
/// <value>A log.</value>
public ILog Log { get; private set; }
/// <summary>
/// Gets the assembler that assembles modules for this script runner.
/// </summary>
/// <value>A WebAssembly text format assembler.</value>
public Assembler Assembler { get; private set; }
/// <summary>
/// Gets the type of compiler to use.
/// </summary>
/// <value>A function that produces a module compiler.</value>
public Func<ModuleCompiler> Compiler { get; private set; }
private NamespacedImporter importer;
private List<ModuleInstance> moduleInstances;
private Dictionary<string, ModuleInstance> moduleInstancesByName;
/// <summary>
/// A data structure that tallies the number of tests that were run.
/// </summary>
public struct TestStatistics
{
/// <summary>
/// Initializes an instance of the <see cref="TestStatistics"/> type.
/// </summary>
/// <param name="successfulCommandCount">
/// The number of commands that were executed successfully.
/// </param>
/// <param name="failedCommandCount">
/// The number of commands that were executed unsuccessfully.
/// </param>
/// <param name="unknownCommandCount">
/// The number of command expressions that were skipped because they were not recognized as a known command.
/// </param>
public TestStatistics(int successfulCommandCount, int failedCommandCount, int unknownCommandCount)
{
this.SuccessfulCommandCount = successfulCommandCount;
this.FailedCommandCount = failedCommandCount;
this.UnknownCommandCount = unknownCommandCount;
}
/// <summary>
/// Gets the number of commands that were recognized and successfully executed.
/// </summary>
/// <value>The number of successfully executed commands.</value>
public int SuccessfulCommandCount { get; private set; }
/// <summary>
/// Gets the number of commands that were recognized and executed, but then errored in some way.
/// </summary>
/// <value>The number of commands that failed.</value>
public int FailedCommandCount { get; private set; }
/// <summary>
/// Gets the number of command expressions that could not be recognized as a
/// known command and were hence skipped.
/// </summary>
/// <value>The number of unrecognized commands.</value>
public int UnknownCommandCount { get; private set; }
/// <summary>
/// Gets the total number of commands expressions that were encountered.
/// </summary>
public int TotalCommandCount => SuccessfulCommandCount + FailedCommandCount + UnknownCommandCount;
/// <summary>
/// Computes the elementwise sum of two test statistics.
/// </summary>
/// <param name="left">A first set of test statistics.</param>
/// <param name="right">A second set of test statistics.</param>
/// <returns>The elementwise sum of <paramref name="left"/> and <paramref name="right"/>.</returns>
public static TestStatistics operator+(TestStatistics left, TestStatistics right)
{
return new TestStatistics(
left.SuccessfulCommandCount + right.SuccessfulCommandCount,
left.FailedCommandCount + right.FailedCommandCount,
left.UnknownCommandCount + right.UnknownCommandCount);
}
/// <summary>
/// An empty set of test statistics: the test statistics for a file that
/// does not execute any commands.
/// </summary>
public static readonly TestStatistics Empty = new TestStatistics(0, 0, 0);
/// <summary>
/// A set of test statistics that represent the successful execution of a single command.
/// </summary>
public static readonly TestStatistics SingleSuccess = new TestStatistics(1, 0, 0);
/// <summary>
/// A set of test statistics that represent a single failed command.
/// </summary>
public static readonly TestStatistics SingleFailure = new TestStatistics(0, 1, 0);
/// <summary>
/// A set of test statistics that represent a single unknown command.
/// </summary>
public static readonly TestStatistics SingleUnknown = new TestStatistics(0, 0, 1);
/// <inheritdoc/>
public override string ToString()
{
return $"total: {TotalCommandCount}, successes: {SuccessfulCommandCount}, " +
$"failures: {FailedCommandCount}, unknown: {UnknownCommandCount}";
}
}
/// <summary>
/// Runs a script, encoded as a sequence of expressions.
/// </summary>
/// <param name="expressions">The script, parsed as a sequence of expressions.</param>
public TestStatistics Run(IEnumerable<SExpression> expressions)
{
var results = TestStatistics.Empty;
foreach (var item in expressions)
{
results += Run(item);
}
return results;
}
/// <summary>
/// Runs a script, encoded as a sequence of tokens.
/// </summary>
/// <param name="tokens">The script, parsed as a sequence of tokens.</param>
public TestStatistics Run(IEnumerable<Lexer.Token> tokens)
{
return Run(Parser.ParseAsSExpressions(tokens, Log));
}
/// <summary>
/// Runs a script, encoded as a string.
/// </summary>
/// <param name="script">The text of the script to run.</param>
/// <param name="scriptName">The file name of the script to run.</param>
public TestStatistics Run(string script, string scriptName = "<string>")
{
return Run(Lexer.Tokenize(script, scriptName));
}
/// <summary>
/// Runs a single expression in the script.
/// </summary>
/// <param name="expression">The expression to run.</param>
public TestStatistics Run(SExpression expression)
{
if (expression.IsCallTo("module"))
{
var module = Assembler.AssembleModule(expression, out string moduleId);
var instance = Wasm.Interpret.ModuleInstance.Instantiate(
module,
importer,
policy: ExecutionPolicy.Create(maxMemorySize: 0x1000),
compiler: Compiler);
moduleInstances.Add(instance);
if (moduleId != null)
{
moduleInstancesByName[moduleId] = instance;
}
if (module.StartFunctionIndex.HasValue)
{
instance.Functions[(int)module.StartFunctionIndex.Value].Invoke(Array.Empty<object>());
}
return TestStatistics.Empty;
}
else if (expression.IsCallTo("register"))
{
var tail = expression.Tail;
var name = Assembler.AssembleString(tail[0], Log);
tail = tail.Skip(1).ToArray();
var moduleId = Assembler.AssembleLabelOrNull(ref tail);
if (moduleId == null)
{
importer.RegisterImporter(name, new ModuleExportsImporter(moduleInstances[moduleInstances.Count - 1]));
}
else
{
importer.RegisterImporter(name, new ModuleExportsImporter(moduleInstancesByName[moduleId]));
}
return TestStatistics.Empty;
}
else if (expression.IsCallTo("invoke") || expression.IsCallTo("get"))
{
RunAction(expression);
return TestStatistics.SingleSuccess;
}
else if (expression.IsCallTo("assert_return"))
{
var results = RunAction(expression.Tail[0]);
var expected = expression.Tail
.Skip(1)
.Zip(results, (expr, val) => EvaluateConstExpr(expr, val.GetType()))
.ToArray();
if (expected.Length != results.Count)
{
Log.Log(
new LogEntry(
Severity.Error,
"assertion failed",
"action produced result ",
string.Join(", ", results),
"; expected ",
string.Join(", ", expected),
".",
Assembler.Highlight(expression)));
return TestStatistics.SingleFailure;
}
bool failures = false;
for (int i = 0; i < expected.Length; i++)
{
if (!object.Equals(results[i], expected[i]))
{
if (AlmostEquals(results[i], expected[i]))
{
Log.Log(
new LogEntry(
Severity.Warning,
"rounding error",
"action produced result ",
results[i].ToString(),
"; expected ",
expected[i].ToString(),
".",
Assembler.Highlight(expression)));
}
else
{
Log.Log(
new LogEntry(
Severity.Error,
"assertion failed",
"action produced result ",
results[i].ToString(),
"; expected ",
expected[i].ToString(),
".",
Assembler.Highlight(expression)));
failures = true;
}
}
}
return failures ? TestStatistics.SingleFailure : TestStatistics.SingleSuccess;
}
else if (expression.IsCallTo("assert_trap") || expression.IsCallTo("assert_exhaustion"))
{
var expected = Assembler.AssembleString(expression.Tail[1], Log);
bool caught = false;
Exception exception = null;
try
{
if (expression.Tail[0].IsCallTo("module"))
{
Run(expression.Tail[0]);
}
else
{
RunAction(expression.Tail[0], false);
}
}
catch (TrapException ex)
{
caught = ex.SpecMessage == expected;
exception = ex;
}
catch (PixieException)
{
throw;
}
catch (Exception ex)
{
caught = false;
exception = ex;
}
if (caught)
{
return TestStatistics.SingleSuccess;
}
else
{
if (exception == null)
{
Log.Log(
new LogEntry(
Severity.Error,
"assertion failed",
"action should have trapped, but didn't.",
Assembler.Highlight(expression)));
}
else
{
Log.Log(
new LogEntry(
Severity.Error,
"assertion failed",
"action trapped as expected, but with an unexpected exception. ",
exception.ToString(),
Assembler.Highlight(expression)));
}
return TestStatistics.SingleFailure;
}
}
else if (expression.IsCallTo("assert_return_canonical_nan"))
{
var results = RunAction(expression.Tail[0]);
bool isCanonicalNaN;
if (results.Count != 1)
{
Log.Log(
new LogEntry(
Severity.Error,
"assertion failed",
"action produced ",
results.Count.ToString(),
" results (",
string.Join(", ", results),
"); expected a single canonical NaN.",
Assembler.Highlight(expression)));
return TestStatistics.SingleFailure;
}
else if (results[0] is double)
{
var val = Interpret.ValueHelpers.ReinterpretAsInt64((double)results[0]);
isCanonicalNaN = val == Interpret.ValueHelpers.ReinterpretAsInt64((double)FloatLiteral.NaN(false))
|| val == Interpret.ValueHelpers.ReinterpretAsInt64((double)FloatLiteral.NaN(true));
}
else if (results[0] is float)
{
var val = Interpret.ValueHelpers.ReinterpretAsInt32((float)results[0]);
isCanonicalNaN = val == Interpret.ValueHelpers.ReinterpretAsInt32((float)FloatLiteral.NaN(false))
|| val == Interpret.ValueHelpers.ReinterpretAsInt32((float)FloatLiteral.NaN(true));
}
else
{
isCanonicalNaN = false;
}
if (isCanonicalNaN)
{
return TestStatistics.SingleSuccess;
}
else
{
Log.Log(
new LogEntry(
Severity.Error,
"assertion failed",
"action produced ",
results[0].ToString(),
"; expected a single canonical NaN.",
Assembler.Highlight(expression)));
return TestStatistics.SingleFailure;
}
}
else if (expression.IsCallTo("assert_return_arithmetic_nan"))
{
var results = RunAction(expression.Tail[0]);
bool isNaN;
if (results.Count != 1)
{
Log.Log(
new LogEntry(
Severity.Error,
"assertion failed",
"action produced ",
results.Count.ToString(),
" results (",
string.Join(", ", results),
"); expected a single NaN.",
Assembler.Highlight(expression)));
return TestStatistics.SingleFailure;
}
else if (results[0] is double)
{
isNaN = double.IsNaN((double)results[0]);
}
else if (results[0] is float)
{
isNaN = float.IsNaN((float)results[0]);
}
else
{
isNaN = false;
}
if (isNaN)
{
return TestStatistics.SingleSuccess;
}
else
{
Log.Log(
new LogEntry(
Severity.Error,
"assertion failed",
"action produced ",
results[0].ToString(),
"; expected a single NaN.",
Assembler.Highlight(expression)));
return TestStatistics.SingleFailure;
}
}
else
{
Log.Log(
new LogEntry(
Severity.Warning,
"unknown script command",
Quotation.QuoteEvenInBold(
"expression ",
expression.Head.Span.Text,
" was not recognized as a known script command."),
Assembler.Highlight(expression)));
return TestStatistics.SingleUnknown;
}
}
private static bool AlmostEquals(object value, object expected)
{
if (value is float && expected is float)
{
return AlmostEquals((float)value, (float)expected, 1);
}
else if (value is double && expected is double)
{
return AlmostEquals((double)value, (double)expected, 1);
}
else
{
return false;
}
}
private static bool AlmostEquals(double left, double right, long representationTolerance)
{
// Approximate comparison code suggested by <NAME> on StackOverflow
// (https://stackoverflow.com/questions/10419771/comparing-doubles-with-adaptive-approximately-equal).
long leftAsBits = ToBitsTwosComplement(left);
long rightAsBits = ToBitsTwosComplement(right);
long floatingPointRepresentationsDiff = Math.Abs(leftAsBits - rightAsBits);
return (floatingPointRepresentationsDiff <= representationTolerance);
}
private static long ToBitsTwosComplement(double value)
{
// Approximate comparison code suggested by <NAME> on StackOverflow
// (https://stackoverflow.com/questions/10419771/comparing-doubles-with-adaptive-approximately-equal).
long valueAsLong = Interpret.ValueHelpers.ReinterpretAsInt64(value);
return valueAsLong < 0
? (long)(0x8000000000000000 - (ulong)valueAsLong)
: valueAsLong;
}
private static bool AlmostEquals(float left, float right, int representationTolerance)
{
// Approximate comparison code suggested by <NAME> on StackOverflow
// (https://stackoverflow.com/questions/10419771/comparing-doubles-with-adaptive-approximately-equal).
long leftAsBits = ToBitsTwosComplement(left);
long rightAsBits = ToBitsTwosComplement(right);
long floatingPointRepresentationsDiff = Math.Abs(leftAsBits - rightAsBits);
return (floatingPointRepresentationsDiff <= representationTolerance);
}
private static int ToBitsTwosComplement(float value)
{
// Approximate comparison code suggested by <NAME> on StackOverflow
// (https://stackoverflow.com/questions/10419771/comparing-doubles-with-adaptive-approximately-equal).
int valueAsInt = Interpret.ValueHelpers.ReinterpretAsInt32(value);
return valueAsInt < 0
? (int)(0x80000000 - (int)valueAsInt)
: valueAsInt;
}
private object EvaluateConstExpr(SExpression expression, WasmValueType resultType)
{
var anonModule = new WasmFile();
var instructions = Assembler.AssembleInstructionExpression(expression, anonModule);
var inst = ModuleInstance.Instantiate(anonModule, new SpecTestImporter());
return inst.Evaluate(new InitializerExpression(instructions), resultType);
}
private object EvaluateConstExpr(SExpression expression, Type resultType)
{
return EvaluateConstExpr(expression, ValueHelpers.ToWasmValueType(resultType));
}
private IReadOnlyList<object> RunAction(SExpression expression, bool reportExceptions = true)
{
if (expression.IsCallTo("invoke"))
{
var tail = expression.Tail;
var moduleId = Assembler.AssembleLabelOrNull(ref tail);
var name = Assembler.AssembleString(tail[0], Log);
var args = tail.Skip(1);
if (moduleId == null)
{
foreach (var inst in Enumerable.Reverse(moduleInstances))
{
if (TryInvokeNamedFunction(inst, name, args, expression, out IReadOnlyList<object> results, reportExceptions))
{
return results;
}
}
Log.Log(
new LogEntry(
Severity.Error,
"undefined function",
Quotation.QuoteEvenInBold(
"no function named ",
name,
" is defined here."),
Assembler.Highlight(expression)));
return Array.Empty<object>();
}
else
{
if (moduleInstancesByName.TryGetValue(moduleId, out ModuleInstance inst))
{
if (TryInvokeNamedFunction(inst, name, args, expression, out IReadOnlyList<object> results, reportExceptions))
{
return results;
}
else
{
Log.Log(
new LogEntry(
Severity.Error,
"undefined function",
Quotation.QuoteEvenInBold(
"no function named ",
name,
" is defined in module ",
moduleId,
"."),
Assembler.Highlight(expression)));
return Array.Empty<object>();
}
}
else
{
Log.Log(
new LogEntry(
Severity.Error,
"undefined module",
Quotation.QuoteEvenInBold(
"no module named ",
moduleId,
" is defined here."),
Assembler.Highlight(expression)));
return Array.Empty<object>();
}
}
}
else if (expression.IsCallTo("get"))
{
var tail = expression.Tail;
var moduleId = Assembler.AssembleLabelOrNull(ref tail);
var name = Assembler.AssembleString(tail[0], Log);
if (moduleId == null)
{
foreach (var inst in moduleInstances)
{
if (inst.ExportedGlobals.TryGetValue(name, out Variable def))
{
return new[] { def.Get<object>() };
}
}
Log.Log(
new LogEntry(
Severity.Error,
"undefined global",
Quotation.QuoteEvenInBold(
"no global named ",
name,
" is defined here."),
Assembler.Highlight(expression)));
return Array.Empty<object>();
}
else
{
if (moduleInstancesByName.TryGetValue(moduleId, out ModuleInstance inst))
{
if (inst.ExportedGlobals.TryGetValue(name, out Variable def))
{
return new[] { def.Get<object>() };
}
else
{
Log.Log(
new LogEntry(
Severity.Error,
"undefined global",
Quotation.QuoteEvenInBold(
"no global named ",
name,
" is defined in module ",
moduleId,
"."),
Assembler.Highlight(expression)));
return Array.Empty<object>();
}
}
else
{
Log.Log(
new LogEntry(
Severity.Error,
"undefined module",
Quotation.QuoteEvenInBold(
"no module named ",
moduleId,
" is defined here."),
Assembler.Highlight(expression)));
return Array.Empty<object>();
}
}
}
else
{
Log.Log(
new LogEntry(
Severity.Error,
"unknown action",
Quotation.QuoteEvenInBold(
"expression ",
expression.Head.Span.Text,
" was not recognized as a known script action."),
Assembler.Highlight(expression)));
return Array.Empty<object>();
}
}
private bool TryInvokeNamedFunction(
ModuleInstance instance,
string name,
IEnumerable<SExpression> argumentExpressions,
SExpression expression,
out IReadOnlyList<object> results,
bool reportExceptions = true)
{
if (instance.ExportedFunctions.TryGetValue(name, out FunctionDefinition def))
{
var args = argumentExpressions
.Zip(def.ParameterTypes, (expr, type) => EvaluateConstExpr(expr, type))
.ToArray();
try
{
results = def.Invoke(args);
return true;
}
catch (Exception ex)
{
if (reportExceptions)
{
Log.Log(
new LogEntry(
Severity.Error,
"unhandled exception",
$"function invocation threw {ex.GetType().Name}",
new Paragraph(ex.ToString()),
Assembler.Highlight(expression)));
}
throw;
}
}
else
{
results = null;
return false;
}
}
}
}
<|start_filename|>libwasm/Interpret/ModuleExportsImporter.cs<|end_filename|>
using Wasm.Optimize;
namespace Wasm.Interpret
{
/// <summary>
/// An importer that imports a module instance's exported values.
/// </summary>
public sealed class ModuleExportsImporter : IImporter
{
/// <summary>
/// Creates an importer for a module's exports.
/// </summary>
/// <param name="module">A module whose exports are imported by the resulting importer.</param>
public ModuleExportsImporter(ModuleInstance module)
{
this.Module = module;
}
/// <summary>
/// Gets the module instance whose exported values are imported by this importer.
/// </summary>
/// <value>A module instance.</value>
public ModuleInstance Module { get; private set; }
/// <inheritdoc/>
public FunctionDefinition ImportFunction(ImportedFunction description, FunctionType signature)
{
if (Module.ExportedFunctions.TryGetValue(description.FieldName, out FunctionDefinition result)
&& ConstFunctionTypeComparer.Instance.Equals(signature, new FunctionType(result.ParameterTypes, result.ReturnTypes)))
{
return result;
}
else
{
return null;
}
}
/// <inheritdoc/>
public Variable ImportGlobal(ImportedGlobal description)
{
if (Module.ExportedGlobals.TryGetValue(description.FieldName, out Variable result)
&& description.Global.ContentType == result.Type
&& description.Global.IsMutable == result.IsMutable)
{
return result;
}
else
{
return null;
}
}
/// <inheritdoc/>
public LinearMemory ImportMemory(ImportedMemory description)
{
if (Module.ExportedMemories.TryGetValue(description.FieldName, out LinearMemory result)
&& result.Limits.Initial >= description.Memory.Limits.Initial)
{
return result;
}
else
{
return null;
}
}
/// <inheritdoc/>
public FunctionTable ImportTable(ImportedTable description)
{
if (Module.ExportedTables.TryGetValue(description.FieldName, out FunctionTable result)
&& result.Limits.Initial >= description.Table.Limits.Initial)
{
return result;
}
else
{
return null;
}
}
}
}
<|start_filename|>unit-tests/Text/ParserTests.cs<|end_filename|>
using System.Linq;
using Loyc.MiniTest;
using Pixie;
namespace Wasm.Text
{
[TestFixture]
public class ParserTests
{
[Test]
public void ParseSExpressions()
{
var exprWithoutTail = ParseSingleSExpression("module");
Assert.IsFalse(exprWithoutTail.IsCall);
Assert.AreEqual(Lexer.TokenKind.Keyword, exprWithoutTail.Head.Kind);
Assert.AreEqual(0, exprWithoutTail.Tail.Count);
var exprWithEmptyTail = ParseSingleSExpression("(module)");
Assert.IsTrue(exprWithEmptyTail.IsCall);
Assert.AreEqual(Lexer.TokenKind.Keyword, exprWithoutTail.Head.Kind);
Assert.AreEqual(0, exprWithEmptyTail.Tail.Count);
var exprWithNonEmptyTail = ParseSingleSExpression("(module 10 2e4)");
Assert.IsTrue(exprWithNonEmptyTail.IsCall);
Assert.AreEqual(Lexer.TokenKind.Keyword, exprWithNonEmptyTail.Head.Kind);
Assert.AreEqual(2, exprWithNonEmptyTail.Tail.Count);
Assert.AreEqual(Lexer.TokenKind.UnsignedInteger, exprWithNonEmptyTail.Tail[0].Head.Kind);
Assert.AreEqual(Lexer.TokenKind.Float, exprWithNonEmptyTail.Tail[1].Head.Kind);
var nestedExpr = ParseSingleSExpression("(module (limits 10 20))");
Assert.IsTrue(nestedExpr.IsCall);
Assert.AreEqual(Lexer.TokenKind.Keyword, nestedExpr.Head.Kind);
Assert.AreEqual(1, nestedExpr.Tail.Count);
Assert.IsTrue(nestedExpr.Tail[0].IsCall);
Assert.AreEqual(Lexer.TokenKind.Keyword, nestedExpr.Tail[0].Head.Kind);
Assert.IsTrue(nestedExpr.Tail[0].IsCall);
Assert.AreEqual(Lexer.TokenKind.UnsignedInteger, nestedExpr.Tail[0].Tail[0].Head.Kind);
Assert.AreEqual(Lexer.TokenKind.UnsignedInteger, nestedExpr.Tail[0].Tail[1].Head.Kind);
}
private SExpression ParseSingleSExpression(string text)
{
var tokens = Lexer.Tokenize(text).ToArray();
var log = new TestLog(new[] { Severity.Error }, NullLog.Instance);
return Parser.ParseAsSExpressions(tokens, log).Single();
}
}
}
<|start_filename|>unit-tests/Interpret/LinearMemoryTests.cs<|end_filename|>
using System;
using Loyc.MiniTest;
namespace Wasm.Interpret
{
[TestFixture]
public class LinearMemoryTests
{
[Test]
public void GrowMemory()
{
var limits = new ResizableLimits(1, 2);
var memory = new LinearMemory(limits);
Assert.AreEqual(1, memory.Size);
Assert.AreEqual(1, memory.Grow(1));
Assert.AreEqual(2, memory.Size);
Assert.AreEqual(-1, memory.Grow(1));
Assert.AreEqual(2, memory.Size);
}
[Test]
public void RoundTripInt8()
{
var limits = new ResizableLimits(1, 2);
var memory = new LinearMemory(limits);
uint offset = MemoryType.PageSize / 2;
sbyte data = 0x7F;
var int8Mem = memory.Int8;
int8Mem[offset] = data;
Assert.AreEqual((int)data, (int)int8Mem[offset]);
}
[Test]
public void RoundTripInt16()
{
var limits = new ResizableLimits(1, 2);
var memory = new LinearMemory(limits);
uint offset = MemoryType.PageSize / 2;
short data = 0x1F2E;
var int16Mem = memory.Int16;
int16Mem[offset] = data;
Assert.AreEqual((int)data, (int)int16Mem[offset]);
}
[Test]
public void RoundTripInt32()
{
var limits = new ResizableLimits(1, 2);
var memory = new LinearMemory(limits);
uint offset = MemoryType.PageSize / 2;
int data = 0x1F2E3D4C;
var int32Mem = memory.Int32;
int32Mem[offset] = data;
Assert.AreEqual((int)data, (int)int32Mem[offset]);
}
[Test]
public void RoundTripInt64()
{
var limits = new ResizableLimits(1, 2);
var memory = new LinearMemory(limits);
uint offset = MemoryType.PageSize / 2;
long data = 0x1F2E3D4C5B6A7988;
var int64Mem = memory.Int64;
int64Mem[offset] = data;
Assert.AreEqual((long)data, (long)int64Mem[offset]);
}
}
}
<|start_filename|>libwasm/Instructions/BrTableOperator.cs<|end_filename|>
using System.Collections.Generic;
using Wasm.Binary;
namespace Wasm.Instructions
{
/// <summary>
/// Describes an operator that begins a break table.
/// </summary>
public sealed class BrTableOperator : Operator
{
/// <summary>
/// Creates a break table operator.
/// </summary>
/// <param name="opCode">The operator's opcode.</param>
/// <param name="declaringType">A type that defines the operator, if any.</param>
/// <param name="mnemonic">The operator's mnemonic.</param>
public BrTableOperator(byte opCode, WasmType declaringType, string mnemonic)
: base(opCode, declaringType, mnemonic)
{ }
/// <summary>
/// Reads the immediates (not the opcode) of a WebAssembly instruction
/// for this operator from the given reader and returns the result as an
/// instruction.
/// </summary>
/// <param name="reader">The WebAssembly file reader to read immediates from.</param>
/// <returns>A WebAssembly instruction.</returns>
public override Instruction ReadImmediates(BinaryWasmReader reader)
{
uint tableSize = reader.ReadVarUInt32();
var tableEntries = new List<uint>((int)tableSize);
for (uint i = 0; i < tableSize; i++)
{
tableEntries.Add(reader.ReadVarUInt32());
}
uint defaultEntry = reader.ReadVarUInt32();
return Create(tableEntries, defaultEntry);
}
/// <summary>
/// Creates a break table instruction from this operator, a table of
/// break targets and a default target.
/// </summary>
/// <param name="targetTable">
/// A table of target entries that indicate an outer block or loop to which to break.
/// </param>
/// <param name="defaultTarget">
/// The default target: an outer block or loop to which to break in the default case.
/// </param>
public BrTableInstruction Create(IEnumerable<uint> targetTable, uint defaultTarget)
{
return new BrTableInstruction(this, targetTable, defaultTarget);
}
/// <summary>
/// Casts the given instruction to this operator's instruction type.
/// </summary>
/// <param name="value">The instruction to cast.</param>
/// <returns>The given instruction as this operator's instruction type.</returns>
public BrTableInstruction CastInstruction(Instruction value)
{
return (BrTableInstruction)value;
}
}
}
<|start_filename|>libwasm-text/Assembler.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Numerics;
using System.Text;
using Pixie;
using Pixie.Code;
using Pixie.Markup;
using Wasm.Instructions;
using Wasm.Optimize;
namespace Wasm.Text
{
/// <summary>
/// An assembler for the WebAssembly text format. Converts parsed WebAssembly text format
/// modules to in-memory WebAssembly binary format modules.
/// </summary>
public sealed class Assembler
{
/// <summary>
/// Creates a WebAssembly assembler.
/// </summary>
/// <param name="log">A log to send diagnostics to.</param>
public Assembler(ILog log)
: this(log, DefaultModuleFieldAssemblers, DefaultPlainInstructionAssemblers)
{ }
/// <summary>
/// Creates a WebAssembly assembler.
/// </summary>
/// <param name="log">A log to send diagnostics to.</param>
/// <param name="moduleFieldAssemblers">
/// A mapping of module field keywords to module field assemblers.
/// </param>
/// <param name="plainInstructionAssemblers">
/// A mapping of instruction keywords to instruction assemblers.
/// </param>
public Assembler(
ILog log,
IReadOnlyDictionary<string, ModuleFieldAssembler> moduleFieldAssemblers,
IReadOnlyDictionary<string, PlainInstructionAssembler> plainInstructionAssemblers)
{
this.Log = log;
this.ModuleFieldAssemblers = moduleFieldAssemblers;
this.PlainInstructionAssemblers = plainInstructionAssemblers;
}
/// <summary>
/// Gets the log that is used for reporting diagnostics.
/// </summary>
/// <value>A log.</value>
public ILog Log { get; private set; }
/// <summary>
/// Gets the module field assemblers this assembler uses to process
/// module fields.
/// </summary>
/// <value>A mapping of module field keywords to module field assemblers.</value>
public IReadOnlyDictionary<string, ModuleFieldAssembler> ModuleFieldAssemblers { get; private set; }
/// <summary>
/// Gets the module field assemblers this assembler uses to process
/// module fields.
/// </summary>
/// <value>A mapping of module field keywords to module field assemblers.</value>
public IReadOnlyDictionary<string, PlainInstructionAssembler> PlainInstructionAssemblers { get; private set; }
/// <summary>
/// Assembles an S-expression representing a module into a WebAssembly module.
/// </summary>
/// <param name="expression">The expression to assemble.</param>
/// <returns>An assembled module.</returns>
public WasmFile AssembleModule(SExpression expression)
{
return AssembleModule(expression, out string moduleId);
}
/// <summary>
/// Assembles an S-expression representing a module into a WebAssembly module.
/// </summary>
/// <param name="expression">The expression to assemble.</param>
/// <param name="moduleIdOrNull">
/// The module's identifier if one is assigned to the module; otherwise, <c>null</c>.
/// </param>
/// <returns>An assembled module.</returns>
public WasmFile AssembleModule(SExpression expression, out string moduleIdOrNull)
{
if (!expression.IsCallTo("module"))
{
Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"top-level modules must be encoded as S-expressions that call ",
"module",
"."),
Highlight(expression)));
}
// Parse the module's label, if it has one.
var fields = expression.Tail;
moduleIdOrNull = AssembleLabelOrNull(ref fields);
if (fields.Count > 0 && fields[0].IsSpecificKeyword("binary"))
{
// We encountered a binary module.
fields = fields.Skip(1).ToArray();
var data = AssembleDataString(fields, Log);
using (var stream = new MemoryStream(data))
{
return WasmFile.ReadBinary(stream);
}
}
var file = new WasmFile();
if (moduleIdOrNull != null)
{
// We encountered a module name. Turn it into a name entry and then skip it
// for the purpose of module field analysis.
file.ModuleName = moduleIdOrNull;
}
// First scan ahead for types in the module.
var context = new ModuleContext(this);
var nonTypeFields = new List<SExpression>();
foreach (var field in fields)
{
if (field.IsCallTo("type"))
{
ModuleFieldAssemblers["type"](field, file, context);
}
else
{
nonTypeFields.Add(field);
}
}
// Now assemble the module's other fields.
foreach (var field in nonTypeFields)
{
ModuleFieldAssembler fieldAssembler;
if (!field.IsCall)
{
Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
"unexpected token; expected a module field.",
Highlight(expression)));
}
else if (ModuleFieldAssemblers.TryGetValue((string)field.Head.Value, out fieldAssembler))
{
fieldAssembler(field, file, context);
}
else
{
Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"unexpected module field type ",
(string)field.Head.Value,
"."),
Highlight(expression)));
}
}
context.ResolveIdentifiers(file);
return file;
}
/// <summary>
/// Assembles an S-expression representing a module into a WebAssembly module.
/// </summary>
/// <param name="tokens">A stream of tokens to parse and assemble.</param>
/// <returns>An assembled module.</returns>
public WasmFile AssembleModule(IEnumerable<Lexer.Token> tokens)
{
var exprs = Parser.ParseAsSExpressions(tokens, Log);
if (exprs.Count == 0)
{
Log.Log(
new LogEntry(
Severity.Error,
"nothing to assemble",
"input stream contains no S-expression that can be assembled into a module."));
return new WasmFile();
}
else if (exprs.Count != 1)
{
Log.Log(
new LogEntry(
Severity.Error,
"multiple modules",
"input stream contains more than one S-expression to assemble into a module; expected just one.",
Highlight(exprs[1])));
}
return AssembleModule(exprs[0]);
}
/// <summary>
/// Assembles an S-expression representing a module into a WebAssembly module.
/// </summary>
/// <param name="document">A document to parse and assemble.</param>
/// <param name="fileName">The name of the file in which <paramref name="document"/> is saved.</param>
/// <returns>An assembled module.</returns>
public WasmFile AssembleModule(string document, string fileName = "<string>")
{
return AssembleModule(Lexer.Tokenize(document, fileName));
}
/// <summary>
/// Assembles a top-level expression-style instruction.
/// </summary>
/// <param name="expression">The expression-style instruction to assemble.</param>
/// <param name="module">The module to which the expression-style instruction is scoped.</param>
/// <returns>A list of assembled instructions.</returns>
public IReadOnlyList<Instruction> AssembleInstructionExpression(SExpression expression, WasmFile module)
{
var context = new InstructionContext(
new Dictionary<string, uint>(),
new ModuleContext(this),
module);
return AssembleExpressionInstruction(expression, context);
}
internal static HighlightedSource Highlight(Lexer.Token expression)
{
return new HighlightedSource(new SourceRegion(expression.Span));
}
internal static HighlightedSource Highlight(SExpression expression)
{
return Highlight(expression.Head);
}
/// <summary>
/// A type for module field assemblers.
/// </summary>
/// <param name="moduleField">A module field to assemble.</param>
/// <param name="module">The module that is being assembled.</param>
/// <param name="context">The module's assembly context.</param>
public delegate void ModuleFieldAssembler(
SExpression moduleField,
WasmFile module,
ModuleContext context);
/// <summary>
/// A type for plain instruction assemblers.
/// </summary>
/// <param name="keyword">The keyword expression that names the instruction.</param>
/// <param name="operands">
/// A nonempty list of S-expressions that represent instruction operands to assemble.
/// </param>
/// <param name="context">The module's assembly context.</param>
public delegate Instruction PlainInstructionAssembler(
SExpression keyword,
ref IReadOnlyList<SExpression> operands,
InstructionContext context);
/// <summary>
/// Context that is used when assembling a module.
/// </summary>
public sealed class ModuleContext
{
/// <summary>
/// Creates a module context.
/// </summary>
/// <param name="assembler">The assembler that gives rise to this conetxt.</param>
public ModuleContext(Assembler assembler)
{
this.Assembler = assembler;
this.MemoryContext = IdentifierContext<MemoryType>.Create();
this.FunctionContext = IdentifierContext<LocalOrImportRef>.Create();
this.GlobalContext = IdentifierContext<LocalOrImportRef>.Create();
this.TableContext = IdentifierContext<LocalOrImportRef>.Create();
this.TypeContext = IdentifierContext<uint>.Create();
}
/// <summary>
/// Gets the identifier context for the module's memories.
/// </summary>
/// <value>An identifier context.</value>
public IdentifierContext<MemoryType> MemoryContext { get; private set; }
/// <summary>
/// Gets the identifier context for the module's functions.
/// </summary>
/// <value>An identifier context.</value>
public IdentifierContext<LocalOrImportRef> FunctionContext { get; private set; }
/// <summary>
/// Gets the identifier context for the module's globals.
/// </summary>
/// <value>An identifier context.</value>
public IdentifierContext<LocalOrImportRef> GlobalContext { get; private set; }
/// <summary>
/// Gets the identifier context for the module's tables.
/// </summary>
/// <value>An identifier context.</value>
public IdentifierContext<LocalOrImportRef> TableContext { get; private set; }
/// <summary>
/// Gets the identifier context for the module's types.
/// </summary>
/// <value>An identifier context.</value>
public IdentifierContext<uint> TypeContext { get; private set;}
/// <summary>
/// Gets the assembler that gives rise to this context.
/// </summary>
/// <value>An assembler.</value>
public Assembler Assembler { get; private set; }
/// <summary>
/// Gets the log used by the assembler and, by extension, this context.
/// </summary>
public ILog Log => Assembler.Log;
/// <summary>
/// Resolves any pending references in the module.
/// </summary>
/// <param name="module">The module for which this context was created.</param>
public void ResolveIdentifiers(WasmFile module)
{
var importSection = module.GetFirstSectionOrNull<ImportSection>() ?? new ImportSection();
var memorySection = module.GetFirstSectionOrNull<MemorySection>() ?? new MemorySection();
var functionSection = module.GetFirstSectionOrNull<FunctionSection>() ?? new FunctionSection();
var globalSection = module.GetFirstSectionOrNull<GlobalSection>() ?? new GlobalSection();
var tableSection = module.GetFirstSectionOrNull<TableSection>() ?? new TableSection();
var memoryIndices = new Dictionary<MemoryType, uint>();
var functionIndices = new Dictionary<LocalOrImportRef, uint>();
var globalIndices = new Dictionary<LocalOrImportRef, uint>();
var tableIndices = new Dictionary<LocalOrImportRef, uint>();
for (int i = 0; i < importSection.Imports.Count; i++)
{
var import = importSection.Imports[i];
if (import is ImportedMemory importedMemory)
{
memoryIndices[importedMemory.Memory] = (uint)memoryIndices.Count;
}
else if (import is ImportedFunction importedFunction)
{
functionIndices[new LocalOrImportRef(true, (uint)i)] = (uint)functionIndices.Count;
}
else if (import is ImportedGlobal importedGlobal)
{
globalIndices[new LocalOrImportRef(true, (uint)i)] = (uint)globalIndices.Count;
}
else if (import is ImportedTable importedTable)
{
tableIndices[new LocalOrImportRef(true, (uint)i)] = (uint)tableIndices.Count;
}
}
foreach (var memory in memorySection.Memories)
{
memoryIndices[memory] = (uint)memoryIndices.Count;
}
for (int i = 0; i < functionSection.FunctionTypes.Count; i++)
{
functionIndices[new LocalOrImportRef(false, (uint)i)] = (uint)functionIndices.Count;
}
for (int i = 0; i < globalSection.GlobalVariables.Count; i++)
{
globalIndices[new LocalOrImportRef(false, (uint)i)] = (uint)globalIndices.Count;
}
for (int i = 0; i < tableSection.Tables.Count; i++)
{
tableIndices[new LocalOrImportRef(false, (uint)i)] = (uint)tableIndices.Count;
}
// Resolve memory identifiers.
MemoryContext.ResolveAll(
Assembler.Log,
mem => memoryIndices[mem]);
// Resolve function identifiers.
FunctionContext.ResolveAll(
Assembler.Log,
func => functionIndices[func]);
// Resolve global identifiers.
GlobalContext.ResolveAll(
Assembler.Log,
global => globalIndices[global]);
// Resolve table identifiers.
TableContext.ResolveAll(
Assembler.Log,
table => tableIndices[table]);
// Resolve type identifiers.
TypeContext.ResolveAll(
Assembler.Log,
index => index);
}
}
/// <summary>
/// Context that is used when assembling an instruction.
/// </summary>
public sealed class InstructionContext
{
private InstructionContext(
IReadOnlyDictionary<string, uint> namedLocalIndices,
string labelOrNull,
ModuleContext moduleContext,
WasmFile module,
InstructionContext parent)
{
this.NamedLocalIndices = namedLocalIndices;
this.LabelOrNull = labelOrNull;
this.ModuleContext = moduleContext;
this.Module = module;
this.ParentOrNull = parent;
}
/// <summary>
/// Creates a top-level instruction context.
/// </summary>
/// <param name="namedLocalIndices">The instruction context's named local indices.</param>
/// <param name="moduleContext">A context for the module that analyzes the instruction.</param>
/// <param name="module">The module that analyzes the instruction.</param>
public InstructionContext(
IReadOnlyDictionary<string, uint> namedLocalIndices,
ModuleContext moduleContext,
WasmFile module)
: this(namedLocalIndices, null, moduleContext, module, null)
{ }
/// <summary>
/// Creates a child instruction context with a particular label.
/// </summary>
/// <param name="labelOrNull">A label that a break table can branch to.</param>
/// <param name="parent">A parent instruction context.</param>
public InstructionContext(
string labelOrNull,
InstructionContext parent)
: this(parent.NamedLocalIndices, labelOrNull, parent.ModuleContext, parent.Module, parent)
{ }
/// <summary>
/// Gets a mapping of local variable names to their indices.
/// </summary>
/// <value>A mapping of names to indices.</value>
public IReadOnlyDictionary<string, uint> NamedLocalIndices { get; private set; }
/// <summary>
/// Gets the enclosing module context.
/// </summary>
/// <value>A module context.</value>
public ModuleContext ModuleContext { get; private set; }
/// <summary>
/// Gets the module associated with the enclosing module context.
/// </summary>
/// <value>A module.</value>
public WasmFile Module { get; private set; }
/// <summary>
/// Gets this instruction context's label if it has one
/// and <c>null</c> otherwise.
/// </summary>
/// <value>A label or <c>null</c>.</value>
public string LabelOrNull { get; private set; }
/// <summary>
/// Tells if this instruction context has a label.
/// </summary>
public bool HasLabel => LabelOrNull != null;
/// <summary>
/// Gets this instruction context's parent context if it has one
/// and <c>null</c> otherwise.
/// </summary>
/// <value>An instruction context or <c>null</c>.</value>
public InstructionContext ParentOrNull { get; private set; }
/// <summary>
/// Tells if this instruction context has a parent context.
/// </summary>
public bool HasParent => ParentOrNull != null;
/// <summary>
/// Gets the log used by the assembler and, by extension, this context.
/// </summary>
public ILog Log => ModuleContext.Log;
}
/// <summary>
/// A reference to a function, global, table or memory that is either defined
/// locally or imported.
/// </summary>
public struct LocalOrImportRef
{
/// <summary>
/// Creates a reference to a function, global, table or memory that is either defined
/// locally or imported.
/// </summary>
/// <param name="isImport">
/// Tells if the value referred to by this reference is an import.
/// </param>
/// <param name="indexInSection">
/// The intra-section index of the value being referred to.
/// </param>
public LocalOrImportRef(bool isImport, uint indexInSection)
{
this.IsImport = isImport;
this.IndexInSection = indexInSection;
}
/// <summary>
/// Tells if the value referred to by this reference is an import.
/// </summary>
/// <value><c>true</c> if the value is an import; otherwise, <c>false</c>.</value>
public bool IsImport { get; private set; }
/// <summary>
/// Gets the intra-section index of the value being referred to.
/// </summary>
/// <value>An intra-section index.</value>
public uint IndexInSection { get; private set; }
}
/// <summary>
/// An identifier context, which maps identifiers to indices.
/// </summary>
public struct IdentifierContext<T>
{
/// <summary>
/// Creates an empty identifier context.
/// </summary>
/// <returns>An identifier context.</returns>
public static IdentifierContext<T> Create()
{
return new IdentifierContext<T>()
{
identifierDefinitions = new Dictionary<string, T>(),
pendingIdentifierReferences = new List<KeyValuePair<Lexer.Token, Action<uint>>>()
};
}
private Dictionary<string, T> identifierDefinitions;
private List<KeyValuePair<Lexer.Token, Action<uint>>> pendingIdentifierReferences;
/// <summary>
/// Defines a new identifier.
/// </summary>
/// <param name="identifier">The identifier to define.</param>
/// <param name="value">The value identified by the identifier.</param>
/// <returns>
/// <c>true</c> if <paramref name="identifier"/> is non-null and there is no
/// previous definition of the identifier; otherwise, <c>false</c>.
/// </returns>
public bool Define(string identifier, T value)
{
if (identifier == null || identifierDefinitions.ContainsKey(identifier))
{
return false;
}
else
{
identifierDefinitions[identifier] = value;
return true;
}
}
/// <summary>
/// Introduces a new identifier use.
/// </summary>
/// <param name="token">A token that refers to an identifier or an index.</param>
/// <param name="patch">
/// An action that patches a user based on the index assigned to the token.
/// Will be executed once the module is fully assembled.
/// </param>
public void Use(Lexer.Token token, Action<uint> patch)
{
pendingIdentifierReferences.Add(
new KeyValuePair<Lexer.Token, Action<uint>>(token, patch));
}
/// <summary>
/// Introduces a new identifier use.
/// </summary>
/// <param name="value">A value that will eventually be assigned an index.</param>
/// <param name="patch">
/// An action that patches a user based on the index assigned to the value.
/// Will be executed once the module is fully assembled.
/// </param>
public void Use(T value, Action<uint> patch)
{
Use(Lexer.Token.Synthesize(value), patch);
}
/// <summary>
/// Tries to map an identifier back to its definition.
/// </summary>
/// <param name="identifier">An identifier to inspect.</param>
/// <param name="definition">A definition for <paramref name="identifier"/>, if one exists already.</param>
/// <returns>
/// <c>true</c> <paramref name="identifier"/> is defined; otherwise, <c>false</c>.
/// </returns>
public bool TryGetDefinition(string identifier, out T definition)
{
return identifierDefinitions.TryGetValue(identifier, out definition);
}
/// <summary>
/// Resolves all pending references.
/// </summary>
/// <param name="log">A log to send diagnostics to.</param>
/// <param name="getIndex">A function that maps defined values to indices.</param>
public void ResolveAll(ILog log, Func<T, uint> getIndex)
{
foreach (var pair in pendingIdentifierReferences)
{
uint index;
if (TryResolve(pair.Key, getIndex, out index))
{
pair.Value(index);
}
else
{
var id = (string)pair.Key.Value;
var suggested = NameSuggestion.SuggestName(id, identifierDefinitions.Keys);
log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold("identifier ", id, " does is undefined"),
suggested == null
? (MarkupNode)"."
: Quotation.QuoteEvenInBold("; did you mean ", suggested, "?"),
Highlight(pair.Key)));
}
}
pendingIdentifierReferences.Clear();
}
/// <summary>
/// Tries to map an identifier to its associated index.
/// </summary>
/// <param name="identifier">An identifier to inspect.</param>
/// <param name="getIndex">A function that maps defined values to indices.</param>
/// <param name="index">The associated index.</param>
/// <returns>
/// <c>true</c> if an index was found for <paramref name="identifier"/>; otherwise, <c>false</c>.
/// </returns>
private bool TryResolve(string identifier, Func<T, uint> getIndex, out uint index)
{
T val;
if (identifierDefinitions.TryGetValue(identifier, out val))
{
index = getIndex(val);
return true;
}
else
{
index = 0;
return false;
}
}
/// <summary>
/// Tries to map an identifier or index to its associated index.
/// </summary>
/// <param name="identifierOrIndex">An identifier or index to inspect.</param>
/// <param name="getIndex">A function that maps defined values to indices.</param>
/// <param name="index">The associated index.</param>
/// <returns>
/// <c>true</c> if an index was found for <paramref name="identifierOrIndex"/>; otherwise, <c>false</c>.
/// </returns>
private bool TryResolve(Lexer.Token identifierOrIndex, Func<T, uint> getIndex, out uint index)
{
if (identifierOrIndex.Kind == Lexer.TokenKind.UnsignedInteger)
{
index = (uint)(BigInteger)identifierOrIndex.Value;
return true;
}
else if (identifierOrIndex.Kind == Lexer.TokenKind.Identifier)
{
return TryResolve((string)identifierOrIndex.Value, getIndex, out index);
}
else if (identifierOrIndex.Kind == Lexer.TokenKind.Synthetic
&& identifierOrIndex.Value is T)
{
index = getIndex((T)identifierOrIndex.Value);
return true;
}
else
{
index = 0;
return false;
}
}
}
/// <summary>
/// The default set of module field assemblers.
/// </summary>
public static readonly IReadOnlyDictionary<string, ModuleFieldAssembler> DefaultModuleFieldAssemblers =
new Dictionary<string, ModuleFieldAssembler>()
{
["data"] = AssembleDataSegment,
["elem"] = AssembleElementSegment,
["export"] = AssembleExport,
["func"] = AssembleFunction,
["global"] = AssembleGlobal,
["import"] = AssembleImport,
["memory"] = AssembleMemory,
["start"] = AssembleStart,
["table"] = AssembleTable,
["type"] = AssembleType
};
/// <summary>
/// The default set of instruction assemblers.
/// </summary>
public static readonly IReadOnlyDictionary<string, PlainInstructionAssembler> DefaultPlainInstructionAssemblers;
private static readonly Dictionary<MemoryOperator, uint> naturalAlignments = new Dictionary<MemoryOperator, uint>()
{
[Operators.Int32Load] = 4,
[Operators.Int64Load] = 8,
[Operators.Float32Load] = 4,
[Operators.Float64Load] = 8,
[Operators.Int32Load8U] = 1,
[Operators.Int32Load8S] = 1,
[Operators.Int32Load16U] = 2,
[Operators.Int32Load16S] = 2,
[Operators.Int64Load8U] = 1,
[Operators.Int64Load8S] = 1,
[Operators.Int64Load16U] = 2,
[Operators.Int64Load16S] = 2,
[Operators.Int64Load32U] = 4,
[Operators.Int64Load32S] = 4,
[Operators.Int32Store] = 4,
[Operators.Int64Store] = 8,
[Operators.Float32Store] = 4,
[Operators.Float64Store] = 8,
[Operators.Int32Store8] = 1,
[Operators.Int32Store16] = 2,
[Operators.Int64Store8] = 1,
[Operators.Int64Store16] = 2,
[Operators.Int64Store32] = 4
};
static Assembler()
{
var insnAssemblers = new Dictionary<string, PlainInstructionAssembler>()
{
["i32.const"] = AssembleConstInt32Instruction,
["i64.const"] = AssembleConstInt64Instruction,
["f32.const"] = AssembleConstFloat32Instruction,
["f64.const"] = AssembleConstFloat64Instruction,
["block"] = (SExpression keyword, ref IReadOnlyList<SExpression> operands, InstructionContext context) =>
AssembleBlockOrLoop(Operators.Block, keyword, ref operands, context, true),
["loop"] = (SExpression keyword, ref IReadOnlyList<SExpression> operands, InstructionContext context) =>
AssembleBlockOrLoop(Operators.Loop, keyword, ref operands, context, true),
["if"] = AssembleIfInstruction,
["local.get"] = (SExpression keyword, ref IReadOnlyList<SExpression> operands, InstructionContext context) =>
AssembleLocalInstruction(Operators.GetLocal, keyword, ref operands, context),
["local.set"] = (SExpression keyword, ref IReadOnlyList<SExpression> operands, InstructionContext context) =>
AssembleLocalInstruction(Operators.SetLocal, keyword, ref operands, context),
["local.tee"] = (SExpression keyword, ref IReadOnlyList<SExpression> operands, InstructionContext context) =>
AssembleLocalInstruction(Operators.TeeLocal, keyword, ref operands, context),
["global.get"] = (SExpression keyword, ref IReadOnlyList<SExpression> operands, InstructionContext context) =>
AssembleGlobalInstruction(Operators.GetGlobal, keyword, ref operands, context),
["global.set"] = (SExpression keyword, ref IReadOnlyList<SExpression> operands, InstructionContext context) =>
AssembleGlobalInstruction(Operators.SetGlobal, keyword, ref operands, context),
["call"] = AssembleCallInstruction,
["call_indirect"] = AssembleCallIndirectInstruction,
["memory.size"] = (SExpression keyword, ref IReadOnlyList<SExpression> operands, InstructionContext context) =>
Operators.CurrentMemory.Create(0),
["memory.grow"] = (SExpression keyword, ref IReadOnlyList<SExpression> operands, InstructionContext context) =>
Operators.GrowMemory.Create(0),
["br"] = (SExpression keyword, ref IReadOnlyList<SExpression> operands, InstructionContext context) =>
AssembleBrInstruction(Operators.Br, keyword, ref operands, context),
["br_if"] = (SExpression keyword, ref IReadOnlyList<SExpression> operands, InstructionContext context) =>
AssembleBrInstruction(Operators.BrIf, keyword, ref operands, context),
["br_table"] = AssembleBrTableInstruction
};
DefaultPlainInstructionAssemblers = insnAssemblers;
foreach (var op in Operators.AllOperators)
{
if (op is NullaryOperator nullary)
{
// Nullary operators have a fairly regular structure that is almost identical
// to their mnemonics as specified for the binary encoding.
// The only way in which they are different is that they do not include slashes.
// To accommodate this, we map binary encoding mnemonics to text format mnemonics like
// so:
//
// i32.add -> i32.add
// 𝚏3𝟸.𝚌𝚘𝚗𝚟𝚎𝚛𝚝_𝚞/𝚒𝟼𝟺 -> 𝚏𝟹𝟸.𝚌𝚘𝚗𝚟𝚎𝚛𝚝_𝚒𝟼𝟺_𝚞
// 𝚏𝟹𝟸.𝚍𝚎𝚖𝚘𝚝𝚎/𝚏𝟼𝟺 -> 𝚏𝟹𝟸.𝚍𝚎𝚖𝚘𝚝𝚎_𝚏𝟼𝟺
//
var mnemonic = nullary.Mnemonic;
var mnemonicAndType = mnemonic.Split(new[] { '/' }, 2);
if (mnemonicAndType.Length == 2)
{
var mnemonicAndSuffix = mnemonicAndType[0].Split(new[] { '_' }, 2);
if (mnemonicAndSuffix.Length == 2)
{
mnemonic = $"{mnemonicAndSuffix[0]}_{mnemonicAndType[1]}_{mnemonicAndSuffix[1]}";
}
else
{
mnemonic = $"{mnemonicAndType[0]}_{mnemonicAndType[1]}";
}
}
if (nullary.DeclaringType != WasmType.Empty)
{
mnemonic = $"{DumpHelpers.WasmTypeToString(nullary.DeclaringType)}.{mnemonic}";
}
insnAssemblers[mnemonic] = (SExpression keyword, ref IReadOnlyList<SExpression> operands, InstructionContext context) =>
nullary.Create();
}
else if (op is MemoryOperator memOp)
{
var mnemonic = $"{DumpHelpers.WasmTypeToString(memOp.DeclaringType)}.{memOp.Mnemonic}";
insnAssemblers[mnemonic] = (SExpression keyword, ref IReadOnlyList<SExpression> operands, InstructionContext context) =>
AssembleMemoryInstruction(memOp, keyword, ref operands, context);
}
}
}
private static Instruction AssembleMemoryInstruction(
MemoryOperator memoryOperator,
SExpression keyword,
ref IReadOnlyList<SExpression> operands,
InstructionContext context)
{
var offset = AssembleOptionalNamedUInt32(ref operands, "offset", 0, context);
uint alignVal;
if (operands.Count > 0)
{
var alignExpr = operands[0];
var align = AssembleOptionalNamedUInt32(ref operands, "align", naturalAlignments[memoryOperator], context);
var alignLog2 = Math.Log(align, 2);
if (Math.Floor(alignLog2) != alignLog2)
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
"alignment ", align.ToString(), " is not a power of two.",
Highlight(alignExpr)));
}
alignVal = (uint)alignLog2;
}
else
{
alignVal = (uint)Math.Log(naturalAlignments[memoryOperator], 2);
}
return memoryOperator.Create(alignVal, offset);
}
private static uint AssembleOptionalNamedUInt32(
ref IReadOnlyList<SExpression> operands,
string keyword,
uint defaultValue,
InstructionContext context)
{
uint offset = defaultValue;
if (operands.Count > 0 && operands[0].IsKeyword && ((string)operands[0].Head.Value).StartsWith(keyword + "=", StringComparison.Ordinal))
{
var offsetValText = ((string)operands[0].Head.Value).Substring(keyword.Length + 1);
var offsetValTokens = Lexer.Tokenize(offsetValText).ToArray();
if (offsetValTokens.Length != 1 || offsetValTokens[0].Kind != Lexer.TokenKind.UnsignedInteger)
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
"text ", offsetValText, " after keyword ", keyword + "=", " is not an unsigned integer.",
Highlight(operands[0])));
}
else
{
offset = AssembleUInt32(SExpression.Create(offsetValTokens[0]), context.ModuleContext);
}
operands = operands.Skip(1).ToArray();
}
return offset;
}
private static Instruction AssembleConstInt32Instruction(
SExpression keyword,
ref IReadOnlyList<SExpression> operands,
InstructionContext context)
{
if (AssertPopImmediate(keyword, ref operands, context, out SExpression immediate))
{
return Operators.Int32Const.Create(AssembleSignlessInt32(immediate, context.ModuleContext));
}
else
{
return Operators.Int32Const.Create(0);
}
}
private static Instruction AssembleConstInt64Instruction(
SExpression keyword,
ref IReadOnlyList<SExpression> operands,
InstructionContext context)
{
if (AssertPopImmediate(keyword, ref operands, context, out SExpression immediate))
{
return Operators.Int64Const.Create(AssembleSignlessInt64(immediate, context.ModuleContext));
}
else
{
return Operators.Int64Const.Create(0);
}
}
private static Instruction AssembleConstFloat32Instruction(
SExpression keyword,
ref IReadOnlyList<SExpression> operands,
InstructionContext context)
{
if (AssertPopImmediate(keyword, ref operands, context, out SExpression immediate))
{
return Operators.Float32Const.Create(AssembleFloat32(immediate, context.ModuleContext));
}
else
{
return Operators.Float32Const.Create(float.NaN);
}
}
private static Instruction AssembleConstFloat64Instruction(
SExpression keyword,
ref IReadOnlyList<SExpression> operands,
InstructionContext context)
{
if (AssertPopImmediate(keyword, ref operands, context, out SExpression immediate))
{
return Operators.Float64Const.Create(AssembleFloat64(immediate, context.ModuleContext));
}
else
{
return Operators.Float64Const.Create(double.NaN);
}
}
private static bool AssertPopImmediate(
SExpression keyword,
ref IReadOnlyList<SExpression> operands,
InstructionContext context,
out SExpression immediate)
{
return AssertPopImmediate(keyword, ref operands, context.Log, out immediate);
}
private static bool AssertPopImmediate(
SExpression keyword,
ref IReadOnlyList<SExpression> operands,
ModuleContext context,
out SExpression immediate)
{
return AssertPopImmediate(keyword, ref operands, context.Log, out immediate);
}
private static bool AssertPopImmediate(
SExpression keyword,
ref IReadOnlyList<SExpression> operands,
ILog log,
out SExpression immediate)
{
if (operands.Count == 0)
{
log.Log(
new LogEntry(
Severity.Error,
"syntax error",
"expected another immediate.",
Highlight(keyword)));
immediate = default(SExpression);
return false;
}
else
{
immediate = operands[0];
operands = operands.Skip(1).ToArray();
return true;
}
}
private static void AssembleMemory(
SExpression moduleField,
WasmFile module,
ModuleContext context)
{
const string kind = "memory definition";
// Process the optional memory identifier.
var memory = new MemoryType(new ResizableLimits(0));
var tail = moduleField.Tail;
var memoryId = AssembleLabelOrNull(ref tail);
if (memoryId != null)
{
context.MemoryContext.Define(memoryId, memory);
}
if (!AssertNonEmpty(moduleField, tail, kind, context))
{
return;
}
// Parse inline exports.
var exportNames = AssembleInlineExports(moduleField, ref tail, context);
if (!AssertNonEmpty(moduleField, tail, kind, context))
{
return;
}
foreach (var exportName in exportNames)
{
AddExport(module, context.MemoryContext, memory, ExternalKind.Memory, exportName);
}
if (tail[0].IsCallTo("data"))
{
var data = AssembleDataString(tail[0].Tail, context);
var pageCount = (uint)Math.Ceiling((double)data.Length / MemoryType.PageSize);
memory.Limits = new ResizableLimits(pageCount, pageCount);
module.AddMemory(memory);
var dataSegment = new DataSegment(0, new InitializerExpression(Operators.Int32Const.Create(0)), data);
context.MemoryContext.Use(memory, index => { dataSegment.MemoryIndex = index; });
module.AddDataSegment(dataSegment);
AssertEmpty(context, kind, tail.Skip(1));
}
else if (tail[0].IsCallTo("import"))
{
var (moduleName, memoryName) = AssembleInlineImport(tail[0], context);
tail = tail.Skip(1).ToArray();
memory.Limits = AssembleLimits(moduleField, tail, context);
var import = new ImportedMemory(moduleName, memoryName, memory);
module.AddImport(import);
}
else
{
memory.Limits = AssembleLimits(moduleField, tail, context);
module.AddMemory(memory);
}
}
private static IReadOnlyList<string> AssembleInlineExports(
SExpression moduleField,
ref IReadOnlyList<SExpression> tail,
ModuleContext context)
{
var results = new List<string>();
while (tail.Count > 0 && tail[0].IsCallTo("export"))
{
var exportExpr = tail[0];
tail = tail.Skip(1).ToArray();
if (!AssertElementCount(exportExpr, exportExpr.Tail, 1, context))
{
continue;
}
results.Add(AssembleString(exportExpr.Tail[0], context));
}
return results;
}
private static void AssembleDataSegment(
SExpression moduleField,
WasmFile module,
ModuleContext context)
{
const string kind = "data segment";
var tail = moduleField.Tail;
if (!AssertNonEmpty(moduleField, tail, kind, context))
{
return;
}
Lexer.Token memoryId;
if (tail[0].IsIdentifier || tail[0].Head.Kind == Lexer.TokenKind.UnsignedInteger)
{
memoryId = AssembleIdentifierOrIndex(tail[0], context);
tail = tail.Skip(1).ToArray();
}
else
{
memoryId = Lexer.Token.Synthesize(new BigInteger(0), Lexer.TokenKind.UnsignedInteger);
}
var offset = AssembleOffset(moduleField, ref tail, context, module);
var data = AssembleDataString(tail, context);
var segment = new DataSegment(0u, offset, data);
context.MemoryContext.Use(memoryId, index => segment.MemoryIndex = index);
module.AddDataSegment(segment);
}
private static void AssembleElementSegment(
SExpression moduleField,
WasmFile module,
ModuleContext context)
{
const string kind = "element segment";
var tail = moduleField.Tail;
if (!AssertNonEmpty(moduleField, tail, kind, context))
{
return;
}
Lexer.Token tableId;
if (tail[0].IsIdentifier || tail[0].Head.Kind == Lexer.TokenKind.UnsignedInteger)
{
tableId = AssembleIdentifierOrIndex(tail[0], context);
tail = tail.Skip(1).ToArray();
}
else
{
tableId = Lexer.Token.Synthesize(new BigInteger(0), Lexer.TokenKind.UnsignedInteger);
}
var offset = AssembleOffset(moduleField, ref tail, context, module);
var segment = new ElementSegment(0u, offset, Enumerable.Empty<uint>());
for (int i = 0; i < tail.Count; i++)
{
int listIndex = i;
segment.Elements.Add(0);
context.FunctionContext.Use(
AssembleIdentifierOrIndex(tail[i], context),
funcIndex => segment.Elements[listIndex] = funcIndex);
}
context.TableContext.Use(tableId, index => segment.TableIndex = index);
module.AddElementSegment(segment);
}
private static InitializerExpression AssembleOffset(
SExpression moduleField,
ref IReadOnlyList<SExpression> tail,
ModuleContext context,
WasmFile module)
{
if (tail.Count == 0)
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
"expected a memory offset, specified as an instruction.",
Highlight(moduleField)));
return new InitializerExpression(Operators.Int32Const.Create(0));
}
if (tail[0].IsCallTo("offset"))
{
var result = AssembleInitializerExpression(tail[0].Tail, context, module);
tail = tail.Skip(1).ToArray();
return result;
}
else
{
var insnContext = new InstructionContext(new Dictionary<string, uint>(), context, module);
return new InitializerExpression(AssembleInstruction(ref tail, insnContext));
}
}
private static void AssembleExport(
SExpression moduleField,
WasmFile module,
ModuleContext context)
{
var tail = moduleField.Tail;
if (!AssertElementCount(moduleField, tail, 2, context)
|| !AssertElementCount(tail[1], tail[1].Tail, 1, context))
{
return;
}
var exportName = AssembleString(tail[0], context);
var index = AssembleIdentifierOrIndex(tail[1].Tail[0], context);
if (tail[1].IsCallTo("memory"))
{
AddExport(module, context.MemoryContext, index, ExternalKind.Memory, exportName);
}
else if (tail[1].IsCallTo("func"))
{
AddExport(module, context.FunctionContext, index, ExternalKind.Function, exportName);
}
else if (tail[1].IsCallTo("table"))
{
AddExport(module, context.TableContext, index, ExternalKind.Table, exportName);
}
else if (tail[1].IsCallTo("global"))
{
AddExport(module, context.GlobalContext, index, ExternalKind.Global, exportName);
}
else
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"unexpected expression in export definition; expected ",
"func", ",", "table", ",", "memory", " or ", "global", "."),
Highlight(tail[1])));
}
}
private static void AssembleImport(SExpression moduleField, WasmFile module, ModuleContext context)
{
if (!AssertElementCount(moduleField, moduleField.Tail, 3, context))
{
return;
}
var moduleName = AssembleString(moduleField.Tail[0], context);
var importName = AssembleString(moduleField.Tail[1], context);
var importDesc = moduleField.Tail[2];
string importId = null;
var importTail = importDesc.Tail;
if (importDesc.Tail.Count > 0 && importDesc.Tail[0].IsIdentifier)
{
importId = (string)importDesc.Tail[0].Head.Value;
importTail = importTail.Skip(1).ToArray();
}
if (importDesc.IsCallTo("memory"))
{
if (!AssertNonEmpty(importDesc, importTail, "import", context))
{
return;
}
var memory = new MemoryType(AssembleLimits(importDesc, importTail, context));
module.AddImport(new ImportedMemory(moduleName, importName, memory));
context.MemoryContext.Define(importId, memory);
}
else if (importDesc.IsCallTo("func"))
{
var type = AssembleTypeUse(importDesc, ref importTail, context, module, true);
var typeIndex = AddOrReuseFunctionType(type, module);
var importIndex = module.AddImport(new ImportedFunction(moduleName, importName, typeIndex));
context.FunctionContext.Define(importId, new LocalOrImportRef(true, importIndex));
AssertEmpty(context, "import", importTail);
}
else if (importDesc.IsCallTo("global"))
{
var type = AssembleGlobalType(importTail[0], context);
var importIndex = module.AddImport(new ImportedGlobal(moduleName, importName, type));
context.GlobalContext.Define(importId, new LocalOrImportRef(true, importIndex));
AssertEmpty(context, "global", importTail.Skip(1).ToArray());
}
else if (importDesc.IsCallTo("table"))
{
var type = AssembleTableType(importDesc, importTail, context);
var importIndex = module.AddImport(new ImportedTable(moduleName, importName, type));
context.TableContext.Define(importId, new LocalOrImportRef(true, importIndex));
}
else
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"unexpected expression in import; expected ",
"func", ",", "table", ",", "memory", " or ", "global", "."),
Highlight(importDesc)));
}
}
private static void AssembleTable(
SExpression moduleField,
WasmFile module,
ModuleContext context)
{
var tail = moduleField.Tail;
var tableId = AssembleLabelOrNull(ref tail);
var exportNames = AssembleInlineExports(moduleField, ref tail, context);
if (!AssertNonEmpty(moduleField, tail, "table definition", context))
{
return;
}
LocalOrImportRef tableRef;
if (tail[0].IsCallTo("import"))
{
var (moduleName, importName) = AssembleInlineImport(tail[0], context);
var table = AssembleTableType(moduleField, tail.Skip(1).ToArray(), context);
var tableIndex = module.AddImport(new ImportedTable(moduleName, importName, table));
tableRef = new LocalOrImportRef(true, tableIndex);
context.TableContext.Define(tableId, tableRef);
}
else if (tail[0].Head.Kind == Lexer.TokenKind.Keyword)
{
var elemType = AssembleElemType(tail[0], context);
if (!AssertElementCount(moduleField, tail, 2, context))
{
return;
}
var elems = tail[1];
if (!elems.IsCallTo("elem"))
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"unexpected expression in initialized table; expected an ",
"elem", " expression."),
Highlight(elems)));
}
var elemSegment = new ElementSegment(
0,
new InitializerExpression(Operators.Int32Const.Create(0)),
Enumerable.Empty<uint>());
module.AddElementSegment(elemSegment);
for (int i = 0; i < elems.Tail.Count; i++)
{
elemSegment.Elements.Add(0);
var functionId = AssembleIdentifierOrIndex(elems.Tail[i], context);
var j = i;
context.FunctionContext.Use(functionId, index => { elemSegment.Elements[j] = index; });
}
var table = new TableType(elemType, new ResizableLimits((uint)elemSegment.Elements.Count));
var tableIndex = module.AddTable(table);
tableRef = new LocalOrImportRef(false, tableIndex);
context.TableContext.Define(tableId, tableRef);
context.TableContext.Use(tableRef, index => { elemSegment.TableIndex = index; });
}
else
{
var table = AssembleTableType(moduleField, tail, context);
var tableIndex = module.AddTable(table);
tableRef = new LocalOrImportRef(false, tableIndex);
context.TableContext.Define(tableId, tableRef);
}
foreach (var exportName in exportNames)
{
AddExport(module, context.TableContext, tableRef, ExternalKind.Table, exportName);
}
}
private static void AssembleType(
SExpression moduleField,
WasmFile module,
ModuleContext context)
{
var tail = moduleField.Tail;
var typeId = AssembleLabelOrNull(ref tail);
if (!AssertPopImmediate(moduleField, ref tail, context, out SExpression funcTypeExpr))
{
return;
}
var funcTypeTail = funcTypeExpr.Tail;
var funcType = AssembleTypeUse(funcTypeExpr, ref funcTypeTail, context, module);
AssertEmpty(context, "type definition", tail);
var index = module.AddFunctionType(funcType);
context.TypeContext.Define(typeId, index);
}
private static void AssembleStart(
SExpression moduleField,
WasmFile module,
ModuleContext context)
{
if (!AssertElementCount(moduleField, moduleField.Tail, 1, context))
{
return;
}
var idOrIndex = AssembleIdentifierOrIndex(moduleField.Tail[0], context);
context.FunctionContext.Use(idOrIndex, index => module.StartFunctionIndex = index);
}
private static void AssembleFunction(
SExpression moduleField,
WasmFile module,
ModuleContext context)
{
var tail = moduleField.Tail;
var functionId = AssembleLabelOrNull(ref tail);
// Parse export names.
var exportNames = AssembleInlineExports(moduleField, ref tail, context);
LocalOrImportRef funcRef;
if (tail.Count > 0 && tail[0].IsCallTo("import"))
{
// We encountered an inline function import.
var (moduleName, functionName) = AssembleInlineImport(tail[0], context);
tail = tail.Skip(1).ToArray();
var funType = AssembleTypeUse(moduleField, ref tail, context, module, true);
AssertEmpty(context, "function import", tail);
var index = module.AddImport(
new ImportedFunction(moduleName, functionName, AddOrReuseFunctionType(funType, module)));
funcRef = new LocalOrImportRef(true, index);
}
else
{
// We're dealing with a regular function definition.
var localIdentifiers = new Dictionary<string, uint>();
var funType = AssembleTypeUse(moduleField, ref tail, context, module, true, localIdentifiers);
var locals = AssembleLocals(ref tail, localIdentifiers, context, "local", funType.ParameterTypes.Count);
var insnContext = new InstructionContext(localIdentifiers, context, module);
var insns = new List<Instruction>();
while (tail.Count > 0)
{
insns.AddRange(AssembleInstruction(ref tail, insnContext));
}
var index = module.AddFunction(
AddOrReuseFunctionType(funType, module),
new FunctionBody(locals.Select(x => new LocalEntry(x, 1)), insns));
funcRef = new LocalOrImportRef(false, index);
}
context.FunctionContext.Define(functionId, funcRef);
// Add entries to the export section if necessary.
foreach (var name in exportNames)
{
AddExport(module, context.FunctionContext, funcRef, ExternalKind.Function, name);
}
}
private static void AssembleGlobal(
SExpression moduleField,
WasmFile module,
ModuleContext context)
{
var tail = moduleField.Tail;
var globalId = AssembleLabelOrNull(ref tail);
// Parse export names.
var exportNames = AssembleInlineExports(moduleField, ref tail, context);
LocalOrImportRef globalRef;
if (tail.Count > 0 && tail[0].IsCallTo("import"))
{
// We encountered an inline global import.
var (moduleName, globalName) = AssembleInlineImport(tail[0], context);
tail = tail.Skip(1).ToArray();
var globalType = AssembleGlobalType(moduleField, ref tail, context);
AssertEmpty(context, "function import", tail);
var index = module.AddImport(
new ImportedGlobal(moduleName, globalName, globalType));
globalRef = new LocalOrImportRef(true, index);
}
else
{
// We're dealing with a regular global definition.
var globalType = AssembleGlobalType(moduleField, ref tail, context);
var init = AssembleInitializerExpression(tail, context, module);
var index = module.AddGlobal(
new GlobalVariable(globalType, init));
globalRef = new LocalOrImportRef(false, index);
}
context.GlobalContext.Define(globalId, globalRef);
// Add entries to the export section if necessary.
foreach (var name in exportNames)
{
AddExport(module, context.GlobalContext, globalRef, ExternalKind.Global, name);
}
}
private static InitializerExpression AssembleInitializerExpression(
IReadOnlyList<SExpression> expressions,
ModuleContext context,
WasmFile module)
{
var insnContext = new InstructionContext(new Dictionary<string, uint>(), context, module);
return new InitializerExpression(expressions.SelectMany(x => AssembleExpressionInstruction(x, insnContext)));
}
internal static string AssembleLabelOrNull(ref IReadOnlyList<SExpression> tail)
{
string result = null;
if (tail.Count > 0 && tail[0].IsIdentifier)
{
result = (string)tail[0].Head.Value;
tail = tail.Skip(1).ToArray();
}
return result;
}
private static IReadOnlyList<Instruction> AssembleInstruction(
ref IReadOnlyList<SExpression> instruction,
InstructionContext context)
{
var first = instruction[0];
if (first.IsKeyword)
{
PlainInstructionAssembler assembler;
if (context.ModuleContext.Assembler.PlainInstructionAssemblers.TryGetValue(
(string)first.Head.Value,
out assembler))
{
instruction = instruction.Skip(1).ToArray();
return new[] { assembler(first, ref instruction, context) };
}
else
{
context.ModuleContext.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"unknown instruction keyword ",
first.Head.Span.Text,
"."),
Highlight(first)));
instruction = Array.Empty<SExpression>();
return Array.Empty<Instruction>();
}
}
else if (first.IsCall)
{
instruction = instruction.Skip(1).ToArray();
return AssembleExpressionInstruction(first, context);
}
else
{
context.ModuleContext.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"expected an instruction; got ",
first.Head.Span.Text,
" instead."),
Highlight(first)));
instruction = Array.Empty<SExpression>();
return Array.Empty<Instruction>();
}
}
private static IReadOnlyList<Instruction> AssembleExpressionInstruction(
SExpression first,
InstructionContext context)
{
if (!first.IsCall)
{
context.ModuleContext.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"expected an expression, that is, a parenthesized instruction; got token ",
first.Head.Span.Text,
" instead."),
Highlight(first)));
return Array.Empty<Instruction>();
}
// Calls can be 'block' or 'loop' instructions, which are
// superficial syntactic sugar. They can also be 'if' instructions
// or folded instructions, which require a tiny bit of additional processing.
var blockTail = first.Tail;
if (first.IsCallTo("block") || first.IsCallTo("loop"))
{
return new[]
{
AssembleBlockOrLoop(
first.IsCallTo("block") ? Operators.Block : Operators.Loop,
first,
ref blockTail,
context,
false)
};
}
else if (first.IsCallTo("if"))
{
return AssembleIfExpression(first, ref blockTail, context);
}
else
{
IReadOnlyList<SExpression> childTail = new[] { SExpression.Create(first.Head) }
.Concat(blockTail)
.ToArray();
var lastInstruction = AssembleInstruction(ref childTail, context);
return childTail
.SelectMany(x => AssembleExpressionInstruction(x, context))
.Concat(lastInstruction)
.ToArray();
}
}
private static IReadOnlyList<Instruction> AssembleIfExpression(
SExpression first,
ref IReadOnlyList<SExpression> blockTail,
InstructionContext context)
{
var label = AssembleLabelOrNull(ref blockTail);
var resultType = AssembleBlockResultType(ref blockTail, context);
var childContext = new InstructionContext(label, context);
var foldedInsns = new List<Instruction>();
while (blockTail.Count > 0 && !blockTail[0].IsCallTo("then"))
{
foldedInsns.AddRange(AssembleInstruction(ref blockTail, context));
}
if (blockTail.Count == 0)
{
context.ModuleContext.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"if-then-else instruction does not have a ", "then", "clause."),
Highlight(first)));
return Array.Empty<Instruction>();
}
var thenTail = blockTail[0].Tail;
string endKw;
var thenBody = AssembleBlockContents(first, ref thenTail, childContext, out endKw);
if (blockTail.Count > 1)
{
if (blockTail[1].IsCallTo("else"))
{
var elseTail = blockTail[1].Tail;
var elseBody = AssembleBlockContents(first, ref elseTail, childContext, out endKw);
return foldedInsns.Concat(new[] { Operators.If.Create(resultType, thenBody, elseBody) }).ToArray();
}
else
{
context.ModuleContext.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"unexpected expression; expected either nothing or an ", "else", " clause."),
Highlight(blockTail[1])));
}
}
return foldedInsns.Concat(new[] { Operators.If.Create(resultType, thenBody, Array.Empty<Instruction>()) }).ToArray();
}
private static Instruction AssembleIfInstruction(
SExpression keyword,
ref IReadOnlyList<SExpression> operands,
InstructionContext context)
{
var label = AssembleLabelOrNull(ref operands);
var resultType = AssembleBlockResultType(ref operands, context);
var childContext = new InstructionContext(label, context);
string endKw;
var thenBody = AssembleBlockContents(keyword, ref operands, childContext, out endKw, "else", "end");
if (endKw == "else")
{
ExpectOptionalLabel(ref operands, context, label);
var elseBody = AssembleBlockContents(keyword, ref operands, childContext, out endKw, "end");
ExpectOptionalLabel(ref operands, context, label);
return Operators.If.Create(resultType, thenBody, elseBody);
}
else
{
ExpectOptionalLabel(ref operands, context, label);
return Operators.If.Create(resultType, thenBody, Array.Empty<Instruction>());
}
}
private static void ExpectOptionalLabel(
ref IReadOnlyList<SExpression> operands,
InstructionContext context,
string expectedLabel)
{
if (operands.Count == 0)
{
return;
}
var labelExpr = operands[0];
var label = AssembleLabelOrNull(ref operands);
if (label != null && label != expectedLabel)
{
context.ModuleContext.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"unexpected label ",
label,
"; expected either no label or label ",
expectedLabel,
"."),
Highlight(labelExpr)));
}
}
private static Instruction AssembleBlockOrLoop(
BlockOperator blockOperator,
SExpression parent,
ref IReadOnlyList<SExpression> operands,
InstructionContext context,
bool requireEnd)
{
var label = AssembleLabelOrNull(ref operands);
var resultType = AssembleBlockResultType(ref operands, context);
var childContext = new InstructionContext(label, context);
string endKw;
var insns = requireEnd
? AssembleBlockContents(parent, ref operands, childContext, out endKw, "end")
: AssembleBlockContents(parent, ref operands, childContext, out endKw);
if (requireEnd)
{
ExpectOptionalLabel(ref operands, context, label);
}
return blockOperator.Create(resultType, insns);
}
private static List<Instruction> AssembleBlockContents(
SExpression parent,
ref IReadOnlyList<SExpression> operands,
InstructionContext context,
out string endKeywordFound,
params string[] endKeywords)
{
var insns = new List<Instruction>();
endKeywordFound = null;
while (operands.Count > 0)
{
var first = operands[0];
if (first.IsKeyword && endKeywords.Contains((string)first.Head.Value))
{
operands = operands.Skip(1).ToArray();
endKeywordFound = (string)first.Head.Value;
break;
}
else
{
insns.AddRange(AssembleInstruction(ref operands, context));
}
}
if (endKeywords.Length > 0 && endKeywordFound == null)
{
context.ModuleContext.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"expected instruction to be terminated by an ",
"end",
" keyword."),
Highlight(parent)));
}
return insns;
}
private static WasmType AssembleBlockResultType(ref IReadOnlyList<SExpression> operands, InstructionContext context)
{
var resultType = WasmType.Empty;
if (operands.Count > 0
&& operands[0].IsCallTo("result")
&& AssertElementCount(operands[0], operands[0].Tail, 1, context.ModuleContext))
{
resultType = (WasmType)AssembleValueType(operands[0].Tail[0], context.ModuleContext);
operands = operands.Skip(1).ToArray();
}
return resultType;
}
private static Instruction AssembleLocalInstruction(
VarUInt32Operator localOperator,
SExpression keyword,
ref IReadOnlyList<SExpression> operands,
InstructionContext context)
{
SExpression idOrIndex;
if (AssertPopImmediate(keyword, ref operands, context, out idOrIndex))
{
if (idOrIndex.IsCall)
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"expected a local identifier or index; got ",
idOrIndex.Head.Span.Text,
" expression instead."),
Highlight(idOrIndex)));
}
else if (idOrIndex.Head.Kind == Lexer.TokenKind.UnsignedInteger)
{
return localOperator.Create(AssembleUInt32(idOrIndex, context.ModuleContext));
}
else if (idOrIndex.Head.Kind == Lexer.TokenKind.Identifier)
{
var id = (string)idOrIndex.Head.Value;
if (context.NamedLocalIndices.TryGetValue(id, out uint index))
{
return localOperator.Create(index);
}
else
{
// TODO: suggest a name? Pixie can do that for us.
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"local variable identifier ",
id,
" is not defined in this scope."),
Highlight(idOrIndex)));
}
}
else
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"expected a local identifier or index; got token ",
idOrIndex.Head.Span.Text,
" instead."),
Highlight(idOrIndex)));
}
}
return Operators.Nop.Create();
}
private static Instruction AssembleTableRefInstruction<T>(
VarUInt32Operator tableRefOperator,
string refKind,
Func<ModuleContext, IdentifierContext<T>> getIdentifierContext,
SExpression keyword,
ref IReadOnlyList<SExpression> operands,
InstructionContext context)
{
SExpression idOrIndex;
var result = tableRefOperator.Create(0);
if (AssertPopImmediate(keyword, ref operands, context, out idOrIndex))
{
var token = AssembleIdentifierOrIndex(idOrIndex, context.ModuleContext);
getIdentifierContext(context.ModuleContext).Use(token, index => {
result.Immediate = index;
});
}
else
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
$"expected a {refKind} identifier or index; got ",
idOrIndex.Head.Span.Text,
" instead."),
Highlight(idOrIndex)));
}
return result;
}
private static Instruction AssembleGlobalInstruction(
VarUInt32Operator globalOperator,
SExpression keyword,
ref IReadOnlyList<SExpression> operands,
InstructionContext context)
{
return AssembleTableRefInstruction(globalOperator, "global", c => c.GlobalContext, keyword, ref operands, context);
}
private static Instruction AssembleCallInstruction(
SExpression keyword,
ref IReadOnlyList<SExpression> operands,
InstructionContext context)
{
return AssembleTableRefInstruction(Operators.Call, "function", c => c.FunctionContext, keyword, ref operands, context);
}
private static Instruction AssembleCallIndirectInstruction(
SExpression keyword,
ref IReadOnlyList<SExpression> operands,
InstructionContext context)
{
var identifiers = new Dictionary<string, uint>();
var typeUse = AssembleTypeUse(keyword, ref operands, context.ModuleContext, context.Module, true, identifiers);
if (identifiers.Count > 0)
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"indirect calls cannot bind names to their parameter declarations; offending parameter name: ",
identifiers.Keys.First(),
"."),
Highlight(keyword)));
}
return Operators.CallIndirect.Create(AddOrReuseFunctionType(typeUse, context.Module));
}
private static Instruction AssembleBrInstruction(
VarUInt32Operator brOperator,
SExpression keyword,
ref IReadOnlyList<SExpression> operands,
InstructionContext context)
{
SExpression idOrIndex;
if (AssertPopImmediate(keyword, ref operands, context, out idOrIndex))
{
var depth = AssembleLabelOrDepth(idOrIndex, context);
return brOperator.Create(depth);
}
else
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"expected a label or break depth; got ",
idOrIndex.Head.Span.Text,
" instead."),
Highlight(idOrIndex)));
return brOperator.Create(0);
}
}
private static Instruction AssembleBrTableInstruction(
SExpression keyword,
ref IReadOnlyList<SExpression> operands,
InstructionContext context)
{
var depths = new List<uint>();
if (operands.Count == 0)
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"expected a branch target identifier or integer as argument to ",
"br_table",
"; got nothing."),
Highlight(keyword)));
return Operators.Nop.Create();
}
int i = 0;
do
{
var immediate = operands[i];
if (immediate.IsKeyword || immediate.IsCall)
{
break;
}
depths.Add(AssembleLabelOrDepth(immediate, context));
i++;
} while (operands.Count > i);
operands = operands.Skip(i).ToArray();
return Operators.BrTable.Create(depths.Take(depths.Count - 1), depths[depths.Count - 1]);
}
private static uint AssembleLabelOrDepth(
SExpression labelOrDepth,
InstructionContext context)
{
var token = AssembleIdentifierOrIndex(labelOrDepth, context.ModuleContext);
if (token.Kind == Lexer.TokenKind.UnsignedInteger)
{
return AssembleUInt32(labelOrDepth, context.ModuleContext);
}
else
{
var label = (string)token.Value;
// We can turn a label into a break depth by iteratively unwinding the chain
// of scopes until we find a scope with a label that matches the label we're
// looking for. The number of scopes we had to unwind then corresponds to the
// break depth.
uint depth = 0;
bool found = false;
var depthContext = context;
while (depthContext != null)
{
if (depthContext.LabelOrNull == label)
{
found = true;
break;
}
else
{
// Pop a context and see if the next context is the one we're looking for.
depth++;
depthContext = depthContext.ParentOrNull;
}
}
if (found)
{
return depth;
}
else
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"label ",
label,
" is not defined here."),
Highlight(labelOrDepth)));
return 0;
}
}
}
private static List<WasmValueType> AssembleLocals(
ref IReadOnlyList<SExpression> tail,
Dictionary<string, uint> localIdentifiers,
ModuleContext context,
string localKeyword,
int parameterCount)
{
var locals = new List<WasmValueType>();
// Parse locals.
while (tail.Count > 0 && tail[0].IsCallTo(localKeyword))
{
var paramSpec = tail[0];
var paramTail = paramSpec.Tail;
if (paramTail.Count > 0 && paramTail[0].IsIdentifier)
{
var id = (string)paramTail[0].Head.Value;
paramTail = paramTail.Skip(1).ToArray();
if (!AssertNonEmpty(paramSpec, paramTail, localKeyword, context))
{
continue;
}
var valType = AssembleValueType(paramTail[0], context);
locals.Add(valType);
paramTail = paramTail.Skip(1).ToArray();
AssertEmpty(context, localKeyword, paramTail);
if (localIdentifiers != null)
{
localIdentifiers[id] = (uint)(parameterCount + locals.Count - 1);
}
}
else
{
locals.AddRange(paramTail.Select(x => AssembleValueType(x, context)));
}
tail = tail.Skip(1).ToArray();
}
return locals;
}
private static TableType AssembleTableType(SExpression parent, IReadOnlyList<SExpression> tail, ModuleContext context)
{
if (tail.Count < 2 || tail.Count > 3)
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"expected a table type, that is, resizable limits followed by ",
"funcref",
", the table element type."),
Highlight(parent)));
return new TableType(WasmType.AnyFunc, new ResizableLimits(0));
}
var limits = AssembleLimits(parent, tail.Take(tail.Count - 1).ToArray(), context);
var elemType = AssembleElemType(tail[tail.Count - 1], context);
return new TableType(WasmType.AnyFunc, limits);
}
private static WasmType AssembleElemType(SExpression expression, ModuleContext context)
{
if (!expression.IsSpecificKeyword("funcref"))
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"unexpected table type expression; expected ",
"funcref", "."),
Highlight(expression)));
}
return WasmType.AnyFunc;
}
private static GlobalType AssembleGlobalType(SExpression expression, ModuleContext context)
{
if (expression.IsCallTo("mut"))
{
if (!AssertElementCount(expression, expression.Tail, 1, context))
{
return new GlobalType(WasmValueType.Int32, true);
}
return new GlobalType(AssembleValueType(expression.Tail[0], context), true);
}
else
{
return new GlobalType(AssembleValueType(expression, context), false);
}
}
private static GlobalType AssembleGlobalType(SExpression parent, ref IReadOnlyList<SExpression> tail, ModuleContext context)
{
if (tail.Count == 0)
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
"expected a global type.",
Highlight(parent)));
return new GlobalType(WasmValueType.Int32, true);
}
else
{
var immediate = tail[0];
tail = tail.Skip(1).ToArray();
return AssembleGlobalType(immediate, context);
}
}
private static FunctionType AssembleTypeUse(
SExpression parent,
ref IReadOnlyList<SExpression> tail,
ModuleContext context,
WasmFile module,
bool allowTypeRef = false,
Dictionary<string, uint> parameterIdentifiers = null)
{
var result = new FunctionType();
FunctionType referenceType = null;
if (allowTypeRef && tail.Count > 0 && tail[0].IsCallTo("type"))
{
var typeRef = tail[0];
tail = tail.Skip(1).ToArray();
if (AssertElementCount(typeRef, typeRef.Tail, 1, context))
{
uint referenceTypeIndex;
if ((typeRef.Tail[0].IsIdentifier
&& context.TypeContext.TryGetDefinition((string)typeRef.Tail[0].Head.Value, out referenceTypeIndex)))
{
}
else if (!typeRef.Tail[0].IsCall
&& typeRef.Tail[0].Head.Kind == Lexer.TokenKind.UnsignedInteger)
{
referenceTypeIndex = AssembleUInt32(typeRef.Tail[0], context);
}
else
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
"expected an identifier or unsigned integer.",
Highlight(typeRef.Tail[0])));
return result;
}
var funTypes = module.GetFirstSectionOrNull<TypeSection>();
if (referenceTypeIndex >= funTypes.FunctionTypes.Count)
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"index ", referenceTypeIndex.ToString(), " does not correspond to a type."),
Highlight(typeRef.Tail[0])));
return result;
}
referenceType = funTypes.FunctionTypes[(int)referenceTypeIndex];
if (tail.Count == 0 || (!tail[0].IsCallTo("param") && !tail[0].IsCallTo("result")))
{
return referenceType;
}
}
}
// Parse parameters.
result.ParameterTypes.AddRange(AssembleLocals(ref tail, parameterIdentifiers, context, "param", 0));
// Parse results.
while (tail.Count > 0 && tail[0].IsCallTo("result"))
{
var resultSpec = tail[0];
var resultTail = resultSpec.Tail;
result.ReturnTypes.AddRange(resultTail.Select(x => AssembleValueType(x, context)));
tail = tail.Skip(1).ToArray();
}
if (referenceType != null)
{
if (ConstFunctionTypeComparer.Instance.Equals(referenceType, result))
{
return referenceType;
}
else
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"expected locally-defined type ",
result.ToString(),
" to equal previously-defined type ",
referenceType.ToString(),
"."),
Highlight(parent)));
}
}
return result;
}
private static uint AddOrReuseFunctionType(
FunctionType type,
WasmFile module)
{
var sec = module.GetFirstSectionOrNull<TypeSection>();
if (sec == null)
{
return module.AddFunctionType(type);
}
else
{
var index = sec.FunctionTypes.FindIndex(x => ConstFunctionTypeComparer.Instance.Equals(type, x));
if (index < 0)
{
return module.AddFunctionType(type);
}
else
{
return (uint)index;
}
}
}
private static WasmValueType AssembleValueType(SExpression expression, ModuleContext context)
{
if (expression.IsSpecificKeyword("i32"))
{
return WasmValueType.Int32;
}
else if (expression.IsSpecificKeyword("i64"))
{
return WasmValueType.Int64;
}
else if (expression.IsSpecificKeyword("f32"))
{
return WasmValueType.Float32;
}
else if (expression.IsSpecificKeyword("f64"))
{
return WasmValueType.Float64;
}
else
{
context.Log.Log(
new LogEntry(
Severity.Error,
"unexpected token",
Quotation.QuoteEvenInBold(
"unexpected a value type, that is, ",
"i32", ",", "i64", ",", "f32", " or ", "f64", "."),
Highlight(expression)));
return WasmValueType.Int32;
}
}
private static void AddExport<T>(
WasmFile module,
IdentifierContext<T> context,
T value,
ExternalKind kind,
string exportName)
{
var export = new ExportedValue(exportName, kind, 0);
module.AddExport(export);
var exportSection = module.GetFirstSectionOrNull<ExportSection>();
int index = exportSection.Exports.Count - 1;
context.Use(
value,
i => { exportSection.Exports[index] = new ExportedValue(exportName, kind, i); });
}
private static void AddExport<T>(
WasmFile module,
IdentifierContext<T> context,
Lexer.Token identifier,
ExternalKind kind,
string exportName)
{
var export = new ExportedValue(exportName, kind, 0);
module.AddExport(export);
var exportSection = module.GetFirstSectionOrNull<ExportSection>();
int index = exportSection.Exports.Count - 1;
context.Use(
identifier,
i => { exportSection.Exports[index] = new ExportedValue(exportName, kind, i); });
}
private static Lexer.Token AssembleIdentifierOrIndex(SExpression expression, ModuleContext context)
{
if (expression.Head.Kind != Lexer.TokenKind.UnsignedInteger
&& expression.Head.Kind != Lexer.TokenKind.Identifier)
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
"expected an identifier or unsigned integer.",
Highlight(expression)));
}
return expression.Head;
}
private static (string, string) AssembleInlineImport(SExpression import, ModuleContext context)
{
if (import.Tail.Count != 2)
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"encountered ",
import.Tail.Count.ToString(),
" elements; expected exactly two names."),
Highlight(import)));
return ("", "");
}
else
{
return (AssembleString(import.Tail[0], context), AssembleString(import.Tail[1], context));
}
}
private static string AssembleString(SExpression expression, ModuleContext context)
{
return AssembleString(expression, context.Log);
}
internal static string AssembleString(SExpression expression, ILog log)
{
return Encoding.UTF8.GetString(AssembleByteString(expression, log));
}
internal static byte[] AssembleByteString(SExpression expression, ILog log)
{
if (expression.IsCall || expression.Head.Kind != Lexer.TokenKind.String)
{
log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"expected a string literal."),
Highlight(expression)));
return Array.Empty<byte>();
}
else
{
return (byte[])expression.Head.Value;
}
}
private static bool AssertElementCount(
SExpression expression,
IReadOnlyList<SExpression> tail,
int count,
ModuleContext context)
{
if (tail.Count == count)
{
return true;
}
else
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(
"encountered ",
tail.Count.ToString(),
" elements; expected exactly ", count.ToString(), "."),
Highlight(expression)));
return false;
}
}
private static bool AssertNonEmpty(
SExpression expression,
IReadOnlyList<SExpression> tail,
string kind,
ModuleContext context)
{
if (tail.Count == 0)
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold(kind + " is unexpectedly empty."),
Highlight(expression)));
return false;
}
else
{
return true;
}
}
private static byte[] AssembleDataString(
IReadOnlyList<SExpression> tail,
ILog log)
{
var results = new List<byte>();
foreach (var item in tail)
{
results.AddRange(AssembleByteString(item, log));
}
return results.ToArray();
}
private static byte[] AssembleDataString(
IReadOnlyList<SExpression> tail,
ModuleContext context)
{
return AssembleDataString(tail, context.Log);
}
private static void AssertEmpty(
ModuleContext context,
string kind,
IEnumerable<SExpression> tail)
{
var tailArray = tail.ToArray();
if (tailArray.Length > 0)
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold("", kind, " has an unexpected trailing expression."),
Highlight(tailArray[0])));
}
}
private static ResizableLimits AssembleLimits(SExpression parent, IReadOnlyList<SExpression> tail, ModuleContext context)
{
if (tail.Count == 0)
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
"limits expression is empty.",
Highlight(parent)));
return new ResizableLimits(0);
}
var init = AssembleUInt32(tail[0], context);
if (tail.Count == 1)
{
return new ResizableLimits(init);
}
if (tail.Count == 2)
{
var max = AssembleUInt32(tail[1], context);
return new ResizableLimits(init, max);
}
else
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
"limits expression contains more than two elements.",
Highlight(tail[2])));
return new ResizableLimits(0);
}
}
private static uint AssembleUInt32(
SExpression expression,
ModuleContext context)
{
return AssembleInt<uint>(
expression,
context,
"32-bit unsigned integer",
new[] { Lexer.TokenKind.UnsignedInteger },
(kind, data) => data <= uint.MaxValue ? (uint)data : (uint?)null);
}
private static int AssembleSignlessInt32(
SExpression expression,
ModuleContext context)
{
return AssembleInt<int>(
expression,
context,
"32-bit integer",
new[] { Lexer.TokenKind.UnsignedInteger, Lexer.TokenKind.SignedInteger },
(kind, data) => {
if (expression.Head.Kind == Lexer.TokenKind.UnsignedInteger && data <= uint.MaxValue)
{
return (int)(uint)data;
}
else if (data >= int.MinValue && data <= int.MaxValue)
{
return (int)data;
}
else
{
return null;
}
});
}
private static long AssembleSignlessInt64(
SExpression expression,
ModuleContext context)
{
return AssembleInt<long>(
expression,
context,
"64-bit integer",
new[] { Lexer.TokenKind.UnsignedInteger, Lexer.TokenKind.SignedInteger },
(kind, data) => {
if (expression.Head.Kind == Lexer.TokenKind.UnsignedInteger && data <= ulong.MaxValue)
{
return (long)(ulong)data;
}
else if (data >= long.MinValue && data <= long.MaxValue)
{
return (long)data;
}
else
{
return null;
}
});
}
private static float AssembleFloat32(SExpression expression, ModuleContext context)
{
if (!expression.IsCall)
{
if (expression.Head.Kind == Lexer.TokenKind.Float)
{
return (float)(FloatLiteral)expression.Head.Value;
}
else if (expression.Head.Kind == Lexer.TokenKind.UnsignedInteger
|| expression.Head.Kind == Lexer.TokenKind.SignedInteger)
{
return (float)(BigInteger)expression.Head.Value;
}
}
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
"expected a floating point number.",
Highlight(expression)));
return float.NaN;
}
private static double AssembleFloat64(
SExpression expression,
ModuleContext context)
{
if (!expression.IsCall)
{
if (expression.Head.Kind == Lexer.TokenKind.Float)
{
return (double)(FloatLiteral)expression.Head.Value;
}
else if (expression.Head.Kind == Lexer.TokenKind.UnsignedInteger
|| expression.Head.Kind == Lexer.TokenKind.SignedInteger)
{
return (double)(BigInteger)expression.Head.Value;
}
}
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
"expected a floating point number.",
Highlight(expression)));
return double.NaN;
}
private static T AssembleInt<T>(
SExpression expression,
ModuleContext context,
string description,
Lexer.TokenKind[] acceptableKinds,
Func<Lexer.TokenKind, BigInteger, T?> tryCoerceInt)
where T : struct
{
if (expression.IsCall)
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
$"expected a {description}; got a call.",
Highlight(expression)));
return default(T);
}
else if (!acceptableKinds.Contains(expression.Head.Kind))
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
Quotation.QuoteEvenInBold($"expected a {description}; token ", expression.Head.Span.Text, "."),
Highlight(expression)));
return default(T);
}
else
{
var data = (BigInteger)expression.Head.Value;
var coerced = tryCoerceInt(expression.Head.Kind, data);
if (coerced.HasValue)
{
return coerced.Value;
}
else
{
context.Log.Log(
new LogEntry(
Severity.Error,
"syntax error",
$"expected a {description}; got an integer that is out of range.",
Highlight(expression)));
return default(T);
}
}
}
}
}
<|start_filename|>libwasm/Instructions/VarInt32Operator.cs<|end_filename|>
using System.IO;
using System.Text;
using Wasm.Binary;
namespace Wasm.Instructions
{
/// <summary>
/// Describes an operator that takes a single 32-bit signed integer immediate.
/// </summary>
public sealed class VarInt32Operator : Operator
{
/// <summary>
/// Creates an operator that takes a single 32-bit signed integer immediate.
/// </summary>
/// <param name="opCode">The operator's opcode.</param>
/// <param name="declaringType">A type that defines the operator, if any.</param>
/// <param name="mnemonic">The operator's mnemonic.</param>
public VarInt32Operator(byte opCode, WasmType declaringType, string mnemonic)
: base(opCode, declaringType, mnemonic)
{ }
/// <summary>
/// Reads the immediates (not the opcode) of a WebAssembly instruction
/// for this operator from the given reader and returns the result as an
/// instruction.
/// </summary>
/// <param name="reader">The WebAssembly file reader to read immediates from.</param>
/// <returns>A WebAssembly instruction.</returns>
public override Instruction ReadImmediates(BinaryWasmReader reader)
{
return Create(reader.ReadVarInt32());
}
/// <summary>
/// Creates a new instruction from this operator and the given
/// immediate.
/// </summary>
/// <param name="immediate">The immediate.</param>
/// <returns>A new instruction.</returns>
public VarInt32Instruction Create(int immediate)
{
return new VarInt32Instruction(this, immediate);
}
/// <summary>
/// Casts the given instruction to this operator's instruction type.
/// </summary>
/// <param name="value">The instruction to cast.</param>
/// <returns>The given instruction as this operator's instruction type.</returns>
public VarInt32Instruction CastInstruction(Instruction value)
{
return (VarInt32Instruction)value;
}
}
}
<|start_filename|>libwasm/Instructions/Instruction.cs<|end_filename|>
using System.IO;
using System.Text;
using Wasm.Binary;
namespace Wasm.Instructions
{
/// <summary>
/// Describes a WebAssembly stack machine instruction.
/// </summary>
public abstract class Instruction
{
/// <summary>
/// Creates an instruction.
/// </summary>
public Instruction()
{ }
/// <summary>
/// Gets the operator for this instruction.
/// </summary>
/// <returns>The instruction's operator.</returns>
public abstract Operator Op { get; }
/// <summary>
/// Writes this instruction's immediates (but not its opcode)
/// to the given WebAssembly file writer.
/// </summary>
/// <param name="writer">The writer to write this instruction's immediates to.</param>
public abstract void WriteImmediatesTo(BinaryWasmWriter writer);
/// <summary>
/// Writes this instruction's opcode and immediates to the given
/// WebAssembly file writer.
/// </summary>
/// <param name="writer">The writer to write this instruction to.</param>
public void WriteTo(BinaryWasmWriter writer)
{
writer.Writer.Write(Op.OpCode);
WriteImmediatesTo(writer);
}
/// <summary>
/// Writes a string representation of this instruction to the given text writer.
/// </summary>
/// <param name="writer">
/// The writer to which a representation of this instruction is written.
/// </param>
public virtual void Dump(TextWriter writer)
{
Op.Dump(writer);
}
/// <summary>
/// Creates a string representation of this instruction.
/// </summary>
/// <returns>The instruction's string representation.</returns>
public override string ToString()
{
var builder = new StringBuilder();
Dump(new StringWriter(builder));
return builder.ToString();
}
}
}
<|start_filename|>libwasm/Interpret/ThrowFunctionDefinition.cs<|end_filename|>
using System;
using System.Collections.Generic;
namespace Wasm.Interpret
{
/// <summary>
/// Defines a type of function that throws an exception when invoked.
/// </summary>
public sealed class ThrowFunctionDefinition : FunctionDefinition
{
/// <summary>
/// Creates a function definition from the given exception.
/// </summary>
/// <param name="parameterTypes">The list of parameter types.</param>
/// <param name="returnTypes">The list of return types.</param>
/// <param name="exceptionToThrow">The exception to throw.</param>
public ThrowFunctionDefinition(
IReadOnlyList<WasmValueType> parameterTypes,
IReadOnlyList<WasmValueType> returnTypes,
Exception exceptionToThrow)
{
this.paramTypes = parameterTypes;
this.retTypes = returnTypes;
this.ExceptionToThrow = exceptionToThrow;
}
private IReadOnlyList<WasmValueType> paramTypes;
private IReadOnlyList<WasmValueType> retTypes;
/// <inheritdoc/>
public override IReadOnlyList<WasmValueType> ParameterTypes => paramTypes;
/// <inheritdoc/>
public override IReadOnlyList<WasmValueType> ReturnTypes => retTypes;
/// <summary>
/// Gets the exception to throw when this function is invoked.
/// </summary>
/// <returns>The exception to throw when this function is invoked.</returns>
public Exception ExceptionToThrow { get; private set; }
/// <inheritdoc/>
public override IReadOnlyList<object> Invoke(IReadOnlyList<object> arguments, uint callStackDepth = 0)
{
throw ExceptionToThrow;
}
}
}
<|start_filename|>examples/readme-example/Program.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace Wasm.ReadmeExample
{
public static class Program
{
public static void Main(string[] args)
{
// Create an empty WebAssembly file.
var file = new WasmFile();
// Define a type section.
var typeSection = new TypeSection();
file.Sections.Add(typeSection);
// Write the file to a (memory) stream.
var stream = new MemoryStream();
file.WriteBinaryTo(stream);
stream.Seek(0, SeekOrigin.Begin);
// Read the file from a (memory) stream.
file = WasmFile.ReadBinary(stream);
stream.Seek(0, SeekOrigin.Begin);
// Define a memory section if it doesn't exist already.
var memSection = file.GetFirstSectionOrNull<MemorySection>();
if (memSection == null)
{
// The file doesn't specify a memory section, so we'll
// have to create one and add it to the file.
memSection = new MemorySection();
file.Sections.Add(memSection);
}
memSection.Memories.Clear();
// Memory sizes are specified in WebAssembly pages,
// which are regions of storage with size 64KiB.
// `new ResizableLimits(1, 1)` creates a memory description
// that is initially one page (first argument) in size and
// is capped at one page of memory (second argument), so
// there will always be exactly one page of linear memory.
memSection.Memories.Add(
new MemoryType(new ResizableLimits(1, 1)));
// Print the memory size.
List<MemoryType> memSections =
file.GetFirstSectionOrNull<MemorySection>()
.Memories;
Console.WriteLine(
"Memory size: {0}",
memSections
.Single<MemoryType>()
.Limits);
// Save the file again.
file.WriteBinaryTo(stream);
stream.Seek(0, SeekOrigin.Begin);
}
}
}
<|start_filename|>libwasm/Interpret/InterpreterContext.cs<|end_filename|>
using System.Collections.Generic;
using System.Linq;
namespace Wasm.Interpret
{
/// <summary>
/// Describes the context in which instructions are interpreted.
/// </summary>
public sealed class InterpreterContext
{
/// <summary>
/// Creates a new interpreter context from the given module and
/// expected return types.
/// </summary>
/// <param name="module">The owning module.</param>
/// <param name="returnTypes">The list of expected return types.</param>
public InterpreterContext(ModuleInstance module, IReadOnlyList<WasmValueType> returnTypes)
: this(module, returnTypes, new Variable[0])
{ }
/// <summary>
/// Creates a new interpreter context from the given module, return types
/// and list of local variables.
/// </summary>
/// <param name="module">The owning module.</param>
/// <param name="returnTypes">The list of expected return types.</param>
/// <param name="locals">The list of local variables in this context.</param>
public InterpreterContext(
ModuleInstance module,
IReadOnlyList<WasmValueType> returnTypes,
IReadOnlyList<Variable> locals)
: this(module, returnTypes, locals, ExecutionPolicy.Create())
{ }
/// <summary>
/// Creates a new interpreter context from the given module, return types
/// and list of local variables.
/// </summary>
/// <param name="module">The owning module.</param>
/// <param name="returnTypes">The list of expected return types.</param>
/// <param name="locals">The list of local variables in this context.</param>
/// <param name="policy">The execution policy to use.</param>
/// <param name="callStackDepth">
/// The current depth of the call stack.
/// </param>
public InterpreterContext(
ModuleInstance module,
IReadOnlyList<WasmValueType> returnTypes,
IReadOnlyList<Variable> locals,
ExecutionPolicy policy,
uint callStackDepth = 0)
{
this.Module = module;
this.ReturnTypes = returnTypes;
this.Locals = locals;
this.Policy = policy;
this.valStack = new Stack<object>();
this.ReturnValues = null;
this.BreakDepth = -1;
this.CallStackDepth = callStackDepth;
}
/// <summary>
/// Gets the module instance that owns the instructions being interpreted.
/// </summary>
/// <returns>The module instance.</returns>
public ModuleInstance Module { get; private set; }
/// <summary>
/// Gets the list of expected return types.
/// </summary>
/// <value>The expected return types.</value>
public IReadOnlyList<WasmValueType> ReturnTypes { get; private set; }
/// <summary>
/// Gets a list of local variables for this interpreter context.
/// </summary>
/// <returns>A list of local variables.</returns>
public IReadOnlyList<Variable> Locals { get; private set; }
/// <summary>
/// Gets the execution policy associated with this interpreter context.
/// </summary>
/// <value>An execution policy.</value>
public ExecutionPolicy Policy { get; private set; }
/// <summary>
/// Gets the depth of the call stack at which the "frame" is placed for
/// the instructions currently being executed.
/// </summary>
/// <value>A call stack depth.</value>
public uint CallStackDepth { get; private set; }
/// <summary>
/// The evaluation stack.
/// </summary>
private Stack<object> valStack;
/// <summary>
/// Gets or sets the evaluation stack.
/// </summary>
/// <value>The evaluation stack.</value>
public EvaluationStack Stack
{
get
{
return new EvaluationStack { stack = valStack };
}
set
{
this.valStack = value.stack;
}
}
/// <summary>
/// Gets the list of values that have been returned, or <c>null</c> if nothing
/// has been returned yet.
/// </summary>
/// <returns>The list of values that have been returned, or <c>null</c> if nothing
/// has been returned yet.</returns>
public IReadOnlyList<object> ReturnValues { get; private set; }
/// <summary>
/// Gets the number of items that are currently on the evaluation stack.
/// </summary>
public int StackDepth => valStack.Count;
/// <summary>
/// Tests if this interpreter context has returned.
/// </summary>
public bool HasReturned => ReturnValues != null;
/// <summary>
/// Gets or sets the depth of the break that is currently being handled.
/// </summary>
/// <returns>The depth of the break that is currently being handled.
/// A negative value means that no break is currently being handled.</returns>
public int BreakDepth { get; set; }
/// <summary>
/// Gets a flag that tells if a break has been requested.
/// </summary>
/// <returns>A flag that tells if a break has been requested.</returns>
public bool BreakRequested => BreakDepth >= 0;
/// <summary>
/// Pops a value of the given type from the value stack.
/// </summary>
/// <returns>The popped value.</returns>
public T Pop<T>()
{
if (StackDepth == 0)
{
throw new WasmException("Cannot pop an element from an empty stack.");
}
return (T)valStack.Pop();
}
/// <summary>
/// Pops an array of values of the given type from the value stack.
/// </summary>
/// <returns>The popped values.</returns>
public T[] Pop<T>(int count)
{
var results = new T[count];
for (int i = count - 1; i >= 0; i--)
{
results[i] = Pop<T>();
}
return results;
}
/// <summary>
/// Sets the list of return values to the contents of the value stack,
/// if nothing has been returned already.
/// </summary>
/// <returns>
/// <c>true</c> if the contents of the value stack have been promoted
/// to return values; otherwise, <c>false</c>.
/// </returns>
public bool Return()
{
if (HasReturned)
{
return false;
}
else
{
ReturnValues = valStack.ToArray();
return true;
}
}
/// <summary>
/// Peeks a value of the given type from the value stack.
/// </summary>
/// <returns>The peeked value.</returns>
public T Peek<T>()
{
if (StackDepth == 0)
{
throw new WasmException("Cannot peek an element from an empty stack.");
}
return (T)valStack.Peek();
}
/// <summary>
/// Pushes the given value onto the value stack.
/// </summary>
/// <param name="value">The value to push onto the stack.</param>
public void Push<T>(T value)
{
valStack.Push(value);
}
/// <summary>
/// Pushes the given sequence of values onto the value stack.
/// </summary>
/// <param name="values">The list of values to push onto the stack.</param>
public void Push<T>(IEnumerable<T> values)
{
foreach (var item in values)
{
Push<T>(item);
}
}
/// <summary>
/// Pushes the contents of an evaluation stack onto this context's stack.
/// </summary>
/// <param name="stack">The stack to push onto this context's evaluation stack.</param>
public void Push(EvaluationStack stack)
{
Push<object>(stack.stack.Reverse());
}
/// <summary>
/// Pushes the topmost <paramref name="count"/> elements of <paramref name="stack"/> onto this context's
/// evaluation stack.
/// </summary>
/// <param name="stack">The stack to push onto this context's evaluation stack.</param>
/// <param name="count">
/// The number of elements to take from <paramref name="stack"/> and push onto this
/// context's evaluation stack.
/// </param>
public void Push(EvaluationStack stack, int count)
{
Push<object>(stack.stack.Take(count).Reverse());
}
/// <summary>
/// Creates an empty evaluation stack.
/// </summary>
/// <returns>An empty evaluation stack.</returns>
public EvaluationStack CreateStack()
{
return new EvaluationStack { stack = new Stack<object>() };
}
/// <summary>
/// A data structure that represents the interpreter's value stack.
/// </summary>
public struct EvaluationStack
{
// Internal on purpose so we can keep the 'Stack<object>' an
// implementation detail.
internal Stack<object> stack;
}
}
/// <summary>
/// A description of an execution policy for WebAssembly modules.
/// </summary>
public sealed class ExecutionPolicy
{
private ExecutionPolicy()
{ }
/// <summary>
/// Creates a new execution policy.
/// </summary>
/// <param name="maxCallStackDepth">The maximal depth of the call stack.</param>
/// <param name="maxMemorySize">
/// The maximum size of any memory, in page units. A value of zero
/// indicates that there is not maximum memory size.
/// </param>
/// <param name="enforceAlignment">
/// Tells if memory access alignments should be taken to be normative instead
/// of as hints.
/// </param>
/// <param name="translateExceptions">
/// Tells if CLR exceptions should be translated to <see cref="TrapException"/> values.
/// </param>
public static ExecutionPolicy Create(
uint maxCallStackDepth = 256,
uint maxMemorySize = 0,
bool enforceAlignment = false,
bool translateExceptions = true)
{
return new ExecutionPolicy()
{
MaxCallStackDepth = maxCallStackDepth,
EnforceAlignment = enforceAlignment,
MaxMemorySize = maxMemorySize,
TranslateExceptions = translateExceptions
};
}
/// <summary>
/// Tells if the alignment specified by memory instructions is to be taken as
/// a mandatory alignment to which memory accesses must adhere instead of a mere
/// hint.
/// </summary>
/// <value><c>true</c> if unaligned accesses must throw exceptions; otherwise, <c>false</c>.</value>
/// <remarks>
/// The WebAssembly specification states that memory instruction alignments do not
/// affect execution semantics. In order to comply with the standard, this property
/// should be set to <c>false</c> (the default).
/// </remarks>
public bool EnforceAlignment { get; private set; }
/// <summary>
/// Gets the maximal depth of the call stack.
/// </summary>
/// <value>A maximal call stack depth.</value>
public uint MaxCallStackDepth { get; private set; }
/// <summary>
/// Gets the maximum size of any memory, in page units. A value of zero
/// indicates that there is not maximum memory size.
/// </summary>
/// <value>The maximum memory size.</value>
public uint MaxMemorySize { get; private set; }
/// <summary>
/// Tells if CLR exceptions should be translated to <see cref="TrapException"/> values.
/// </summary>
/// <value>
/// <c>true</c> if WebAssembly execution should throw only <see cref="TrapException"/> values;
/// <c>false</c> if it may also throw other types of exceptions.
/// </value>
public bool TranslateExceptions { get; private set; }
}
}
<|start_filename|>libwasm/Interpret/Jit/JitOperatorImpls.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Reflection.Emit;
using Wasm.Instructions;
namespace Wasm.Interpret.Jit
{
using InstructionImpl = Action<CompilerContext, ILGenerator>;
/// <summary>
/// A collection of methods that compiler WebAssembly instructions to IL.
/// </summary>
public static class JitOperatorImpls
{
private static InstructionImpl ImplementAsOpCode(OpCode op, WasmValueType? resultType = null, params WasmValueType[] parameterTypes)
{
return (context, gen) =>
{
var paramTypesOnStack = context.Pop(parameterTypes.Length);
for (int i = 0; i < parameterTypes.Length; i++)
{
if (parameterTypes[i] != paramTypesOnStack[i])
{
throw new InvalidOperationException($"Expected type '{parameterTypes[i]}' on stack for argument {i} of opcode '{op}', but got type '{paramTypesOnStack[i]}' instead.");
}
}
gen.Emit(op);
if (resultType.HasValue)
{
context.Push(resultType.Value);
}
};
}
private static InstructionImpl ImplementAsCall(MethodInfo callee)
{
return (context, gen) =>
{
var parameterTypes = callee.GetParameters().Select(p => ValueHelpers.ToWasmValueType(p.ParameterType)).ToArray();
var paramTypesOnStack = context.Pop(parameterTypes.Length);
for (int i = 0; i < parameterTypes.Length; i++)
{
if (parameterTypes[i] != paramTypesOnStack[i])
{
throw new InvalidOperationException($"Expected type '{parameterTypes[i]}' on stack for argument {i} of method '{callee}', but got type '{paramTypesOnStack[i]}' instead.");
}
}
gen.Emit(OpCodes.Call, callee);
if (callee.ReturnType != null && callee.ReturnType != typeof(void))
{
context.Push(ValueHelpers.ToWasmValueType(callee.ReturnType));
}
};
}
private static InstructionImpl Chain(params InstructionImpl[] impls)
{
return (context, gen) =>
{
foreach (var impl in impls)
{
impl(context, gen);
}
};
}
private static InstructionImpl ImplementAsBinaryOpCode(OpCode op, WasmValueType type)
{
return ImplementAsOpCode(op, type, type, type);
}
private static InstructionImpl ImplementAsComparisonOpCode(OpCode op, WasmValueType type)
{
return ImplementAsOpCode(op, WasmValueType.Int32, type, type);
}
private static InstructionImpl ImplementAsUnaryOpCode(OpCode op, WasmValueType type)
{
return ImplementAsOpCode(op, type, type);
}
/// <summary>
/// Compiles an 'i32.add' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32Add(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Add, WasmValueType.Int32);
}
/// <summary>
/// Compiles an 'i32.clz' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32Clz(Instruction instruction)
{
return ImplementAsCall(
typeof(ValueHelpers).GetMethod(
nameof(ValueHelpers.CountLeadingZeros),
new[] { typeof(int) }));
}
/// <summary>
/// Compiles an 'i32.ctz' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32Ctz(Instruction instruction)
{
return ImplementAsCall(
typeof(ValueHelpers).GetMethod(
nameof(ValueHelpers.CountTrailingZeros),
new[] { typeof(int) }));
}
/// <summary>
/// Compiles an 'i32.sub' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32Sub(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Sub, WasmValueType.Int32);
}
/// <summary>
/// Compiles an 'i32.mul' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32Mul(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Mul, WasmValueType.Int32);
}
/// <summary>
/// Compiles an 'i32.div_s' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32DivS(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Div, WasmValueType.Int32);
}
/// <summary>
/// Compiles an 'i32.div_u' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32DivU(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Div_Un, WasmValueType.Int32);
}
/// <summary>
/// Compiles an 'i32.eq' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32Eq(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Ceq, WasmValueType.Int32);
}
/// <summary>
/// Compiles an 'i32.eqz' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32Eqz(Instruction instruction)
{
return Chain(Int32Const(Operators.Int32Const.Create(0)), Int32Eq(instruction));
}
/// <summary>
/// Compiles an 'i32.ne' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32Ne(Instruction instruction)
{
return Chain(Int32Eq(instruction), Int32Eqz(instruction));
}
/// <summary>
/// Compiles an 'i32.ge_s' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32GeS(Instruction instruction)
{
return Chain(Int32LtS(instruction), Int32Eqz(instruction));
}
/// <summary>
/// Compiles an 'i32.ge_u' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32GeU(Instruction instruction)
{
return Chain(Int32LtU(instruction), Int32Eqz(instruction));
}
/// <summary>
/// Compiles an 'i32.le_s' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32LeS(Instruction instruction)
{
return Chain(Int32GtS(instruction), Int32Eqz(instruction));
}
/// <summary>
/// Compiles an 'i32.le_u' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32LeU(Instruction instruction)
{
return Chain(Int32GtU(instruction), Int32Eqz(instruction));
}
/// <summary>
/// Compiles an 'i32.lt_s' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32LtS(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Clt, WasmValueType.Int32);
}
/// <summary>
/// Compiles an 'i32.lt_u' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32LtU(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Clt_Un, WasmValueType.Int32);
}
/// <summary>
/// Compiles an 'i32.gt_s' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32GtS(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Cgt, WasmValueType.Int32);
}
/// <summary>
/// Compiles an 'i32.gt_u' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32GtU(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Cgt_Un, WasmValueType.Int32);
}
/// <summary>
/// Compiles an 'i32.rem_s' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32RemS(Instruction instruction)
{
return ImplementAsCall(
typeof(ValueHelpers).GetMethod(
nameof(ValueHelpers.RemS),
new[] { typeof(int), typeof(int) }));
}
/// <summary>
/// Compiles an 'i32.rem_u' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32RemU(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Rem_Un, WasmValueType.Int32);
}
/// <summary>
/// Compiles an 'i32.rotl' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32Rotl(Instruction instruction)
{
return ImplementAsCall(
typeof(ValueHelpers).GetMethod(
nameof(ValueHelpers.RotateLeft),
new[] { typeof(int), typeof(int) }));
}
/// <summary>
/// Compiles an 'i32.rotr' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32Rotr(Instruction instruction)
{
return ImplementAsCall(
typeof(ValueHelpers).GetMethod(
nameof(ValueHelpers.RotateRight),
new[] { typeof(int), typeof(int) }));
}
/// <summary>
/// Compiles an 'i32.and' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32And(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.And, WasmValueType.Int32);
}
/// <summary>
/// Compiles an 'i32.or' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32Or(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Or, WasmValueType.Int32);
}
/// <summary>
/// Compiles an 'i32.popcnt' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32Popcnt(Instruction instruction)
{
return ImplementAsCall(
typeof(ValueHelpers).GetMethod(
nameof(ValueHelpers.PopCount),
new[] { typeof(int) }));
}
/// <summary>
/// Compiles an 'i32.wrap/i64' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32WrapInt64(Instruction instruction)
{
return ImplementAsOpCode(OpCodes.Conv_I4, WasmValueType.Int32, WasmValueType.Int64);
}
/// <summary>
/// Compiles an 'i32.xor' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32Xor(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Xor, WasmValueType.Int32);
}
/// <summary>
/// Compiles an 'i32.shl' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32Shl(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Shl, WasmValueType.Int32);
}
/// <summary>
/// Compiles an 'i32.shl_s' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32ShrS(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Shr, WasmValueType.Int32);
}
/// <summary>
/// Compiles an 'i32.shl_u' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32ShrU(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Shr_Un, WasmValueType.Int32);
}
/// <summary>
/// Compiles an 'i64.add' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64Add(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Add, WasmValueType.Int64);
}
/// <summary>
/// Compiles an 'i64.clz' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64Clz(Instruction instruction)
{
return Chain(
ImplementAsCall(
typeof(ValueHelpers).GetMethod(
nameof(ValueHelpers.CountLeadingZeros),
new[] { typeof(long) })),
Int64ExtendUInt32(instruction));
}
/// <summary>
/// Compiles an 'i64.ctz' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64Ctz(Instruction instruction)
{
return Chain(
ImplementAsCall(
typeof(ValueHelpers).GetMethod(
nameof(ValueHelpers.CountTrailingZeros),
new[] { typeof(long) })),
Int64ExtendUInt32(instruction));
}
/// <summary>
/// Compiles an 'i64.sub' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64Sub(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Sub, WasmValueType.Int64);
}
/// <summary>
/// Compiles an 'i64.mul' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64Mul(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Mul, WasmValueType.Int64);
}
/// <summary>
/// Compiles an 'i64.div_s' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64DivS(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Div, WasmValueType.Int64);
}
/// <summary>
/// Compiles an 'i64.div_u' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64DivU(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Div_Un, WasmValueType.Int64);
}
/// <summary>
/// Compiles an 'i64.eq' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64Eq(Instruction instruction)
{
return ImplementAsComparisonOpCode(OpCodes.Ceq, WasmValueType.Int64);
}
/// <summary>
/// Compiles an 'i64.eqz' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64Eqz(Instruction instruction)
{
return Chain(Int64Const(Operators.Int64Const.Create(0)), Int64Eq(instruction));
}
/// <summary>
/// Compiles an 'i64.extend_s/i32' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64ExtendSInt32(Instruction instruction)
{
return ImplementAsOpCode(OpCodes.Conv_I8, WasmValueType.Int64, WasmValueType.Int32);
}
/// <summary>
/// Compiles an 'i64.extend_u/i32' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64ExtendUInt32(Instruction instruction)
{
return ImplementAsOpCode(OpCodes.Conv_U8, WasmValueType.Int64, WasmValueType.Int32);
}
/// <summary>
/// Compiles an 'i64.ne' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64Ne(Instruction instruction)
{
return Chain(Int64Eq(instruction), Int32Eqz(instruction));
}
/// <summary>
/// Compiles an 'i64.ge_s' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64GeS(Instruction instruction)
{
return Chain(Int64LtS(instruction), Int32Eqz(instruction));
}
/// <summary>
/// Compiles an 'i64.ge_u' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64GeU(Instruction instruction)
{
return Chain(Int64LtU(instruction), Int32Eqz(instruction));
}
/// <summary>
/// Compiles an 'i64.le_s' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64LeS(Instruction instruction)
{
return Chain(Int64GtS(instruction), Int32Eqz(instruction));
}
/// <summary>
/// Compiles an 'i64.le_u' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64LeU(Instruction instruction)
{
return Chain(Int64GtU(instruction), Int32Eqz(instruction));
}
/// <summary>
/// Compiles an 'i64.lt_s' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64LtS(Instruction instruction)
{
return ImplementAsComparisonOpCode(OpCodes.Clt, WasmValueType.Int64);
}
/// <summary>
/// Compiles an 'i64.lt_u' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64LtU(Instruction instruction)
{
return ImplementAsComparisonOpCode(OpCodes.Clt_Un, WasmValueType.Int64);
}
/// <summary>
/// Compiles an 'i64.gt_s' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64GtS(Instruction instruction)
{
return ImplementAsComparisonOpCode(OpCodes.Cgt, WasmValueType.Int64);
}
/// <summary>
/// Compiles an 'i64.gt_u' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64GtU(Instruction instruction)
{
return ImplementAsComparisonOpCode(OpCodes.Cgt_Un, WasmValueType.Int64);
}
/// <summary>
/// Compiles an 'i64.rem_s' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64RemS(Instruction instruction)
{
return ImplementAsCall(
typeof(ValueHelpers).GetMethod(
nameof(ValueHelpers.RemS),
new[] { typeof(long), typeof(long) }));
}
/// <summary>
/// Compiles an 'i64.rem_u' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64RemU(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Rem_Un, WasmValueType.Int64);
}
/// <summary>
/// Compiles an 'i64.rotl' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64Rotl(Instruction instruction)
{
return ImplementAsCall(
typeof(ValueHelpers).GetMethod(
nameof(ValueHelpers.RotateLeft),
new[] { typeof(long), typeof(long) }));
}
/// <summary>
/// Compiles an 'i64.rotr' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64Rotr(Instruction instruction)
{
return ImplementAsCall(
typeof(ValueHelpers).GetMethod(
nameof(ValueHelpers.RotateRight),
new[] { typeof(long), typeof(long) }));
}
/// <summary>
/// Compiles an 'i64.and' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64And(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.And, WasmValueType.Int64);
}
/// <summary>
/// Compiles an 'i64.or' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64Or(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Or, WasmValueType.Int64);
}
/// <summary>
/// Compiles an 'i64.popcnt' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64Popcnt(Instruction instruction)
{
return Chain(
ImplementAsCall(
typeof(ValueHelpers).GetMethod(
nameof(ValueHelpers.PopCount),
new[] { typeof(long) })),
Int64ExtendUInt32(instruction));
}
/// <summary>
/// Compiles an 'i64.xor' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64Xor(Instruction instruction)
{
return ImplementAsBinaryOpCode(OpCodes.Xor, WasmValueType.Int64);
}
/// <summary>
/// Compiles an 'i64.shl' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64Shl(Instruction instruction)
{
return Chain(
Int32WrapInt64(instruction),
ImplementAsOpCode(OpCodes.Shl, WasmValueType.Int64, WasmValueType.Int64, WasmValueType.Int32));
}
/// <summary>
/// Compiles an 'i64.shl_s' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64ShrS(Instruction instruction)
{
return Chain(
Int32WrapInt64(instruction),
ImplementAsOpCode(OpCodes.Shr, WasmValueType.Int64, WasmValueType.Int64, WasmValueType.Int32));
}
/// <summary>
/// Compiles an 'i64.shl_u' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64ShrU(Instruction instruction)
{
return Chain(
Int32WrapInt64(instruction),
ImplementAsOpCode(OpCodes.Shr_Un, WasmValueType.Int64, WasmValueType.Int64, WasmValueType.Int32));
}
/// <summary>
/// Compiles an 'i32.const' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int32Const(Instruction instruction)
{
var immediate = Operators.Int32Const.CastInstruction(instruction).Immediate;
return (context, gen) =>
{
gen.Emit(OpCodes.Ldc_I4, immediate);
context.Push(WasmValueType.Int32);
};
}
/// <summary>
/// Compiles an 'i64.const' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Int64Const(Instruction instruction)
{
var immediate = Operators.Int64Const.CastInstruction(instruction).Immediate;
return (context, gen) =>
{
gen.Emit(OpCodes.Ldc_I8, immediate);
context.Push(WasmValueType.Int64);
};
}
/// <summary>
/// Compiles an 'f32.const' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Float32Const(Instruction instruction)
{
var immediate = Operators.Float32Const.CastInstruction(instruction).Immediate;
return (context, gen) =>
{
gen.Emit(OpCodes.Ldc_R4, immediate);
context.Push(WasmValueType.Float32);
};
}
/// <summary>
/// Compiles an 'f64.const' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Float64Const(Instruction instruction)
{
var immediate = Operators.Float64Const.CastInstruction(instruction).Immediate;
return (context, gen) =>
{
gen.Emit(OpCodes.Ldc_R8, immediate);
context.Push(WasmValueType.Float64);
};
}
/// <summary>
/// Compiles a 'nop' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Nop(Instruction instruction)
{
return (context, gen) => { };
}
/// <summary>
/// Compiles a 'drop' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Drop(Instruction instruction)
{
return (context, gen) =>
{
context.Pop();
gen.Emit(OpCodes.Pop);
};
}
/// <summary>
/// Compiles a 'select' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Select(Instruction instruction)
{
return (context, gen) =>
{
context.Pop();
var rhsType = context.Pop();
var lhsType = context.Pop();
var ifLabel = gen.DefineLabel();
var endLabel = gen.DefineLabel();
gen.Emit(OpCodes.Brtrue, ifLabel);
var rhsLocal = gen.DeclareLocal(ValueHelpers.ToClrType(rhsType));
gen.Emit(OpCodes.Stloc, rhsLocal);
gen.Emit(OpCodes.Pop);
gen.Emit(OpCodes.Ldloc, rhsLocal);
gen.Emit(OpCodes.Br, endLabel);
gen.MarkLabel(ifLabel);
gen.Emit(OpCodes.Pop);
gen.MarkLabel(endLabel);
context.Push(lhsType);
};
}
/// <summary>
/// Compiles a 'call' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl Call(Instruction instruction)
{
var callInsn = Operators.Call.CastInstruction(instruction);
var index = (int)callInsn.Immediate;
return (context, gen) =>
{
// Check for stack overflow.
var successLabel = gen.DefineLabel();
gen.Emit(OpCodes.Ldarg, context.ParameterCount);
gen.Emit(OpCodes.Ldc_I4, (int)context.Compiler.module.Policy.MaxCallStackDepth);
gen.Emit(OpCodes.Blt_Un, successLabel);
gen.Emit(OpCodes.Ldstr, "A stack overflow occurred: the max call stack depth was exceeded");
gen.Emit(OpCodes.Ldstr, TrapException.SpecMessages.CallStackExhausted);
gen.Emit(OpCodes.Newobj, typeof(TrapException).GetConstructor(new[] { typeof(string), typeof(string) }));
gen.Emit(OpCodes.Throw);
gen.MarkLabel(successLabel);
FunctionType signature;
if (index < context.Compiler.offset)
{
// If we're calling an import, then things get interesting. Basically, we have to emit
// a call to the import's Invoke method instead of calling the import directly as
// we can do with compiled methods.
var callee = context.Compiler.module.Functions[index];
signature = new FunctionType(callee.ParameterTypes, callee.ReturnTypes);
var args = new List<Func<ILGenerator, Type>>();
foreach (var item in callee.ParameterTypes.Reverse())
{
var loc = gen.DeclareLocal(ValueHelpers.ToClrType(item));
gen.Emit(OpCodes.Stloc, loc);
args.Add(generator =>
{
generator.Emit(OpCodes.Ldloc, loc);
return loc.LocalType;
});
}
args.Reverse();
context.Compiler.EmitExternalCall(gen, context.ParameterCount, callee, args);
}
else
{
// Push the call stack depth onto the evaluation stack.
gen.Emit(OpCodes.Ldarg, context.ParameterCount);
// Emit the call.
signature = context.Compiler.types[index - context.Compiler.offset];
var callee = context.Compiler.builders[index - context.Compiler.offset];
gen.Emit(OpCodes.Call, callee);
}
// Pop and check argument types.
var parameterTypes = signature.ParameterTypes;
var paramTypesOnStack = context.Pop(parameterTypes.Count);
for (int i = 0; i < parameterTypes.Count; i++)
{
if (parameterTypes[i] != paramTypesOnStack[i])
{
throw new InvalidOperationException($"Expected type '{parameterTypes[i]}' on stack for function call, but got type '{paramTypesOnStack[i]}' instead.");
}
}
// Update the type stack.
foreach (var item in signature.ReturnTypes)
{
context.Push(item);
}
};
}
/// <summary>
/// Compiles a 'get_local' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl GetLocal(Instruction instruction)
{
var index = Operators.GetLocal.CastInstruction(instruction).Immediate;
return (context, gen) =>
{
if (index < context.ParameterCount)
{
gen.Emit(OpCodes.Ldarg, (int)index);
}
else
{
gen.Emit(OpCodes.Ldloc, context.Locals[index]);
}
context.Push(context.LocalTypes[(int)index]);
};
}
/// <summary>
/// Compiles a 'set_local' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl SetLocal(Instruction instruction)
{
var index = Operators.SetLocal.CastInstruction(instruction).Immediate;
return (context, gen) =>
{
if (index < context.ParameterCount)
{
gen.Emit(OpCodes.Starg, (int)index);
}
else
{
gen.Emit(OpCodes.Stloc, context.Locals[index]);
}
};
}
/// <summary>
/// Compiles a 'tee_local' instruction.
/// </summary>
/// <param name="instruction">The instruction to compile to an implementation.</param>
public static InstructionImpl TeeLocal(Instruction instruction)
{
return (context, gen) =>
{
gen.Emit(OpCodes.Dup);
var type = context.Pop();
context.Push(type);
context.Push(type);
SetLocal(instruction)(context, gen);
};
}
}
}
<|start_filename|>libwasm/Instructions/Float64Operator.cs<|end_filename|>
using Wasm.Binary;
namespace Wasm.Instructions
{
/// <summary>
/// Describes an operator that takes a single 64-bit floating-point number immediate.
/// </summary>
public sealed class Float64Operator : Operator
{
/// <summary>
/// Creates an operator that takes a 64-bit floating-point number immediate.
/// </summary>
/// <param name="opCode">The operator's opcode.</param>
/// <param name="declaringType">A type that defines the operator, if any.</param>
/// <param name="mnemonic">The operator's mnemonic.</param>
public Float64Operator(byte opCode, WasmType declaringType, string mnemonic)
: base(opCode, declaringType, mnemonic)
{ }
/// <summary>
/// Reads the immediates (not the opcode) of a WebAssembly instruction
/// for this operator from the given reader and returns the result as an
/// instruction.
/// </summary>
/// <param name="Reader">The WebAssembly file reader to read immediates from.</param>
/// <returns>A WebAssembly instruction.</returns>
public override Instruction ReadImmediates(BinaryWasmReader Reader)
{
return Create(Reader.ReadFloat64());
}
/// <summary>
/// Creates a new instruction from this operator and the given
/// immediate.
/// </summary>
/// <param name="Immediate">The immediate.</param>
/// <returns>A new instruction.</returns>
public Float64Instruction Create(double Immediate)
{
return new Float64Instruction(this, Immediate);
}
/// <summary>
/// Casts the given instruction to this operator's instruction type.
/// </summary>
/// <param name="Value">The instruction to cast.</param>
/// <returns>The given instruction as this operator's instruction type.</returns>
public Float64Instruction CastInstruction(Instruction Value)
{
return (Float64Instruction)Value;
}
}
}
<|start_filename|>libwasm/Interpret/ModuleCompiler.cs<|end_filename|>
using System.Collections.Generic;
namespace Wasm.Interpret
{
/// <summary>
/// A base class for objects that turn module definition functions into module instance functions.
/// </summary>
public abstract class ModuleCompiler
{
/// <summary>
/// Declares all functions in a module.
/// </summary>
/// <param name="module">The module to declare functions for.</param>
/// <param name="offset">The index of the first function to define.</param>
/// <param name="types">A list of function types, one for each function declaration.</param>
public abstract void Initialize(ModuleInstance module, int offset, IReadOnlyList<FunctionType> types);
/// <summary>
/// Compiles a single function by generating code that is equivalent to <paramref name="body"/>.
/// </summary>
/// <param name="index">The index of the function to compile.</param>
/// <param name="body">The function body to compile.</param>
/// <returns>A compiled function that runs <paramref name="body"/>.</returns>
public abstract FunctionDefinition Compile(int index, FunctionBody body);
/// <summary>
/// Finalizes the module's code generation.
/// </summary>
public abstract void Finish();
}
/// <summary>
/// A module compiler that "compiles" WebAssembly function definitions by wrapping them in
/// interpreted function instances.
/// </summary>
public sealed class InterpreterCompiler : ModuleCompiler
{
private ModuleInstance module;
private IReadOnlyList<FunctionType> types;
/// <inheritdoc/>
public override void Initialize(ModuleInstance module, int offset, IReadOnlyList<FunctionType> types)
{
this.module = module;
this.types = types;
}
/// <inheritdoc/>
public override FunctionDefinition Compile(int index, FunctionBody body)
{
return new WasmFunctionDefinition(types[index], body, module);
}
/// <inheritdoc/>
public override void Finish()
{
}
}
}
<|start_filename|>libwasm/Binary/BinaryWasmReader.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace Wasm.Binary
{
/// <summary>
/// A reader that reads the binary WebAssembly format.
/// </summary>
public class BinaryWasmReader
{
/// <summary>
/// Initializes a new instance of the <see cref="Wasm.Binary.BinaryWasmReader"/> class.
/// </summary>
/// <param name="reader">The binary reader for a WebAssembly file.</param>
public BinaryWasmReader(BinaryReader reader)
: this(reader, UTF8Encoding.UTF8)
{ }
/// <summary>
/// Initializes a new instance of the <see cref="Wasm.Binary.BinaryWasmReader"/> class.
/// </summary>
/// <param name="reader">The binary reader for a WebAssembly file.</param>
/// <param name="stringEncoding">The encoding for strings in the WebAssembly file.</param>
public BinaryWasmReader(
BinaryReader reader,
Encoding stringEncoding)
{
this.reader = reader;
this.StringEncoding = stringEncoding;
this.streamIsEmpty = defaultStreamIsEmptyImpl;
}
/// <summary>
/// Initializes a new instance of the <see cref="Wasm.Binary.BinaryWasmReader"/> class.
/// </summary>
/// <param name="reader">The binary reader for a WebAssembly file.</param>
/// <param name="stringEncoding">The encoding for strings in the WebAssembly file.</param>
/// <param name="streamIsEmpty">Tests if the stream is empty.</param>
public BinaryWasmReader(
BinaryReader reader,
Encoding stringEncoding,
Func<bool> streamIsEmpty)
{
this.reader = reader;
this.StringEncoding = stringEncoding;
this.streamIsEmpty = streamIsEmpty;
}
/// <summary>
/// Initializes a new instance of the <see cref="Wasm.Binary.BinaryWasmReader"/> class.
/// </summary>
/// <param name="reader">The binary reader for a WebAssembly file.</param>
/// <param name="streamIsEmpty">Tests if the stream is empty.</param>
public BinaryWasmReader(
BinaryReader reader,
Func<bool> streamIsEmpty)
{
this.reader = reader;
this.StringEncoding = UTF8Encoding.UTF8;
this.streamIsEmpty = streamIsEmpty;
}
/// <summary>
/// The binary reader for a WebAssembly file.
/// </summary>
private BinaryReader reader;
/// <summary>
/// The encoding that is used to parse strings.
/// </summary>
/// <returns>The string encoding.</returns>
public Encoding StringEncoding { get; private set; }
/// <summary>
/// Tests if the stream is empty.
/// </summary>
private Func<bool> streamIsEmpty;
/// <summary>
/// A default implementation that tests if the stream is empty.
/// </summary>
private bool defaultStreamIsEmptyImpl()
{
return Position >= reader.BaseStream.Length;
}
/// <summary>
/// Gets the current position of the reader in the WebAssembly file.
/// </summary>
public long Position { get; private set; }
/// <summary>
/// Reads a single byte.
/// </summary>
/// <returns>The byte that was read.</returns>
public byte ReadByte()
{
byte result = reader.ReadByte();
Position++;
return result;
}
/// <summary>
/// Reads a range of bytes.
/// </summary>
/// <param name="count">The number of bytes to read.</param>
/// <returns>The array of bytes that were read.</returns>
public byte[] ReadBytes(int count)
{
byte[] results = reader.ReadBytes(count);
Position += count;
return results;
}
/// <summary>
/// Parses an unsigned LEB128 variable-length integer, limited to 64 bits.
/// </summary>
/// <returns>The parsed unsigned 64-bit integer.</returns>
public ulong ReadVarUInt64()
{
// C# translation of code borrowed from Wikipedia article:
// https://en.wikipedia.org/wiki/LEB128
ulong result = 0;
int shift = 0;
while (true)
{
byte b = ReadByte();
result |= ((ulong)(b & 0x7F) << shift);
if ((b & 0x80) == 0)
break;
shift += 7;
}
return result;
}
/// <summary>
/// Parses an unsigned LEB128 variable-length integer, limited to one bit.
/// </summary>
/// <returns>The parsed unsigned 1-bit integer, as a Boolean.</returns>
public bool ReadVarUInt1()
{
// Negate the integer twice to turn it into a Boolean.
return !(ReadVarUInt64() == 0);
}
/// <summary>
/// Parses an unsigned LEB128 variable-length integer, limited to 7 bits.
/// </summary>
/// <returns>The parsed unsigned 7-bit integer.</returns>
public byte ReadVarUInt7()
{
return (byte)ReadVarUInt64();
}
/// <summary>
/// Parses an unsigned LEB128 variable-length integer, limited to 32 bits.
/// </summary>
/// <returns>The parsed unsigned 32-bit integer.</returns>
public uint ReadVarUInt32()
{
return (uint)ReadVarUInt64();
}
/// <summary>
/// Parses a signed LEB128 variable-length integer, limited to 64 bits.
/// </summary>
/// <returns>The parsed signed 64-bit integer.</returns>
public long ReadVarInt64()
{
// C# translation of code borrowed from Wikipedia article:
// https://en.wikipedia.org/wiki/LEB128
long result = 0;
int shift = 0;
byte b;
do
{
b = ReadByte();
result |= ((long)(b & 0x7F) << shift);
shift += 7;
} while ((b & 0x80) != 0);
// Sign bit of byte is second high order bit. (0x40)
if ((shift < 64) && ((b & 0x40) == 0x40))
{
// Sign extend.
result |= -(1L << shift);
}
return result;
}
/// <summary>
/// Parses a signed LEB128 variable-length integer, limited to 7 bits.
/// </summary>
/// <returns>The parsed signed 7-bit integer.</returns>
public sbyte ReadVarInt7()
{
return (sbyte)ReadVarInt64();
}
/// <summary>
/// Parses a signed LEB128 variable-length integer, limited to 32 bits.
/// </summary>
/// <returns>The parsed signed 32-bit integer.</returns>
public int ReadVarInt32()
{
return (int)ReadVarInt64();
}
/// <summary>
/// Parses a 32-bit floating-point number.
/// </summary>
/// <returns>The parsed 32-bit floating-point number.</returns>
public float ReadFloat32()
{
var result = reader.ReadSingle();
Position += sizeof(float);
return result;
}
/// <summary>
/// Parses a 64-bit floating-point number.
/// </summary>
/// <returns>The parsed 64-bit floating-point number.</returns>
public double ReadFloat64()
{
var result = reader.ReadDouble();
Position += sizeof(double);
return result;
}
/// <summary>
/// Reads a WebAssembly language type.
/// </summary>
/// <returns>The WebAssembly language type.</returns>
public WasmType ReadWasmType()
{
return (WasmType)ReadVarInt7();
}
/// <summary>
/// Reads a WebAssembly value type.
/// </summary>
/// <returns>The WebAssembly value type.</returns>
public WasmValueType ReadWasmValueType()
{
return (WasmValueType)ReadVarInt7();
}
/// <summary>
/// Parses a length-prefixed string.
/// </summary>
/// <returns>The parsed string.</returns>
public string ReadString()
{
uint length = ReadVarUInt32();
byte[] bytes = ReadBytes((int)length);
return StringEncoding.GetString(bytes);
}
/// <summary>
/// Reads resizable limits.
/// </summary>
/// <returns>The resizable limits.</returns>
public ResizableLimits ReadResizableLimits()
{
bool hasMaximum = ReadVarUInt1();
uint initial = ReadVarUInt32();
Nullable<uint> max = hasMaximum
? new Nullable<uint>(ReadVarUInt32())
: default(Nullable<uint>);
return new ResizableLimits(initial, max);
}
/// <summary>
/// Parses a version header.
/// </summary>
/// <returns>The parsed version header.</returns>
public VersionHeader ReadVersionHeader()
{
var result = new VersionHeader(reader.ReadUInt32(), reader.ReadUInt32());
Position += 2 * sizeof(uint);
return result;
}
/// <summary>
/// Parses a section header.
/// </summary>
/// <returns>The parsed section header.</returns>
public SectionHeader ReadSectionHeader()
{
var code = (SectionCode)ReadVarUInt7();
uint payloadLength = ReadVarUInt32();
if (code == SectionCode.Custom)
{
uint startPos = (uint)Position;
var name = ReadString();
uint nameLength = (uint)Position - startPos;
return new SectionHeader(new SectionName(name), payloadLength - nameLength);
}
else
{
return new SectionHeader(new SectionName(code), payloadLength);
}
}
/// <summary>
/// Reads a section.
/// </summary>
/// <returns>The section.</returns>
public Section ReadSection()
{
var header = ReadSectionHeader();
return ReadSectionPayload(header);
}
/// <summary>
/// Reads the section with the given header.
/// </summary>
/// <param name="header">The section header.</param>
/// <returns>The parsed section.</returns>
public Section ReadSectionPayload(SectionHeader header)
{
if (header.Name.IsCustom)
return ReadCustomSectionPayload(header);
else
return ReadKnownSectionPayload(header);
}
/// <summary>
/// Reads the remaining payload of the section whose payload starts at the given position.
/// </summary>
/// <param name="startPosition">The start of the section's payload.</param>
/// <param name="payloadLength">The length of the section's payload, in bytes.</param>
/// <returns>The remaining payload of the section whose payload starts at the given position.</returns>
public byte[] ReadRemainingPayload(long startPosition, uint payloadLength)
{
return ReadBytes((int)(Position - startPosition - payloadLength));
}
/// <summary>
/// Reads the remaining payload of the section whose payload starts at the given position.
/// </summary>
/// <param name="startPosition">The start of the section's payload.</param>
/// <param name="header">The section's header.</param>
/// <returns>The remaining payload of the section whose payload starts at the given position.</returns>
public byte[] ReadRemainingPayload(long startPosition, SectionHeader header)
{
return ReadRemainingPayload(startPosition, header.PayloadLength);
}
/// <summary>
/// Reads the custom section with the given header.
/// </summary>
/// <param name="header">The section header.</param>
/// <returns>The parsed section.</returns>
protected virtual Section ReadCustomSectionPayload(SectionHeader header)
{
if (header.Name.CustomName == NameSection.CustomName)
{
return NameSection.ReadSectionPayload(header, this);
}
else
{
return new CustomSection(
header.Name.CustomName,
ReadBytes((int)header.PayloadLength));
}
}
/// <summary>
/// Reads the non-custom section with the given header.
/// </summary>
/// <param name="header">The section header.</param>
/// <returns>The parsed section.</returns>
protected Section ReadKnownSectionPayload(SectionHeader header)
{
switch (header.Name.Code)
{
case SectionCode.Type:
return TypeSection.ReadSectionPayload(header, this);
case SectionCode.Import:
return ImportSection.ReadSectionPayload(header, this);
case SectionCode.Function:
return FunctionSection.ReadSectionPayload(header, this);
case SectionCode.Table:
return TableSection.ReadSectionPayload(header, this);
case SectionCode.Memory:
return MemorySection.ReadSectionPayload(header, this);
case SectionCode.Global:
return GlobalSection.ReadSectionPayload(header, this);
case SectionCode.Export:
return ExportSection.ReadSectionPayload(header, this);
case SectionCode.Start:
return StartSection.ReadSectionPayload(header, this);
case SectionCode.Element:
return ElementSection.ReadSectionPayload(header, this);
case SectionCode.Code:
return CodeSection.ReadSectionPayload(header, this);
case SectionCode.Data:
return DataSection.ReadSectionPayload(header, this);
default:
return ReadUnknownSectionPayload(header);
}
}
/// <summary>
/// Reads the unknown, non-custom section with the given header.
/// </summary>
/// <param name="header">The section header.</param>
/// <returns>The parsed section.</returns>
protected virtual Section ReadUnknownSectionPayload(SectionHeader header)
{
return new UnknownSection(
header.Name.Code,
ReadBytes((int)header.PayloadLength));
}
/// <summary>
/// Reads an entire WebAssembly file.
/// </summary>
/// <returns>The WebAssembly file.</returns>
public WasmFile ReadFile()
{
var version = ReadVersionHeader();
version.Verify();
var sections = new List<Section>();
while (!streamIsEmpty())
{
sections.Add(ReadSection());
}
return new WasmFile(version, sections);
}
}
}
<|start_filename|>libwasm/Interpret/TrapException.cs<|end_filename|>
using System;
using System.Runtime.Serialization;
namespace Wasm.Interpret
{
/// <summary>
/// A WebAssembly exception that is thrown when WebAssembly execution traps.
/// </summary>
[Serializable]
public class TrapException : WasmException
{
/// <summary>
/// Initializes a new instance of the <see cref="TrapException"/> class.
/// </summary>
/// <param name="message">A user-friendly error message.</param>
/// <param name="specMessage">A spec-mandated generic error message.</param>
public TrapException(string message, string specMessage) : base(message)
{
this.SpecMessage = specMessage;
}
/// <summary>
/// Initializes a new instance of the <see cref="TrapException"/> class.
/// </summary>
/// <param name="info">Serialization info.</param>
/// <param name="context">A streaming context.</param>
protected TrapException(
SerializationInfo info,
StreamingContext context) : base(info, context) { }
/// <summary>
/// Gets the generic error message mandated by the spec, as opposed to the possibly
/// more helpful message encapsulated in the exception itself.
/// </summary>
/// <value>A spec error message.</value>
public string SpecMessage { get; private set; }
/// <summary>
/// A collection of generic spec error messages for traps.
/// </summary>
public static class SpecMessages
{
/// <summary>
/// The error message for out of bounds memory accesses.
/// </summary>
public const string OutOfBoundsMemoryAccess = "out of bounds memory access";
/// <summary>
/// The error message for when an unreachable instruction is reached.
/// </summary>
public const string Unreachable = "unreachable";
/// <summary>
/// The error message for when the max execution stack depth is exceeded.
/// </summary>
public const string CallStackExhausted = "call stack exhausted";
/// <summary>
/// The error message for when integer overflow occurs.
/// </summary>
public const string IntegerOverflow = "integer overflow";
/// <summary>
/// The error message for when NaN is converted to an integer.
/// </summary>
public const string InvalidConversionToInteger = "invalid conversion to integer";
/// <summary>
/// The error message for misaligned memory accesses.
/// </summary>
public const string MisalignedMemoryAccess = "misaligned memory access";
/// <summary>
/// The error message for when an indirect call's expected type does not match
/// the actual type of the function being called.
/// </summary>
public const string IndirectCallTypeMismatch = "indirect call type mismatch";
/// <summary>
/// The error message for when an integer is divided by zero.
/// </summary>
public const string IntegerDivideByZero = "integer divide by zero";
/// <summary>
/// The error message for when an undefined element of a table is accessed.
/// </summary>
public const string UndefinedElement = "undefined element";
/// <summary>
/// The error message for when an uninitialized element of a table is accessed.
/// </summary>
public const string UninitializedElement = "uninitialized element";
}
}
}
<|start_filename|>libwasm-text/FloatLiteral.cs<|end_filename|>
using System;
using System.Numerics;
namespace Wasm.Text
{
/// <summary>
/// Represents a parsed floating-point number literal.
/// </summary>
public struct FloatLiteral
{
private FloatLiteral(
FloatLiteralKind kind,
bool isNegative,
BigInteger significand,
int @base,
BigInteger exponent)
{
this.Kind = kind;
this.IsNegative = isNegative;
this.Significand = significand;
this.Base = @base;
this.Exponent = exponent;
}
/// <summary>
/// Gets the float literal's kind.
/// </summary>
/// <value>A float literal kind.</value>
public FloatLiteralKind Kind { get; private set; }
/// <summary>
/// Tells if the float literal's sign is negative.
/// </summary>
/// <value><c>true</c> if the float literal's sign is negative; otherwise, <c>false</c>.</value>
public bool IsNegative { get; private set; }
/// <summary>
/// Tells if the float literal's sign is positive.
/// </summary>
/// <value><c>true</c> if the float literal's sign is positive; otherwise, <c>false</c>.</value>
public bool IsPositive => !IsNegative;
/// <summary>
/// Gets the float literal's significand as a positive integer.
/// </summary>
/// <value>A significand.</value>
public BigInteger Significand { get; private set; }
/// <summary>
/// Gets the float literal's significand and sign as a single integer whose
/// absolute value equals the significand and whose sign equals the sign.
/// </summary>
public BigInteger SignedSignificand => IsNegative ? -Significand : Significand;
/// <summary>
/// Gets the base for the float literal's exponent.
/// </summary>
/// <value>A base.</value>
public int Base { get; private set; }
/// <summary>
/// Gets the float literal's exponent as an integer.
/// </summary>
/// <value>The literal's exponent.</value>
public BigInteger Exponent { get; private set; }
/// <summary>
/// Creates a Not-a-Number float literal with a custom payload.
/// </summary>
/// <param name="isNegative">Tells if the Not-a-Number float is negative.</param>
/// <param name="payload">The NaN payload.</param>
/// <returns>A NaN float literal.</returns>
public static FloatLiteral NaN(bool isNegative, BigInteger payload)
{
return new FloatLiteral(FloatLiteralKind.NaNWithPayload, isNegative, payload, 2, 0);
}
/// <summary>
/// Creates a canonical Not-a-Number float literal.
/// </summary>
/// <param name="isNegative">Tells if the Not-a-Number float is negative.</param>
/// <returns>A NaN float literal.</returns>
public static FloatLiteral NaN(bool isNegative)
{
return new FloatLiteral(FloatLiteralKind.CanonicalNaN, isNegative, 0, 2, 0);
}
/// <summary>
/// Creates a numeric float literal.
/// </summary>
/// <param name="isNegative">Tells if the float is negative.</param>
/// <param name="significand">The float's significand.</param>
/// <param name="baseNum">The float's base.</param>
/// <param name="exponent">The exponent to which <paramref name="baseNum"/> is raised.</param>
/// <returns>A numeric float literal.</returns>
private static FloatLiteral Number(bool isNegative, BigInteger significand, int baseNum, BigInteger exponent)
{
return new FloatLiteral(FloatLiteralKind.Number, isNegative, significand, baseNum, exponent);
}
/// <summary>
/// Creates a numeric float literal that is equal to an integer multiplied by a base exponentiation.
/// </summary>
/// <param name="significand">The float's significand.</param>
/// <param name="baseNum">The float's base.</param>
/// <param name="exponent">The exponent to which <paramref name="baseNum"/> is raised.</param>
/// <returns>A numeric float literal.</returns>
public static FloatLiteral Number(BigInteger significand, int baseNum, BigInteger exponent)
{
bool isNeg = significand < 0;
return Number(isNeg, isNeg ? -significand : significand, baseNum, exponent);
}
/// <summary>
/// Creates a numeric float literal that is equal to an integer.
/// </summary>
/// <param name="significand">The float's significand.</param>
/// <param name="baseNum">The float's base.</param>
/// <returns>A numeric float literal.</returns>
public static FloatLiteral Number(BigInteger significand, int baseNum)
{
return Number(significand, baseNum, 0);
}
/// <summary>
/// Creates a zero float literal constant.
/// </summary>
/// <param name="baseNum">The base for the zero literal.</param>
/// <returns>A zero float literal.</returns>
public static FloatLiteral Zero(int baseNum) => Number(false, 0, baseNum, 0);
/// <summary>
/// A float literal representing positive infinity.
/// </summary>
public static readonly FloatLiteral PositiveInfinity = new FloatLiteral(FloatLiteralKind.Infinity, false, 0, 2, 0);
/// <summary>
/// A float literal representing negative infinity.
/// </summary>
public static readonly FloatLiteral NegativeInfinity = new FloatLiteral(FloatLiteralKind.Infinity, true, 0, 2, 0);
/// <summary>
/// Adds a value to this float literal's exponent.
/// </summary>
/// <param name="exponentDelta">The value to add to the exponent.</param>
/// <returns>A new float literal.</returns>
public FloatLiteral AddToExponent(BigInteger exponentDelta)
{
if (Kind == FloatLiteralKind.Number)
{
return Number(IsNegative, Significand, Base, Exponent + exponentDelta);
}
else
{
return this;
}
}
/// <inheritdoc/>
public override string ToString()
{
var sign = IsNegative ? "-" : "";
switch (Kind)
{
case FloatLiteralKind.Number:
default:
return $"{sign}{Significand} * {Base} ^ {Exponent}";
case FloatLiteralKind.NaNWithPayload:
return $"{sign}nan:0x{Significand.ToString("x")}";
case FloatLiteralKind.CanonicalNaN:
return $"{sign}nan";
case FloatLiteralKind.Infinity:
return $"{sign}inf";
}
}
/// <summary>
/// Adds an integer to a float literal.
/// </summary>
/// <param name="first">An integer.</param>
/// <param name="second">A float literal.</param>
/// <returns>A float literal that is the sum of <paramref name="first"/> and <paramref name="second"/>.</returns>
public static FloatLiteral operator+(BigInteger first, FloatLiteral second)
{
return Number(first, second.Base, 0) + second;
}
/// <summary>
/// Computes the sum of two numeric float literals with equal bases.
/// </summary>
/// <param name="first">A first float literal.</param>
/// <param name="second">A second float literal.</param>
/// <returns>The sum of <paramref name="first"/> and <paramref name="second"/>.</returns>
public static FloatLiteral operator+(FloatLiteral first, FloatLiteral second)
{
if (first.Kind != FloatLiteralKind.Number || second.Kind != FloatLiteralKind.Number)
{
throw new WasmException("Cannot add non-number float literals.");
}
else if (first.Base != second.Base)
{
throw new WasmException("Cannot add float literals with incompatible bases.");
}
if (first.Exponent == second.Exponent)
{
// If both numbers have the same exponent, then adding them is easy. Just
// compute the sum of their significands.
return Number(first.SignedSignificand + second.SignedSignificand, first.Base, first.Exponent);
}
else if (first.Exponent < second.Exponent)
{
// If the first number's exponent is less than the second number's, then we
// can multiply the second number's significand by its base until the
// exponents become equal.
var secondSignificand = second.SignedSignificand;
var firstExponent = first.Exponent;
while (firstExponent != second.Exponent)
{
secondSignificand *= first.Base;
firstExponent++;
}
return Number(first.SignedSignificand + secondSignificand, first.Base, first.Exponent);
}
else
{
return second + first;
}
}
/// <summary>
/// Negates a float literal.
/// </summary>
/// <param name="value">The float literal to negate.</param>
/// <returns>The additive inverse of a float literal.</returns>
public static FloatLiteral operator-(FloatLiteral value)
{
return new FloatLiteral(value.Kind, !value.IsNegative, value.Significand, value.Base, value.Exponent);
}
/// <summary>
/// Transforms a float literal to a double-precision floating point number.
/// </summary>
/// <param name="value">A float literal.</param>
public static explicit operator double(FloatLiteral value)
{
double result;
switch (value.Kind)
{
case FloatLiteralKind.Infinity:
result = double.PositiveInfinity;
break;
case FloatLiteralKind.NaNWithPayload:
result = CreateFloat64NaNWithSignificand((long)value.Significand);
break;
case FloatLiteralKind.CanonicalNaN:
result = double.NaN;
break;
case FloatLiteralKind.Number:
default:
// To convert a float literal to a float, we need to do the following:
// 1. Convert the literal to base 2.
// 2. Increment the exponent until the fractional part achieves the form
// required by the IEEE 754 standard.
var exp = value.Exponent;
var frac = value.Significand;
if (frac == 0)
{
result = 0;
break;
}
// Decompose the base into a binary base that is a factor of the base
// and a remainder, such that `base = 2 ^ binBase * baseRemainder`, where
// binBase is maximal.
var binBase = 0;
var baseRemainder = value.Base;
while (baseRemainder % 2 == 0)
{
binBase++;
baseRemainder /= 2;
}
// Now we can make the following observation:
//
// base ^ exp = (2 ^ binBase * baseRemainder) ^ exp
// = 2 ^ (binBase * exp) * baseRemainder ^ exp
//
// We hence tentatively set our binary exponent to `binBase * exp`.
var binExp = binBase * (int)exp;
// We will now fold `baseRemainder ^ exp` into our fractional part. This is
// easy if `exp` is positive---just multiply the fractional part by `baseRemainder ^ exp`.
bool negExp = exp < 0;
if (negExp)
{
exp = -exp;
}
const int doubleBitLength = 52;
bool nonzeroRemainder = false;
int bitLength;
if (negExp)
{
// If `exp` is negative then things are more complicated; we need to ensure that we do
// not lose information due to integer division. For instance, if we were to naively
// convert `1 * 3 ^ 1` to base 2 using the same method as above but with division instead
// of multiplication, then we would get `1 / 3 = 0` as the fractional part of our resulting
// float. That's not what we want.
//
// To avoid these types of mishaps, we will pad the fractional part with zeros and update
// the exponent to compensate.
//
// To find out how many zeros we need to pad the fractional part with, we consider the following:
// * We want to end up with at least `doubleBitLength + 2` bits of precision (the first bit
// is always '1' and is implied for normal floats and the last bit is used for rounding).
// * Dividing by `baseRemainder` will reduce the number of bits in the final number.
//
// We will hence extend the fractional part to `doubleBitLength + 2 + log2(supBaseRemainder) * exp`,
// where `supBaseRemainder` is the smallest power of two greater than `baseRemainder`.
var supBaseRemainder = 1;
var supBaseRemainderLog2 = 0;
while (baseRemainder > supBaseRemainder)
{
supBaseRemainder *= 2;
supBaseRemainderLog2++;
}
// Extend the fractional part to at least the desired bit length.
var desiredBitLength = doubleBitLength + 2 + supBaseRemainderLog2 * exp;
bitLength = GetBitLength(frac);
while (bitLength < desiredBitLength)
{
bitLength++;
frac <<= 1;
binExp--;
}
// Now repeatedly divide it by `baseRemainder`.
BigInteger divisor = 1;
for (BigInteger i = 0; i < exp; i++)
{
divisor *= baseRemainder;
}
var oldFrac = frac;
frac = BigInteger.DivRem(frac, divisor, out BigInteger rem);
nonzeroRemainder = rem > 0;
}
else
{
for (BigInteger i = 0; i < exp; i++)
{
frac *= baseRemainder;
}
}
// At this point, `frac * 2 ^ binExp` equals the absolute value of our float literal.
// However, `frac` and `binExpr` are not yet normalized. To normalize them, we will
// change `frac` until its bit length is exactly equal to the number of bits in the
// fractional part of a double (52) plus one (=53). After that, we drop the first bit
// and keep only the 52 bits that trail it. We increment the exponent by 52.
// 2. Increment the exponent.
binExp += doubleBitLength;
// Make sure the bit length equals 53 exactly.
var (finalFrac, finalBinExp) = Round(frac, binExp, nonzeroRemainder, doubleBitLength + 1);
if (finalBinExp > 1023)
{
// If the exponent is greater than 1023, then we round toward infinity.
result = double.PositiveInfinity;
break;
}
else if (finalBinExp < -1022)
{
if (finalBinExp >= -1022 - doubleBitLength)
{
// If the exponent is less than -1022 but greater than (-1022 - 52), then
// we'll try to create a subnormal number.
var precision = doubleBitLength;
while (precision > 0)
{
// TODO: get rounding right for subnormals.
(finalFrac, finalBinExp) = Round(frac, binExp, nonzeroRemainder, precision);
precision--;
if (finalBinExp >= -1022)
{
return CreateNormalFloat64(value.IsNegative, -1023, finalFrac);
}
}
}
// Otherwise, we'll just round toward zero.
result = 0;
break;
}
// Convert the fractional part to a 64-bit integer and drop the
// leading one. Compose the double.
result = CreateNormalFloat64(false, finalBinExp, finalFrac);
break;
}
return Interpret.ValueHelpers.Setsign(result, value.IsNegative);
}
/// <summary>
/// Takes a positive significand and a binary exponent, sets the significand's
/// bit length to a particular precision (updating the exponent accordingly
/// such that approximately the same number is represented), and rounds
/// the significand to produce a number that is as close to the original
/// number as possible.
/// </summary>
/// <param name="significand">A positive significand.</param>
/// <param name="exponent">A binary exponent.</param>
/// <param name="nonzeroRemainder">
/// Tells if the significand has trailing ones not encoded in <paramref name="significand"/>.
/// </param>
/// <param name="precision">The bit precision of the resulting significand.</param>
/// <returns>A (significand, exponent) pair.</returns>
private static (BigInteger significand, int exponent) Round(
BigInteger significand,
int exponent,
bool nonzeroRemainder,
int precision)
{
var bitLength = GetBitLength(significand);
int delta = precision - bitLength;
if (delta >= 0)
{
// If the significand is insufficiently precise, then we can
// just add more bits of precision by appending zero bits,
// i.e., shifting to the left.
return (significand << delta, exponent - delta);
}
else
{
// If the significand is too precise, then we need to eliminate
// bits of precision. We also need to round in this step.
// Rounding implies that we find a minimal range `[lower, upper]` such that
// `significand \in [lower, upper]`. Then, we pick either `lower` or `upper`
// as our result, depending on which is closer or a tie-breaking round-to-even
// rule.
// Find `lower`, `upper`.
delta = -delta;
var lower = significand >> delta;
var lowerExponent = exponent + delta;
var upper = lower + 1;
var upperExponent = lowerExponent;
if (GetBitLength(upper) == precision + 1)
{
upper >>= 1;
upperExponent++;
}
// Now we just need to pick either `lower` or `upper`. The digits in the
// significand that are not included in `lower` are decisive here.
var lowerRoundingError = significand - (lower << delta);
var midpoint = 1 << (delta - 1);
if (lowerRoundingError < midpoint
|| (lowerRoundingError == midpoint && !nonzeroRemainder && lower % 2 == 0))
{
return (lower, lowerExponent);
}
else
{
return (upper, upperExponent);
}
}
}
private static int GetBitLength(BigInteger value)
{
int length = 0;
while (value > 0)
{
value >>= 1;
length++;
}
return length;
}
/// <summary>
/// Creates a double from a sign, an exponent and a significand.
/// </summary>
/// <param name="isNegative">
/// <c>true</c> if the double-precision floating-point number is negated; otherwise, <c>false</c>.
/// </param>
/// <param name="exponent">
/// The exponent to which the number's base (2) is raised.
/// </param>
/// <param name="fraction">
/// The fractional part of the float.
/// </param>
/// <returns>
/// A floating-point number that is equal to (-1)^<paramref name="isNegative"/> * 2^(<paramref name="exponent"/> - 1023) * 1.<paramref name="fraction"/>.
/// </returns>
private static double CreateNormalFloat64(bool isNegative, int exponent, BigInteger fraction)
{
return CreateNormalFloat64(isNegative, exponent, (long)fraction & 0x000fffffffffffffL);
}
/// <summary>
/// Creates a double from a sign, an exponent and a significand.
/// </summary>
/// <param name="isNegative">
/// <c>true</c> if the double-precision floating-point number is negated; otherwise, <c>false</c>.
/// </param>
/// <param name="exponent">
/// The exponent to which the number's base (2) is raised.
/// </param>
/// <param name="fraction">
/// The fractional part of the float.
/// </param>
/// <returns>
/// A floating-point number that is equal to (-1)^<paramref name="isNegative"/> * 2^(<paramref name="exponent"/> - 1023) * 1.<paramref name="fraction"/>.
/// </returns>
private static double CreateNormalFloat64(bool isNegative, int exponent, long fraction)
{
return Wasm.Interpret.ValueHelpers.ReinterpretAsFloat64(
((isNegative ? 1L : 0L) << 63)
| ((long)(exponent + 1023) << 52)
| fraction);
}
/// <summary>
/// Transforms a float literal to a single-precision floating point number.
/// </summary>
/// <param name="value">A float literal.</param>
public static explicit operator float(FloatLiteral value)
{
float result;
switch (value.Kind)
{
case FloatLiteralKind.Infinity:
result = float.PositiveInfinity;
break;
case FloatLiteralKind.NaNWithPayload:
result = CreateFloat32NaNWithSignificand((int)value.Significand);
break;
case FloatLiteralKind.CanonicalNaN:
result = float.NaN;
break;
case FloatLiteralKind.Number:
default:
return (float)(double)value;
}
return Interpret.ValueHelpers.Setsign(result, value.IsNegative);
}
/// <summary>
/// Losslessly changes a float literal's base. Base changes only work if old base
/// is a power of the new base.
/// </summary>
/// <param name="newBase">The new base.</param>
/// <returns>An equivalent float literal with base <paramref name="newBase"/>.</returns>
public FloatLiteral ChangeBase(int newBase)
{
if (Kind != FloatLiteralKind.Number || Base == newBase)
{
return this;
}
else if (Exponent == 0)
{
return FloatLiteral.Number(IsNegative, Significand, newBase, 0);
}
// Note: `x * (n ^ m) ^ k` equals `x * n ^ (m * k)`.
var power = 1;
var resultBase = Base;
while (resultBase != newBase)
{
if (resultBase < newBase || resultBase % newBase != 0)
{
throw new InvalidOperationException(
$"Float literal '{this}' with base '{Base}' cannot be transformed losslessly to float with base '{newBase}'.");
}
resultBase /= newBase;
power++;
}
return FloatLiteral.Number(IsNegative, Significand, newBase, power * Exponent);
}
private static double CreateFloat64NaNWithSignificand(long significand)
{
// We're going to create a NaN with a special significand.
long bits = BitConverter.DoubleToInt64Bits(double.NaN);
long oldSignificand = bits & 0xfffffffffffffL;
// Wipe out the bits originally in the significand.
bits ^= oldSignificand;
// Put in our bits.
bits |= significand;
return BitConverter.Int64BitsToDouble(bits);
}
private static float CreateFloat32NaNWithSignificand(int significand)
{
// We're going to create a NaN with a special significand.
int bits = Interpret.ValueHelpers.ReinterpretAsInt32(float.NaN);
int oldSignificand = bits & 0x7fffff;
// Wipe out the bits originally in the significand.
bits ^= oldSignificand;
// Put in our bits.
bits |= significand;
return Interpret.ValueHelpers.ReinterpretAsFloat32(bits);
}
}
/// <summary>
/// An enumeration of different kinds of float literals.
/// </summary>
public enum FloatLiteralKind
{
/// <summary>
/// Indicates that a float literal represents a concrete number.
/// </summary>
Number,
/// <summary>
/// Indicates that a float literal represents a canonical Not-a-Number (NaN) value.
/// </summary>
CanonicalNaN,
/// <summary>
/// Indicates that a float literal represents a Not-a-Number (NaN) value
/// with a custom payload.
/// </summary>
NaNWithPayload,
/// <summary>
/// Indicates that a float literal represents an infinite quantity.
/// </summary>
Infinity
}
}
<|start_filename|>libwasm/GlobalSection.cs<|end_filename|>
using System.Collections.Generic;
using System.IO;
using Wasm.Binary;
namespace Wasm
{
/// <summary>
/// Represents a global section.
/// </summary>
public sealed class GlobalSection : Section
{
/// <summary>
/// Creates an empty global section.
/// </summary>
public GlobalSection()
{
this.GlobalVariables = new List<GlobalVariable>();
}
/// <summary>
/// Creates a global from the given list of global variables.
/// </summary>
/// <param name="globalVariables">The global section's list of global variables.</param>
public GlobalSection(IEnumerable<GlobalVariable> globalVariables)
: this(globalVariables, new byte[0])
{
}
/// <summary>
/// Creates a global section from the given list of global variables and additional payload.
/// </summary>
/// <param name="globalVariables">The global section's list of global variables.</param>
/// <param name="extraPayload">The global section's additional payload.</param>
public GlobalSection(IEnumerable<GlobalVariable> globalVariables, byte[] extraPayload)
{
this.GlobalVariables = new List<GlobalVariable>(globalVariables);
this.ExtraPayload = extraPayload;
}
/// <inheritdoc/>
public override SectionName Name => new SectionName(SectionCode.Global);
/// <summary>
/// Gets this global section's list of global variables.
/// </summary>
/// <returns>A list of global variable definitions.</returns>
public List<GlobalVariable> GlobalVariables { get; private set; }
/// <summary>
/// This global section's additional payload.
/// </summary>
/// <returns>The additional payload, as an array of bytes.</returns>
public byte[] ExtraPayload { get; set; }
/// <summary>
/// Writes this WebAssembly section's payload to the given binary WebAssembly writer.
/// </summary>
/// <param name="writer">The writer to which the payload is written.</param>
public override void WritePayloadTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt32((uint)GlobalVariables.Count);
foreach (var index in GlobalVariables)
{
index.WriteTo(writer);
}
writer.Writer.Write(ExtraPayload);
}
/// <summary>
/// Reads the global section with the given header.
/// </summary>
/// <param name="header">The section header.</param>
/// <param name="reader">The WebAssembly file reader.</param>
/// <returns>The parsed section.</returns>
public static GlobalSection ReadSectionPayload(SectionHeader header, BinaryWasmReader reader)
{
long startPos = reader.Position;
// Read the global variable definitions.
uint count = reader.ReadVarUInt32();
var globalVars = new List<GlobalVariable>();
for (uint i = 0; i < count; i++)
{
globalVars.Add(GlobalVariable.ReadFrom(reader));
}
// Skip any remaining bytes.
var extraPayload = reader.ReadRemainingPayload(startPos, header);
return new GlobalSection(globalVars, extraPayload);
}
/// <inheritdoc/>
public override void Dump(TextWriter writer)
{
writer.Write(Name.ToString());
writer.Write("; number of entries: ");
writer.Write(GlobalVariables.Count);
writer.WriteLine();
var indentedWriter = DumpHelpers.CreateIndentedTextWriter(writer);
for (int i = 0; i < GlobalVariables.Count; i++)
{
writer.Write("#{0}:", i);
indentedWriter.WriteLine();
GlobalVariables[i].Dump(indentedWriter);
writer.WriteLine();
}
if (ExtraPayload.Length > 0)
{
writer.Write("Extra payload size: ");
writer.Write(ExtraPayload.Length);
writer.WriteLine();
DumpHelpers.DumpBytes(ExtraPayload, writer);
writer.WriteLine();
}
}
}
/// <summary>
/// Describes a global variable's type and mutability.
/// </summary>
public sealed class GlobalType
{
/// <summary>
/// Creates a global type from the given content type and mutability.
/// </summary>
/// <param name="contentType">The type of content in the global type.</param>
/// <param name="isMutable">The global type's mutability.</param>
public GlobalType(WasmValueType contentType, bool isMutable)
{
this.ContentType = contentType;
this.IsMutable = isMutable;
}
/// <summary>
/// Gets or sets the type of content stored in globals of this type.
/// </summary>
/// <returns>The type of content stored in globals of this type.</returns>
public WasmValueType ContentType { get; set; }
/// <summary>
/// Gets or sets the mutability of globals of this type.
/// </summary>
/// <returns>The mutability of globals of this type.</returns>
public bool IsMutable { get; set; }
/// <summary>
/// Reads a global variable type from the given WebAssembly reader.
/// </summary>
/// <param name="reader">The WebAssembly reader to use.</param>
/// <returns>The global variable type that was read.</returns>
public static GlobalType ReadFrom(BinaryWasmReader reader)
{
return new GlobalType(reader.ReadWasmValueType(), reader.ReadVarUInt1());
}
/// <summary>
/// Writes this global variable type to the given WebAssembly writer.
/// </summary>
/// <param name="writer">The WebAssembly writer to use.</param>
public void WriteTo(BinaryWasmWriter writer)
{
writer.WriteWasmValueType(ContentType);
writer.WriteVarUInt1(IsMutable);
}
/// <summary>
/// Writes a textual representation of this global variable type to the given writer.
/// </summary>
/// <param name="writer">The writer to which text is written.</param>
public void Dump(TextWriter writer)
{
writer.Write("{type: ");
DumpHelpers.DumpWasmType(ContentType, writer);
writer.Write(", is_mutable: ");
writer.Write(IsMutable);
writer.Write("}");
}
}
/// <summary>
/// Describes a global variable's type, mutability and initial value.
/// </summary>
public sealed class GlobalVariable
{
/// <summary>
/// Creates a global variable definition from the given type and initial value.
/// </summary>
/// <param name="type">The global variable definition's type.</param>
/// <param name="initialValue">The global variable definition's initial value.</param>
public GlobalVariable(GlobalType type, InitializerExpression initialValue)
{
this.Type = type;
this.InitialValue = initialValue;
}
/// <summary>
/// Gets or sets a description of this global variable.
/// </summary>
/// <returns>The global variable's description.</returns>
public GlobalType Type { get; set; }
/// <summary>
/// Gets or sets this global variable's initial value.
/// </summary>
/// <returns>The initial value.</returns>
public InitializerExpression InitialValue { get; set; }
/// <summary>
/// Reads a global variable definition from the given WebAssembly reader.
/// </summary>
/// <param name="reader">The WebAssembly reader to use.</param>
/// <returns>The global variable definition that was read.</returns>
public static GlobalVariable ReadFrom(BinaryWasmReader reader)
{
return new GlobalVariable(
GlobalType.ReadFrom(reader),
InitializerExpression.ReadFrom(reader));
}
/// <summary>
/// Writes this global variable definition to the given WebAssembly writer.
/// </summary>
/// <param name="writer">The WebAssembly writer to use.</param>
public void WriteTo(BinaryWasmWriter writer)
{
Type.WriteTo(writer);
InitialValue.WriteTo(writer);
}
/// <summary>
/// Writes a textual representation of this global variable definition to the given writer.
/// </summary>
/// <param name="writer">The writer to which text is written.</param>
public void Dump(TextWriter writer)
{
writer.Write("- Type: ");
Type.Dump(writer);
writer.WriteLine();
writer.Write("- Initial value:");
var indentedWriter = DumpHelpers.CreateIndentedTextWriter(writer);
foreach (var instruction in InitialValue.BodyInstructions)
{
indentedWriter.WriteLine();
instruction.Dump(indentedWriter);
}
}
}
}
<|start_filename|>libwasm/Instructions/NullaryInstruction.cs<|end_filename|>
using Wasm.Binary;
namespace Wasm.Instructions
{
/// <summary>
/// Describes a WebAssembly stack machine instruction that does not have any immediates.
/// </summary>
public sealed class NullaryInstruction : Instruction
{
/// <summary>
/// Creates a nullary instruction: an instruction that does not take any immediates.
/// </summary>
/// <param name="op">The nullary instruction's operator.</param>
public NullaryInstruction(NullaryOperator op)
{
this.opValue = op;
}
private NullaryOperator opValue;
/// <summary>
/// Gets the operator for this instruction.
/// </summary>
/// <returns>The instruction's operator.</returns>
public override Operator Op { get { return opValue; } }
/// <summary>
/// Writes this instruction's immediates (but not its opcode)
/// to the given WebAssembly file writer.
/// </summary>
/// <param name="writer">The writer to write this instruction's immediates to.</param>
public override void WriteImmediatesTo(BinaryWasmWriter writer)
{
// Do nothing. This instruction doesn't have any immediates.
}
}
}
<|start_filename|>unit-tests/Scripts/ScriptTests.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Loyc.MiniTest;
using Pixie;
using Wasm.Interpret;
using Wasm.Interpret.Jit;
using Wasm.Text;
namespace Wasm.Scripts
{
[TestFixture]
public class ScriptTests
{
private static readonly string[] blacklist = new[] {
"const.wast",
"float_exprs.wast",
"float_literals.wast",
"float_misc.wast",
"linking.wast"
};
[Test]
public void RunSpecScriptsWithInterpreter()
{
RunSpecScripts("interpreter", null);
}
[Test]
public void RunSpecScriptsWithJit()
{
RunSpecScripts("jit", () => new JitCompiler());
}
public void RunSpecScripts(string compilerName, Func<ModuleCompiler> compiler)
{
var failed = new SortedSet<string>();
var total = ScriptRunner.TestStatistics.Empty;
foreach (var name in Directory.EnumerateFiles(Path.Combine("spec", "test", "core")).OrderBy(x => x))
{
if (name.EndsWith(".wast") && !blacklist.Any(x => name.EndsWith(x)))
{
Console.WriteLine($" - {name} ({compilerName})");
try
{
var tally = RunSpecScript(name, compiler);
total += tally;
Console.WriteLine($" -> {tally}");
}
catch
{
failed.Add(name.Split('/').Last());
}
}
}
if (failed.Count > 0)
{
Console.WriteLine("Failed: " + string.Join(", ", failed.Select(x => $"\"{x}\"")));
Assert.Fail();
}
Console.WriteLine($"Total: {total}");
}
private ScriptRunner.TestStatistics RunSpecScript(string scriptPath, Func<ModuleCompiler> compiler)
{
var log = new TestLog(new[] { Severity.Error }, NullLog.Instance);
var runner = new ScriptRunner(log, compiler);
var scriptText = File.ReadAllText(scriptPath);
return runner.Run(scriptText, scriptPath);
}
}
}
<|start_filename|>libwasm/Instructions/NullaryOperator.cs<|end_filename|>
using Wasm.Binary;
namespace Wasm.Instructions
{
/// <summary>
/// Describes an operator that does not have any immediates.
/// </summary>
public sealed class NullaryOperator : Operator
{
/// <summary>
/// Creates a nullary operator.
/// </summary>
/// <param name="opCode">The operator's opcode.</param>
/// <param name="declaringType">A type that defines the operator, if any.</param>
/// <param name="mnemonic">The operator's mnemonic.</param>
public NullaryOperator(byte opCode, WasmType declaringType, string mnemonic)
: base(opCode, declaringType, mnemonic)
{
this.instruction = new NullaryInstruction(this);
}
/// <summary>
/// A nullary instruction for this operator. Since nullary operators don't take
/// any values, their instruction instances can be shared.
/// </summary>
private NullaryInstruction instruction;
/// <summary>
/// Gets an instruction that applies this operator.
/// </summary>
/// <returns>An instruction.</returns>
public NullaryInstruction Create()
{
return instruction;
}
/// <summary>
/// Reads the immediates (not the opcode) of a WebAssembly instruction
/// for this operator from the given reader and returns the result as an
/// instruction.
/// </summary>
/// <param name="reader">The WebAssembly file reader to read immediates from.</param>
/// <returns>A WebAssembly instruction.</returns>
public override Instruction ReadImmediates(BinaryWasmReader reader)
{
// Return the shared nullary instruction.
return instruction;
}
}
}
<|start_filename|>libwasm/Binary/SectionHeader.cs<|end_filename|>
namespace Wasm.Binary
{
/// <summary>
/// Represents a section's header.
/// </summary>
public struct SectionHeader
{
/// <summary>
/// Creates a section header for a non-custom section with the given section
/// name and payload length.
/// </summary>
/// <param name="name">The section name.</param>
/// <param name="payloadLength">The length of the payload.</param>
public SectionHeader(SectionName name, uint payloadLength)
{
this.Name = name;
this.PayloadLength = payloadLength;
}
/// <summary>
/// Gets the section's name.
/// </summary>
/// <returns>The section's name.</returns>
public SectionName Name { get; private set; }
/// <summary>
/// Gets the length of the payload, in bytes.
/// </summary>
/// <returns>The length of the payload, in bytes.</returns>
public uint PayloadLength { get; private set; }
/// <inheritdoc/>
public override string ToString()
{
return Name + ", payload size: " + PayloadLength;
}
}
}
<|start_filename|>libwasm/ResizableLimits.cs<|end_filename|>
using System;
using System.IO;
using System.Text;
using Wasm.Binary;
namespace Wasm
{
/// <summary>
/// A description of the limits of a table or memory.
/// </summary>
public struct ResizableLimits
{
/// <summary>
/// Creates resizable limits with the given initial size and no maximal
/// size.
/// </summary>
/// <param name="initial">The initial size of the resizable limits.</param>
public ResizableLimits(uint initial)
{
this.Initial = initial;
this.Maximum = default(Nullable<uint>);
}
/// <summary>
/// Creates resizable limits with the given initial and maximal sizes.
/// </summary>
/// <param name="initial">The initial size of the resizable limits.</param>
/// <param name="maximum">The maximal size of the resizable limits.</param>
public ResizableLimits(uint initial, uint maximum)
{
this.Initial = initial;
this.Maximum = new Nullable<uint>(maximum);
}
/// <summary>
/// Creates resizable limits with the given initial and maximal sizes.
/// </summary>
/// <param name="initial">The initial size of the resizable limits.</param>
/// <param name="maximum">The optional maximal size of the resizable limits.</param>
public ResizableLimits(uint initial, Nullable<uint> maximum)
{
this.Initial = initial;
this.Maximum = maximum;
}
/// <summary>
/// Gets a Boolean that tells if these resizable limits have a maximum size.
/// </summary>
public bool HasMaximum => Maximum.HasValue;
/// <summary>
/// Gets the initial length (in units of table elements or wasm pages).
/// </summary>
/// <returns>The initial length of the resizable limits.</returns>
public uint Initial { get; private set; }
/// <summary>
/// Gets the maximal length (in units of table elements or wasm pages).
/// This value may be <c>null</c> to signify that no maximum is specified.
/// </summary>
/// <returns>The maximum length of the resizable limits, if any.</returns>
public Nullable<uint> Maximum { get; private set; }
/// <summary>
/// Writes these resizable limits to the given WebAssembly file writer.
/// </summary>
/// <param name="writer">The WebAssembly file writer.</param>
public void WriteTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt1(HasMaximum);
writer.WriteVarUInt32(Initial);
if (HasMaximum)
{
writer.WriteVarUInt32(Maximum.Value);
}
}
/// <summary>
/// Writes a textual representation of these resizable limits to the given writer.
/// </summary>
/// <param name="writer">The writer to which text is written.</param>
public void Dump(TextWriter writer)
{
writer.Write("{initial: ");
writer.Write(Initial);
if (HasMaximum)
{
writer.Write(", max: ");
writer.Write(Maximum.Value);
}
writer.Write("}");
}
/// <inheritdoc/>
public override string ToString()
{
var builder = new StringBuilder();
Dump(new StringWriter(builder));
return builder.ToString();
}
}
}
<|start_filename|>libwasm/CustomSection.cs<|end_filename|>
using System.IO;
using Wasm.Binary;
namespace Wasm
{
/// <summary>
/// Represents a custom section.
/// </summary>
public sealed class CustomSection : Section
{
/// <summary>
/// Creates a custom section from the given section name and payload.
/// </summary>
/// <param name="customName">The custom section's name.</param>
/// <param name="payload">The custom section's payload.</param>
public CustomSection(string customName, byte[] payload)
{
this.CustomName = customName;
this.Payload = payload;
}
/// <summary>
/// Gets this custom section's custom name.
/// </summary>
/// <returns>The custom name of the custom section.</returns>
public string CustomName { get; private set; }
/// <inheritdoc/>
public override SectionName Name => new SectionName(CustomName);
/// <summary>
/// Gets this custom section's payload, as an array of bytes.
/// </summary>
/// <returns>A byte array that defines the custom section's payload.</returns>
public byte[] Payload { get; private set; }
/// <summary>
/// Writes this WebAssembly section's payload to the given binary writer.
/// </summary>
/// <param name="writer">The writer to which the payload is written.</param>
public override void WritePayloadTo(BinaryWasmWriter writer)
{
writer.Writer.Write(Payload);
}
}
}
<|start_filename|>libwasm/Section.cs<|end_filename|>
using System.IO;
using System.Text;
using Wasm.Binary;
namespace Wasm
{
/// <summary>
/// A base class for WebAssembly module sections.
/// </summary>
public abstract class Section
{
/// <summary>
/// Creates a new WebAssembly module section.
/// </summary>
public Section()
{ }
/// <summary>
/// Gets this section's name.
/// </summary>
/// <returns>The section name.</returns>
public abstract SectionName Name { get; }
/// <summary>
/// Writes this WebAssembly section's payload to the given binary WebAssembly writer.
/// </summary>
/// <param name="writer">The writer to which the payload is written.</param>
public abstract void WritePayloadTo(BinaryWasmWriter writer);
/// <summary>
/// Writes this WebAssembly section's optional custom name and payload to the given
/// WebAssembly writer.
/// </summary>
/// <param name="writer">The writer to which the custom name and payload are written.</param>
internal void WriteCustomNameAndPayloadTo(BinaryWasmWriter writer)
{
if (Name.IsCustom)
{
writer.WriteString(Name.CustomName);
}
WritePayloadTo(writer);
}
/// <summary>
/// Creates a memory stream and fills it with this WebAssembly section's payload.
/// </summary>
/// <returns>The memory stream.</returns>
public MemoryStream PayloadAsMemoryStream()
{
var memStream = new MemoryStream();
WritePayloadTo(new BinaryWasmWriter(new BinaryWriter(memStream)));
memStream.Seek(0, SeekOrigin.Begin);
return memStream;
}
/// <summary>
/// Writes a string representation of this section to the given text writer.
/// </summary>
/// <param name="writer">
/// The writer to which a representation of this section is written.
/// </param>
public virtual void Dump(TextWriter writer)
{
DumpNameAndPayload(writer);
}
/// <summary>
/// Writes a string representation of this section and its payload to the given text writer.
/// </summary>
/// <param name="writer">
/// The writer to which a representation of this section is written.
/// </param>
/// <remarks>This is the default 'Dump' implementation.</remarks>
public void DumpNameAndPayload(TextWriter writer)
{
writer.Write(Name.ToString());
writer.Write("; payload length: ");
using (var memStream = PayloadAsMemoryStream())
{
writer.Write(memStream.Length);
writer.WriteLine();
DumpHelpers.DumpStream(memStream, writer);
writer.WriteLine();
}
}
/// <summary>
/// Creates a string representation of this section.
/// </summary>
/// <returns>The section's string representation.</returns>
public override string ToString()
{
var builder = new StringBuilder();
Dump(new StringWriter(builder));
return builder.ToString();
}
}
}
<|start_filename|>libwasm/Interpret/OperatorImpls.cs<|end_filename|>
using System;
using Wasm.Instructions;
using Wasm.Optimize;
namespace Wasm.Interpret
{
/// <summary>
/// A class that defines operator implementations.
/// </summary>
public static class OperatorImpls
{
/// <summary>
/// Executes a 'unreachable' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Unreachable(Instruction value, InterpreterContext context)
{
throw new TrapException("An 'unreachable' instruction was reached.", TrapException.SpecMessages.Unreachable);
}
/// <summary>
/// Executes a 'nop' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Nop(Instruction value, InterpreterContext context)
{
}
/// <summary>
/// Executes a 'block' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Block(Instruction value, InterpreterContext context)
{
var instruction = Operators.Block.CastInstruction(value);
var contents = instruction.Contents;
var interpreter = context.Module.Interpreter;
var outerStack = context.Stack;
var innerStack = context.Stack = context.CreateStack();
for (int i = 0; i < contents.Count; i++)
{
interpreter.Interpret(contents[i], context);
if (context.BreakRequested)
{
// Restore the outer stack.
context.Stack = outerStack;
if (context.BreakDepth == 0)
{
// The buck stops here. Push the topmost n items of the
// inner stack onto the outer stack, where n is the block
// instruction's arity.
context.Push(innerStack, instruction.Arity);
}
else
{
// Otherwise, push the entire inner stack onto the outer stack and
// make the issue of figuring out how many elements to pop the next
// block's problem.
context.Push(innerStack);
}
context.BreakDepth--;
return;
}
}
// Restore the outer stack.
context.Stack = outerStack;
// Push the inner stack onto the outer stack.
context.Push(innerStack);
}
/// <summary>
/// Executes a 'loop' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Loop(Instruction value, InterpreterContext context)
{
var contents = Operators.Loop.CastInstruction(value).Contents;
var interpreter = context.Module.Interpreter;
for (int i = 0; i < contents.Count;)
{
interpreter.Interpret(contents[i], context);
if (context.BreakRequested)
{
if (context.BreakDepth == 0)
{
// This loop is the break's target. We should decrement the
// break depth to terminate the break request and then re-start
// the loop.
context.BreakDepth--;
i = 0;
}
else
{
// This loop is not the break's target. We should terminate the loop.
context.BreakDepth--;
return;
}
}
else
{
i++;
}
}
}
/// <summary>
/// Executes an 'if' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void If(Instruction value, InterpreterContext context)
{
// Determine which branch we should take.
var instr = Operators.If.CastInstruction(value);
var condVal = context.Pop<int>();
var bodyToRun = condVal != 0
? instr.IfBranch
: instr.ElseBranch;
if (bodyToRun != null)
{
// Create a block and run it.
var block = Operators.Block.Create(instr.Type, bodyToRun);
Block(block, context);
}
}
/// <summary>
/// Executes a 'br' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Br(Instruction value, InterpreterContext context)
{
var instr = Operators.Br.CastInstruction(value);
context.BreakDepth = (int)instr.Immediate;
}
/// <summary>
/// Executes a 'br_if' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void BrIf(Instruction value, InterpreterContext context)
{
var instr = Operators.BrIf.CastInstruction(value);
if (context.Pop<int>() != 0)
{
context.BreakDepth = (int)instr.Immediate;
}
}
/// <summary>
/// Executes a 'br_table' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void BrTable(Instruction value, InterpreterContext context)
{
var instr = Operators.BrTable.CastInstruction(value);
int index = context.Pop<int>();
if (index < 0 || index >= instr.TargetTable.Count)
{
context.BreakDepth = (int)instr.DefaultTarget;
}
else
{
context.BreakDepth = (int)instr.TargetTable[index];
}
}
/// <summary>
/// Executes a 'return' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Return(Instruction value, InterpreterContext context)
{
Return(context);
}
/// <summary>
/// Executes a 'return' instruction.
/// </summary>
/// <param name="context">The interpreter's context.</param>
public static void Return(InterpreterContext context)
{
// Remove excess values from the evaluation stack.
var oldStack = context.Stack;
context.Stack = context.CreateStack();
context.Push(oldStack, context.ReturnTypes.Count);
context.Return();
}
/// <summary>
/// Executes a 'drop' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Drop(Instruction value, InterpreterContext context)
{
context.Pop<object>();
}
/// <summary>
/// Executes a 'select' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Select(Instruction value, InterpreterContext context)
{
// Stack layout:
//
// lhs (any type)
// rhs (same type as `lhs`)
// condition (i32)
//
// Pop operands from the stack.
int condVal = context.Pop<int>();
var rhs = context.Pop<object>();
var lhs = context.Pop<object>();
// Push the lhs onto the stack if the condition
// is truthy; otherwise, push the rhs onto the
// stack.
context.Push<object>(condVal != 0 ? lhs : rhs);
}
/// <summary>
/// Executes a 'call' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Call(Instruction value, InterpreterContext context)
{
var instr = Operators.Call.CastInstruction(value);
var funcDef = context.Module.Functions[(int)instr.Immediate];
var args = context.Pop<object>(funcDef.ParameterTypes.Count);
CheckForStackOverflow(context);
var results = funcDef.Invoke(args, context.CallStackDepth);
context.Push<object>(results);
}
private static void CheckForStackOverflow(InterpreterContext context)
{
if (context.CallStackDepth >= context.Policy.MaxCallStackDepth)
{
throw new TrapException(
"A stack overflow occurred: the max call stack depth was exceeded.",
TrapException.SpecMessages.CallStackExhausted);
}
}
/// <summary>
/// Executes a 'call_indirect' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void CallIndirect(Instruction value, InterpreterContext context)
{
var funcDefIndex = context.Pop<int>();
var funcDef = context.Module.Tables[0][(uint)funcDefIndex];
if (!(funcDef is ThrowFunctionDefinition))
{
var funcType = new FunctionType(funcDef.ParameterTypes, funcDef.ReturnTypes);
var instruction = Operators.CallIndirect.CastInstruction(value);
var expectedFuncType = context.Module.Types[(int)instruction.TypeIndex];
if (!ConstFunctionTypeComparer.Instance.Equals(funcType, expectedFuncType))
{
throw new TrapException(
$"Indirect function call expected to refer to a function with signature '{funcType}' but " +
$"instead found a function with signature '{expectedFuncType}'",
TrapException.SpecMessages.IndirectCallTypeMismatch);
}
}
var args = context.Pop<object>(funcDef.ParameterTypes.Count);
CheckForStackOverflow(context);
var results = funcDef.Invoke(args, context.CallStackDepth);
context.Push<object>(results);
}
/// <summary>
/// Executes a 'get_local' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void GetLocal(Instruction value, InterpreterContext context)
{
var instr = Operators.GetLocal.CastInstruction(value);
context.Push<object>(context.Locals[(int)instr.Immediate].Get<object>());
}
/// <summary>
/// Executes a 'set_local' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void SetLocal(Instruction value, InterpreterContext context)
{
var instr = Operators.SetLocal.CastInstruction(value);
context.Locals[(int)instr.Immediate].Set<object>(context.Pop<object>());
}
/// <summary>
/// Executes a 'tee_local' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void TeeLocal(Instruction value, InterpreterContext context)
{
var instr = Operators.TeeLocal.CastInstruction(value);
context.Locals[(int)instr.Immediate].Set<object>(context.Peek<object>());
}
/// <summary>
/// Executes a 'get_global' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void GetGlobal(Instruction value, InterpreterContext context)
{
var instr = Operators.GetGlobal.CastInstruction(value);
context.Push<object>(context.Module.Globals[(int)instr.Immediate].Get<object>());
}
/// <summary>
/// Executes a 'set_global' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void SetGlobal(Instruction value, InterpreterContext context)
{
var instr = Operators.SetGlobal.CastInstruction(value);
context.Module.Globals[(int)instr.Immediate].Set<object>(context.Pop<object>());
}
/// <summary>
/// Executes an 'i32.load' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Load(Instruction value, InterpreterContext context)
{
var instr = Operators.Int32Load.CastInstruction(value);
var pointer = PopAlignedPointer(instr, context);
var result = context.Module.Memories[0].Int32[pointer];
context.Push<int>(result);
}
/// <summary>
/// Executes an 'i64.load' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Load(Instruction value, InterpreterContext context)
{
var instr = Operators.Int64Load.CastInstruction(value);
var pointer = PopAlignedPointer(instr, context);
var result = context.Module.Memories[0].Int64[pointer];
context.Push<long>(result);
}
/// <summary>
/// Executes an 'i32.load8_s' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Load8S(Instruction value, InterpreterContext context)
{
var instr = Operators.Int32Load8S.CastInstruction(value);
var pointer = PopAlignedPointer(instr, context);
var result = context.Module.Memories[0].Int8[pointer];
context.Push<int>(result);
}
/// <summary>
/// Executes an 'i32.load8_u' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Load8U(Instruction value, InterpreterContext context)
{
var instr = Operators.Int32Load8U.CastInstruction(value);
var pointer = PopAlignedPointer(instr, context);
var result = (byte)context.Module.Memories[0].Int8[pointer];
context.Push<int>(result);
}
/// <summary>
/// Executes an 'i32.load16_s' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Load16S(Instruction value, InterpreterContext context)
{
var instr = Operators.Int32Load16S.CastInstruction(value);
var pointer = PopAlignedPointer(instr, context);
var result = context.Module.Memories[0].Int16[pointer];
context.Push<int>(result);
}
/// <summary>
/// Executes an 'i32.load16_u' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Load16U(Instruction value, InterpreterContext context)
{
var instr = Operators.Int32Load16U.CastInstruction(value);
var pointer = PopAlignedPointer(instr, context);
var result = (ushort)context.Module.Memories[0].Int16[pointer];
context.Push<int>(result);
}
/// <summary>
/// Executes an 'i64.load8_s' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Load8S(Instruction value, InterpreterContext context)
{
var instr = Operators.Int64Load8S.CastInstruction(value);
var pointer = PopAlignedPointer(instr, context);
var result = context.Module.Memories[0].Int8[pointer];
context.Push<long>(result);
}
/// <summary>
/// Executes an 'i64.load8_u' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Load8U(Instruction value, InterpreterContext context)
{
var instr = Operators.Int64Load8U.CastInstruction(value);
var pointer = PopAlignedPointer(instr, context);
var result = (byte)context.Module.Memories[0].Int8[pointer];
context.Push<long>(result);
}
/// <summary>
/// Executes an 'i64.load16_s' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Load16S(Instruction value, InterpreterContext context)
{
var instr = Operators.Int64Load16S.CastInstruction(value);
var pointer = PopAlignedPointer(instr, context);
var result = context.Module.Memories[0].Int16[pointer];
context.Push<long>(result);
}
/// <summary>
/// Executes an 'i64.load16_u' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Load16U(Instruction value, InterpreterContext context)
{
var instr = Operators.Int64Load16U.CastInstruction(value);
var pointer = PopAlignedPointer(instr, context);
var result = (ushort)context.Module.Memories[0].Int16[pointer];
context.Push<long>(result);
}
/// <summary>
/// Executes an 'i64.load32_s' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Load32S(Instruction value, InterpreterContext context)
{
var instr = Operators.Int64Load32S.CastInstruction(value);
var pointer = PopAlignedPointer(instr, context);
var result = context.Module.Memories[0].Int32[pointer];
context.Push<long>(result);
}
/// <summary>
/// Executes an 'i64.load32_u' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Load32U(Instruction value, InterpreterContext context)
{
var instr = Operators.Int64Load32U.CastInstruction(value);
var pointer = PopAlignedPointer(instr, context);
var result = (uint)context.Module.Memories[0].Int32[pointer];
context.Push<long>(result);
}
/// <summary>
/// Executes an 'f32.load' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Load(Instruction value, InterpreterContext context)
{
var instr = Operators.Float32Load.CastInstruction(value);
var pointer = PopAlignedPointer(instr, context);
var result = context.Module.Memories[0].Float32[pointer];
context.Push<float>(result);
}
/// <summary>
/// Executes an 'f64.load' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Load(Instruction value, InterpreterContext context)
{
var instr = Operators.Float64Load.CastInstruction(value);
var pointer = PopAlignedPointer(instr, context);
var result = context.Module.Memories[0].Float64[pointer];
context.Push<double>(result);
}
/// <summary>
/// Executes an 'i32.store8' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Store8(Instruction value, InterpreterContext context)
{
var instr = Operators.Int32Store8.CastInstruction(value);
var result = context.Pop<int>();
var pointer = PopAlignedPointer(instr, context);
var memView = context.Module.Memories[0].Int8;
memView[pointer] = (sbyte)result;
}
/// <summary>
/// Executes an 'i32.store16' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Store16(Instruction value, InterpreterContext context)
{
var instr = Operators.Int32Store16.CastInstruction(value);
var result = context.Pop<int>();
var pointer = PopAlignedPointer(instr, context);
var memView = context.Module.Memories[0].Int16;
memView[pointer] = (short)result;
}
/// <summary>
/// Executes an 'i32.store' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Store(Instruction value, InterpreterContext context)
{
var instr = Operators.Int32Store.CastInstruction(value);
var result = context.Pop<int>();
var pointer = PopAlignedPointer(instr, context);
var memView = context.Module.Memories[0].Int32;
memView[pointer] = result;
}
/// <summary>
/// Executes an 'i64.store8' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Store8(Instruction value, InterpreterContext context)
{
var instr = Operators.Int64Store8.CastInstruction(value);
var result = context.Pop<long>();
var pointer = PopAlignedPointer(instr, context);
var memView = context.Module.Memories[0].Int8;
memView[pointer] = (sbyte)result;
}
/// <summary>
/// Executes an 'i32.store16' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Store16(Instruction value, InterpreterContext context)
{
var instr = Operators.Int64Store16.CastInstruction(value);
var result = context.Pop<long>();
var pointer = PopAlignedPointer(instr, context);
var memView = context.Module.Memories[0].Int16;
memView[pointer] = (short)result;
}
/// <summary>
/// Executes an 'i64.store32' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Store32(Instruction value, InterpreterContext context)
{
var instr = Operators.Int64Store32.CastInstruction(value);
var result = context.Pop<long>();
var pointer = PopAlignedPointer(instr, context);
var memView = context.Module.Memories[0].Int32;
memView[pointer] = (int)result;
}
/// <summary>
/// Executes an 'i64.store' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Store(Instruction value, InterpreterContext context)
{
var instr = Operators.Int64Store.CastInstruction(value);
var result = context.Pop<long>();
var pointer = PopAlignedPointer(instr, context);
var memView = context.Module.Memories[0].Int64;
memView[pointer] = result;
}
/// <summary>
/// Executes an 'f32.store' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Store(Instruction value, InterpreterContext context)
{
var instr = Operators.Float32Store.CastInstruction(value);
var result = context.Pop<float>();
var pointer = PopAlignedPointer(instr, context);
var memView = context.Module.Memories[0].Float32;
memView[pointer] = result;
}
/// <summary>
/// Executes an 'f64.store' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Store(Instruction value, InterpreterContext context)
{
var instr = Operators.Float64Store.CastInstruction(value);
var result = context.Pop<double>();
var pointer = PopAlignedPointer(instr, context);
var memView = context.Module.Memories[0].Float64;
memView[pointer] = result;
}
private static uint PopAlignedPointer(MemoryInstruction Instruction, InterpreterContext context)
{
var longPtr = (ulong)(uint)context.Pop<int>() + Instruction.Offset;
if (longPtr > uint.MaxValue)
{
throw new TrapException(
"Memory address overflow.",
TrapException.SpecMessages.OutOfBoundsMemoryAccess);
}
var pointer = (uint)longPtr;
if (context.Policy.EnforceAlignment)
{
CheckAlignment(pointer, Instruction);
}
return pointer;
}
private static void CheckAlignment(uint Pointer, MemoryInstruction Instruction)
{
if (Pointer % Instruction.Alignment != 0)
{
throw new TrapException(
string.Format(
"Misaligned memory access at {0}. (alignment: {1})",
DumpHelpers.FormatHex(Pointer),
Instruction.Alignment),
TrapException.SpecMessages.MisalignedMemoryAccess);
}
}
/// <summary>
/// Executes a 'current_memory' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void CurrentMemory(Instruction value, InterpreterContext context)
{
var instr = Operators.CurrentMemory.CastInstruction(value);
var result = context.Module.Memories[(int)instr.Immediate].Size;
context.Push<int>((int)result);
}
/// <summary>
/// Executes a 'grow_memory' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void GrowMemory(Instruction value, InterpreterContext context)
{
var instr = Operators.CurrentMemory.CastInstruction(value);
var amount = (uint)context.Pop<int>();
var result = context.Module.Memories[(int)instr.Immediate].Grow(amount);
context.Push<int>(result);
}
/// <summary>
/// Executes an 'i32.const' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Const(Instruction value, InterpreterContext context)
{
var instr = Operators.Int32Const.CastInstruction(value);
context.Push<int>(instr.Immediate);
}
/// <summary>
/// Executes an 'i64.const' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Const(Instruction value, InterpreterContext context)
{
var instr = Operators.Int64Const.CastInstruction(value);
context.Push<long>(instr.Immediate);
}
/// <summary>
/// Executes an 'f32.const' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Const(Instruction value, InterpreterContext context)
{
var instr = Operators.Float32Const.CastInstruction(value);
context.Push<float>(instr.Immediate);
}
/// <summary>
/// Executes an 'f64.const' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Const(Instruction value, InterpreterContext context)
{
var instr = Operators.Float64Const.CastInstruction(value);
context.Push<double>(instr.Immediate);
}
#region Int32 nullaries
/// <summary>
/// Executes an 'i32.add' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Add(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<int>();
var lhs = context.Pop<int>();
context.Push<int>(lhs + rhs);
}
/// <summary>
/// Executes an 'i32.sub' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Sub(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<int>();
var lhs = context.Pop<int>();
context.Push<int>(lhs - rhs);
}
/// <summary>
/// Executes an 'i32.mul' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Mul(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<int>();
var lhs = context.Pop<int>();
context.Push<int>(lhs * rhs);
}
/// <summary>
/// Executes an 'i32.div_s' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32DivS(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<int>();
var lhs = context.Pop<int>();
context.Push<int>(lhs / rhs);
}
/// <summary>
/// Executes an 'i32.div_u' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32DivU(Instruction value, InterpreterContext context)
{
var rhs = (uint)context.Pop<int>();
var lhs = (uint)context.Pop<int>();
context.Push<int>((int)(lhs / rhs));
}
/// <summary>
/// Executes an 'i32.rem_s' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32RemS(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<int>();
var lhs = context.Pop<int>();
context.Push<int>(ValueHelpers.RemS(lhs, rhs));
}
/// <summary>
/// Executes an 'i32.rem_u' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32RemU(Instruction value, InterpreterContext context)
{
var rhs = (uint)context.Pop<int>();
var lhs = (uint)context.Pop<int>();
context.Push<int>((int)(lhs % rhs));
}
/// <summary>
/// Executes an 'i32.and' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32And(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<int>();
var lhs = context.Pop<int>();
context.Push<int>(lhs & rhs);
}
/// <summary>
/// Executes an 'i32.or' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Or(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<int>();
var lhs = context.Pop<int>();
context.Push<int>(lhs | rhs);
}
/// <summary>
/// Executes an 'i32.xor' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Xor(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<int>();
var lhs = context.Pop<int>();
context.Push<int>(lhs ^ rhs);
}
/// <summary>
/// Executes an 'i32.shr_s' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32ShrS(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<int>();
var lhs = context.Pop<int>();
context.Push<int>(lhs >> rhs);
}
/// <summary>
/// Executes an 'i32.shr_u' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32ShrU(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<int>();
var lhs = (uint)context.Pop<int>();
context.Push<int>((int)(lhs >> rhs));
}
/// <summary>
/// Executes an 'i32.shl' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Shl(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<int>();
var lhs = context.Pop<int>();
context.Push<int>(lhs << rhs);
}
/// <summary>
/// Executes an 'i32.rotl' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Rotl(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<int>();
var lhs = context.Pop<int>();
context.Push<int>(ValueHelpers.RotateLeft(lhs, rhs));
}
/// <summary>
/// Executes an 'i32.rotr' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Rotr(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<int>();
var lhs = context.Pop<int>();
context.Push<int>(ValueHelpers.RotateRight(lhs, rhs));
}
/// <summary>
/// Executes an 'i32.clz' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Clz(Instruction value, InterpreterContext context)
{
context.Push<int>(ValueHelpers.CountLeadingZeros(context.Pop<int>()));
}
/// <summary>
/// Executes an 'i32.ctz' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Ctz(Instruction value, InterpreterContext context)
{
context.Push<int>(ValueHelpers.CountTrailingZeros(context.Pop<int>()));
}
/// <summary>
/// Executes an 'i32.popcnt' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Popcnt(Instruction value, InterpreterContext context)
{
context.Push<int>(ValueHelpers.PopCount(context.Pop<int>()));
}
/// <summary>
/// Executes an 'i32.eq' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Eq(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<int>();
var lhs = context.Pop<int>();
context.Push<int>(lhs == rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i32.ne' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Ne(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<int>();
var lhs = context.Pop<int>();
context.Push<int>(lhs != rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i32.lt_s' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32LtS(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<int>();
var lhs = context.Pop<int>();
context.Push<int>(lhs < rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i32.lt_u' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32LtU(Instruction value, InterpreterContext context)
{
var rhs = (uint)context.Pop<int>();
var lhs = (uint)context.Pop<int>();
context.Push<int>(lhs < rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i32.le_s' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32LeS(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<int>();
var lhs = context.Pop<int>();
context.Push<int>(lhs <= rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i32.le_u' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32LeU(Instruction value, InterpreterContext context)
{
var rhs = (uint)context.Pop<int>();
var lhs = (uint)context.Pop<int>();
context.Push<int>(lhs <= rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i32.gt_s' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32GtS(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<int>();
var lhs = context.Pop<int>();
context.Push<int>(lhs > rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i32.gt_u' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32GtU(Instruction value, InterpreterContext context)
{
var rhs = (uint)context.Pop<int>();
var lhs = (uint)context.Pop<int>();
context.Push<int>(lhs > rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i32.ge_s' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32GeS(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<int>();
var lhs = context.Pop<int>();
context.Push<int>(lhs >= rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i32.ge_u' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32GeU(Instruction value, InterpreterContext context)
{
var rhs = (uint)context.Pop<int>();
var lhs = (uint)context.Pop<int>();
context.Push<int>(lhs >= rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i32.eqz' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32Eqz(Instruction value, InterpreterContext context)
{
context.Push<int>(context.Pop<int>() == 0 ? 1 : 0);
}
/// <summary>
/// Executes an 'i32.trunc_s/f32' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32TruncSFloat32(Instruction value, InterpreterContext context)
{
context.Push<int>(ValueHelpers.TruncateToInt32(context.Pop<float>()));
}
/// <summary>
/// Executes an 'i32.trunc_u/f32' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32TruncUFloat32(Instruction value, InterpreterContext context)
{
context.Push<int>((int)ValueHelpers.TruncateToUInt32(context.Pop<float>()));
}
/// <summary>
/// Executes an 'i32.trunc_s/f64' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32TruncSFloat64(Instruction value, InterpreterContext context)
{
context.Push<int>(ValueHelpers.TruncateToInt32(context.Pop<double>()));
}
/// <summary>
/// Executes an 'i32.trunc_u/f64' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32TruncUFloat64(Instruction value, InterpreterContext context)
{
context.Push<int>((int)ValueHelpers.TruncateToUInt32(context.Pop<double>()));
}
/// <summary>
/// Executes an 'i32.wrap/i64' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32WrapInt64(Instruction value, InterpreterContext context)
{
context.Push<int>((int)context.Pop<long>());
}
/// <summary>
/// Executes an 'i32.reinterpret/f32' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int32ReinterpretFloat32(Instruction value, InterpreterContext context)
{
context.Push<int>(ValueHelpers.ReinterpretAsInt32(context.Pop<float>()));
}
#endregion
#region Int64 nullaries
/// <summary>
/// Executes an 'i64.add' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Add(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<long>();
var lhs = context.Pop<long>();
context.Push<long>(lhs + rhs);
}
/// <summary>
/// Executes an 'i64.sub' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Sub(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<long>();
var lhs = context.Pop<long>();
context.Push<long>(lhs - rhs);
}
/// <summary>
/// Executes an 'i64.mul' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Mul(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<long>();
var lhs = context.Pop<long>();
context.Push<long>(lhs * rhs);
}
/// <summary>
/// Executes an 'i64.div_s' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64DivS(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<long>();
var lhs = context.Pop<long>();
context.Push<long>(lhs / rhs);
}
/// <summary>
/// Executes an 'i64.div_u' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64DivU(Instruction value, InterpreterContext context)
{
var rhs = (ulong)context.Pop<long>();
var lhs = (ulong)context.Pop<long>();
context.Push<long>((long)(lhs / rhs));
}
/// <summary>
/// Executes an 'i64.rem_s' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64RemS(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<long>();
var lhs = context.Pop<long>();
context.Push<long>(ValueHelpers.RemS(lhs, rhs));
}
/// <summary>
/// Executes an 'i64.rem_u' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64RemU(Instruction value, InterpreterContext context)
{
var rhs = (ulong)context.Pop<long>();
var lhs = (ulong)context.Pop<long>();
context.Push<long>((long)(lhs % rhs));
}
/// <summary>
/// Executes an 'i64.and' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64And(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<long>();
var lhs = context.Pop<long>();
context.Push<long>(lhs & rhs);
}
/// <summary>
/// Executes an 'i64.or' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Or(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<long>();
var lhs = context.Pop<long>();
context.Push<long>(lhs | rhs);
}
/// <summary>
/// Executes an 'i64.xor' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Xor(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<long>();
var lhs = context.Pop<long>();
context.Push<long>(lhs ^ rhs);
}
/// <summary>
/// Executes an 'i64.shr_s' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64ShrS(Instruction value, InterpreterContext context)
{
var rhs = (int)context.Pop<long>();
var lhs = context.Pop<long>();
context.Push<long>(lhs >> rhs);
}
/// <summary>
/// Executes an 'i64.shr_u' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64ShrU(Instruction value, InterpreterContext context)
{
var rhs = (int)context.Pop<long>();
var lhs = (ulong)context.Pop<long>();
context.Push<long>((long)(lhs >> rhs));
}
/// <summary>
/// Executes an 'i64.shl' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Shl(Instruction value, InterpreterContext context)
{
var rhs = (int)context.Pop<long>();
var lhs = context.Pop<long>();
context.Push<long>(lhs << rhs);
}
/// <summary>
/// Executes an 'i64.rotl' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Rotl(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<long>();
var lhs = context.Pop<long>();
context.Push<long>(ValueHelpers.RotateLeft(lhs, rhs));
}
/// <summary>
/// Executes an 'i64.rotr' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Rotr(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<long>();
var lhs = context.Pop<long>();
context.Push<long>(ValueHelpers.RotateRight(lhs, rhs));
}
/// <summary>
/// Executes an 'i64.clz' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Clz(Instruction value, InterpreterContext context)
{
context.Push<long>(ValueHelpers.CountLeadingZeros(context.Pop<long>()));
}
/// <summary>
/// Executes an 'i64.ctz' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Ctz(Instruction value, InterpreterContext context)
{
context.Push<long>(ValueHelpers.CountTrailingZeros(context.Pop<long>()));
}
/// <summary>
/// Executes an 'i64.popcnt' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Popcnt(Instruction value, InterpreterContext context)
{
context.Push<long>(ValueHelpers.PopCount(context.Pop<long>()));
}
/// <summary>
/// Executes an 'i64.eq' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Eq(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<long>();
var lhs = context.Pop<long>();
context.Push<int>(lhs == rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i64.ne' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Ne(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<long>();
var lhs = context.Pop<long>();
context.Push<int>(lhs != rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i64.lt_s' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64LtS(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<long>();
var lhs = context.Pop<long>();
context.Push<int>(lhs < rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i64.lt_u' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64LtU(Instruction value, InterpreterContext context)
{
var rhs = (ulong)context.Pop<long>();
var lhs = (ulong)context.Pop<long>();
context.Push<int>(lhs < rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i64.le_s' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64LeS(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<long>();
var lhs = context.Pop<long>();
context.Push<int>(lhs <= rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i64.le_u' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64LeU(Instruction value, InterpreterContext context)
{
var rhs = (ulong)context.Pop<long>();
var lhs = (ulong)context.Pop<long>();
context.Push<int>(lhs <= rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i64.gt_s' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64GtS(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<long>();
var lhs = context.Pop<long>();
context.Push<int>(lhs > rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i64.gt_u' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64GtU(Instruction value, InterpreterContext context)
{
var rhs = (ulong)context.Pop<long>();
var lhs = (ulong)context.Pop<long>();
context.Push<int>(lhs > rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i64.ge_s' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64GeS(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<long>();
var lhs = context.Pop<long>();
context.Push<int>(lhs >= rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i64.ge_u' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64GeU(Instruction value, InterpreterContext context)
{
var rhs = (ulong)context.Pop<long>();
var lhs = (ulong)context.Pop<long>();
context.Push<int>(lhs >= rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'i64.eqz' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64Eqz(Instruction value, InterpreterContext context)
{
context.Push<int>(context.Pop<long>() == 0 ? 1 : 0);
}
/// <summary>
/// Executes an 'i64.trunc_s/f32' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64TruncSFloat32(Instruction value, InterpreterContext context)
{
context.Push<long>(ValueHelpers.TruncateToInt64(context.Pop<float>()));
}
/// <summary>
/// Executes an 'i64.trunc_u/f32' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64TruncUFloat32(Instruction value, InterpreterContext context)
{
context.Push<long>((long)ValueHelpers.TruncateToUInt64(context.Pop<float>()));
}
/// <summary>
/// Executes an 'i64.trunc_s/f64' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64TruncSFloat64(Instruction value, InterpreterContext context)
{
context.Push<long>(ValueHelpers.TruncateToInt64(context.Pop<double>()));
}
/// <summary>
/// Executes an 'i64.trunc_u/f64' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64TruncUFloat64(Instruction value, InterpreterContext context)
{
context.Push<long>((long)ValueHelpers.TruncateToUInt64(context.Pop<double>()));
}
/// <summary>
/// Executes an 'i64.reinterpret/f32' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64ReinterpretFloat64(Instruction value, InterpreterContext context)
{
context.Push<long>(ValueHelpers.ReinterpretAsInt64(context.Pop<double>()));
}
/// <summary>
/// Executes an 'i64.extend_s/i32' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64ExtendSInt32(Instruction value, InterpreterContext context)
{
context.Push<long>(context.Pop<int>());
}
/// <summary>
/// Executes an 'i64.extend_u/i32' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Int64ExtendUInt32(Instruction value, InterpreterContext context)
{
context.Push<long>((uint)context.Pop<int>());
}
#endregion
#region Float32 nullaries
/// <summary>
/// Executes an 'f32.abs' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Abs(Instruction value, InterpreterContext context)
{
context.Push<float>(Math.Abs(context.Pop<float>()));
}
/// <summary>
/// Executes an 'f32.add' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Add(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<float>();
var lhs = context.Pop<float>();
context.Push<float>(lhs + rhs);
}
/// <summary>
/// Executes an 'f32.ceil' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Ceil(Instruction value, InterpreterContext context)
{
context.Push<float>((float)Math.Ceiling(context.Pop<float>()));
}
/// <summary>
/// Executes an 'f32.copysign' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Copysign(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<float>();
var lhs = context.Pop<float>();
context.Push<float>(ValueHelpers.Copysign(lhs, rhs));
}
/// <summary>
/// Executes an 'f32.div' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Div(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<float>();
var lhs = context.Pop<float>();
context.Push<float>(lhs / rhs);
}
/// <summary>
/// Executes an 'f32.eq' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Eq(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<float>();
var lhs = context.Pop<float>();
context.Push<int>(lhs == rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'f32.floor' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Floor(Instruction value, InterpreterContext context)
{
context.Push<float>((float)Math.Floor(context.Pop<float>()));
}
/// <summary>
/// Executes an 'f32.ge' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Ge(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<float>();
var lhs = context.Pop<float>();
context.Push<int>(lhs >= rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'f32.gt' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Gt(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<float>();
var lhs = context.Pop<float>();
context.Push<int>(lhs > rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'f32.le' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Le(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<float>();
var lhs = context.Pop<float>();
context.Push<int>(lhs <= rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'f32.lt' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Lt(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<float>();
var lhs = context.Pop<float>();
context.Push<int>(lhs < rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'f32.max' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Max(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<float>();
var lhs = context.Pop<float>();
context.Push<float>(Math.Max(lhs, rhs));
}
/// <summary>
/// Executes an 'f32.min' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Min(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<float>();
var lhs = context.Pop<float>();
context.Push<float>(Math.Min(lhs, rhs));
}
/// <summary>
/// Executes an 'f32.mul' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Mul(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<float>();
var lhs = context.Pop<float>();
context.Push<float>(lhs * rhs);
}
/// <summary>
/// Executes an 'f32.ne' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Ne(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<float>();
var lhs = context.Pop<float>();
context.Push<int>(lhs != rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'f32.nearest' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Nearest(Instruction value, InterpreterContext context)
{
context.Push<float>((float)Math.Round(context.Pop<float>(), MidpointRounding.ToEven));
}
/// <summary>
/// Executes an 'f32.neg' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Neg(Instruction value, InterpreterContext context)
{
context.Push<float>(-context.Pop<float>());
}
/// <summary>
/// Executes an 'f32.sub' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Sub(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<float>();
var lhs = context.Pop<float>();
context.Push<float>(lhs - rhs);
}
/// <summary>
/// Executes an 'f32.sqrt' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Sqrt(Instruction value, InterpreterContext context)
{
context.Push<float>((float)Math.Sqrt(context.Pop<float>()));
}
/// <summary>
/// Executes an 'f32.trunc' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32Trunc(Instruction value, InterpreterContext context)
{
context.Push<float>((float)Math.Truncate(context.Pop<float>()));
}
/// <summary>
/// Executes an 'f32.convert_s/i32' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32ConvertSInt32(Instruction value, InterpreterContext context)
{
context.Push<float>(context.Pop<int>());
}
/// <summary>
/// Executes an 'f32.convert_u/i32' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32ConvertUInt32(Instruction value, InterpreterContext context)
{
context.Push<float>((uint)context.Pop<int>());
}
/// <summary>
/// Executes an 'f32.convert_s/i64' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32ConvertSInt64(Instruction value, InterpreterContext context)
{
context.Push<float>(context.Pop<long>());
}
/// <summary>
/// Executes an 'f32.convert_u/i64' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32ConvertUInt64(Instruction value, InterpreterContext context)
{
context.Push<float>((ulong)context.Pop<long>());
}
/// <summary>
/// Executes an 'f32.demote/f64' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32DemoteFloat64(Instruction value, InterpreterContext context)
{
context.Push<float>((float)context.Pop<double>());
}
/// <summary>
/// Executes an 'f32.reinterpret/i32' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float32ReinterpretInt32(Instruction value, InterpreterContext context)
{
context.Push<float>(ValueHelpers.ReinterpretAsFloat32(context.Pop<int>()));
}
#endregion
#region Float64 nullaries
/// <summary>
/// Executes an 'f64.abs' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Abs(Instruction value, InterpreterContext context)
{
context.Push<double>(Math.Abs(context.Pop<double>()));
}
/// <summary>
/// Executes an 'f64.add' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Add(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<double>();
var lhs = context.Pop<double>();
context.Push<double>(lhs + rhs);
}
/// <summary>
/// Executes an 'f64.ceil' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Ceil(Instruction value, InterpreterContext context)
{
context.Push<double>(Math.Ceiling(context.Pop<double>()));
}
/// <summary>
/// Executes an 'f64.copysign' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Copysign(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<double>();
var lhs = context.Pop<double>();
context.Push<double>(ValueHelpers.Copysign(lhs, rhs));
}
/// <summary>
/// Executes an 'f64.div' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Div(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<double>();
var lhs = context.Pop<double>();
context.Push<double>(lhs / rhs);
}
/// <summary>
/// Executes an 'f64.eq' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Eq(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<double>();
var lhs = context.Pop<double>();
context.Push<int>(lhs == rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'f64.floor' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Floor(Instruction value, InterpreterContext context)
{
context.Push<double>(Math.Floor(context.Pop<double>()));
}
/// <summary>
/// Executes an 'f64.ge' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Ge(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<double>();
var lhs = context.Pop<double>();
context.Push<int>(lhs >= rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'f64.gt' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Gt(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<double>();
var lhs = context.Pop<double>();
context.Push<int>(lhs > rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'f64.le' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Le(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<double>();
var lhs = context.Pop<double>();
context.Push<int>(lhs <= rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'f64.lt' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Lt(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<double>();
var lhs = context.Pop<double>();
context.Push<int>(lhs < rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'f64.max' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Max(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<double>();
var lhs = context.Pop<double>();
context.Push<double>(Math.Max(lhs, rhs));
}
/// <summary>
/// Executes an 'f64.min' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Min(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<double>();
var lhs = context.Pop<double>();
context.Push<double>(Math.Min(lhs, rhs));
}
/// <summary>
/// Executes an 'f64.mul' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Mul(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<double>();
var lhs = context.Pop<double>();
context.Push<double>(lhs * rhs);
}
/// <summary>
/// Executes an 'f64.ne' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Ne(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<double>();
var lhs = context.Pop<double>();
context.Push<int>(lhs != rhs ? 1 : 0);
}
/// <summary>
/// Executes an 'f64.nearest' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Nearest(Instruction value, InterpreterContext context)
{
context.Push<double>(Math.Round(context.Pop<double>(), MidpointRounding.ToEven));
}
/// <summary>
/// Executes an 'f64.neg' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Neg(Instruction value, InterpreterContext context)
{
context.Push<double>(-context.Pop<double>());
}
/// <summary>
/// Executes an 'f64.sub' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Sub(Instruction value, InterpreterContext context)
{
var rhs = context.Pop<double>();
var lhs = context.Pop<double>();
context.Push<double>(lhs - rhs);
}
/// <summary>
/// Executes an 'f64.sqrt' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Sqrt(Instruction value, InterpreterContext context)
{
context.Push<double>(Math.Sqrt(context.Pop<double>()));
}
/// <summary>
/// Executes an 'f64.trunc' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64Trunc(Instruction value, InterpreterContext context)
{
context.Push<double>(Math.Truncate(context.Pop<double>()));
}
/// <summary>
/// Executes an 'f64.convert_s/i32' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64ConvertSInt32(Instruction value, InterpreterContext context)
{
context.Push<double>(context.Pop<int>());
}
/// <summary>
/// Executes an 'f64.convert_u/i32' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64ConvertUInt32(Instruction value, InterpreterContext context)
{
context.Push<double>((uint)context.Pop<int>());
}
/// <summary>
/// Executes an 'f64.convert_s/i64' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64ConvertSInt64(Instruction value, InterpreterContext context)
{
context.Push<double>(context.Pop<long>());
}
/// <summary>
/// Executes an 'f64.convert_u/i64' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64ConvertUInt64(Instruction value, InterpreterContext context)
{
context.Push<double>((ulong)context.Pop<long>());
}
/// <summary>
/// Executes an 'f64.promote/f32' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64PromoteFloat32(Instruction value, InterpreterContext context)
{
context.Push<double>(context.Pop<float>());
}
/// <summary>
/// Executes an 'f64.reinterpret/i64' instruction.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter's context.</param>
public static void Float64ReinterpretInt64(Instruction value, InterpreterContext context)
{
context.Push<double>(ValueHelpers.ReinterpretAsFloat64(context.Pop<long>()));
}
#endregion
}
}
<|start_filename|>libwasm/Interpret/ValueHelpers.cs<|end_filename|>
using System;
namespace Wasm.Interpret
{
/// <summary>
/// Defines helper functions that operate on WebAssembly values.
/// </summary>
public static class ValueHelpers
{
/// <summary>
/// Takes a WebAssembly value type and maps it to its corresponding CLR type.
/// </summary>
/// <param name="type">The type to map to a CLR type.</param>
/// <returns>A CLR type.</returns>
public static Type ToClrType(WasmValueType type)
{
switch (type)
{
case WasmValueType.Float32:
return typeof(float);
case WasmValueType.Float64:
return typeof(double);
case WasmValueType.Int32:
return typeof(int);
case WasmValueType.Int64:
return typeof(long);
default:
throw new WasmException($"Cannot convert unknown WebAssembly type '{type}' to a CLR type.");
}
}
/// <summary>
/// Takes a type and maps it to its corresponding WebAssembly value type.
/// </summary>
/// <param name="type">The type to map to a WebAssembly value type.</param>
/// <returns>A WebAssembly value type.</returns>
public static WasmValueType ToWasmValueType(Type type)
{
if (type == typeof(int))
{
return WasmValueType.Int32;
}
else if (type == typeof(long))
{
return WasmValueType.Int64;
}
else if (type == typeof(float))
{
return WasmValueType.Float32;
}
else if (type == typeof(double))
{
return WasmValueType.Float64;
}
else
{
throw new WasmException($"Type '{type}' does not map to a WebAssembly type.");
}
}
/// <summary>
/// Takes a type and maps it to its corresponding WebAssembly value type.
/// </summary>
/// <typeparam name="T">The type to map to a WebAssembly value type.</typeparam>
/// <returns>A WebAssembly value type.</returns>
public static WasmValueType ToWasmValueType<T>()
{
return ToWasmValueType(typeof(T));
}
/// <summary>
/// Reinterprets the given 32-bit integer's bits as a 32-bit floating-point
/// number.
/// </summary>
/// <param name="value">The value to reinterpret.</param>
/// <returns>A 32-bit floating-point number.</returns>
public static unsafe float ReinterpretAsFloat32(int value)
{
return *(float*)&value;
}
/// <summary>
/// Reinterprets the given 32-bit floating-point number's bits as a 32-bit
/// integer.
/// </summary>
/// <param name="value">The value to reinterpret.</param>
/// <returns>A 32-bit integer.</returns>
public static unsafe int ReinterpretAsInt32(float value)
{
return *(int*)&value;
}
/// <summary>
/// Reinterprets the given 64-bit integer's bits as a 64-bit floating-point
/// number.
/// </summary>
/// <param name="value">The value to reinterpret.</param>
/// <returns>A 64-bit floating-point number.</returns>
public static double ReinterpretAsFloat64(long value)
{
return BitConverter.Int64BitsToDouble(value);
}
/// <summary>
/// Reinterprets the given 64-bit floating-point number's bits as a 64-bit
/// integer.
/// </summary>
/// <param name="value">The value to reinterpret.</param>
/// <returns>A 64-bit integer.</returns>
public static long ReinterpretAsInt64(double value)
{
return BitConverter.DoubleToInt64Bits(value);
}
/// <summary>
/// Rotates the first operand to the left by the number of
/// bits given by the second operand.
/// </summary>
/// <param name="left">The first operand.</param>
/// <param name="right">The second operand.</param>
public static int RotateLeft(int left, int right)
{
var rhs = right;
var lhs = (uint)left;
uint result = (lhs << rhs) | (lhs >> (32 - rhs));
return (int)result;
}
/// <summary>
/// Rotates the first operand to the right by the number of
/// bits given by the second operand.
/// </summary>
/// <param name="left">The first operand.</param>
/// <param name="right">The second operand.</param>
public static int RotateRight(int left, int right)
{
var rhs = right;
var lhs = (uint)left;
uint result = (lhs >> rhs) | (lhs << (32 - rhs));
return (int)result;
}
/// <summary>
/// Counts the number of leading zero bits in the given integer.
/// </summary>
/// <param name="value">The operand.</param>
public static int CountLeadingZeros(int value)
{
var uintVal = (uint)value;
int numOfLeadingZeros = 32;
while (uintVal != 0)
{
numOfLeadingZeros--;
uintVal >>= 1;
}
return numOfLeadingZeros;
}
/// <summary>
/// Counts the number of trailing zero bits in the given integer.
/// </summary>
/// <param name="value">The operand.</param>
public static int CountTrailingZeros(int value)
{
var uintVal = (uint)value;
if (uintVal == 0u)
{
return 32;
}
int numOfTrailingZeros = 0;
while ((uintVal & 0x1u) == 0u)
{
numOfTrailingZeros++;
uintVal >>= 1;
}
return numOfTrailingZeros;
}
/// <summary>
/// Counts the number of one bits in the given integer.
/// </summary>
/// <param name="value">The operand.</param>
public static int PopCount(int value)
{
var uintVal = (uint)value;
int numOfOnes = 0;
while (uintVal != 0)
{
numOfOnes += (int)(uintVal & 0x1u);
uintVal >>= 1;
}
return numOfOnes;
}
/// <summary>
/// Rotates the first operand to the left by the number of
/// bits given by the second operand.
/// </summary>
/// <param name="left">The first operand.</param>
/// <param name="right">The second operand.</param>
public static long RotateLeft(long left, long right)
{
var rhs = (int)right;
var lhs = (ulong)left;
ulong result = (lhs << rhs) | (lhs >> (64 - rhs));
return (long)result;
}
/// <summary>
/// Rotates the first operand to the right by the number of
/// bits given by the second operand.
/// </summary>
/// <param name="left">The first operand.</param>
/// <param name="right">The second operand.</param>
public static long RotateRight(long left, long right)
{
var rhs = (int)right;
var lhs = (ulong)left;
ulong result = (lhs >> rhs) | (lhs << (64 - rhs));
return (long)result;
}
/// <summary>
/// Counts the number of leading zero bits in the given integer.
/// </summary>
/// <param name="value">The operand.</param>
public static int CountLeadingZeros(long value)
{
var uintVal = (ulong)value;
int numOfLeadingZeros = 64;
while (uintVal != 0)
{
numOfLeadingZeros--;
uintVal >>= 1;
}
return numOfLeadingZeros;
}
/// <summary>
/// Counts the number of trailing zero bits in the given integer.
/// </summary>
/// <param name="value">The operand.</param>
public static int CountTrailingZeros(long value)
{
var uintVal = (ulong)value;
if (uintVal == 0ul)
{
return 64;
}
int numOfTrailingZeros = 0;
while ((uintVal & 0x1u) == 0u)
{
numOfTrailingZeros++;
uintVal >>= 1;
}
return numOfTrailingZeros;
}
/// <summary>
/// Counts the number of one bits in the given integer.
/// </summary>
/// <param name="value">The operand.</param>
public static int PopCount(long value)
{
var uintVal = (ulong)value;
int numOfOnes = 0;
while (uintVal != 0)
{
numOfOnes += (int)(uintVal & 0x1u);
uintVal >>= 1;
}
return numOfOnes;
}
// Based on the StackOverflow answer by Deduplicator:
// https://stackoverflow.com/questions/26576285/how-can-i-get-the-sign-bit-of-a-double
private static readonly int float32SignMask = unchecked((int)0x80000000);
private static readonly long float64SignMask = unchecked((long)0x8000000000000000);
/// <summary>
/// Tests if the sign bit of the given 32-bit floating point value is set,
/// i.e., if the value is negative.
/// </summary>
/// <param name="value">The value to test.</param>
/// <returns><c>true</c> if the value's sign bit is set; otherwise, <c>false</c>.</returns>
public static bool Signbit(float value)
{
return (ReinterpretAsInt32(value) & float32SignMask) == float32SignMask;
}
/// <summary>
/// Composes a 32-bit floating point number with the magnitude of the first
/// argument and the sign of the second.
/// </summary>
/// <param name="left">The argument whose magnitude is used.</param>
/// <param name="right">The argument whose sign bit is used.</param>
public static float Copysign(float left, float right)
{
int leftBits = ReinterpretAsInt32(left);
int rightBits = ReinterpretAsInt32(right);
int resultBits = (leftBits & ~float32SignMask) | (rightBits & float32SignMask);
return ReinterpretAsFloat32(resultBits);
}
/// <summary>
/// Tests if the sign bit of the given 64-bit floating point value is set,
/// i.e., if the value is negative.
/// </summary>
/// <param name="value">The value to test.</param>
/// <returns><c>true</c> if the value's sign bit is set; otherwise, <c>false</c>.</returns>
public static bool Signbit(double value)
{
return (ReinterpretAsInt64(value) & float64SignMask) == float64SignMask;
}
/// <summary>
/// Composes a 64-bit floating point number with the magnitude of the first
/// argument and the sign of the second.
/// </summary>
/// <param name="left">The argument whose magnitude is used.</param>
/// <param name="right">The argument whose sign bit is used.</param>
public static double Copysign(double left, double right)
{
long leftBits = ReinterpretAsInt64(left);
long rightBits = ReinterpretAsInt64(right);
long resultBits = (leftBits & ~float64SignMask) | (rightBits & float64SignMask);
return ReinterpretAsFloat64(resultBits);
}
/// <summary>
/// Sets the sign of a 32-bit floating point number.
/// </summary>
/// <param name="value">A number whose magnitude is preserved and sign is rewritten.</param>
/// <param name="isNegative">The sign to assign to <paramref name="value"/>.</param>
/// <returns>A number that is equal to <paramref name="value"/> in magnitude and <paramref name="isNegative"/> in sign.</returns>
public static float Setsign(float value, bool isNegative)
{
return Copysign(value, isNegative ? -1.0f : 1.0f);
}
/// <summary>
/// Sets the sign of a 64-bit floating point number.
/// </summary>
/// <param name="value">A number whose magnitude is preserved and sign is rewritten.</param>
/// <param name="isNegative">The sign to assign to <paramref name="value"/>.</param>
/// <returns>A number that is equal to <paramref name="value"/> in magnitude and <paramref name="isNegative"/> in sign.</returns>
public static double Setsign(double value, bool isNegative)
{
return Copysign(value, isNegative ? -1.0 : 1.0);
}
/// <summary>
/// Takes a 32-bit floating point number and truncates it to a
/// 32-bit signed integer.
/// </summary>
/// <param name="value">A 32-bit floating point number to truncate.</param>
/// <returns>A 32-bit integer that is the truncated version of <paramref name="value"/>.</returns>
public static int TruncateToInt32(float value)
{
if (float.IsInfinity(value))
{
return ThrowInfinityToInt<int>();
}
else if (float.IsNaN(value))
{
return ThrowNaNToInt<int>();
}
else
{
return checked((int)value);
}
}
/// <summary>
/// Takes a 32-bit floating point number and truncates it to a
/// 32-bit unsigned integer.
/// </summary>
/// <param name="value">A 32-bit floating point number to truncate.</param>
/// <returns>A 32-bit integer that is the truncated version of <paramref name="value"/>.</returns>
public static uint TruncateToUInt32(float value)
{
if (float.IsInfinity(value))
{
return ThrowInfinityToInt<uint>();
}
else if (float.IsNaN(value))
{
return ThrowNaNToInt<uint>();
}
else
{
return checked((uint)value);
}
}
/// <summary>
/// Takes a 64-bit floating point number and truncates it to a
/// 32-bit signed integer.
/// </summary>
/// <param name="value">A 64-bit floating point number to truncate.</param>
/// <returns>A 32-bit integer that is the truncated version of <paramref name="value"/>.</returns>
public static int TruncateToInt32(double value)
{
if (double.IsInfinity(value))
{
return ThrowInfinityToInt<int>();
}
else if (double.IsNaN(value))
{
return ThrowNaNToInt<int>();
}
else
{
return checked((int)value);
}
}
/// <summary>
/// Takes a 64-bit floating point number and truncates it to a
/// 32-bit unsigned integer.
/// </summary>
/// <param name="value">A 64-bit floating point number to truncate.</param>
/// <returns>A 32-bit integer that is the truncated version of <paramref name="value"/>.</returns>
public static uint TruncateToUInt32(double value)
{
if (double.IsInfinity(value))
{
return ThrowInfinityToInt<uint>();
}
else if (double.IsNaN(value))
{
return ThrowNaNToInt<uint>();
}
else
{
return checked((uint)value);
}
}
/// <summary>
/// Takes a 32-bit floating point number and truncates it to a
/// 64-bit signed integer.
/// </summary>
/// <param name="value">A 32-bit floating point number to truncate.</param>
/// <returns>A 64-bit integer that is the truncated version of <paramref name="value"/>.</returns>
public static long TruncateToInt64(float value)
{
if (float.IsInfinity(value))
{
return ThrowInfinityToInt<long>();
}
else if (float.IsNaN(value))
{
return ThrowNaNToInt<long>();
}
else
{
return checked((long)value);
}
}
/// <summary>
/// Takes a 32-bit floating point number and truncates it to a
/// 64-bit unsigned integer.
/// </summary>
/// <param name="value">A 32-bit floating point number to truncate.</param>
/// <returns>A 64-bit integer that is the truncated version of <paramref name="value"/>.</returns>
public static ulong TruncateToUInt64(float value)
{
if (float.IsInfinity(value))
{
return ThrowInfinityToInt<ulong>();
}
else if (float.IsNaN(value))
{
return ThrowNaNToInt<ulong>();
}
else
{
return checked((ulong)value);
}
}
/// <summary>
/// Takes a 64-bit floating point number and truncates it to a
/// 64-bit signed integer.
/// </summary>
/// <param name="value">A 64-bit floating point number to truncate.</param>
/// <returns>A 64-bit integer that is the truncated version of <paramref name="value"/>.</returns>
public static long TruncateToInt64(double value)
{
if (double.IsInfinity(value))
{
return ThrowInfinityToInt<long>();
}
else if (double.IsNaN(value))
{
return ThrowNaNToInt<long>();
}
else
{
return checked((long)value);
}
}
/// <summary>
/// Takes a 64-bit floating point number and truncates it to a
/// 64-bit unsigned integer.
/// </summary>
/// <param name="value">A 64-bit floating point number to truncate.</param>
/// <returns>A 64-bit integer that is the truncated version of <paramref name="value"/>.</returns>
public static ulong TruncateToUInt64(double value)
{
if (double.IsInfinity(value))
{
return ThrowInfinityToInt<ulong>();
}
else if (double.IsNaN(value))
{
return ThrowNaNToInt<ulong>();
}
else
{
return checked((ulong)value);
}
}
/// <summary>
/// Computes the remainder of two signed 32-bit integers, as specified by
/// the WebAssembly spec.
/// </summary>
/// <param name="lhs">A first integer.</param>
/// <param name="rhs">A second integer.</param>
/// <returns>The remainder after division of <paramref name="lhs"/> and <paramref name="rhs"/>.</returns>
public static int RemS(int lhs, int rhs)
{
if (lhs == int.MinValue && rhs == -1)
{
// We need to check for this corner case. As per the OpCodes.Rem docs:
//
// Note that on the Intel-based platforms an OverflowException is thrown when computing (minint rem -1).
//
return 0;
}
else
{
return lhs % rhs;
}
}
/// <summary>
/// Computes the remainder of two signed 64-bit integers, as specified by
/// the WebAssembly spec.
/// </summary>
/// <param name="lhs">A first integer.</param>
/// <param name="rhs">A second integer.</param>
/// <returns>The remainder after division of <paramref name="lhs"/> and <paramref name="rhs"/>.</returns>
public static long RemS(long lhs, long rhs)
{
if (lhs == long.MinValue && rhs == -1)
{
// We need to check for this corner case. As per the OpCodes.Rem docs:
//
// Note that on the Intel-based platforms an OverflowException is thrown when computing (minint rem -1).
//
return 0;
}
else
{
return lhs % rhs;
}
}
private static T ThrowInfinityToInt<T>()
{
throw new TrapException(
"Cannot convert infinity to an integer.",
TrapException.SpecMessages.IntegerOverflow);
}
private static T ThrowNaNToInt<T>()
{
throw new TrapException(
"Cannot convert NaN to an integer.",
TrapException.SpecMessages.InvalidConversionToInteger);
}
}
}
<|start_filename|>wasm-opt/Program.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using Wasm.Binary;
using Wasm.Optimize;
namespace Wasm.Opt
{
// wasm-opt takes WebAssembly takes a single WebAssembly file as input and optimizes it.
public struct OptArgs
{
public string Output { get; set; }
public string Input { get; set; }
public static bool TryParse(string[] Args, out OptArgs Result)
{
Result = default(OptArgs);
if (Args.Length == 0)
{
return false;
}
for (int i = 0; i < Args.Length; i++)
{
if (Args[i] == "-o")
{
if (i == Args.Length - 1)
{
return false;
}
i++;
Result.Output = Args[i];
}
else
{
if (Result.Input != null)
{
return false;
}
Result.Input = Args[i];
}
}
return true;
}
}
public static class Program
{
public static int Main(string[] args)
{
OptArgs parsedArgs;
if (!OptArgs.TryParse(args, out parsedArgs))
{
Console.Error.WriteLine("usage: wasm-opt file.wasm [-o output.wasm]");
return 1;
}
// Read the file.
var file = WasmFile.ReadBinary(parsedArgs.Input);
file.Optimize();
// Now write the file to standard output.
using (var outputStream = string.IsNullOrEmpty(parsedArgs.Output)
? Console.OpenStandardOutput()
: File.OpenWrite(parsedArgs.Output))
{
file.WriteBinaryTo(outputStream);
}
return 0;
}
}
}
<|start_filename|>libwasm/Instructions/BlockOperator.cs<|end_filename|>
using System.Collections.Generic;
using Wasm.Binary;
namespace Wasm.Instructions
{
/// <summary>
/// Describes an operator that begins a sequence of expressions, yielding 0 or 1 values.
/// </summary>
public sealed class BlockOperator : Operator
{
/// <summary>
/// Creates a block operator.
/// </summary>
/// <param name="opCode">The block operator's opcode.</param>
/// <param name="declaringType">The type that declares the operator, if any.</param>
/// <param name="mnemonic">The operator's mnemonic.</param>
public BlockOperator(byte opCode, WasmType declaringType, string mnemonic)
: base(opCode, declaringType, mnemonic)
{ }
/// <summary>
/// Creates a block instruction with this operator and the given operands.
/// </summary>
/// <param name="blockType">The resulting block instruction's type.</param>
/// <param name="contents">
/// The resulting block instruction's contents, as a sequence of instructions.
/// </param>
/// <returns>A block instruction.</returns>
public BlockInstruction Create(WasmType blockType, IEnumerable<Instruction> contents)
{
return new BlockInstruction(this, blockType, contents);
}
/// <summary>
/// Reads the immediates (not the opcode) of a WebAssembly instruction
/// for this operator from the given reader and returns the result as an
/// instruction.
/// </summary>
/// <param name="reader">The WebAssembly file reader to read immediates from.</param>
/// <returns>A WebAssembly instruction.</returns>
public override Instruction ReadImmediates(BinaryWasmReader reader)
{
var type = reader.ReadWasmType();
return ReadBlockContents(type, reader);
}
/// <summary>
/// Reads the child instructions of a WebAssembly block from the given reader.
/// </summary>
/// <param name="blockType">The type of value returned by the resulting block.</param>
/// <param name="reader">The WebAssembly file reader.</param>
/// <returns>A WebAssembly block instruction.</returns>
public BlockInstruction ReadBlockContents(WasmType blockType, BinaryWasmReader reader)
{
var contents = new List<Instruction>();
while (true)
{
byte opCode = reader.ReadByte();
if (opCode == Operators.EndOpCode)
{
return Create(blockType, contents);
}
else
{
var op = Operators.GetOperatorByOpCode(opCode);
contents.Add(op.ReadImmediates(reader));
}
}
}
/// <summary>
/// Casts the given instruction to this operator's instruction type.
/// </summary>
/// <param name="value">The instruction to cast.</param>
/// <returns>The given instruction as this operator's instruction type.</returns>
public BlockInstruction CastInstruction(Instruction value)
{
return (BlockInstruction)value;
}
}
}
<|start_filename|>Makefile<|end_filename|>
.PHONY: all debug release clean test
RUN_EXE ?= mono
all: release
release:
msbuild /p:Configuration=Release /verbosity:quiet /nologo cs-wasm.sln
debug:
msbuild /p:Configuration=Debug /verbosity:quiet /nologo cs-wasm.sln
include flame-make-scripts/use-compare-test.mk
include flame-make-scripts/use-ecsc.mk
test: debug | compare-test
$(RUN_EXE) ./unit-tests/bin/Debug/net47/unit-tests.exe 1234
$(COMPARE_TEST) run-tests.test
<|start_filename|>libwasm/Interpret/ModuleInstance.cs<|end_filename|>
using System;
using System.Collections.Generic;
namespace Wasm.Interpret
{
/// <summary>
/// Represents an instance of a WebAssembly module.
/// </summary>
public sealed class ModuleInstance
{
private ModuleInstance(InstructionInterpreter interpreter, ExecutionPolicy policy)
{
this.Interpreter = interpreter;
this.Policy = policy;
this.definedTypes = new List<FunctionType>();
this.definedMemories = new List<LinearMemory>();
this.definedGlobals = new List<Variable>();
this.definedFuncs = new List<FunctionDefinition>();
this.definedTables = new List<FunctionTable>();
this.expMemories = new Dictionary<string, LinearMemory>();
this.expGlobals = new Dictionary<string, Variable>();
this.expFuncs = new Dictionary<string, FunctionDefinition>();
this.expTables = new Dictionary<string, FunctionTable>();
}
/// <summary>
/// Gets the interpreter for this module instance.
/// </summary>
public InstructionInterpreter Interpreter { get; private set; }
private List<FunctionType> definedTypes;
private List<LinearMemory> definedMemories;
private List<Variable> definedGlobals;
private List<FunctionDefinition> definedFuncs;
private List<FunctionTable> definedTables;
private Dictionary<string, LinearMemory> expMemories;
private Dictionary<string, Variable> expGlobals;
private Dictionary<string, FunctionDefinition> expFuncs;
private Dictionary<string, FunctionTable> expTables;
/// <summary>
/// Gets the execution policy for this module.
/// </summary>
/// <value>An execution policy.</value>
public ExecutionPolicy Policy { get; private set; }
/// <summary>
/// Gets a read-only list of the function types defined in this module.
/// </summary>
public IReadOnlyList<FunctionType> Types => definedTypes;
/// <summary>
/// Gets a read-only list of the memories in this module.
/// </summary>
public IReadOnlyList<LinearMemory> Memories => definedMemories;
/// <summary>
/// Gets a read-only list of the functions in this module.
/// </summary>
public IReadOnlyList<FunctionDefinition> Functions => definedFuncs;
/// <summary>
/// Gets a read-only list of global variables in this module.
/// </summary>
public IReadOnlyList<Variable> Globals => definedGlobals;
/// <summary>
/// Gets a read-only list of tables defined in this module.
/// </summary>
public IReadOnlyList<FunctionTable> Tables => definedTables;
/// <summary>
/// Gets a read-only mapping of names to memories exported by this module.
/// </summary>
public IReadOnlyDictionary<string, LinearMemory> ExportedMemories => expMemories;
/// <summary>
/// Gets a read-only mapping of names to functions exported by this module.
/// </summary>
public IReadOnlyDictionary<string, FunctionDefinition> ExportedFunctions => expFuncs;
/// <summary>
/// Gets a read-only mapping of names to global variables exported by this module.
/// </summary>
public IReadOnlyDictionary<string, Variable> ExportedGlobals => expGlobals;
/// <summary>
/// Gets a read-only mapping of names to tables exported by this module.
/// </summary>
public IReadOnlyDictionary<string, FunctionTable> ExportedTables => expTables;
/// <summary>
/// Evaluates an initializer expression.
/// </summary>
/// <param name="expression">The expression to evaluate.</param>
/// <param name="resultType">The result type expected from the expression.</param>
/// <returns>The value obtained by evaluating the initializer expression.</returns>
public object Evaluate(InitializerExpression expression, WasmValueType resultType)
{
var context = new InterpreterContext(this, new[] { resultType });
foreach (var instruction in expression.BodyInstructions)
{
Interpreter.Interpret(instruction, context);
}
var result = context.Pop<object>();
if (context.StackDepth > 0)
{
throw new WasmException(
"The stack must contain exactly one value after " +
"evaluating an initializer expression. Actual stack depth: " +
context.StackDepth + ".");
}
return result;
}
/// <summary>
/// Evaluates an initializer expression.
/// </summary>
/// <param name="expression">The expression to evaluate.</param>
/// <param name="resultType">The result type expected from the expression.</param>
/// <returns>The value obtained by evaluating the initializer expression.</returns>
public object Evaluate(InitializerExpression expression, Type resultType)
{
return Evaluate(expression, ValueHelpers.ToWasmValueType(resultType));
}
/// <summary>
/// Evaluates an initializer expression.
/// </summary>
/// <param name="expression">The expression to evaluate.</param>
/// <returns>The value obtained by evaluating the initializer expression.</returns>
public T Evaluate<T>(InitializerExpression expression)
{
return (T)Evaluate(expression, ValueHelpers.ToWasmValueType<T>());
}
/// <summary>
/// Runs the function at the given index with the given sequence of arguments.
/// </summary>
/// <param name="index">The index of the function to run.</param>
/// <param name="arguments">The function's argument list.</param>
/// <returns>The function's return value.</returns>
public IReadOnlyList<object> RunFunction(uint index, IReadOnlyList<object> arguments)
{
return definedFuncs[(int)index].Invoke(arguments);
}
/// <summary>
/// Instantiates the given WebAssembly file.
/// </summary>
/// <param name="file">The file to instantiate.</param>
/// <param name="importer">The importer to use to resolve module imports.</param>
/// <param name="interpreter">
/// Interprets instructions. A <c>null</c> interpreter indicates that the default interpreter should be used.
/// </param>
/// <param name="policy">
/// The execution policy to adhere to for this module.
/// A <c>null</c> execution policy indicates that the default policy should be used.
/// </param>
/// <param name="compiler">
/// Creates a new instance of a module compiler to use.
/// </param>
/// <returns>A module instance.</returns>
public static ModuleInstance Instantiate(
WasmFile file,
IImporter importer,
InstructionInterpreter interpreter = null,
ExecutionPolicy policy = null,
Func<ModuleCompiler> compiler = null)
{
if (interpreter == null)
{
interpreter = DefaultInstructionInterpreter.Default;
}
if (policy == null)
{
policy = ExecutionPolicy.Create();
}
if (compiler == null)
{
compiler = () => new InterpreterCompiler();
}
var instance = new ModuleInstance(interpreter, policy);
// Extract the function types.
var allFuncTypes = GetFunctionTypes(file);
instance.definedTypes.AddRange(allFuncTypes);
// Resolve all imports.
instance.ResolveImports(file, importer, allFuncTypes);
// Instantiate global variables.
instance.InstantiateGlobals(file);
// Instantiate memories.
instance.InstantiateMemories(file, policy.MaxMemorySize);
// Instantiate function definitions.
instance.InstantiateFunctionDefs(file, compiler(), allFuncTypes);
// Instantiate function tables.
instance.InstantiateTables(file);
// Export values.
instance.RegisterExports(file);
return instance;
}
/// <summary>
/// Uses the given importer to resolve all imported values.
/// </summary>
/// <param name="file">A file whose imports are to be resolved.</param>
/// <param name="importer">The importer.</param>
/// <param name="functionTypes">A list of <paramref name="file"/>'s function types.</param>
private void ResolveImports(
WasmFile file,
IImporter importer,
List<FunctionType> functionTypes)
{
var allImportSections = file.GetSections<ImportSection>();
for (int i = 0; i < allImportSections.Count; i++)
{
var importSection = allImportSections[i];
foreach (var import in importSection.Imports)
{
if (import is ImportedMemory)
{
var memory = importer.ImportMemory((ImportedMemory)import);
if (memory == null)
{
ThrowCannotResolveImport(import, "linear memory");
}
definedMemories.Add(memory);
}
else if (import is ImportedGlobal)
{
var globalVar = importer.ImportGlobal((ImportedGlobal)import);
if (globalVar == null)
{
ThrowCannotResolveImport(import, "global variable");
}
definedGlobals.Add(globalVar);
}
else if (import is ImportedFunction)
{
var funcImport = (ImportedFunction)import;
var funcDef = importer.ImportFunction(funcImport, functionTypes[(int)funcImport.TypeIndex]);
if (funcDef == null)
{
ThrowCannotResolveImport(import, "function");
}
DefineFunction(funcDef);
}
else if (import is ImportedTable)
{
var table = importer.ImportTable((ImportedTable)import);
if (table == null)
{
ThrowCannotResolveImport(import, "table");
}
definedTables.Add(table);
}
else
{
throw new WasmException("Unknown import type: " + import.ToString());
}
}
}
}
private static void ThrowCannotResolveImport(ImportedValue import, string importType)
{
throw new WasmException(
string.Format(
"Importer cannot resolve {0} definition '{1}.{2}'.",
importType, import.ModuleName, import.FieldName));
}
private void InstantiateMemories(WasmFile file, uint maxMemorySize)
{
// Create module-defined memories.
var allMemorySections = file.GetSections<MemorySection>();
for (int i = 0; i < allMemorySections.Count; i++)
{
var memorySection = allMemorySections[i];
foreach (var memorySpec in memorySection.Memories)
{
if (maxMemorySize == 0)
{
definedMemories.Add(new LinearMemory(memorySpec.Limits));
}
else
{
definedMemories.Add(
new LinearMemory(
new ResizableLimits(
memorySpec.Limits.Initial,
memorySpec.Limits.HasMaximum
? Math.Min(memorySpec.Limits.Maximum.Value, maxMemorySize)
: maxMemorySize)));
}
}
}
// Initialize memories by applying the segments defined by data sections.
var allDataSections = file.GetSections<DataSection>();
for (int i = 0; i < allDataSections.Count; i++)
{
var dataSection = allDataSections[i];
foreach (var segment in dataSection.Segments)
{
var memoryView = Memories[(int)segment.MemoryIndex].Int8;
var evalOffset = Evaluate<int>(segment.Offset);
for (int j = 0; j < segment.Data.Length; j++)
{
memoryView[(uint)(evalOffset + j)] = (sbyte)segment.Data[j];
}
}
}
}
private void InstantiateGlobals(WasmFile file)
{
// Create module-defined globals.
var allGlobalSections = file.GetSections<GlobalSection>();
for (int i = 0; i < allGlobalSections.Count; i++)
{
var globalSection = allGlobalSections[i];
foreach (var globalSpec in globalSection.GlobalVariables)
{
definedGlobals.Add(
Variable.Create<object>(
globalSpec.Type.ContentType,
globalSpec.Type.IsMutable,
Evaluate(globalSpec.InitialValue, globalSpec.Type.ContentType)));
}
}
}
/// <summary>
/// Gets a list of all function types declared by the given WebAssembly file.
/// </summary>
/// <param name="file">The WebAssembly file to examine.</param>
/// <returns>The list of function types.</returns>
private static List<FunctionType> GetFunctionTypes(WasmFile file)
{
var allFuncTypes = new List<FunctionType>();
var allTypeSections = file.GetSections<TypeSection>();
for (int i = 0; i < allTypeSections.Count; i++)
{
allFuncTypes.AddRange(allTypeSections[i].FunctionTypes);
}
return allFuncTypes;
}
/// <summary>
/// Instantiates all function definitions from the given WebAssembly file.
/// </summary>
/// <param name="file">A WebAssembly file.</param>
/// <param name="compiler">A compiler to use for instantiating function definitions.</param>
/// <param name="functionTypes">The list of all function types declared by the WebAssembly file.</param>
private void InstantiateFunctionDefs(WasmFile file, ModuleCompiler compiler, List<FunctionType> functionTypes)
{
var funcSignatures = new List<FunctionType>();
var funcBodies = new List<FunctionBody>();
var allFuncSections = file.GetSections<FunctionSection>();
for (int i = 0; i < allFuncSections.Count; i++)
{
foreach (var funcSpec in allFuncSections[i].FunctionTypes)
{
funcSignatures.Add(functionTypes[(int)funcSpec]);
}
}
var allCodeSections = file.GetSections<CodeSection>();
for (int i = 0; i < allCodeSections.Count; i++)
{
funcBodies.AddRange(allCodeSections[i].Bodies);
}
if (funcSignatures.Count != funcBodies.Count)
{
throw new WasmException(
"Function declaration/definition count mismatch: module declares " +
funcSignatures.Count + " functions and defines " + funcBodies.Count + ".");
}
compiler.Initialize(this, definedFuncs.Count, funcSignatures);
for (int i = 0; i < funcSignatures.Count; i++)
{
DefineFunction(compiler.Compile(i, funcBodies[i]));
}
compiler.Finish();
}
/// <summary>
/// Defines a function.
/// </summary>
/// <param name="definition">The function's definition.</param>
private void DefineFunction(FunctionDefinition definition)
{
definedFuncs.Add(definition);
}
/// <summary>
/// Instantiates the tables in the given WebAssembly file.
/// </summary>
/// <param name="file">The file whose tables are to be instantiated.</param>
private void InstantiateTables(WasmFile file)
{
// Create module-defined tables.
var allTableSections = file.GetSections<TableSection>();
for (int i = 0; i < allTableSections.Count; i++)
{
foreach (var tableSpec in allTableSections[i].Tables)
{
definedTables.Add(new FunctionTable(tableSpec.Limits));
}
}
// Initialize tables by applying the segments defined by element sections.
var allElementSections = file.GetSections<ElementSection>();
for (int i = 0; i < allElementSections.Count; i++)
{
foreach (var segment in allElementSections[i].Segments)
{
var table = Tables[(int)segment.TableIndex];
var evalOffset = Evaluate<int>(segment.Offset);
for (int j = 0; j < segment.Elements.Count; j++)
{
table[(uint)(evalOffset + j)] = definedFuncs[(int)segment.Elements[j]];
}
}
}
}
/// <summary>
/// Exports values specified by the given WebAssembly file.
/// </summary>
/// <param name="file">The file that specifies which values are to be exported and how.</param>
private void RegisterExports(WasmFile file)
{
var allExportSections = file.GetSections<ExportSection>();
for (int i = 0; i < allExportSections.Count; i++)
{
foreach (var export in allExportSections[i].Exports)
{
switch (export.Kind)
{
case ExternalKind.Memory:
expMemories[export.Name] = Memories[(int)export.Index];
break;
case ExternalKind.Global:
expGlobals[export.Name] = Globals[(int)export.Index];
break;
case ExternalKind.Function:
expFuncs[export.Name] = Functions[(int)export.Index];
break;
case ExternalKind.Table:
expTables[export.Name] = Tables[(int)export.Index];
break;
default:
throw new WasmException("Unknown export kind: " + export.Kind);
}
}
}
}
}
}
<|start_filename|>libwasm/Interpret/Variable.cs<|end_filename|>
using System;
namespace Wasm.Interpret
{
/// <summary>
/// Describes a WebAssembly variable.
/// </summary>
public sealed class Variable
{
/// <summary>
/// Creates a variable with the given value, type and mutability.
/// </summary>
/// <param name="value">The variable's value.</param>
/// <param name="type">The variable's type.</param>
/// <param name="isMutable">The variable's mutability.</param>
private Variable(object value, WasmValueType type, bool isMutable)
{
this.val = value;
this.Type = type;
this.IsMutable = isMutable;
}
/// <summary>
/// The variable's value.
/// </summary>
private object val;
/// <summary>
/// Gets this variable's type.
/// </summary>
/// <returns>The variable's type.</returns>
public WasmValueType Type { get; private set; }
/// <summary>
/// Gets this variable's mutability.
/// </summary>
/// <returns>The variable's mutability.</returns>
public bool IsMutable { get; private set; }
/// <summary>
/// Gets this variable's value.
/// </summary>
/// <returns>The variable's value.</returns>
public T Get<T>()
{
return (T)val;
}
/// <summary>
/// Sets this variable's value.
/// </summary>
/// <param name="Value">The variable's new value.</param>
public void Set<T>(T Value)
{
if (!IsMutable)
{
throw new WasmException("Cannot assign a value to an immutable variable.");
}
if (!IsInstanceOf<T>(Value, Type))
{
throw new WasmException(
"Cannot assign a value of type '" + GetTypeName(Value) +
"' to a variable of type '" + ((object)Type).ToString() + "'.");
}
val = Value;
}
/// <summary>
/// Creates a new variable from the given value.
/// </summary>
/// <param name="type">The variable's type.</param>
/// <param name="isMutable">The variable's mutability.</param>
/// <param name="value">The variable's initial value.</param>
/// <returns>The newly-created variable.</returns>
public static Variable Create<T>(WasmValueType type, bool isMutable, T value)
{
if (!IsInstanceOf<T>(value, type))
{
throw new WasmException(
"Cannot create a variable of type '" + ((object)type).ToString() +
"' with an initial value of type '" + GetTypeName(value) + "'.");
}
return new Variable(value, type, isMutable);
}
/// <summary>
/// Creates a new variable of the given type and mutability, and initializes
/// it with the default value for the given type.
/// </summary>
/// <param name="type">The variable's type.</param>
/// <param name="isMutable">The variable's mutability.</param>
/// <returns>The newly-created variable.</returns>
public static Variable CreateDefault(WasmValueType type, bool isMutable)
{
return Create<object>(type, isMutable, GetDefaultValue(type));
}
/// <summary>
/// Gets the default value for the given WebAssembly value tyoe.
/// </summary>
/// <param name="type">A WebAssembly value type.</param>
/// <returns>The default value.</returns>
public static object GetDefaultValue(WasmValueType type)
{
switch (type)
{
case WasmValueType.Int32:
return default(int);
case WasmValueType.Int64:
return default(long);
case WasmValueType.Float32:
return default(float);
case WasmValueType.Float64:
return default(double);
default:
throw new WasmException("Unknown value type: " + type);
}
}
/// <summary>
/// Checks if the given value is an instance of the given WebAssembly value type.
/// </summary>
/// <param name="value">A value.</param>
/// <param name="type">A WebAssembly value type.</param>
/// <returns>
/// <c>true</c> if the given value is an instance of the given WebAssembly value type;
/// otherwise, <c>false</c>.
/// </returns>
public static bool IsInstanceOf<T>(T value, WasmValueType type)
{
switch (type)
{
case WasmValueType.Int32:
return value is int;
case WasmValueType.Int64:
return value is long;
case WasmValueType.Float32:
return value is float;
case WasmValueType.Float64:
return value is double;
default:
throw new WasmException("Unknown value type: " + type);
}
}
private static string GetTypeName(object value)
{
return value.GetType().Name;
}
}
}
<|start_filename|>unit-tests/Interpret/DefaultInterpreterTests.cs<|end_filename|>
using System;
using Loyc.MiniTest;
using Wasm.Instructions;
namespace Wasm.Interpret
{
[TestFixture]
public class DefaultInterpreterTests
{
[Test]
public void ImplementationCompleteness()
{
var interpreter = DefaultInstructionInterpreter.Default;
foreach (var op in Operators.AllOperators)
{
Assert.IsTrue(interpreter.IsImplemented(op), "Operator not implemented: " + op.ToString());
}
}
private static readonly double float64NegativeZero = Negate(0.0);
private static double Negate(double value)
{
return -value;
}
[Test]
public void Signbit()
{
Assert.IsFalse(ValueHelpers.Signbit(1.0));
Assert.IsTrue(ValueHelpers.Signbit(-1.0));
Assert.IsTrue(ValueHelpers.Signbit(float64NegativeZero));
Assert.IsFalse(ValueHelpers.Signbit(0.0));
}
[Test]
public void Copysign()
{
Assert.AreEqual(42.0, ValueHelpers.Copysign(42.0, 1.0));
Assert.AreEqual(42.0, ValueHelpers.Copysign(-42.0, 1.0));
Assert.AreEqual(-42.0, ValueHelpers.Copysign(-42.0, -1.0));
Assert.AreEqual(42.0, ValueHelpers.Copysign(-42.0, 0.0));
Assert.AreEqual(-42.0, ValueHelpers.Copysign(-42.0, float64NegativeZero));
Assert.AreEqual(42.0, ValueHelpers.Copysign(-42.0, (double)Text.FloatLiteral.NaN(false)));
Assert.AreEqual(-42.0, ValueHelpers.Copysign(-42.0, (double)Text.FloatLiteral.NaN(true)));
}
}
}
<|start_filename|>libwasm/Interpret/DefaultInstructionInterpreter.cs<|end_filename|>
using System;
using System.Collections.Generic;
using Wasm.Instructions;
namespace Wasm.Interpret
{
/// <summary>
/// The default instruction interpreter implementation.
/// </summary>
public sealed class DefaultInstructionInterpreter : InstructionInterpreter
{
/// <summary>
/// Creates an instruction interpreter with no operator implementations.
/// </summary>
public DefaultInstructionInterpreter()
{
this.operatorImpls =
new Dictionary<Operator, Action<Instruction, InterpreterContext>>();
}
/// <summary>
/// Creates an instruction interpreter that clones the given interpreter's
/// operator implementations.
/// </summary>
public DefaultInstructionInterpreter(DefaultInstructionInterpreter other)
{
this.operatorImpls =
new Dictionary<Operator, Action<Instruction, InterpreterContext>>(
other.operatorImpls);
}
/// <summary>
/// A mapping of operators to their implementations.
/// </summary>
private Dictionary<Operator, Action<Instruction, InterpreterContext>> operatorImpls;
/// <summary>
/// Implements the given operator as the specified action.
/// </summary>
/// <param name="op">The operator to implement.</param>
/// <param name="implementation">The action that implements the operator.</param>
public void ImplementOperator(
Operator op,
Action<Instruction, InterpreterContext> implementation)
{
operatorImpls[op] = implementation;
}
/// <summary>
/// Checks if this instruction interpreter implements the given operator.
/// </summary>
/// <param name="op">A WebAssembly operator.</param>
/// <returns><c>true</c> if the given operator is implemented by this interpreter; otherwise, <c>false</c>.</returns>
public bool IsImplemented(Operator op) => operatorImpls.ContainsKey(op);
/// <summary>
/// Interprets the given instruction within the specified context.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter context.</param>
public override void Interpret(Instruction value, InterpreterContext context)
{
if (context.HasReturned)
{
return;
}
Action<Instruction, InterpreterContext> impl;
if (operatorImpls.TryGetValue(value.Op, out impl))
{
impl(value, context);
}
else
{
throw new WasmException("Operator not implemented by interpreter: " + value.Op.ToString());
}
}
/// <summary>
/// The default instruction interpreter with the default list of operator implementations.
/// Please don't implement any additional operators in this interpreter instance.
/// </summary>
public static readonly DefaultInstructionInterpreter Default;
static DefaultInstructionInterpreter()
{
Default = new DefaultInstructionInterpreter();
Default.ImplementOperator(Operators.Unreachable, OperatorImpls.Unreachable);
Default.ImplementOperator(Operators.Nop, OperatorImpls.Nop);
Default.ImplementOperator(Operators.Block, OperatorImpls.Block);
Default.ImplementOperator(Operators.Loop, OperatorImpls.Loop);
Default.ImplementOperator(Operators.If, OperatorImpls.If);
Default.ImplementOperator(Operators.Br, OperatorImpls.Br);
Default.ImplementOperator(Operators.BrIf, OperatorImpls.BrIf);
Default.ImplementOperator(Operators.BrTable, OperatorImpls.BrTable);
Default.ImplementOperator(Operators.Return, OperatorImpls.Return);
Default.ImplementOperator(Operators.Drop, OperatorImpls.Drop);
Default.ImplementOperator(Operators.Select, OperatorImpls.Select);
Default.ImplementOperator(Operators.Call, OperatorImpls.Call);
Default.ImplementOperator(Operators.CallIndirect, OperatorImpls.CallIndirect);
Default.ImplementOperator(Operators.GetLocal, OperatorImpls.GetLocal);
Default.ImplementOperator(Operators.SetLocal, OperatorImpls.SetLocal);
Default.ImplementOperator(Operators.TeeLocal, OperatorImpls.TeeLocal);
Default.ImplementOperator(Operators.GetGlobal, OperatorImpls.GetGlobal);
Default.ImplementOperator(Operators.SetGlobal, OperatorImpls.SetGlobal);
Default.ImplementOperator(Operators.Int32Load, OperatorImpls.Int32Load);
Default.ImplementOperator(Operators.Int64Load, OperatorImpls.Int64Load);
Default.ImplementOperator(Operators.Int32Load8S, OperatorImpls.Int32Load8S);
Default.ImplementOperator(Operators.Int32Load8U, OperatorImpls.Int32Load8U);
Default.ImplementOperator(Operators.Int32Load16S, OperatorImpls.Int32Load16S);
Default.ImplementOperator(Operators.Int32Load16U, OperatorImpls.Int32Load16U);
Default.ImplementOperator(Operators.Int64Load8S, OperatorImpls.Int64Load8S);
Default.ImplementOperator(Operators.Int64Load8U, OperatorImpls.Int64Load8U);
Default.ImplementOperator(Operators.Int64Load16S, OperatorImpls.Int64Load16S);
Default.ImplementOperator(Operators.Int64Load16U, OperatorImpls.Int64Load16U);
Default.ImplementOperator(Operators.Int64Load32S, OperatorImpls.Int64Load32S);
Default.ImplementOperator(Operators.Int64Load32U, OperatorImpls.Int64Load32U);
Default.ImplementOperator(Operators.Float32Load, OperatorImpls.Float32Load);
Default.ImplementOperator(Operators.Float64Load, OperatorImpls.Float64Load);
Default.ImplementOperator(Operators.Int32Store8, OperatorImpls.Int32Store8);
Default.ImplementOperator(Operators.Int32Store16, OperatorImpls.Int32Store16);
Default.ImplementOperator(Operators.Int32Store, OperatorImpls.Int32Store);
Default.ImplementOperator(Operators.Int64Store8, OperatorImpls.Int64Store8);
Default.ImplementOperator(Operators.Int64Store16, OperatorImpls.Int64Store16);
Default.ImplementOperator(Operators.Int64Store32, OperatorImpls.Int64Store32);
Default.ImplementOperator(Operators.Int64Store, OperatorImpls.Int64Store);
Default.ImplementOperator(Operators.Float32Store, OperatorImpls.Float32Store);
Default.ImplementOperator(Operators.Float64Store, OperatorImpls.Float64Store);
Default.ImplementOperator(Operators.CurrentMemory, OperatorImpls.CurrentMemory);
Default.ImplementOperator(Operators.GrowMemory, OperatorImpls.GrowMemory);
Default.ImplementOperator(Operators.Int32Const, OperatorImpls.Int32Const);
Default.ImplementOperator(Operators.Int64Const, OperatorImpls.Int64Const);
Default.ImplementOperator(Operators.Float32Const, OperatorImpls.Float32Const);
Default.ImplementOperator(Operators.Float64Const, OperatorImpls.Float64Const);
Default.ImplementOperator(Operators.Int32Add, OperatorImpls.Int32Add);
Default.ImplementOperator(Operators.Int32And, OperatorImpls.Int32And);
Default.ImplementOperator(Operators.Int32Clz, OperatorImpls.Int32Clz);
Default.ImplementOperator(Operators.Int32Ctz, OperatorImpls.Int32Ctz);
Default.ImplementOperator(Operators.Int32DivS, OperatorImpls.Int32DivS);
Default.ImplementOperator(Operators.Int32DivU, OperatorImpls.Int32DivU);
Default.ImplementOperator(Operators.Int32Eq, OperatorImpls.Int32Eq);
Default.ImplementOperator(Operators.Int32Eqz, OperatorImpls.Int32Eqz);
Default.ImplementOperator(Operators.Int32GeS, OperatorImpls.Int32GeS);
Default.ImplementOperator(Operators.Int32GeU, OperatorImpls.Int32GeU);
Default.ImplementOperator(Operators.Int32GtS, OperatorImpls.Int32GtS);
Default.ImplementOperator(Operators.Int32GtU, OperatorImpls.Int32GtU);
Default.ImplementOperator(Operators.Int32LeS, OperatorImpls.Int32LeS);
Default.ImplementOperator(Operators.Int32LeU, OperatorImpls.Int32LeU);
Default.ImplementOperator(Operators.Int32LtS, OperatorImpls.Int32LtS);
Default.ImplementOperator(Operators.Int32LtU, OperatorImpls.Int32LtU);
Default.ImplementOperator(Operators.Int32Mul, OperatorImpls.Int32Mul);
Default.ImplementOperator(Operators.Int32Ne, OperatorImpls.Int32Ne);
Default.ImplementOperator(Operators.Int32Or, OperatorImpls.Int32Or);
Default.ImplementOperator(Operators.Int32Popcnt, OperatorImpls.Int32Popcnt);
Default.ImplementOperator(Operators.Int32ReinterpretFloat32, OperatorImpls.Int32ReinterpretFloat32);
Default.ImplementOperator(Operators.Int32RemS, OperatorImpls.Int32RemS);
Default.ImplementOperator(Operators.Int32RemU, OperatorImpls.Int32RemU);
Default.ImplementOperator(Operators.Int32Rotl, OperatorImpls.Int32Rotl);
Default.ImplementOperator(Operators.Int32Rotr, OperatorImpls.Int32Rotr);
Default.ImplementOperator(Operators.Int32Shl, OperatorImpls.Int32Shl);
Default.ImplementOperator(Operators.Int32ShrS, OperatorImpls.Int32ShrS);
Default.ImplementOperator(Operators.Int32ShrU, OperatorImpls.Int32ShrU);
Default.ImplementOperator(Operators.Int32Sub, OperatorImpls.Int32Sub);
Default.ImplementOperator(Operators.Int32TruncSFloat32, OperatorImpls.Int32TruncSFloat32);
Default.ImplementOperator(Operators.Int32TruncSFloat64, OperatorImpls.Int32TruncSFloat64);
Default.ImplementOperator(Operators.Int32TruncUFloat32, OperatorImpls.Int32TruncUFloat32);
Default.ImplementOperator(Operators.Int32TruncUFloat64, OperatorImpls.Int32TruncUFloat64);
Default.ImplementOperator(Operators.Int32WrapInt64, OperatorImpls.Int32WrapInt64);
Default.ImplementOperator(Operators.Int32Xor, OperatorImpls.Int32Xor);
Default.ImplementOperator(Operators.Int64Add, OperatorImpls.Int64Add);
Default.ImplementOperator(Operators.Int64And, OperatorImpls.Int64And);
Default.ImplementOperator(Operators.Int64Clz, OperatorImpls.Int64Clz);
Default.ImplementOperator(Operators.Int64Ctz, OperatorImpls.Int64Ctz);
Default.ImplementOperator(Operators.Int64DivS, OperatorImpls.Int64DivS);
Default.ImplementOperator(Operators.Int64DivU, OperatorImpls.Int64DivU);
Default.ImplementOperator(Operators.Int64Eq, OperatorImpls.Int64Eq);
Default.ImplementOperator(Operators.Int64Eqz, OperatorImpls.Int64Eqz);
Default.ImplementOperator(Operators.Int64ExtendSInt32, OperatorImpls.Int64ExtendSInt32);
Default.ImplementOperator(Operators.Int64ExtendUInt32, OperatorImpls.Int64ExtendUInt32);
Default.ImplementOperator(Operators.Int64GeS, OperatorImpls.Int64GeS);
Default.ImplementOperator(Operators.Int64GeU, OperatorImpls.Int64GeU);
Default.ImplementOperator(Operators.Int64GtS, OperatorImpls.Int64GtS);
Default.ImplementOperator(Operators.Int64GtU, OperatorImpls.Int64GtU);
Default.ImplementOperator(Operators.Int64LeS, OperatorImpls.Int64LeS);
Default.ImplementOperator(Operators.Int64LeU, OperatorImpls.Int64LeU);
Default.ImplementOperator(Operators.Int64LtS, OperatorImpls.Int64LtS);
Default.ImplementOperator(Operators.Int64LtU, OperatorImpls.Int64LtU);
Default.ImplementOperator(Operators.Int64Mul, OperatorImpls.Int64Mul);
Default.ImplementOperator(Operators.Int64Ne, OperatorImpls.Int64Ne);
Default.ImplementOperator(Operators.Int64Or, OperatorImpls.Int64Or);
Default.ImplementOperator(Operators.Int64Popcnt, OperatorImpls.Int64Popcnt);
Default.ImplementOperator(Operators.Int64ReinterpretFloat64, OperatorImpls.Int64ReinterpretFloat64);
Default.ImplementOperator(Operators.Int64RemS, OperatorImpls.Int64RemS);
Default.ImplementOperator(Operators.Int64RemU, OperatorImpls.Int64RemU);
Default.ImplementOperator(Operators.Int64Rotl, OperatorImpls.Int64Rotl);
Default.ImplementOperator(Operators.Int64Rotr, OperatorImpls.Int64Rotr);
Default.ImplementOperator(Operators.Int64Shl, OperatorImpls.Int64Shl);
Default.ImplementOperator(Operators.Int64ShrS, OperatorImpls.Int64ShrS);
Default.ImplementOperator(Operators.Int64ShrU, OperatorImpls.Int64ShrU);
Default.ImplementOperator(Operators.Int64Sub, OperatorImpls.Int64Sub);
Default.ImplementOperator(Operators.Int64TruncSFloat32, OperatorImpls.Int64TruncSFloat32);
Default.ImplementOperator(Operators.Int64TruncSFloat64, OperatorImpls.Int64TruncSFloat64);
Default.ImplementOperator(Operators.Int64TruncUFloat32, OperatorImpls.Int64TruncUFloat32);
Default.ImplementOperator(Operators.Int64TruncUFloat64, OperatorImpls.Int64TruncUFloat64);
Default.ImplementOperator(Operators.Int64Xor, OperatorImpls.Int64Xor);
Default.ImplementOperator(Operators.Float32Abs, OperatorImpls.Float32Abs);
Default.ImplementOperator(Operators.Float32Add, OperatorImpls.Float32Add);
Default.ImplementOperator(Operators.Float32Ceil, OperatorImpls.Float32Ceil);
Default.ImplementOperator(Operators.Float32ConvertSInt32, OperatorImpls.Float32ConvertSInt32);
Default.ImplementOperator(Operators.Float32ConvertSInt64, OperatorImpls.Float32ConvertSInt64);
Default.ImplementOperator(Operators.Float32ConvertUInt32, OperatorImpls.Float32ConvertUInt32);
Default.ImplementOperator(Operators.Float32ConvertUInt64, OperatorImpls.Float32ConvertUInt64);
Default.ImplementOperator(Operators.Float32Copysign, OperatorImpls.Float32Copysign);
Default.ImplementOperator(Operators.Float32DemoteFloat64, OperatorImpls.Float32DemoteFloat64);
Default.ImplementOperator(Operators.Float32Div, OperatorImpls.Float32Div);
Default.ImplementOperator(Operators.Float32Eq, OperatorImpls.Float32Eq);
Default.ImplementOperator(Operators.Float32Floor, OperatorImpls.Float32Floor);
Default.ImplementOperator(Operators.Float32Ge, OperatorImpls.Float32Ge);
Default.ImplementOperator(Operators.Float32Gt, OperatorImpls.Float32Gt);
Default.ImplementOperator(Operators.Float32Le, OperatorImpls.Float32Le);
Default.ImplementOperator(Operators.Float32Lt, OperatorImpls.Float32Lt);
Default.ImplementOperator(Operators.Float32Max, OperatorImpls.Float32Max);
Default.ImplementOperator(Operators.Float32Min, OperatorImpls.Float32Min);
Default.ImplementOperator(Operators.Float32Mul, OperatorImpls.Float32Mul);
Default.ImplementOperator(Operators.Float32Ne, OperatorImpls.Float32Ne);
Default.ImplementOperator(Operators.Float32Nearest, OperatorImpls.Float32Nearest);
Default.ImplementOperator(Operators.Float32Neg, OperatorImpls.Float32Neg);
Default.ImplementOperator(Operators.Float32ReinterpretInt32, OperatorImpls.Float32ReinterpretInt32);
Default.ImplementOperator(Operators.Float32Sqrt, OperatorImpls.Float32Sqrt);
Default.ImplementOperator(Operators.Float32Sub, OperatorImpls.Float32Sub);
Default.ImplementOperator(Operators.Float32Trunc, OperatorImpls.Float32Trunc);
Default.ImplementOperator(Operators.Float64Abs, OperatorImpls.Float64Abs);
Default.ImplementOperator(Operators.Float64Add, OperatorImpls.Float64Add);
Default.ImplementOperator(Operators.Float64Ceil, OperatorImpls.Float64Ceil);
Default.ImplementOperator(Operators.Float64ConvertSInt32, OperatorImpls.Float64ConvertSInt32);
Default.ImplementOperator(Operators.Float64ConvertSInt64, OperatorImpls.Float64ConvertSInt64);
Default.ImplementOperator(Operators.Float64ConvertUInt32, OperatorImpls.Float64ConvertUInt32);
Default.ImplementOperator(Operators.Float64ConvertUInt64, OperatorImpls.Float64ConvertUInt64);
Default.ImplementOperator(Operators.Float64Copysign, OperatorImpls.Float64Copysign);
Default.ImplementOperator(Operators.Float64Div, OperatorImpls.Float64Div);
Default.ImplementOperator(Operators.Float64Eq, OperatorImpls.Float64Eq);
Default.ImplementOperator(Operators.Float64Floor, OperatorImpls.Float64Floor);
Default.ImplementOperator(Operators.Float64Ge, OperatorImpls.Float64Ge);
Default.ImplementOperator(Operators.Float64Gt, OperatorImpls.Float64Gt);
Default.ImplementOperator(Operators.Float64Le, OperatorImpls.Float64Le);
Default.ImplementOperator(Operators.Float64Lt, OperatorImpls.Float64Lt);
Default.ImplementOperator(Operators.Float64Max, OperatorImpls.Float64Max);
Default.ImplementOperator(Operators.Float64Min, OperatorImpls.Float64Min);
Default.ImplementOperator(Operators.Float64Mul, OperatorImpls.Float64Mul);
Default.ImplementOperator(Operators.Float64Ne, OperatorImpls.Float64Ne);
Default.ImplementOperator(Operators.Float64Nearest, OperatorImpls.Float64Nearest);
Default.ImplementOperator(Operators.Float64Neg, OperatorImpls.Float64Neg);
Default.ImplementOperator(Operators.Float64PromoteFloat32, OperatorImpls.Float64PromoteFloat32);
Default.ImplementOperator(Operators.Float64ReinterpretInt64, OperatorImpls.Float64ReinterpretInt64);
Default.ImplementOperator(Operators.Float64Sqrt, OperatorImpls.Float64Sqrt);
Default.ImplementOperator(Operators.Float64Sub, OperatorImpls.Float64Sub);
Default.ImplementOperator(Operators.Float64Trunc, OperatorImpls.Float64Trunc);
}
}
}
<|start_filename|>libwasm/WasmType.cs<|end_filename|>
namespace Wasm
{
/// <summary>
/// An enumeration of WebAssembly language types.
/// </summary>
public enum WasmType : sbyte
{
/// <summary>
/// A 32-bit integer type.
/// </summary>
Int32 = -0x01,
/// <summary>
/// A 64-bit integer type.
/// </summary>
Int64 = -0x02,
/// <summary>
/// A 32-bit floating-point type.
/// </summary>
Float32 = -0x03,
/// <summary>
/// A 64-bit floating-point type.
/// </summary>
Float64 = -0x04,
/// <summary>
/// A pointer to a function of any type.
/// </summary>
AnyFunc = -0x10,
/// <summary>
/// The type of function declarations.
/// </summary>
Func = -0x20,
/// <summary>
/// A pseudo-type for representing an empty block type.
/// </summary>
Empty = -0x40
}
/// <summary>
/// An enumeration of WebAssembly value types.
/// </summary>
public enum WasmValueType : sbyte
{
/// <summary>
/// A 32-bit integer type.
/// </summary>
Int32 = -0x01,
/// <summary>
/// A 64-bit integer type.
/// </summary>
Int64 = -0x02,
/// <summary>
/// A 32-bit floating-point type.
/// </summary>
Float32 = -0x03,
/// <summary>
/// A 64-bit floating-point type.
/// </summary>
Float64 = -0x04
}
}
<|start_filename|>libwasm-text/SExpression.cs<|end_filename|>
using System;
using System.Collections.Generic;
namespace Wasm.Text
{
/// <summary>
/// An S-expression: a data structure that is either a single token or a keyword
/// token followed by a tail.
/// </summary>
public struct SExpression
{
internal static SExpression Create(Lexer.Token head, IReadOnlyList<SExpression> tail)
{
return new SExpression
{
IsCall = true,
Head = head,
Tail = tail
};
}
internal static SExpression Create(Lexer.Token head)
{
return new SExpression
{
IsCall = false,
Head = head,
Tail = Array.Empty<SExpression>()
};
}
/// <summary>
/// Tests if this S-expression represents a call.
/// </summary>
public bool IsCall { get; private set; }
/// <summary>
/// Gets the keyword token that is the head of this S-expression if the S-expression is a call;
/// otherwise, the token that corresponds to the S-expression itself.
/// </summary>
public Lexer.Token Head { get; private set; }
/// <summary>
/// Gets the S-expression's tail: a sequence of S-expressions that trail the S-expression's head.
/// Note that this tail may be empty even for S-expressions that are calls.
/// </summary>
public IReadOnlyList<SExpression> Tail { get; private set; }
/// <summary>
/// Tells if this S-expression represents a single identifier token.
/// </summary>
public bool IsIdentifier => !IsCall && Head.Kind == Lexer.TokenKind.Identifier;
/// <summary>
/// Tells if this S-expression represents a single keyword token.
/// </summary>
public bool IsKeyword => !IsCall && Head.Kind == Lexer.TokenKind.Keyword;
/// <summary>
/// Tells if this S-expression represents a specific keyword
/// </summary>
/// <param name="keyword">The keyword to look for.</param>
/// <returns><c>true</c> if this S-expression is keyword token that matches <paramref name="keyword"/>, <c>false</c> otherwise.</returns>
public bool IsSpecificKeyword(string keyword)
{
return !IsCall && Head.Kind == Lexer.TokenKind.Keyword && (string)Head.Value == keyword;
}
/// <summary>
/// Tests if this S-expression is a call to a keyword with a particular name.
/// </summary>
/// <param name="keyword">The keyword to check for.</param>
/// <returns>
/// <c>true</c> if the S-expression is a call to <paramref name="keyword"/>; otherwise, <c>false</c>.
/// </returns>
public bool IsCallTo(string keyword)
{
return IsCall && Head.Kind == Lexer.TokenKind.Keyword && (string)Head.Value == keyword;
}
/// <inheritdoc/>
public override string ToString()
{
return IsCall ? Head.Span.Text : $"({Head.Span.Text} {string.Join(" ", Tail)})";
}
}
}
<|start_filename|>libwasm/TableSection.cs<|end_filename|>
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Wasm.Binary;
namespace Wasm
{
/// <summary>
/// Represents a table section in a WebAssembly file.
/// </summary>
public sealed class TableSection : Section
{
/// <summary>
/// Creates an empty table section.
/// </summary>
public TableSection()
: this(Enumerable.Empty<TableType>())
{
}
/// <summary>
/// Creates a table section from the given list of table descriptions.
/// </summary>
/// <param name="tables">The list of table descriptions in this type section.</param>
public TableSection(IEnumerable<TableType> tables)
: this(tables, new byte[0])
{
}
/// <summary>
/// Creates a type section from the given list of table descriptions and an additional payload.
/// </summary>
/// <param name="tables">The list of table descriptions in this type section.</param>
/// <param name="extraPayload">The additional payload for this section, as an array of bytes.</param>
public TableSection(IEnumerable<TableType> tables, byte[] extraPayload)
{
this.Tables = new List<TableType>(tables);
this.ExtraPayload = extraPayload;
}
/// <summary>
/// Gets this table section's list of tables.
/// </summary>
/// <returns>The list of tables in this table section.</returns>
public List<TableType> Tables { get; private set; }
/// <summary>
/// This type section's additional payload.
/// </summary>
/// <returns>The additional payload, as an array of bytes.</returns>
public byte[] ExtraPayload { get; set; }
/// <inheritdoc/>
public override SectionName Name => new SectionName(SectionCode.Table);
/// <inheritdoc/>
public override void WritePayloadTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt32((uint)Tables.Count);
foreach (var type in Tables)
type.WriteTo(writer);
writer.Writer.Write(ExtraPayload);
}
/// <inheritdoc/>
public override void Dump(TextWriter writer)
{
writer.Write(Name.ToString());
writer.Write("; number of entries: ");
writer.Write(Tables.Count);
writer.WriteLine();
for (int i = 0; i < Tables.Count; i++)
{
writer.Write("#");
writer.Write(i);
writer.Write(" -> ");
Tables[i].Dump(writer);
writer.WriteLine();
}
if (ExtraPayload.Length > 0)
{
writer.Write("Extra payload size: ");
writer.Write(ExtraPayload.Length);
writer.WriteLine();
DumpHelpers.DumpBytes(ExtraPayload, writer);
writer.WriteLine();
}
}
/// <summary>
/// Reads a table section's payload from the given binary WebAssembly reader.
/// </summary>
/// <param name="header">The type section's header.</param>
/// <param name="reader">A reader for a binary WebAssembly file.</param>
/// <returns>A parsed type section.</returns>
public static TableSection ReadSectionPayload(SectionHeader header, BinaryWasmReader reader)
{
long initPos = reader.Position;
uint typeCount = reader.ReadVarUInt32();
var tables = new List<TableType>((int)typeCount);
for (uint i = 0; i < typeCount; i++)
{
tables.Add(TableType.ReadFrom(reader));
}
var extraBytes = reader.ReadRemainingPayload(initPos, header);
return new TableSection(tables, extraBytes);
}
}
/// <summary>
/// Describes a table in a table section.
/// </summary>
public struct TableType
{
/// <summary>
/// Creates a table description from the given element type and limits.
/// </summary>
/// <param name="elementType">The table's element type.</param>
/// <param name="limits">The table's limits.</param>
public TableType(WasmType elementType, ResizableLimits limits)
{
this.ElementType = elementType;
this.Limits = limits;
}
/// <summary>
/// Gets the type of element in the table.
/// </summary>
/// <returns>The type of element in the table.</returns>
public WasmType ElementType { get; private set; }
/// <summary>
/// Gets the table's limits.
/// </summary>
/// <returns>The table's limits.</returns>
public ResizableLimits Limits { get; private set; }
/// <summary>
/// Writes this table description to the given binary WebAssembly file.
/// </summary>
/// <param name="writer">The writer for a binary WebAssembly file.</param>
public void WriteTo(BinaryWasmWriter writer)
{
writer.WriteWasmType(ElementType);
Limits.WriteTo(writer);
}
/// <summary>
/// Writes a textual representation of this table description to the given writer.
/// </summary>
/// <param name="writer">The writer to which text is written.</param>
public void Dump(TextWriter writer)
{
writer.Write("(elem_type: ");
DumpHelpers.DumpWasmType(ElementType, writer);
writer.Write(", limits: ");
Limits.Dump(writer);
writer.Write(")");
}
/// <summary>
/// Reads a single table description from the given reader.
/// </summary>
/// <returns>The table description.</returns>
public static TableType ReadFrom(BinaryWasmReader reader)
{
var elemType = (WasmType)reader.ReadWasmType();
var limits = reader.ReadResizableLimits();
return new TableType(elemType, limits);
}
}
}
<|start_filename|>libwasm/CodeSection.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using Wasm.Binary;
using Wasm.Instructions;
namespace Wasm
{
/// <summary>
/// A type of section that contains a body for every function in the module.
/// </summary>
public sealed class CodeSection : Section
{
/// <summary>
/// Creates an empty code section.
/// </summary>
public CodeSection()
{
this.Bodies = new List<FunctionBody>();
this.ExtraPayload = new byte[0];
}
/// <summary>
/// Creates a code section from a sequence of function bodies.
/// </summary>
/// <param name="bodies">The code section's function codies.</param>
public CodeSection(IEnumerable<FunctionBody> bodies)
: this(bodies, new byte[0])
{
}
/// <summary>
/// Creates a code section from a sequence of function bodies and a
/// trailing payload.
/// </summary>
/// <param name="bodies">The code section's function bodies.</param>
/// <param name="extraPayload">
/// A sequence of bytes that have no intrinsic meaning; they are part
/// of the code section but are placed after the code section's actual contents.
/// </param>
public CodeSection(IEnumerable<FunctionBody> bodies, byte[] extraPayload)
{
this.Bodies = new List<FunctionBody>(bodies);
this.ExtraPayload = extraPayload;
}
/// <inheritdoc/>
public override SectionName Name => new SectionName(SectionCode.Code);
/// <summary>
/// Gets the list of all values that are exported by this section.
/// </summary>
/// <returns>A list of all values exported by this section.</returns>
public List<FunctionBody> Bodies { get; private set; }
/// <summary>
/// Gets this function section's additional payload.
/// </summary>
/// <returns>The additional payload, as an array of bytes.</returns>
public byte[] ExtraPayload { get; set; }
/// <inheritdoc/>
public override void WritePayloadTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt32((uint)Bodies.Count);
foreach (var body in Bodies)
{
body.WriteTo(writer);
}
writer.Writer.Write(ExtraPayload);
}
/// <summary>
/// Reads the code section with the given header.
/// </summary>
/// <param name="header">The section header.</param>
/// <param name="reader">A reader for a binary WebAssembly file.</param>
/// <returns>The parsed section.</returns>
public static CodeSection ReadSectionPayload(
SectionHeader header, BinaryWasmReader reader)
{
long startPos = reader.Position;
// Read the function bodies.
uint count = reader.ReadVarUInt32();
var funcBodies = new List<FunctionBody>();
for (uint i = 0; i < count; i++)
{
funcBodies.Add(FunctionBody.ReadFrom(reader));
}
// Skip any remaining bytes.
var extraPayload = reader.ReadRemainingPayload(startPos, header);
return new CodeSection(funcBodies, extraPayload);
}
/// <inheritdoc/>
public override void Dump(TextWriter writer)
{
writer.Write(Name.ToString());
writer.Write("; number of entries: ");
writer.Write(Bodies.Count);
writer.WriteLine();
var indentedWriter = DumpHelpers.CreateIndentedTextWriter(writer);
for (int i = 0; i < Bodies.Count; i++)
{
writer.Write("#{0}: ", i);
indentedWriter.WriteLine();
Bodies[i].Dump(indentedWriter);
}
if (ExtraPayload.Length > 0)
{
writer.Write("Extra payload size: ");
writer.Write(ExtraPayload.Length);
writer.WriteLine();
DumpHelpers.DumpBytes(ExtraPayload, writer);
writer.WriteLine();
}
}
}
/// <summary>
/// An entry in a code section; defines a function body.
/// </summary>
public sealed class FunctionBody
{
/// <summary>
/// Creates a function body from the given list of local entries
/// and a block instruction.
/// </summary>
/// <param name="locals">The list of local entries.</param>
/// <param name="body">The block instruction that serves as the function's body.</param>
public FunctionBody(IEnumerable<LocalEntry> locals, IEnumerable<Instruction> body)
: this(locals, body, new byte[0])
{ }
/// <summary>
/// Creates a function body from the given list of local entries,
/// a list of instructions and the specified extra payload.
/// </summary>
/// <param name="locals">The list of local entries.</param>
/// <param name="body">The list of instructions that serves as the function's body.</param>
/// <param name="extraPayload">
/// The function body's extra payload, which is placed right after the function body.
/// </param>
public FunctionBody(IEnumerable<LocalEntry> locals, IEnumerable<Instruction> body, byte[] extraPayload)
{
this.Locals = new List<LocalEntry>(locals);
this.BodyInstructions = new List<Instruction>(body);
this.ExtraPayload = extraPayload;
}
/// <summary>
/// Gets the list of local entries for this function body.
/// </summary>
/// <returns>The list of local entries.</returns>
public List<LocalEntry> Locals { get; private set; }
/// <summary>
/// Gets the function body's list of instructions.
/// </summary>
/// <returns>The list of function body instructions.</returns>
public List<Instruction> BodyInstructions { get; private set; }
/// <summary>
/// Gets this function body's additional payload.
/// </summary>
/// <returns>
/// The additional payload, as an array of bytes.
/// <c>null</c> indicates an empty additional payload.
/// </returns>
public byte[] ExtraPayload { get; set; }
/// <summary>
/// Checks if this function body has at least one byte of additional payload.
/// </summary>
public bool HasExtraPayload => ExtraPayload != null && ExtraPayload.Length > 0;
/// <summary>
/// Writes this function body to the given WebAssembly file writer.
/// </summary>
/// <param name="writer">The WebAssembly file writer.</param>
public void WriteTo(BinaryWasmWriter writer)
{
writer.WriteLengthPrefixed(WriteContentsTo);
}
private void WriteContentsTo(BinaryWasmWriter writer)
{
// Write the number of local entries to the file.
writer.WriteVarUInt32((uint)Locals.Count);
// Write the local variables to the file.
foreach (var local in Locals)
{
local.WriteTo(writer);
}
// Write the body to the file.
Operators.Block.Create(WasmType.Empty, BodyInstructions).WriteContentsTo(writer);
if (HasExtraPayload)
{
// If we have at least one byte of additional payload,
// then we should write it to the stream now.
writer.Writer.Write(ExtraPayload);
}
}
/// <summary>
/// Reads a function body from the given WebAssembly file reader.
/// </summary>
/// <param name="reader">The WebAssembly file reader to use.</param>
/// <returns>A function body.</returns>
public static FunctionBody ReadFrom(BinaryWasmReader reader)
{
// Read the length of the function body definition.
uint funcBodyLength = reader.ReadVarUInt32();
// Save the function body's start position.
long startPos = reader.Position;
// Read the number of local entries.
uint localEntryCount = reader.ReadVarUInt32();
// Read local entries.
var localEntries = new List<LocalEntry>((int)localEntryCount);
for (uint i = 0; i < localEntryCount; i++)
{
localEntries.Add(LocalEntry.ReadFrom(reader));
}
// Read the function's body block.
var body = Operators.Block.ReadBlockContents(WasmType.Empty, reader);
// Skip any remaining bytes.
var extraPayload = reader.ReadRemainingPayload(startPos, funcBodyLength);
return new FunctionBody(localEntries, body.Contents, extraPayload);
}
/// <summary>
/// Writes a textual representation of this function body to the given writer.
/// </summary>
/// <param name="writer">The writer to which text is written.</param>
public void Dump(TextWriter writer)
{
if (Locals.Count > 0)
{
writer.Write("- Local entries:");
var varEntryWriter = DumpHelpers.CreateIndentedTextWriter(writer);
for (int i = 0; i < Locals.Count; i++)
{
varEntryWriter.WriteLine();
varEntryWriter.Write("#{0}: ", i);
Locals[i].Dump(varEntryWriter);
}
writer.WriteLine();
}
else
{
writer.WriteLine("- No local entries");
}
if (BodyInstructions.Count > 0)
{
writer.Write("- Function body:");
var instructionWriter = DumpHelpers.CreateIndentedTextWriter(writer);
foreach (var instr in BodyInstructions)
{
instructionWriter.WriteLine();
instr.Dump(instructionWriter);
}
writer.WriteLine();
}
else
{
writer.WriteLine("- Empty function body");
}
if (HasExtraPayload)
{
writer.Write("- Extra payload size: ");
writer.Write(ExtraPayload.Length);
writer.WriteLine();
DumpHelpers.DumpBytes(ExtraPayload, writer);
writer.WriteLine();
}
}
}
/// <summary>
/// Describes a local entry. Each local entry declares a number of local variables
/// of a given type. It is legal to have several entries with the same type.
/// </summary>
public struct LocalEntry : IEquatable<LocalEntry>
{
/// <summary>
/// Creates a new local entry that defines <c>LocalCount</c> variables of type
/// <c>LocalType</c>.
/// </summary>
/// <param name="localType">The type of the variables to define.</param>
/// <param name="localCount">The number of local variables to define.</param>
public LocalEntry(WasmValueType localType, uint localCount)
{
this.LocalType = localType;
this.LocalCount = localCount;
}
/// <summary>
/// Gets the type of the local variables declared by this local entry.
/// </summary>
/// <returns>The type of the local variables declared by this local entry.</returns>
public WasmValueType LocalType { get; private set; }
/// <summary>
/// Gets the number of local variables defined by this local entry.
/// </summary>
/// <returns>The number of local variables defined by this local entry.</returns>
public uint LocalCount { get; private set; }
/// <summary>
/// Writes this local entry to the given WebAssembly file writer.
/// </summary>
/// <param name="writer">The WebAssembly file writer.</param>
public void WriteTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt32(LocalCount);
writer.WriteWasmValueType(LocalType);
}
/// <summary>
/// Reads a local entry from the given WebAssembly file reader.
/// </summary>
/// <param name="reader">The WebAssembly file reader.</param>
/// <returns>A local entry.</returns>
public static LocalEntry ReadFrom(BinaryWasmReader reader)
{
var count = reader.ReadVarUInt32();
var type = reader.ReadWasmValueType();
return new LocalEntry(type, count);
}
/// <summary>
/// Writes a textual representation of this local entry to the given writer.
/// </summary>
/// <param name="writer">The writer to which text is written.</param>
public void Dump(TextWriter writer)
{
writer.Write(LocalCount);
writer.Write(" x ");
DumpHelpers.DumpWasmType(LocalType, writer);
}
/// <inheritdoc/>
public override string ToString()
{
var builder = new StringBuilder();
Dump(new StringWriter(builder));
return builder.ToString();
}
/// <inheritdoc/>
public override bool Equals(object obj)
{
return obj is LocalEntry && Equals((LocalEntry)obj);
}
/// <inheritdoc/>
public override int GetHashCode()
{
return ((int)LocalType << 16) | (int)LocalCount;
}
/// <summary>
/// Checks if this local entry declares the same type and
/// number of locals as the given local entry.
/// </summary>
/// <param name="other">The other local entry.</param>
/// <returns>
/// <c>true</c> if this local entry is the same as the given entry; otherwise, <c>false</c>.
/// </returns>
public bool Equals(LocalEntry other)
{
return LocalType == other.LocalType && LocalCount == other.LocalCount;
}
}
}
<|start_filename|>libwasm/Binary/BinaryWasmWriter.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace Wasm.Binary
{
/// <summary>
/// A writes that writes the binary WebAssembly format.
/// </summary>
public class BinaryWasmWriter
{
/// <summary>
/// Initializes a new instance of the <see cref="Wasm.Binary.BinaryWasmWriter"/> class.
/// </summary>
/// <param name="writer">The binary writer for a WebAssembly file.</param>
public BinaryWasmWriter(BinaryWriter writer)
: this(writer, UTF8Encoding.UTF8)
{ }
/// <summary>
/// Initializes a new instance of the <see cref="Wasm.Binary.BinaryWasmWriter"/> class.
/// </summary>
/// <param name="writer">The binary writer for a WebAssembly file.</param>
/// <param name="stringEncoding">The encoding for strings in the WebAssembly file.</param>
public BinaryWasmWriter(BinaryWriter writer, Encoding stringEncoding)
{
this.Writer = writer;
this.StringEncoding = stringEncoding;
}
/// <summary>
/// The binary writer for a WebAssembly file.
/// </summary>
public BinaryWriter Writer { get; private set; }
/// <summary>
/// The encoding that is used to write strings.
/// </summary>
/// <returns>The string encoding.</returns>
public Encoding StringEncoding { get; private set; }
/// <summary>
/// Writes an unsigned LEB128 variable-length integer, limited to 64 bits.
/// </summary>
/// <returns>The number of bytes used to encode the integer.</returns>
public int WriteVarUInt64(ulong value)
{
// C# translation of code borrowed from Wikipedia article:
// https://en.wikipedia.org/wiki/LEB128
int count = 0;
do
{
byte b = (byte)(value & 0x7F);
value >>= 7;
if (value != 0)
b |= 0x80;
Writer.Write(b);
count++;
} while (value != 0);
return count;
}
/// <summary>
/// Writes an unsigned LEB128 variable-length integer, limited to 32 bits.
/// </summary>
/// <returns>The number of bytes used to encode the integer.</returns>
public int WriteVarUInt32(uint value)
{
return WriteVarUInt64(value);
}
/// <summary>
/// Writes an unsigned LEB128 variable-length integer, limited to 7 bits.
/// </summary>
/// <returns>The number of bytes used to encode the integer.</returns>
public int WriteVarUInt7(byte value)
{
return WriteVarUInt32(value);
}
/// <summary>
/// Writes an unsigned LEB128 variable-length integer, limited to one bit.
/// </summary>
/// <returns>The number of bytes used to encode the integer.</returns>
public int WriteVarUInt1(bool value)
{
return WriteVarUInt32(value ? 1u : 0u);
}
/// <summary>
/// Writes a signed LEB128 variable-length integer, limited to 64 bits.
/// </summary>
/// <returns>The number of bytes used to encode the integer.</returns>
public int WriteVarInt64(long value)
{
// C# translation of code borrowed from Wikipedia article:
// https://en.wikipedia.org/wiki/LEB128
int count = 0;
bool more = true;
while (more)
{
byte b = (byte)(value & 0x7F);
value >>= 7;
if ((value == 0 && ((b & 0x40) == 0)) || (value == -1 && ((b & 0x40) == 0x40)))
more = false;
else
// set high order bit of byte
b |= 0x80;
Writer.Write(b);
count++;
}
return count;
}
/// <summary>
/// Writes a signed LEB128 variable-length integer, limited to 32 bits.
/// </summary>
/// <returns>The number of bytes used to encode the integer.</returns>
public int WriteVarInt32(int value)
{
return WriteVarInt64(value);
}
/// <summary>
/// Writes a signed LEB128 variable-length integer, limited to 7 bits.
/// </summary>
/// <returns>The number of bytes used to encode the integer.</returns>
public int WriteVarInt7(sbyte value)
{
return WriteVarInt64(value);
}
/// <summary>
/// Writes a 32-bit floating-point number.
/// </summary>
/// <param name="value">The floating-point number to write.</param>
/// <returns>The number of bytes used to encode the floating-point number.</returns>
public int WriteFloat32(float value)
{
Writer.Write(value);
return 4;
}
/// <summary>
/// Writes a 64-bit floating-point number.
/// </summary>
/// <param name="value">The floating-point number to write.</param>
/// <returns>The number of bytes used to encode the floating-point number.</returns>
public int WriteFloat64(double value)
{
Writer.Write(value);
return 8;
}
/// <summary>
/// Writes a WebAssembly language type.
/// </summary>
/// <param name="value">The WebAssembly language type to write.</param>
/// <returns>The number of bytes used to encode the type.</returns>
public int WriteWasmType(WasmType value)
{
return WriteVarInt7((sbyte)value);
}
/// <summary>
/// Writes a WebAssembly value type.
/// </summary>
/// <param name="value">The WebAssembly language value to write.</param>
/// <returns>The number of bytes used to encode the type.</returns>
public int WriteWasmValueType(WasmValueType value)
{
return WriteVarInt7((sbyte)value);
}
/// <summary>
/// Writes a length-prefixed string to the WebAssembly file.
/// </summary>
/// <param name="value">The string to write to the file.</param>
public void WriteString(string value)
{
byte[] buffer = StringEncoding.GetBytes(value);
WriteVarUInt32((uint)buffer.Length);
Writer.Write(buffer);
}
/// <summary>
/// Writes data and prefixes it with a variable-length 32-bit unsigned integer
/// that specifies the number of bytes written.
/// </summary>
/// <param name="writeData">Writes data to a WebAssembly file.</param>
public void WriteLengthPrefixed(Action<BinaryWasmWriter> writeData)
{
using (var memStream = new MemoryStream())
{
var innerWriter = new BinaryWasmWriter(
new BinaryWriter(memStream),
StringEncoding);
// Write the contents to the memory stream.
writeData(innerWriter);
// Save the number of bytes we've written.
var numberOfBytes = memStream.Position;
// Seek to the beginning of the memory stream.
memStream.Seek(0, SeekOrigin.Begin);
// Write the size of the contents to follow, in bytes.
WriteVarUInt32((uint)numberOfBytes);
// Write the memory stream's data to the writer's stream.
Writer.Write(memStream.GetBuffer(), 0, (int)numberOfBytes);
}
}
/// <summary>
/// Writes a WebAssembly version header.
/// </summary>
/// <param name="header">The WebAssembly version header to write.</param>
public void WriteVersionHeader(VersionHeader header)
{
Writer.Write(header.Magic);
Writer.Write(header.Version);
}
/// <summary>
/// Writes a WebAssembly section, including its header.
/// </summary>
/// <param name="value">The WebAssembly section to write.</param>
public void WriteSection(Section value)
{
WriteVarInt7((sbyte)value.Name.Code);
WriteLengthPrefixed(value.WriteCustomNameAndPayloadTo);
}
/// <summary>
/// Writes a WebAssembly file.
/// </summary>
/// <param name="file">The WebAssembly file to write.</param>
public void WriteFile(WasmFile file)
{
WriteVersionHeader(file.Header);
foreach (var section in file.Sections)
{
WriteSection(section);
}
}
}
}
<|start_filename|>unit-tests/Optimize/FunctionBodyOptimizationTests.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using Loyc.MiniTest;
using Wasm.Instructions;
namespace Wasm.Optimize
{
[TestFixture]
public class FunctionBodyOptimizationTests
{
private static WasmValueType GenerateWasmValueType(Random rand)
{
switch (rand.Next(4))
{
case 0:
return WasmValueType.Int32;
case 1:
return WasmValueType.Int64;
case 2:
return WasmValueType.Float32;
default:
return WasmValueType.Float64;
}
}
private static IEnumerable<LocalEntry> GenerateLocalEntries(Random rand, int entryCount, int maxEntrySize)
{
var results = new List<LocalEntry>();
for (int i = 0; i < entryCount; i++)
{
results.Add(new LocalEntry(GenerateWasmValueType(rand), (uint)rand.Next(maxEntrySize)));
}
return results;
}
[Test]
public void CompressLocals()
{
var rand = new Random();
// Generate one hundred local entries. Create a compressed function body
// from them as well as an uncompressed function body. Then check that they
// declare the same locals.
int testCount = 100;
for (int i = 0; i < testCount; i++)
{
var localEntries = GenerateLocalEntries(rand, rand.Next(50), 10);
var compressed = new FunctionBody(localEntries, Enumerable.Empty<Instruction>());
compressed.CompressLocalEntries();
var uncompressed = new FunctionBody(localEntries, Enumerable.Empty<Instruction>());
AssertEquivalentLocals(compressed, uncompressed);
}
}
private static void AssertEquivalentLocals(FunctionBody first, FunctionBody second)
{
var firstCopy = new FunctionBody(first.Locals, first.BodyInstructions);
var secondCopy = new FunctionBody(second.Locals, second.BodyInstructions);
firstCopy.ExpandLocalEntries();
secondCopy.ExpandLocalEntries();
Assert.IsTrue(Enumerable.SequenceEqual<LocalEntry>(firstCopy.Locals, secondCopy.Locals));
}
}
}
<|start_filename|>libwasm/Interpret/Jit/CompilerContext.cs<|end_filename|>
using System.Collections.Generic;
using System.Reflection.Emit;
namespace Wasm.Interpret.Jit
{
/// <summary>
/// Context that is used when compiling a function body.
/// </summary>
public sealed class CompilerContext
{
/// <summary>
/// Creates a compiler context.
/// </summary>
/// <param name="compiler">The JIT compiler itself.</param>
/// <param name="localTypes">A list of all local variable types including parameters.</param>
/// <param name="parameterCount">The number of parameters defined for the function being compiled.</param>
/// <param name="locals">A mapping of indices to local variables.</param>
public CompilerContext(
JitCompiler compiler,
IReadOnlyList<WasmValueType> localTypes,
int parameterCount,
IReadOnlyDictionary<uint, LocalBuilder> locals)
{
this.Compiler = compiler;
this.LocalTypes = localTypes;
this.ParameterCount = parameterCount;
this.Locals = locals;
this.StackContents = new Stack<WasmValueType>();
}
/// <summary>
/// Gets the JIT compiler that initiated the compilation.
/// </summary>
/// <value>A JIT compiler.</value>
public JitCompiler Compiler { get; private set; }
/// <summary>
/// Gets the number of parameters defined for the function being compiled.
/// </summary>
/// <value>A parameter count.</value>
public int ParameterCount { get; private set; }
/// <summary>
/// Gets a list of local variables types. All local variables including
/// parameters appear in this list.
/// </summary>
/// <value>A list of all local variable types.</value>
public IReadOnlyList<WasmValueType> LocalTypes { get; private set; }
/// <summary>
/// Gets a mapping of local variable indices to local variables. Note that
/// only true local variables appear in this list; arguments do not.
/// </summary>
/// <value>A mapping of indices to local variables.</value>
public IReadOnlyDictionary<uint, LocalBuilder> Locals { get; internal set; }
/// <summary>
/// Gets the types of the values on the evaluation stack.
/// </summary>
/// <value>A stack of types.</value>
public Stack<WasmValueType> StackContents { get; private set; }
/// <summary>
/// Informs that compiler context that a value is pushed onto the stack at the
/// current point in the code generation process.
/// </summary>
/// <param name="type">The type of the value that is pushed on the stack.</param>
public void Push(WasmValueType type)
{
StackContents.Push(type);
}
/// <summary>
/// Informs the compiler context that a number of values are popped from the stack
/// at this point. Returns theit types.
/// </summary>
/// <param name="count">The number of values to pop from the stack.</param>
/// <returns>
/// The types of the <paramref name="count"/> topmost values on the stack, in the
/// order those types were pushed onto the stack.
/// </returns>
public IReadOnlyList<WasmValueType> Pop(int count)
{
var result = new Stack<WasmValueType>();
for (int i = 0; i < count; i++)
{
result.Push(StackContents.Pop());
}
return result.ToArray();
}
/// <summary>
/// Informs the compiler context that a value is popped from the stack
/// at this point. Returns its type.
/// </summary>
/// <returns>
/// The types of thetopmost value on the stack.
/// </returns>
public WasmValueType Pop()
{
return StackContents.Pop();
}
}
}
<|start_filename|>libwasm/Interpret/FunctionTable.cs<|end_filename|>
using System.Collections.Generic;
namespace Wasm.Interpret
{
/// <summary>
/// Defines a table of function values.
/// </summary>
public sealed class FunctionTable
{
/// <summary>
/// Creates a function table from the given resizable limits.
/// The table's initial contents are trap values.
/// </summary>
/// <param name="limits">The table's limits.</param>
public FunctionTable(ResizableLimits limits)
{
this.Limits = limits;
this.contents = new List<FunctionDefinition>((int)limits.Initial);
var funcDef = new ThrowFunctionDefinition(
new WasmValueType[0],
new WasmValueType[0],
new TrapException("Indirect call target not initialized yet.", TrapException.SpecMessages.UninitializedElement));
for (int i = 0; i < limits.Initial; i++)
{
contents.Add(funcDef);
}
}
/// <summary>
/// Gets this function table's limits.
/// </summary>
/// <returns>The function table's limits.</returns>
public ResizableLimits Limits { get; private set; }
private List<FunctionDefinition> contents;
/// <summary>
/// Gets or sets the function definition at the given index in the table.
/// </summary>
public FunctionDefinition this[uint index]
{
get
{
CheckBounds(index);
return contents[(int)index];
}
set
{
CheckBounds(index);
contents[(int)index] = value;
}
}
private void CheckBounds(uint index)
{
if (index >= contents.Count)
{
throw new TrapException(
$"Cannot access element with index {index} in a function table of size {contents.Count}.",
TrapException.SpecMessages.UndefinedElement);
}
}
/// <summary>
/// Gets the number of elements in the table.
/// </summary>
/// <returns>An element count.</returns>
public int Count => contents.Count;
}
}
<|start_filename|>libwasm/Interpret/SpecTestImporter.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
namespace Wasm.Interpret
{
/// <summary>
/// Imports values from the 'spectest' environment.
/// </summary>
public sealed class SpecTestImporter : IImporter
{
/// <summary>
/// Creates an importer for the 'spectest' environment.
/// </summary>
public SpecTestImporter()
: this(Environment.NewLine)
{ }
/// <summary>
/// Creates an importer for the 'spectest' environment.
/// </summary>
/// <param name="printWriter">
/// A text writer to use for print calls.
/// </param>
public SpecTestImporter(TextWriter printWriter)
: this(printWriter.NewLine, printWriter)
{ }
/// <summary>
/// Creates an importer for the 'spectest' environment.
/// </summary>
/// <param name="printSuffix">
/// A string that is written to the console at the
/// end of a print call.
/// </param>
public SpecTestImporter(string printSuffix)
: this(printSuffix, Console.Out)
{ }
/// <summary>
/// Creates an importer for the 'spectest' environment.
/// </summary>
/// <param name="printSuffix">
/// A string that is written to <paramref name="printWriter"/> at the
/// end of a print call.
/// </param>
/// <param name="printWriter">
/// A text writer to use for print calls.
/// </param>
public SpecTestImporter(string printSuffix, TextWriter printWriter)
{
this.PrintSuffix = printSuffix;
this.PrintWriter = printWriter;
this.globalI32 = Variable.Create<int>(
WasmValueType.Int32,
false,
666);
this.globalF32 = Variable.Create<float>(
WasmValueType.Float32,
false,
666.0f);
this.globalF64 = Variable.Create<double>(
WasmValueType.Float64,
false,
666.0);
}
/// <summary>
/// Gets the string that is written to the console at the
/// end of a print call.
/// </summary>
/// <returns>The print suffix.</returns>
public string PrintSuffix { get; private set; }
/// <summary>
/// Gets the text writer that is used for print calls.
/// </summary>
/// <value>A text writer.</value>
public TextWriter PrintWriter { get; private set; }
private Variable globalI32, globalF32, globalF64;
/// <inheritdoc/>
public FunctionDefinition ImportFunction(
ImportedFunction description, FunctionType signature)
{
switch (description.FieldName)
{
case "print":
case "print_i32":
case "print_i32_f32":
case "print_f64_f64":
case "print_f32":
case "print_f64":
return new SpecTestPrintFunctionDefinition(
signature.ParameterTypes,
signature.ReturnTypes,
PrintSuffix,
PrintWriter);
default:
return null;
}
}
/// <inheritdoc/>
public Variable ImportGlobal(ImportedGlobal description)
{
switch (description.FieldName)
{
case "global_i32":
return globalI32;
case "global_f32":
return globalF32;
case "global_f64":
return globalF64;
default:
return null;
}
}
/// <inheritdoc/>
public LinearMemory ImportMemory(ImportedMemory description)
{
if (description.FieldName == "memory")
{
return new LinearMemory(new ResizableLimits(1, 2));
}
else
{
return null;
}
}
/// <inheritdoc/>
public FunctionTable ImportTable(ImportedTable description)
{
if (description.FieldName == "table")
{
return new FunctionTable(new ResizableLimits(10, 20));
}
else
{
return null;
}
}
}
/// <summary>
/// An implementation of the 'spectest.print' function.
/// </summary>
internal sealed class SpecTestPrintFunctionDefinition : FunctionDefinition
{
public SpecTestPrintFunctionDefinition(
IReadOnlyList<WasmValueType> parameterTypes,
IReadOnlyList<WasmValueType> returnTypes,
string printSuffix,
TextWriter printWriter)
{
this.paramTypes = parameterTypes;
this.retTypes = returnTypes;
this.PrintSuffix = printSuffix;
this.PrintWriter = printWriter;
}
private IReadOnlyList<WasmValueType> paramTypes;
private IReadOnlyList<WasmValueType> retTypes;
/// <summary>
/// Gets the string that is written to the console at the
/// end of a print call.
/// </summary>
/// <returns>The print suffix.</returns>
public string PrintSuffix { get; private set; }
public TextWriter PrintWriter { get; private set; }
/// <inheritdoc/>
public override IReadOnlyList<WasmValueType> ParameterTypes => paramTypes;
/// <inheritdoc/>
public override IReadOnlyList<WasmValueType> ReturnTypes => retTypes;
/// <inheritdoc/>
public override IReadOnlyList<object> Invoke(IReadOnlyList<object> arguments, uint callStackDepth = 0)
{
for (int i = 0; i < arguments.Count; i++)
{
if (i > 0)
{
PrintWriter.Write(" ");
}
PrintWriter.Write(arguments[i]);
}
PrintWriter.Write(PrintSuffix);
var results = new object[ReturnTypes.Count];
for (int i = 0; i < results.Length; i++)
{
results[i] = Variable.GetDefaultValue(ReturnTypes[i]);
}
return results;
}
}
}
<|start_filename|>libwasm/Interpret/WasmFunctionDefinition.cs<|end_filename|>
using System;
using System.Collections.Generic;
namespace Wasm.Interpret
{
/// <summary>
/// Represents a WebAssembly function definition.
/// </summary>
public sealed class WasmFunctionDefinition : FunctionDefinition
{
/// <summary>
/// Creates a WebAssembly function definition from the given signature,
/// function body and declaring module.
/// </summary>
/// <param name="signature">The function's signature.</param>
/// <param name="body">The function's body.</param>
/// <param name="module">The declaring module.</param>
public WasmFunctionDefinition(
FunctionType signature,
FunctionBody body,
ModuleInstance module)
{
this.Signature = signature;
this.body = body;
this.Module = module;
}
/// <summary>
/// Gets the function's signature.
/// </summary>
/// <returns>The function's signature.</returns>
public FunctionType Signature { get; private set; }
/// <summary>
/// The function's body.
/// </summary>
private FunctionBody body;
/// <summary>
/// Gets the module that owns this function definition.
/// </summary>
/// <returns>The declaring module.</returns>
public ModuleInstance Module { get; private set; }
/// <inheritdoc/>
public override IReadOnlyList<WasmValueType> ParameterTypes => Signature.ParameterTypes;
/// <inheritdoc/>
public override IReadOnlyList<WasmValueType> ReturnTypes => Signature.ReturnTypes;
/// <inheritdoc/>
public override IReadOnlyList<object> Invoke(IReadOnlyList<object> arguments, uint callStackDepth = 0)
{
var locals = new List<Variable>();
// Check argument types and create parameter variables.
if (Signature.ParameterTypes.Count != arguments.Count)
{
throw new WasmException(
"Function arity mismatch: function has " + Signature.ParameterTypes.Count +
" parameters and is given " + arguments.Count + " arguments.");
}
// Turn each argument into a variable.
for (int i = 0; i < Signature.ParameterTypes.Count; i++)
{
locals.Add(Variable.Create<object>(Signature.ParameterTypes[i], true, arguments[i]));
}
// Turn each local into a variable.
foreach (var localEntry in body.Locals)
{
for (int i = 0; i < localEntry.LocalCount; i++)
{
locals.Add(Variable.CreateDefault(localEntry.LocalType, true));
}
}
// Interpret the function body.
var context = InterpretBody(callStackDepth, locals);
// Check return types.
var retVals = context.ReturnValues;
if (retVals.Count != Signature.ReturnTypes.Count)
{
throw new WasmException(
"Return value arity mismatch: function expects " + Signature.ReturnTypes.Count +
" return values but is given " + retVals.Count + " return values.");
}
for (int i = 0; i < retVals.Count; i++)
{
if (!Variable.IsInstanceOf<object>(retVals[i], Signature.ReturnTypes[i]))
{
throw new WasmException(
"Return type mismatch: function has return type '" +
Signature.ReturnTypes[i].ToString() +
" but is given a return value of type '" +
retVals[i].GetType().Name + "'.");
}
}
return retVals;
}
private InterpreterContext InterpretBody(uint callStackDepth, List<Variable> locals)
{
if (Module.Policy.TranslateExceptions)
{
try
{
return InterpretBodyImpl(callStackDepth, locals);
}
catch (DivideByZeroException ex)
{
throw new TrapException(ex.Message, TrapException.SpecMessages.IntegerDivideByZero);
}
catch (OverflowException ex)
{
throw new TrapException(ex.Message, TrapException.SpecMessages.IntegerOverflow);
}
}
else
{
return InterpretBodyImpl(callStackDepth, locals);
}
}
private InterpreterContext InterpretBodyImpl(uint callStackDepth, List<Variable> locals)
{
var context = new InterpreterContext(Module, ReturnTypes, locals, Module.Policy, callStackDepth + 1);
var interpreter = Module.Interpreter;
foreach (var instruction in body.BodyInstructions)
{
interpreter.Interpret(instruction, context);
if (context.BreakRequested)
{
// Functions can use a break to return. This acts exactly like
// a regular return.
OperatorImpls.Return(context);
break;
}
}
context.Return();
return context;
}
}
}
<|start_filename|>libwasm/Interpret/TracingInstructionInterpreter.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using Wasm.Instructions;
namespace Wasm.Interpret
{
/// <summary>
/// A type of interpreter that creates an execution trace as it runs instructions.
/// </summary>
public class TracingInstructionInterpreter : InstructionInterpreter
{
/// <summary>
/// Creates a tracing instruction interpreter from the given inner interpreter
/// and a trace writer.
/// </summary>
/// <param name="interpreter">The inner interpreter that is used to run instructions.</param>
/// <param name="traceWriter">The text writer to which execution traces are written.</param>
public TracingInstructionInterpreter(
InstructionInterpreter interpreter, TextWriter traceWriter)
{
this.Interpreter = interpreter;
this.TraceWriter = traceWriter;
}
/// <summary>
/// Gets the inner interpreter that is used to run instructions.
/// </summary>
/// <returns>The instruction interpreter.</returns>
public InstructionInterpreter Interpreter { get; private set; }
/// <summary>
/// Gets the text writer to which execution traces are written.
/// </summary>
/// <returns>The text writer.</returns>
public TextWriter TraceWriter { get; private set; }
/// <summary>
/// Writes an instruction to the trace writer.
/// </summary>
/// <param name="value">The instruction.</param>
protected virtual void Trace(Instruction value)
{
if (value is BlockInstruction || value is IfElseInstruction)
{
value.Op.Dump(TraceWriter);
}
else
{
value.Dump(TraceWriter);
}
TraceWriter.WriteLine();
}
/// <inheritdoc/>
public override void Interpret(Instruction value, InterpreterContext context)
{
// Trace the instruction.
if (!context.HasReturned)
{
Trace(value);
}
// Actually interpret the instruction.
Interpreter.Interpret(value, context);
}
}
}
<|start_filename|>libwasm/SectionName.cs<|end_filename|>
using System;
namespace Wasm
{
/// <summary>
/// /// Represents a section's header.
/// </summary>
public struct SectionName : IEquatable<SectionName>
{
/// <summary>
/// Creates a section name for a non-custom section with the given section code.
/// </summary>
/// <param name="code">The section code.</param>
public SectionName(SectionCode code)
{
this.Code = code;
this.CustomName = null;
}
/// <summary>
/// Creates a section header for a custom section with the given name.
/// </summary>
/// <param name="customName">The name of the custom section.</param>
public SectionName(string customName)
{
this.Code = SectionCode.Custom;
this.CustomName = customName;
}
/// <summary>
/// Gets the section's code.
/// </summary>
/// <returns>The section code.</returns>
public SectionCode Code { get; private set; }
/// <summary>
/// Gets a Boolean value that tells if the section is a custom section.
/// </summary>
public bool IsCustom => Code == SectionCode.Custom;
/// <summary>
/// Gets the name of the section, as a byte string. This applies only to
/// custom sections.
/// </summary>
/// <returns>The name of the section if is this a custom section; otherwise, null.</returns>
public string CustomName { get; private set; }
/// <summary>
/// Checks if this section name is equal to the given section name.
/// </summary>
/// <param name="other">The other section name.</param>
/// <returns><c>true</c> if this section name is equal to the given section name; otherwise, <c>false</c>.</returns>
public bool Equals(SectionName other)
{
if (IsCustom)
{
return other.IsCustom && CustomName == other.CustomName;
}
else
{
return Code == other.Code;
}
}
/// <inheritdoc/>
public override int GetHashCode()
{
if (IsCustom)
{
return CustomName.GetHashCode();
}
else
{
return (int)Code;
}
}
/// <inheritdoc/>
public override bool Equals(object other)
{
return other is SectionName && Equals((SectionName)other);
}
/// <summary>
/// Checks if the given section names are the same.
/// </summary>
/// <param name="first">The first section name.</param>
/// <param name="second">The second section name.</param>
/// <returns><c>true</c> if the given section names are the same; otherwise, <c>false</c>.</returns>
public static bool operator==(SectionName first, SectionName second)
{
return first.Equals(second);
}
/// <summary>
/// Checks if the given section names not are the same.
/// </summary>
/// <param name="first">The first section name.</param>
/// <param name="second">The second section name.</param>
/// <returns><c>true</c> if the given section names are not the same; otherwise, <c>false</c>.</returns>
public static bool operator!=(SectionName first, SectionName second)
{
return !first.Equals(second);
}
/// <inheritdoc/>
public override string ToString()
{
if (IsCustom)
return "Custom section '" + CustomName + "'";
else
return ((object)Code).ToString();
}
}
}
<|start_filename|>libwasm/WasmException.cs<|end_filename|>
using System;
using System.Runtime.Serialization;
namespace Wasm
{
/// <summary>
/// A type of exception that is thrown by the Wasm namespace and its sub-namespaces.
/// </summary>
[Serializable]
public class WasmException : Exception
{
/// <summary>
/// Initializes a new instance of the <see cref="Wasm.WasmException"/> class.
/// </summary>
/// <param name="message">The error message.</param>
public WasmException(string message)
: base(message)
{ }
/// <summary>
/// Initializes a new instance of the <see cref="Wasm.WasmException"/> class.
/// </summary>
/// <param name="info">Serialization info.</param>
/// <param name="context">A streaming context.</param>
protected WasmException(
SerializationInfo info,
StreamingContext context) : base(info, context) { }
}
}
<|start_filename|>libwasm/Interpret/FunctionDefinition.cs<|end_filename|>
using System.Collections.Generic;
namespace Wasm.Interpret
{
/// <summary>
/// Defines a base class for function definitions.
/// </summary>
public abstract class FunctionDefinition
{
/// <summary>
/// Gets this function definition's list of parameter types.
/// </summary>
/// <returns>The list of parameter types.</returns>
public abstract IReadOnlyList<WasmValueType> ParameterTypes { get; }
/// <summary>
/// Gets this function definition's list of return types.
/// </summary>
/// <returns>The list of return types.</returns>
public abstract IReadOnlyList<WasmValueType> ReturnTypes { get; }
/// <summary>
/// Invokes this function with the given argument list.
/// </summary>
/// <param name="arguments">The list of arguments for this function's parameters.</param>
/// <param name="callStackDepth">
/// The depth of the call stack when the function is invoked, excluding the function's stack frame.
/// </param>
/// <returns>The list of return values.</returns>
public abstract IReadOnlyList<object> Invoke(IReadOnlyList<object> arguments, uint callStackDepth = 0);
}
}
<|start_filename|>libwasm/Instructions/Operators.cs<|end_filename|>
using System.Collections.Generic;
namespace Wasm.Instructions
{
/// <summary>
/// A collection of operator definitions.
/// </summary>
public static class Operators
{
static Operators()
{
opsByOpCode = new Dictionary<byte, Operator>();
Unreachable = Register<NullaryOperator>(new NullaryOperator(0x00, WasmType.Empty, "unreachable"));
Nop = Register<NullaryOperator>(new NullaryOperator(0x01, WasmType.Empty, "nop"));
Block = Register<BlockOperator>(new BlockOperator(0x02, WasmType.Empty, "block"));
Loop = Register<BlockOperator>(new BlockOperator(0x03, WasmType.Empty, "loop"));
If = Register<IfElseOperator>(new IfElseOperator(0x04, WasmType.Empty, "if"));
Br = Register<VarUInt32Operator>(new VarUInt32Operator(0x0c, WasmType.Empty, "br"));
BrIf = Register<VarUInt32Operator>(new VarUInt32Operator(0x0d, WasmType.Empty, "br_if"));
BrTable = Register<BrTableOperator>(new BrTableOperator(0x0e, WasmType.Empty, "br_table"));
Return = Register<NullaryOperator>(new NullaryOperator(0x0f, WasmType.Empty, "return"));
Drop = Register<NullaryOperator>(new NullaryOperator(0x1a, WasmType.Empty, "drop"));
Select = Register<NullaryOperator>(new NullaryOperator(0x1b, WasmType.Empty, "select"));
Call = Register<VarUInt32Operator>(new VarUInt32Operator(0x10, WasmType.Empty, "call"));
CallIndirect = Register<CallIndirectOperator>(new CallIndirectOperator(0x11, WasmType.Empty, "call_indirect"));
GetLocal = Register<VarUInt32Operator>(new VarUInt32Operator(0x20, WasmType.Empty, "get_local"));
SetLocal = Register<VarUInt32Operator>(new VarUInt32Operator(0x21, WasmType.Empty, "set_local"));
TeeLocal = Register<VarUInt32Operator>(new VarUInt32Operator(0x22, WasmType.Empty, "tee_local"));
GetGlobal = Register<VarUInt32Operator>(new VarUInt32Operator(0x23, WasmType.Empty, "get_global"));
SetGlobal = Register<VarUInt32Operator>(new VarUInt32Operator(0x24, WasmType.Empty, "set_global"));
Int32Const = Register<VarInt32Operator>(new VarInt32Operator(0x41, WasmType.Int32, "const"));
Int64Const = Register<VarInt64Operator>(new VarInt64Operator(0x42, WasmType.Int64, "const"));
Float32Const = Register<Float32Operator>(new Float32Operator(0x43, WasmType.Float32, "const"));
Float64Const = Register<Float64Operator>(new Float64Operator(0x44, WasmType.Float64, "const"));
Int32Load = Register<MemoryOperator>(new MemoryOperator(0x28, WasmType.Int32, "load"));
Int64Load = Register<MemoryOperator>(new MemoryOperator(0x29, WasmType.Int64, "load"));
Float32Load = Register<MemoryOperator>(new MemoryOperator(0x2a, WasmType.Float32, "load"));
Float64Load = Register<MemoryOperator>(new MemoryOperator(0x2b, WasmType.Float64, "load"));
Int32Load8S = Register<MemoryOperator>(new MemoryOperator(0x2c, WasmType.Int32, "load8_s"));
Int32Load8U = Register<MemoryOperator>(new MemoryOperator(0x2d, WasmType.Int32, "load8_u"));
Int32Load16S = Register<MemoryOperator>(new MemoryOperator(0x2e, WasmType.Int32, "load16_s"));
Int32Load16U = Register<MemoryOperator>(new MemoryOperator(0x2f, WasmType.Int32, "load16_u"));
Int64Load8S = Register<MemoryOperator>(new MemoryOperator(0x30, WasmType.Int64, "load8_s"));
Int64Load8U = Register<MemoryOperator>(new MemoryOperator(0x31, WasmType.Int64, "load8_u"));
Int64Load16S = Register<MemoryOperator>(new MemoryOperator(0x32, WasmType.Int64, "load16_s"));
Int64Load16U = Register<MemoryOperator>(new MemoryOperator(0x33, WasmType.Int64, "load16_u"));
Int64Load32S = Register<MemoryOperator>(new MemoryOperator(0x34, WasmType.Int64, "load32_s"));
Int64Load32U = Register<MemoryOperator>(new MemoryOperator(0x35, WasmType.Int64, "load32_u"));
Int32Store = Register<MemoryOperator>(new MemoryOperator(0x36, WasmType.Int32, "store"));
Int64Store = Register<MemoryOperator>(new MemoryOperator(0x37, WasmType.Int64, "store"));
Float32Store = Register<MemoryOperator>(new MemoryOperator(0x38, WasmType.Float32, "store"));
Float64Store = Register<MemoryOperator>(new MemoryOperator(0x39, WasmType.Float64, "store"));
Int32Store8 = Register<MemoryOperator>(new MemoryOperator(0x3a, WasmType.Int32, "store8"));
Int32Store16 = Register<MemoryOperator>(new MemoryOperator(0x3b, WasmType.Int32, "store16"));
Int64Store8 = Register<MemoryOperator>(new MemoryOperator(0x3c, WasmType.Int64, "store8"));
Int64Store16 = Register<MemoryOperator>(new MemoryOperator(0x3d, WasmType.Int64, "store16"));
Int64Store32 = Register<MemoryOperator>(new MemoryOperator(0x3e, WasmType.Int64, "store32"));
CurrentMemory = Register<VarUInt32Operator>(new VarUInt32Operator(0x3f, WasmType.Empty, "current_memory"));
GrowMemory = Register<VarUInt32Operator>(new VarUInt32Operator(0x40, WasmType.Empty, "grow_memory"));
// The code below has been auto-generated by nullary-opcode-generator.
Int32Eqz = Register<NullaryOperator>(new NullaryOperator(0x45, WasmType.Int32, "eqz"));
Int32Eq = Register<NullaryOperator>(new NullaryOperator(0x46, WasmType.Int32, "eq"));
Int32Ne = Register<NullaryOperator>(new NullaryOperator(0x47, WasmType.Int32, "ne"));
Int32LtS = Register<NullaryOperator>(new NullaryOperator(0x48, WasmType.Int32, "lt_s"));
Int32LtU = Register<NullaryOperator>(new NullaryOperator(0x49, WasmType.Int32, "lt_u"));
Int32GtS = Register<NullaryOperator>(new NullaryOperator(0x4a, WasmType.Int32, "gt_s"));
Int32GtU = Register<NullaryOperator>(new NullaryOperator(0x4b, WasmType.Int32, "gt_u"));
Int32LeS = Register<NullaryOperator>(new NullaryOperator(0x4c, WasmType.Int32, "le_s"));
Int32LeU = Register<NullaryOperator>(new NullaryOperator(0x4d, WasmType.Int32, "le_u"));
Int32GeS = Register<NullaryOperator>(new NullaryOperator(0x4e, WasmType.Int32, "ge_s"));
Int32GeU = Register<NullaryOperator>(new NullaryOperator(0x4f, WasmType.Int32, "ge_u"));
Int64Eqz = Register<NullaryOperator>(new NullaryOperator(0x50, WasmType.Int64, "eqz"));
Int64Eq = Register<NullaryOperator>(new NullaryOperator(0x51, WasmType.Int64, "eq"));
Int64Ne = Register<NullaryOperator>(new NullaryOperator(0x52, WasmType.Int64, "ne"));
Int64LtS = Register<NullaryOperator>(new NullaryOperator(0x53, WasmType.Int64, "lt_s"));
Int64LtU = Register<NullaryOperator>(new NullaryOperator(0x54, WasmType.Int64, "lt_u"));
Int64GtS = Register<NullaryOperator>(new NullaryOperator(0x55, WasmType.Int64, "gt_s"));
Int64GtU = Register<NullaryOperator>(new NullaryOperator(0x56, WasmType.Int64, "gt_u"));
Int64LeS = Register<NullaryOperator>(new NullaryOperator(0x57, WasmType.Int64, "le_s"));
Int64LeU = Register<NullaryOperator>(new NullaryOperator(0x58, WasmType.Int64, "le_u"));
Int64GeS = Register<NullaryOperator>(new NullaryOperator(0x59, WasmType.Int64, "ge_s"));
Int64GeU = Register<NullaryOperator>(new NullaryOperator(0x5a, WasmType.Int64, "ge_u"));
Float32Eq = Register<NullaryOperator>(new NullaryOperator(0x5b, WasmType.Float32, "eq"));
Float32Ne = Register<NullaryOperator>(new NullaryOperator(0x5c, WasmType.Float32, "ne"));
Float32Lt = Register<NullaryOperator>(new NullaryOperator(0x5d, WasmType.Float32, "lt"));
Float32Gt = Register<NullaryOperator>(new NullaryOperator(0x5e, WasmType.Float32, "gt"));
Float32Le = Register<NullaryOperator>(new NullaryOperator(0x5f, WasmType.Float32, "le"));
Float32Ge = Register<NullaryOperator>(new NullaryOperator(0x60, WasmType.Float32, "ge"));
Float64Eq = Register<NullaryOperator>(new NullaryOperator(0x61, WasmType.Float64, "eq"));
Float64Ne = Register<NullaryOperator>(new NullaryOperator(0x62, WasmType.Float64, "ne"));
Float64Lt = Register<NullaryOperator>(new NullaryOperator(0x63, WasmType.Float64, "lt"));
Float64Gt = Register<NullaryOperator>(new NullaryOperator(0x64, WasmType.Float64, "gt"));
Float64Le = Register<NullaryOperator>(new NullaryOperator(0x65, WasmType.Float64, "le"));
Float64Ge = Register<NullaryOperator>(new NullaryOperator(0x66, WasmType.Float64, "ge"));
Int32Clz = Register<NullaryOperator>(new NullaryOperator(0x67, WasmType.Int32, "clz"));
Int32Ctz = Register<NullaryOperator>(new NullaryOperator(0x68, WasmType.Int32, "ctz"));
Int32Popcnt = Register<NullaryOperator>(new NullaryOperator(0x69, WasmType.Int32, "popcnt"));
Int32Add = Register<NullaryOperator>(new NullaryOperator(0x6a, WasmType.Int32, "add"));
Int32Sub = Register<NullaryOperator>(new NullaryOperator(0x6b, WasmType.Int32, "sub"));
Int32Mul = Register<NullaryOperator>(new NullaryOperator(0x6c, WasmType.Int32, "mul"));
Int32DivS = Register<NullaryOperator>(new NullaryOperator(0x6d, WasmType.Int32, "div_s"));
Int32DivU = Register<NullaryOperator>(new NullaryOperator(0x6e, WasmType.Int32, "div_u"));
Int32RemS = Register<NullaryOperator>(new NullaryOperator(0x6f, WasmType.Int32, "rem_s"));
Int32RemU = Register<NullaryOperator>(new NullaryOperator(0x70, WasmType.Int32, "rem_u"));
Int32And = Register<NullaryOperator>(new NullaryOperator(0x71, WasmType.Int32, "and"));
Int32Or = Register<NullaryOperator>(new NullaryOperator(0x72, WasmType.Int32, "or"));
Int32Xor = Register<NullaryOperator>(new NullaryOperator(0x73, WasmType.Int32, "xor"));
Int32Shl = Register<NullaryOperator>(new NullaryOperator(0x74, WasmType.Int32, "shl"));
Int32ShrS = Register<NullaryOperator>(new NullaryOperator(0x75, WasmType.Int32, "shr_s"));
Int32ShrU = Register<NullaryOperator>(new NullaryOperator(0x76, WasmType.Int32, "shr_u"));
Int32Rotl = Register<NullaryOperator>(new NullaryOperator(0x77, WasmType.Int32, "rotl"));
Int32Rotr = Register<NullaryOperator>(new NullaryOperator(0x78, WasmType.Int32, "rotr"));
Int64Clz = Register<NullaryOperator>(new NullaryOperator(0x79, WasmType.Int64, "clz"));
Int64Ctz = Register<NullaryOperator>(new NullaryOperator(0x7a, WasmType.Int64, "ctz"));
Int64Popcnt = Register<NullaryOperator>(new NullaryOperator(0x7b, WasmType.Int64, "popcnt"));
Int64Add = Register<NullaryOperator>(new NullaryOperator(0x7c, WasmType.Int64, "add"));
Int64Sub = Register<NullaryOperator>(new NullaryOperator(0x7d, WasmType.Int64, "sub"));
Int64Mul = Register<NullaryOperator>(new NullaryOperator(0x7e, WasmType.Int64, "mul"));
Int64DivS = Register<NullaryOperator>(new NullaryOperator(0x7f, WasmType.Int64, "div_s"));
Int64DivU = Register<NullaryOperator>(new NullaryOperator(0x80, WasmType.Int64, "div_u"));
Int64RemS = Register<NullaryOperator>(new NullaryOperator(0x81, WasmType.Int64, "rem_s"));
Int64RemU = Register<NullaryOperator>(new NullaryOperator(0x82, WasmType.Int64, "rem_u"));
Int64And = Register<NullaryOperator>(new NullaryOperator(0x83, WasmType.Int64, "and"));
Int64Or = Register<NullaryOperator>(new NullaryOperator(0x84, WasmType.Int64, "or"));
Int64Xor = Register<NullaryOperator>(new NullaryOperator(0x85, WasmType.Int64, "xor"));
Int64Shl = Register<NullaryOperator>(new NullaryOperator(0x86, WasmType.Int64, "shl"));
Int64ShrS = Register<NullaryOperator>(new NullaryOperator(0x87, WasmType.Int64, "shr_s"));
Int64ShrU = Register<NullaryOperator>(new NullaryOperator(0x88, WasmType.Int64, "shr_u"));
Int64Rotl = Register<NullaryOperator>(new NullaryOperator(0x89, WasmType.Int64, "rotl"));
Int64Rotr = Register<NullaryOperator>(new NullaryOperator(0x8a, WasmType.Int64, "rotr"));
Float32Abs = Register<NullaryOperator>(new NullaryOperator(0x8b, WasmType.Float32, "abs"));
Float32Neg = Register<NullaryOperator>(new NullaryOperator(0x8c, WasmType.Float32, "neg"));
Float32Ceil = Register<NullaryOperator>(new NullaryOperator(0x8d, WasmType.Float32, "ceil"));
Float32Floor = Register<NullaryOperator>(new NullaryOperator(0x8e, WasmType.Float32, "floor"));
Float32Trunc = Register<NullaryOperator>(new NullaryOperator(0x8f, WasmType.Float32, "trunc"));
Float32Nearest = Register<NullaryOperator>(new NullaryOperator(0x90, WasmType.Float32, "nearest"));
Float32Sqrt = Register<NullaryOperator>(new NullaryOperator(0x91, WasmType.Float32, "sqrt"));
Float32Add = Register<NullaryOperator>(new NullaryOperator(0x92, WasmType.Float32, "add"));
Float32Sub = Register<NullaryOperator>(new NullaryOperator(0x93, WasmType.Float32, "sub"));
Float32Mul = Register<NullaryOperator>(new NullaryOperator(0x94, WasmType.Float32, "mul"));
Float32Div = Register<NullaryOperator>(new NullaryOperator(0x95, WasmType.Float32, "div"));
Float32Min = Register<NullaryOperator>(new NullaryOperator(0x96, WasmType.Float32, "min"));
Float32Max = Register<NullaryOperator>(new NullaryOperator(0x97, WasmType.Float32, "max"));
Float32Copysign = Register<NullaryOperator>(new NullaryOperator(0x98, WasmType.Float32, "copysign"));
Float64Abs = Register<NullaryOperator>(new NullaryOperator(0x99, WasmType.Float64, "abs"));
Float64Neg = Register<NullaryOperator>(new NullaryOperator(0x9a, WasmType.Float64, "neg"));
Float64Ceil = Register<NullaryOperator>(new NullaryOperator(0x9b, WasmType.Float64, "ceil"));
Float64Floor = Register<NullaryOperator>(new NullaryOperator(0x9c, WasmType.Float64, "floor"));
Float64Trunc = Register<NullaryOperator>(new NullaryOperator(0x9d, WasmType.Float64, "trunc"));
Float64Nearest = Register<NullaryOperator>(new NullaryOperator(0x9e, WasmType.Float64, "nearest"));
Float64Sqrt = Register<NullaryOperator>(new NullaryOperator(0x9f, WasmType.Float64, "sqrt"));
Float64Add = Register<NullaryOperator>(new NullaryOperator(0xa0, WasmType.Float64, "add"));
Float64Sub = Register<NullaryOperator>(new NullaryOperator(0xa1, WasmType.Float64, "sub"));
Float64Mul = Register<NullaryOperator>(new NullaryOperator(0xa2, WasmType.Float64, "mul"));
Float64Div = Register<NullaryOperator>(new NullaryOperator(0xa3, WasmType.Float64, "div"));
Float64Min = Register<NullaryOperator>(new NullaryOperator(0xa4, WasmType.Float64, "min"));
Float64Max = Register<NullaryOperator>(new NullaryOperator(0xa5, WasmType.Float64, "max"));
Float64Copysign = Register<NullaryOperator>(new NullaryOperator(0xa6, WasmType.Float64, "copysign"));
Int32WrapInt64 = Register<NullaryOperator>(new NullaryOperator(0xa7, WasmType.Int32, "wrap/i64"));
Int32TruncSFloat32 = Register<NullaryOperator>(new NullaryOperator(0xa8, WasmType.Int32, "trunc_s/f32"));
Int32TruncUFloat32 = Register<NullaryOperator>(new NullaryOperator(0xa9, WasmType.Int32, "trunc_u/f32"));
Int32TruncSFloat64 = Register<NullaryOperator>(new NullaryOperator(0xaa, WasmType.Int32, "trunc_s/f64"));
Int32TruncUFloat64 = Register<NullaryOperator>(new NullaryOperator(0xab, WasmType.Int32, "trunc_u/f64"));
Int64ExtendSInt32 = Register<NullaryOperator>(new NullaryOperator(0xac, WasmType.Int64, "extend_s/i32"));
Int64ExtendUInt32 = Register<NullaryOperator>(new NullaryOperator(0xad, WasmType.Int64, "extend_u/i32"));
Int64TruncSFloat32 = Register<NullaryOperator>(new NullaryOperator(0xae, WasmType.Int64, "trunc_s/f32"));
Int64TruncUFloat32 = Register<NullaryOperator>(new NullaryOperator(0xaf, WasmType.Int64, "trunc_u/f32"));
Int64TruncSFloat64 = Register<NullaryOperator>(new NullaryOperator(0xb0, WasmType.Int64, "trunc_s/f64"));
Int64TruncUFloat64 = Register<NullaryOperator>(new NullaryOperator(0xb1, WasmType.Int64, "trunc_u/f64"));
Float32ConvertSInt32 = Register<NullaryOperator>(new NullaryOperator(0xb2, WasmType.Float32, "convert_s/i32"));
Float32ConvertUInt32 = Register<NullaryOperator>(new NullaryOperator(0xb3, WasmType.Float32, "convert_u/i32"));
Float32ConvertSInt64 = Register<NullaryOperator>(new NullaryOperator(0xb4, WasmType.Float32, "convert_s/i64"));
Float32ConvertUInt64 = Register<NullaryOperator>(new NullaryOperator(0xb5, WasmType.Float32, "convert_u/i64"));
Float32DemoteFloat64 = Register<NullaryOperator>(new NullaryOperator(0xb6, WasmType.Float32, "demote/f64"));
Float64ConvertSInt32 = Register<NullaryOperator>(new NullaryOperator(0xb7, WasmType.Float64, "convert_s/i32"));
Float64ConvertUInt32 = Register<NullaryOperator>(new NullaryOperator(0xb8, WasmType.Float64, "convert_u/i32"));
Float64ConvertSInt64 = Register<NullaryOperator>(new NullaryOperator(0xb9, WasmType.Float64, "convert_s/i64"));
Float64ConvertUInt64 = Register<NullaryOperator>(new NullaryOperator(0xba, WasmType.Float64, "convert_u/i64"));
Float64PromoteFloat32 = Register<NullaryOperator>(new NullaryOperator(0xbb, WasmType.Float64, "promote/f32"));
Int32ReinterpretFloat32 = Register<NullaryOperator>(new NullaryOperator(0xbc, WasmType.Int32, "reinterpret/f32"));
Int64ReinterpretFloat64 = Register<NullaryOperator>(new NullaryOperator(0xbd, WasmType.Int64, "reinterpret/f64"));
Float32ReinterpretInt32 = Register<NullaryOperator>(new NullaryOperator(0xbe, WasmType.Float32, "reinterpret/i32"));
Float64ReinterpretInt64 = Register<NullaryOperator>(new NullaryOperator(0xbf, WasmType.Float64, "reinterpret/i64"));
}
/// <summary>
/// A map of opcodes to the operators that define them.
/// </summary>
private static Dictionary<byte, Operator> opsByOpCode;
/// <summary>
/// Gets a map of opcodes to the operators that define them.
/// </summary>
public static IReadOnlyDictionary<byte, Operator> OperatorsByOpCode => opsByOpCode;
/// <summary>
/// Gets a sequence that contains all WebAssembly operators defined by this class.
/// </summary>
public static IEnumerable<Operator> AllOperators => opsByOpCode.Values;
/// <summary>
/// Registers the given operator.
/// </summary>
/// <param name="op">The operator to register.</param>
/// <returns>The operator.</returns>
private static T Register<T>(T op)
where T : Operator
{
opsByOpCode.Add(op.OpCode, op);
return op;
}
/// <summary>
/// Gets the operator with the given opcode.
/// </summary>
/// <param name="opCode">The opcode to find an operator for.</param>
/// <returns>The operator with the given opcode.</returns>
public static Operator GetOperatorByOpCode(byte opCode)
{
Operator result;
if (OperatorsByOpCode.TryGetValue(opCode, out result))
{
return result;
}
else
{
throw new WasmException(
string.Format("Unknown opcode: {0}", DumpHelpers.FormatHex(opCode)));
}
}
/// <summary>
/// The 'unreachable' operator, which traps immediately.
/// </summary>
public static readonly NullaryOperator Unreachable;
/// <summary>
/// The 'nop' operator, which does nothing.
/// </summary>
public static readonly NullaryOperator Nop;
/// <summary>
/// The 'block' operator, which begins a sequence of expressions, yielding 0 or 1 values.
/// </summary>
public static readonly BlockOperator Block;
/// <summary>
/// The 'loop' operator, which begins a block which can also form control flow loops
/// </summary>
public static readonly BlockOperator Loop;
/// <summary>
/// The 'if' operator, which runs one of two sequences of expressions.
/// </summary>
public static readonly IfElseOperator If;
/// <summary>
/// The 'br' operator: a break that targets an outer nested block.
/// </summary>
public static readonly VarUInt32Operator Br;
/// <summary>
/// The 'br_if' operator: a conditional break that targets an outer nested block.
/// </summary>
public static readonly VarUInt32Operator BrIf;
/// <summary>
/// The 'br_table' operator, which begins a break table.
/// </summary>
public static readonly BrTableOperator BrTable;
/// <summary>
/// The 'return' operator, which returns zero or one value from a function.
/// </summary>
public static readonly NullaryOperator Return;
/// <summary>
/// The 'drop' operator, which pops the top-of-stack value and ignores it.
/// </summary>
public static readonly NullaryOperator Drop;
/// <summary>
/// The 'select' operator, which selects one of two values based on a condition.
/// </summary>
public static readonly NullaryOperator Select;
/// <summary>
/// The 'call' operator, which calls a function by its index.
/// </summary>
public static readonly VarUInt32Operator Call;
/// <summary>
/// The 'call_indirect' operator, which calls a function pointer.
/// </summary>
public static readonly CallIndirectOperator CallIndirect;
/// <summary>
/// The 'get_local' operator, which reads a local variable or parameter.
/// </summary>
public static readonly VarUInt32Operator GetLocal;
/// <summary>
/// The 'set_local' operator, which writes a value to a local variable or parameter.
/// </summary>
public static readonly VarUInt32Operator SetLocal;
/// <summary>
/// The 'tee_local' operator, which writes a value to a local variable or parameter
/// and then returns the same value.
/// </summary>
public static readonly VarUInt32Operator TeeLocal;
/// <summary>
/// The 'get_global' operator, which reads a global variable.
/// </summary>
public static readonly VarUInt32Operator GetGlobal;
/// <summary>
/// The 'set_global' operator, which writes a value to a global variable.
/// </summary>
public static readonly VarUInt32Operator SetGlobal;
/// <summary>
/// The 'i32.load' operator, which loads a 32-bit integer from linear memory.
/// </summary>
public static readonly MemoryOperator Int32Load;
/// <summary>
/// The 'i64.load' operator, which loads a 64-bit integer from linear memory.
/// </summary>
public static readonly MemoryOperator Int64Load;
/// <summary>
/// The 'f32.load' operator, which loads a 32-bit floating-point number from linear memory.
/// </summary>
public static readonly MemoryOperator Float32Load;
/// <summary>
/// The 'f64.load' operator, which loads a 64-bit floating-point number from linear memory.
/// </summary>
public static readonly MemoryOperator Float64Load;
/// <summary>
/// The 'i32.load8_s' operator, which loads a byte from memory and sign-extends it to
/// a 32-bit integer.
/// </summary>
public static readonly MemoryOperator Int32Load8S;
/// <summary>
/// The 'i32.load8_u' operator, which loads a byte from memory and zero-extends it to
/// a 32-bit integer.
/// </summary>
public static readonly MemoryOperator Int32Load8U;
/// <summary>
/// The 'i32.load16_s' operator, which loads a 16-bit integer from memory and
/// sign-extends it to a 32-bit integer.
/// </summary>
public static readonly MemoryOperator Int32Load16S;
/// <summary>
/// The 'i32.load16_u' operator, which loads a 16-bit integer from memory and
/// zero-extends it to a 32-bit integer.
/// </summary>
public static readonly MemoryOperator Int32Load16U;
/// <summary>
/// The 'i64.load8_s' operator, which loads a byte from memory and sign-extends it to
/// a 64-bit integer.
/// </summary>
public static readonly MemoryOperator Int64Load8S;
/// <summary>
/// The 'i64.load8_u' operator, which loads a byte from memory and zero-extends it to
/// a 64-bit integer.
/// </summary>
public static readonly MemoryOperator Int64Load8U;
/// <summary>
/// The 'i64.load16_s' operator, which loads a 16-bit integer from memory and
/// sign-extends it to a 64-bit integer.
/// </summary>
public static readonly MemoryOperator Int64Load16S;
/// <summary>
/// The 'i64.load16_u' operator, which loads a 16-bit integer from memory and
/// zero-extends it to a 64-bit integer.
/// </summary>
public static readonly MemoryOperator Int64Load16U;
/// <summary>
/// The 'i64.load32_s' operator, which loads a 32-bit integer from memory and
/// sign-extends it to a 64-bit integer.
/// </summary>
public static readonly MemoryOperator Int64Load32S;
/// <summary>
/// The 'i64.load32_u' operator, which loads a 32-bit integer from memory and
/// zero-extends it to a 64-bit integer.
/// </summary>
public static readonly MemoryOperator Int64Load32U;
/// <summary>
/// The 'i32.store' operator, which stores a 32-bit integer in linear memory.
/// </summary>
public static readonly MemoryOperator Int32Store;
/// <summary>
/// The 'i64.store' operator, which stores a 64-bit integer in linear memory.
/// </summary>
public static readonly MemoryOperator Int64Store;
/// <summary>
/// The 'f32.store' operator, which stores a 32-bit floating-point number in
/// linear memory.
/// </summary>
public static readonly MemoryOperator Float32Store;
/// <summary>
/// The 'f64.store' operator, which stores a 64-bit floating-point number in
/// linear memory.
/// </summary>
public static readonly MemoryOperator Float64Store;
/// <summary>
/// The 'i32.store' operator, which truncates a 32-bit integer to a byte and stores
/// it in linear memory.
/// </summary>
public static readonly MemoryOperator Int32Store8;
/// <summary>
/// The 'i32.store' operator, which truncates a 32-bit integer to a 16-bit integer
/// and stores it in linear memory.
/// </summary>
public static readonly MemoryOperator Int32Store16;
/// <summary>
/// The 'i64.store' operator, which truncates a 64-bit integer to a byte and stores
/// it in linear memory.
/// </summary>
public static readonly MemoryOperator Int64Store8;
/// <summary>
/// The 'i64.store' operator, which truncates a 64-bit integer to a 16-bit integer
/// and stores it in linear memory.
/// </summary>
public static readonly MemoryOperator Int64Store16;
/// <summary>
/// The 'i64.store' operator, which truncates a 64-bit integer to a 32-bit integer
/// and stores it in linear memory.
/// </summary>
public static readonly MemoryOperator Int64Store32;
/// <summary>
/// The 'current_memory' operator, which queries the memory size.
/// </summary>
public static readonly VarUInt32Operator CurrentMemory;
/// <summary>
/// The 'grow_memory' operator, which grows the memory size.
/// </summary>
public static readonly VarUInt32Operator GrowMemory;
/// <summary>
/// The 'i32.const' operator, which loads a constant 32-bit integer onto the stack.
/// </summary>
public static readonly VarInt32Operator Int32Const;
/// <summary>
/// The 'i64.const' operator, which loads a constant 64-bit integer onto the stack.
/// </summary>
public static readonly VarInt64Operator Int64Const;
/// <summary>
/// The 'f32.const' operator, which loads a constant 32-bit floating-point number onto the stack.
/// </summary>
public static readonly Float32Operator Float32Const;
/// <summary>
/// The 'f64.const' operator, which loads a constant 64-bit floating-point number onto the stack.
/// </summary>
public static readonly Float64Operator Float64Const;
/// <summary>
/// The 'else' opcode, which begins an 'if' expression's 'else' block.
/// </summary>
public const byte ElseOpCode = 0x05;
/// <summary>
/// The 'end' opcode, which ends a block, loop or if.
/// </summary>
public const byte EndOpCode = 0x0b;
#region Auto-generated nullaries
// This region was auto-generated by nullary-opcode-generator. Please don't make any
// manual changes.
/// <summary>
/// The 'i32.eqz' operator: compare equal to zero (return 1 if operand is zero, 0 otherwise).
/// </summary>
public static readonly NullaryOperator Int32Eqz;
/// <summary>
/// The 'i32.eq' operator: sign-agnostic compare equal.
/// </summary>
public static readonly NullaryOperator Int32Eq;
/// <summary>
/// The 'i32.ne' operator: sign-agnostic compare unequal.
/// </summary>
public static readonly NullaryOperator Int32Ne;
/// <summary>
/// The 'i32.lt_s' operator: signed less than.
/// </summary>
public static readonly NullaryOperator Int32LtS;
/// <summary>
/// The 'i32.lt_u' operator: unsigned less than.
/// </summary>
public static readonly NullaryOperator Int32LtU;
/// <summary>
/// The 'i32.gt_s' operator: signed greater than.
/// </summary>
public static readonly NullaryOperator Int32GtS;
/// <summary>
/// The 'i32.gt_u' operator: unsigned greater than.
/// </summary>
public static readonly NullaryOperator Int32GtU;
/// <summary>
/// The 'i32.le_s' operator: signed less than or equal.
/// </summary>
public static readonly NullaryOperator Int32LeS;
/// <summary>
/// The 'i32.le_u' operator: unsigned less than or equal.
/// </summary>
public static readonly NullaryOperator Int32LeU;
/// <summary>
/// The 'i32.ge_s' operator: signed greater than or equal.
/// </summary>
public static readonly NullaryOperator Int32GeS;
/// <summary>
/// The 'i32.ge_u' operator: unsigned greater than or equal.
/// </summary>
public static readonly NullaryOperator Int32GeU;
/// <summary>
/// The 'i64.eqz' operator: compare equal to zero (return 1 if operand is zero, 0 otherwise).
/// </summary>
public static readonly NullaryOperator Int64Eqz;
/// <summary>
/// The 'i64.eq' operator: sign-agnostic compare equal.
/// </summary>
public static readonly NullaryOperator Int64Eq;
/// <summary>
/// The 'i64.ne' operator: sign-agnostic compare unequal.
/// </summary>
public static readonly NullaryOperator Int64Ne;
/// <summary>
/// The 'i64.lt_s' operator: signed less than.
/// </summary>
public static readonly NullaryOperator Int64LtS;
/// <summary>
/// The 'i64.lt_u' operator: unsigned less than.
/// </summary>
public static readonly NullaryOperator Int64LtU;
/// <summary>
/// The 'i64.gt_s' operator: signed greater than.
/// </summary>
public static readonly NullaryOperator Int64GtS;
/// <summary>
/// The 'i64.gt_u' operator: unsigned greater than.
/// </summary>
public static readonly NullaryOperator Int64GtU;
/// <summary>
/// The 'i64.le_s' operator: signed less than or equal.
/// </summary>
public static readonly NullaryOperator Int64LeS;
/// <summary>
/// The 'i64.le_u' operator: unsigned less than or equal.
/// </summary>
public static readonly NullaryOperator Int64LeU;
/// <summary>
/// The 'i64.ge_s' operator: signed greater than or equal.
/// </summary>
public static readonly NullaryOperator Int64GeS;
/// <summary>
/// The 'i64.ge_u' operator: unsigned greater than or equal.
/// </summary>
public static readonly NullaryOperator Int64GeU;
/// <summary>
/// The 'f32.eq' operator: compare ordered and equal.
/// </summary>
public static readonly NullaryOperator Float32Eq;
/// <summary>
/// The 'f32.ne' operator: compare unordered or unequal.
/// </summary>
public static readonly NullaryOperator Float32Ne;
/// <summary>
/// The 'f32.lt' operator: compare ordered and less than.
/// </summary>
public static readonly NullaryOperator Float32Lt;
/// <summary>
/// The 'f32.gt' operator: compare ordered and greater than.
/// </summary>
public static readonly NullaryOperator Float32Gt;
/// <summary>
/// The 'f32.le' operator: compare ordered and less than or equal.
/// </summary>
public static readonly NullaryOperator Float32Le;
/// <summary>
/// The 'f32.ge' operator: compare ordered and greater than or equal.
/// </summary>
public static readonly NullaryOperator Float32Ge;
/// <summary>
/// The 'f64.eq' operator: compare ordered and equal.
/// </summary>
public static readonly NullaryOperator Float64Eq;
/// <summary>
/// The 'f64.ne' operator: compare unordered or unequal.
/// </summary>
public static readonly NullaryOperator Float64Ne;
/// <summary>
/// The 'f64.lt' operator: compare ordered and less than.
/// </summary>
public static readonly NullaryOperator Float64Lt;
/// <summary>
/// The 'f64.gt' operator: compare ordered and greater than.
/// </summary>
public static readonly NullaryOperator Float64Gt;
/// <summary>
/// The 'f64.le' operator: compare ordered and less than or equal.
/// </summary>
public static readonly NullaryOperator Float64Le;
/// <summary>
/// The 'f64.ge' operator: compare ordered and greater than or equal.
/// </summary>
public static readonly NullaryOperator Float64Ge;
/// <summary>
/// The 'i32.clz' operator: sign-agnostic count leading zero bits (All zero bits are considered leading if the value is zero).
/// </summary>
public static readonly NullaryOperator Int32Clz;
/// <summary>
/// The 'i32.ctz' operator: sign-agnostic count trailing zero bits (All zero bits are considered trailing if the value is zero).
/// </summary>
public static readonly NullaryOperator Int32Ctz;
/// <summary>
/// The 'i32.popcnt' operator: sign-agnostic count number of one bits.
/// </summary>
public static readonly NullaryOperator Int32Popcnt;
/// <summary>
/// The 'i32.add' operator: sign-agnostic addition.
/// </summary>
public static readonly NullaryOperator Int32Add;
/// <summary>
/// The 'i32.sub' operator: sign-agnostic subtraction.
/// </summary>
public static readonly NullaryOperator Int32Sub;
/// <summary>
/// The 'i32.mul' operator: sign-agnostic multiplication (lower 32-bits).
/// </summary>
public static readonly NullaryOperator Int32Mul;
/// <summary>
/// The 'i32.div_s' operator: signed division (result is truncated toward zero).
/// </summary>
public static readonly NullaryOperator Int32DivS;
/// <summary>
/// The 'i32.div_u' operator: unsigned division (result is floored).
/// </summary>
public static readonly NullaryOperator Int32DivU;
/// <summary>
/// The 'i32.rem_s' operator: signed remainder (result has the sign of the dividend).
/// </summary>
public static readonly NullaryOperator Int32RemS;
/// <summary>
/// The 'i32.rem_u' operator: unsigned remainder.
/// </summary>
public static readonly NullaryOperator Int32RemU;
/// <summary>
/// The 'i32.and' operator: sign-agnostic bitwise and.
/// </summary>
public static readonly NullaryOperator Int32And;
/// <summary>
/// The 'i32.or' operator: sign-agnostic bitwise inclusive or.
/// </summary>
public static readonly NullaryOperator Int32Or;
/// <summary>
/// The 'i32.xor' operator: sign-agnostic bitwise exclusive or.
/// </summary>
public static readonly NullaryOperator Int32Xor;
/// <summary>
/// The 'i32.shl' operator: sign-agnostic shift left.
/// </summary>
public static readonly NullaryOperator Int32Shl;
/// <summary>
/// The 'i32.shr_s' operator: sign-replicating (arithmetic) shift right.
/// </summary>
public static readonly NullaryOperator Int32ShrS;
/// <summary>
/// The 'i32.shr_u' operator: zero-replicating (logical) shift right.
/// </summary>
public static readonly NullaryOperator Int32ShrU;
/// <summary>
/// The 'i32.rotl' operator: sign-agnostic rotate left.
/// </summary>
public static readonly NullaryOperator Int32Rotl;
/// <summary>
/// The 'i32.rotr' operator: sign-agnostic rotate right.
/// </summary>
public static readonly NullaryOperator Int32Rotr;
/// <summary>
/// The 'i64.clz' operator: sign-agnostic count leading zero bits (All zero bits are considered leading if the value is zero).
/// </summary>
public static readonly NullaryOperator Int64Clz;
/// <summary>
/// The 'i64.ctz' operator: sign-agnostic count trailing zero bits (All zero bits are considered trailing if the value is zero).
/// </summary>
public static readonly NullaryOperator Int64Ctz;
/// <summary>
/// The 'i64.popcnt' operator: sign-agnostic count number of one bits.
/// </summary>
public static readonly NullaryOperator Int64Popcnt;
/// <summary>
/// The 'i64.add' operator: sign-agnostic addition.
/// </summary>
public static readonly NullaryOperator Int64Add;
/// <summary>
/// The 'i64.sub' operator: sign-agnostic subtraction.
/// </summary>
public static readonly NullaryOperator Int64Sub;
/// <summary>
/// The 'i64.mul' operator: sign-agnostic multiplication (lower 32-bits).
/// </summary>
public static readonly NullaryOperator Int64Mul;
/// <summary>
/// The 'i64.div_s' operator: signed division (result is truncated toward zero).
/// </summary>
public static readonly NullaryOperator Int64DivS;
/// <summary>
/// The 'i64.div_u' operator: unsigned division (result is floored).
/// </summary>
public static readonly NullaryOperator Int64DivU;
/// <summary>
/// The 'i64.rem_s' operator: signed remainder (result has the sign of the dividend).
/// </summary>
public static readonly NullaryOperator Int64RemS;
/// <summary>
/// The 'i64.rem_u' operator: unsigned remainder.
/// </summary>
public static readonly NullaryOperator Int64RemU;
/// <summary>
/// The 'i64.and' operator: sign-agnostic bitwise and.
/// </summary>
public static readonly NullaryOperator Int64And;
/// <summary>
/// The 'i64.or' operator: sign-agnostic bitwise inclusive or.
/// </summary>
public static readonly NullaryOperator Int64Or;
/// <summary>
/// The 'i64.xor' operator: sign-agnostic bitwise exclusive or.
/// </summary>
public static readonly NullaryOperator Int64Xor;
/// <summary>
/// The 'i64.shl' operator: sign-agnostic shift left.
/// </summary>
public static readonly NullaryOperator Int64Shl;
/// <summary>
/// The 'i64.shr_s' operator: sign-replicating (arithmetic) shift right.
/// </summary>
public static readonly NullaryOperator Int64ShrS;
/// <summary>
/// The 'i64.shr_u' operator: zero-replicating (logical) shift right.
/// </summary>
public static readonly NullaryOperator Int64ShrU;
/// <summary>
/// The 'i64.rotl' operator: sign-agnostic rotate left.
/// </summary>
public static readonly NullaryOperator Int64Rotl;
/// <summary>
/// The 'i64.rotr' operator: sign-agnostic rotate right.
/// </summary>
public static readonly NullaryOperator Int64Rotr;
/// <summary>
/// The 'f32.abs' operator: absolute value.
/// </summary>
public static readonly NullaryOperator Float32Abs;
/// <summary>
/// The 'f32.neg' operator: negation.
/// </summary>
public static readonly NullaryOperator Float32Neg;
/// <summary>
/// The 'f32.ceil' operator: ceiling operator.
/// </summary>
public static readonly NullaryOperator Float32Ceil;
/// <summary>
/// The 'f32.floor' operator: floor operator.
/// </summary>
public static readonly NullaryOperator Float32Floor;
/// <summary>
/// The 'f32.trunc' operator: round to nearest integer towards zero.
/// </summary>
public static readonly NullaryOperator Float32Trunc;
/// <summary>
/// The 'f32.nearest' operator: round to nearest integer, ties to even.
/// </summary>
public static readonly NullaryOperator Float32Nearest;
/// <summary>
/// The 'f32.sqrt' operator: square root.
/// </summary>
public static readonly NullaryOperator Float32Sqrt;
/// <summary>
/// The 'f32.add' operator: addition.
/// </summary>
public static readonly NullaryOperator Float32Add;
/// <summary>
/// The 'f32.sub' operator: subtraction.
/// </summary>
public static readonly NullaryOperator Float32Sub;
/// <summary>
/// The 'f32.mul' operator: multiplication.
/// </summary>
public static readonly NullaryOperator Float32Mul;
/// <summary>
/// The 'f32.div' operator: division.
/// </summary>
public static readonly NullaryOperator Float32Div;
/// <summary>
/// The 'f32.min' operator: minimum (binary operator); if either operand is NaN, returns NaN.
/// </summary>
public static readonly NullaryOperator Float32Min;
/// <summary>
/// The 'f32.max' operator: maximum (binary operator); if either operand is NaN, returns NaN.
/// </summary>
public static readonly NullaryOperator Float32Max;
/// <summary>
/// The 'f32.copysign' operator: copysign.
/// </summary>
public static readonly NullaryOperator Float32Copysign;
/// <summary>
/// The 'f64.abs' operator: absolute value.
/// </summary>
public static readonly NullaryOperator Float64Abs;
/// <summary>
/// The 'f64.neg' operator: negation.
/// </summary>
public static readonly NullaryOperator Float64Neg;
/// <summary>
/// The 'f64.ceil' operator: ceiling operator.
/// </summary>
public static readonly NullaryOperator Float64Ceil;
/// <summary>
/// The 'f64.floor' operator: floor operator.
/// </summary>
public static readonly NullaryOperator Float64Floor;
/// <summary>
/// The 'f64.trunc' operator: round to nearest integer towards zero.
/// </summary>
public static readonly NullaryOperator Float64Trunc;
/// <summary>
/// The 'f64.nearest' operator: round to nearest integer, ties to even.
/// </summary>
public static readonly NullaryOperator Float64Nearest;
/// <summary>
/// The 'f64.sqrt' operator: square root.
/// </summary>
public static readonly NullaryOperator Float64Sqrt;
/// <summary>
/// The 'f64.add' operator: addition.
/// </summary>
public static readonly NullaryOperator Float64Add;
/// <summary>
/// The 'f64.sub' operator: subtraction.
/// </summary>
public static readonly NullaryOperator Float64Sub;
/// <summary>
/// The 'f64.mul' operator: multiplication.
/// </summary>
public static readonly NullaryOperator Float64Mul;
/// <summary>
/// The 'f64.div' operator: division.
/// </summary>
public static readonly NullaryOperator Float64Div;
/// <summary>
/// The 'f64.min' operator: minimum (binary operator); if either operand is NaN, returns NaN.
/// </summary>
public static readonly NullaryOperator Float64Min;
/// <summary>
/// The 'f64.max' operator: maximum (binary operator); if either operand is NaN, returns NaN.
/// </summary>
public static readonly NullaryOperator Float64Max;
/// <summary>
/// The 'f64.copysign' operator: copysign.
/// </summary>
public static readonly NullaryOperator Float64Copysign;
/// <summary>
/// The 'i32.wrap/i64' operator: wrap a 64-bit integer to a 32-bit integer.
/// </summary>
public static readonly NullaryOperator Int32WrapInt64;
/// <summary>
/// The 'i32.trunc_s/f32' operator: truncate a 32-bit float to a signed 32-bit integer.
/// </summary>
public static readonly NullaryOperator Int32TruncSFloat32;
/// <summary>
/// The 'i32.trunc_u/f32' operator: truncate a 32-bit float to an unsigned 32-bit integer.
/// </summary>
public static readonly NullaryOperator Int32TruncUFloat32;
/// <summary>
/// The 'i32.trunc_s/f64' operator: truncate a 64-bit float to a signed 32-bit integer.
/// </summary>
public static readonly NullaryOperator Int32TruncSFloat64;
/// <summary>
/// The 'i32.trunc_u/f64' operator: truncate a 64-bit float to an unsigned 32-bit integer.
/// </summary>
public static readonly NullaryOperator Int32TruncUFloat64;
/// <summary>
/// The 'i64.extend_s/i32' operator: extend a signed 32-bit integer to a 64-bit integer.
/// </summary>
public static readonly NullaryOperator Int64ExtendSInt32;
/// <summary>
/// The 'i64.extend_u/i32' operator: extend an unsigned 32-bit integer to a 64-bit integer.
/// </summary>
public static readonly NullaryOperator Int64ExtendUInt32;
/// <summary>
/// The 'i64.trunc_s/f32' operator: truncate a 32-bit float to a signed 64-bit integer.
/// </summary>
public static readonly NullaryOperator Int64TruncSFloat32;
/// <summary>
/// The 'i64.trunc_u/f32' operator: truncate a 32-bit float to an unsigned 64-bit integer.
/// </summary>
public static readonly NullaryOperator Int64TruncUFloat32;
/// <summary>
/// The 'i64.trunc_s/f64' operator: truncate a 64-bit float to a signed 64-bit integer.
/// </summary>
public static readonly NullaryOperator Int64TruncSFloat64;
/// <summary>
/// The 'i64.trunc_u/f64' operator: truncate a 64-bit float to an unsigned 64-bit integer.
/// </summary>
public static readonly NullaryOperator Int64TruncUFloat64;
/// <summary>
/// The 'f32.convert_s/i32' operator: convert a signed 32-bit integer to a 32-bit float.
/// </summary>
public static readonly NullaryOperator Float32ConvertSInt32;
/// <summary>
/// The 'f32.convert_u/i32' operator: convert an unsigned 32-bit integer to a 32-bit float.
/// </summary>
public static readonly NullaryOperator Float32ConvertUInt32;
/// <summary>
/// The 'f32.convert_s/i64' operator: convert a signed 64-bit integer to a 32-bit float.
/// </summary>
public static readonly NullaryOperator Float32ConvertSInt64;
/// <summary>
/// The 'f32.convert_u/i64' operator: convert an unsigned 64-bit integer to a 32-bit float.
/// </summary>
public static readonly NullaryOperator Float32ConvertUInt64;
/// <summary>
/// The 'f32.demote/f64' operator: demote a 64-bit float to a 32-bit float.
/// </summary>
public static readonly NullaryOperator Float32DemoteFloat64;
/// <summary>
/// The 'f64.convert_s/i32' operator: convert a signed 32-bit integer to a 64-bit float.
/// </summary>
public static readonly NullaryOperator Float64ConvertSInt32;
/// <summary>
/// The 'f64.convert_u/i32' operator: convert an unsigned 32-bit integer to a 64-bit float.
/// </summary>
public static readonly NullaryOperator Float64ConvertUInt32;
/// <summary>
/// The 'f64.convert_s/i64' operator: convert a signed 64-bit integer to a 64-bit float.
/// </summary>
public static readonly NullaryOperator Float64ConvertSInt64;
/// <summary>
/// The 'f64.convert_u/i64' operator: convert an unsigned 64-bit integer to a 64-bit float.
/// </summary>
public static readonly NullaryOperator Float64ConvertUInt64;
/// <summary>
/// The 'f64.promote/f32' operator: promote a 32-bit float to a 64-bit float.
/// </summary>
public static readonly NullaryOperator Float64PromoteFloat32;
/// <summary>
/// The 'i32.reinterpret/f32' operator: reinterpret the bits of a 32-bit float as a 32-bit integer.
/// </summary>
public static readonly NullaryOperator Int32ReinterpretFloat32;
/// <summary>
/// The 'i64.reinterpret/f64' operator: reinterpret the bits of a 64-bit float as a 64-bit integer.
/// </summary>
public static readonly NullaryOperator Int64ReinterpretFloat64;
/// <summary>
/// The 'f32.reinterpret/i32' operator: reinterpret the bits of a 32-bit integer as a 32-bit float.
/// </summary>
public static readonly NullaryOperator Float32ReinterpretInt32;
/// <summary>
/// The 'f64.reinterpret/i64' operator: reinterpret the bits of a 64-bit integer as a 64-bit float.
/// </summary>
public static readonly NullaryOperator Float64ReinterpretInt64;
#endregion
}
}
<|start_filename|>libwasm-text/Lexer.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using System.Numerics;
using System.Text;
using Pixie.Code;
namespace Wasm.Text
{
/// <summary>
/// A lexer for the WebAssembly text format.
/// </summary>
public sealed class Lexer
{
private Lexer(SourceDocument document, TextReader reader)
{
this.document = document;
this.reader = reader;
this.offset = 0;
this.lookaheadBuffer = new List<char>();
}
private SourceDocument document;
private TextReader reader;
private int offset;
private List<char> lookaheadBuffer;
/// <summary>
/// Tokenizes a string.
/// </summary>
/// <param name="document">A string to tokenize.</param>
/// <param name="fileName">The name of the file in which the string is saved.</param>
/// <returns>A tokenized string.</returns>
public static IEnumerable<Token> Tokenize(string document, string fileName = "<string>")
{
return Tokenize(new StringDocument(fileName, document));
}
/// <summary>
/// Tokenizes a source document.
/// </summary>
/// <param name="document">A source document to tokenize.</param>
/// <returns>A tokenized source document.</returns>
public static IEnumerable<Token> Tokenize(SourceDocument document)
{
using (var reader = document.Open(0))
{
var lexer = new Lexer(document, reader);
Token token;
while (lexer.TryReadToken(out token))
{
yield return token;
}
}
}
/// <summary>
/// Tries to read the next token from the stream.
/// </summary>
/// <param name="token">The next token.</param>
/// <returns>
/// <c>true</c> if a token was read; <c>false</c> if the stream is empty.
/// </returns>
private bool TryReadToken(out Token token)
{
SkipWhitespace();
char firstChar;
if (TryPeekChar(out firstChar))
{
var startOffset = offset;
if (firstChar == '(' || firstChar == ')')
{
SkipChar();
token = new Token(
firstChar == '(' ? TokenKind.LeftParenthesis : TokenKind.RightParenthesis,
new SourceSpan(document, startOffset, 1),
null);
return true;
}
else if (firstChar == '"')
{
token = ReadStringToken();
}
else if (firstChar == '$')
{
token = ReadIdentifierToken();
}
else if (firstChar >= 'a' && firstChar <= 'z')
{
token = ReadKeywordToken();
}
else
{
token = ReadNumberToken();
}
if (!SkipWhitespace() && TryPeekChar(out firstChar) && firstChar != '(' && firstChar != ')')
{
// According to the spec:
//
// The effect of defining the set of reserved tokens is that all tokens must be
// separated by either parentheses or white space. For example, ‘𝟶$𝚡’ is a single
// reserved token. Consequently, it is not recognized as two separate tokens ‘𝟶’
// and ‘$𝚡’, but instead disallowed. This property of tokenization is not affected
// by the fact that the definition of reserved tokens overlaps with other token classes.
token = ReadReservedToken(startOffset);
}
return true;
}
else
{
token = default(Token);
return false;
}
}
/// <summary>
/// Skips as many whitespace characters as possible.
/// </summary>
/// <returns>
/// <c>true</c> if at least one whitespace character was skipped; otherwise, <c>false</c>.
/// </returns>
private bool SkipWhitespace()
{
bool skippedAny = false;
while (true)
{
char c;
if (!TryPeekChar(out c))
{
break;
}
if (c == ' ' || c == '\t' || c == '\n' || c == '\r')
{
SkipChar();
skippedAny = true;
}
else if (IsNext(";;"))
{
// Line comments.
SkipChar();
SkipChar();
while (TryReadChar(out c) && c != '\n')
{ }
skippedAny = true;
}
else if (IsNext("(;"))
{
// Block comments.
SkipChar();
SkipChar();
int nest = 1;
while (nest > 0)
{
if (IsNext(";)"))
{
nest--;
SkipChars(2);
}
else if (IsNext("(;"))
{
nest++;
SkipChars(2);
}
else
{
SkipChar();
}
}
skippedAny = true;
}
else
{
break;
}
}
return skippedAny;
}
private bool SkipChar()
{
char c;
return TryReadChar(out c);
}
private void SkipChars(int count)
{
for (int i = 0; i < count; i++)
{
SkipChar();
}
}
private bool IsNext(string expected)
{
string actual;
return TryPeekString(expected.Length, out actual)
&& actual == expected;
}
private bool Expect(string expected)
{
if (IsNext(expected))
{
SkipChars(expected.Length);
return true;
}
else
{
return false;
}
}
private bool TryPeekString(int length, out string result)
{
var builder = new StringBuilder();
for (int i = 0; i < length; i++)
{
char c;
if (TryPeekChar(i, out c))
{
builder.Append(c);
}
else
{
result = null;
return false;
}
}
result = builder.ToString();
return true;
}
private bool TryPeekChar(int offset, out char result)
{
// Read characters from the text reader into the lookahead buffer
// until we reach the character at 'offset'.
for (int i = lookaheadBuffer.Count; i <= offset; i++)
{
char c;
if (CheckReaderResult(reader.Read(), out c))
{
lookaheadBuffer.Add(c);
}
else
{
result = default(char);
return false;
}
}
result = lookaheadBuffer[offset];
return true;
}
private bool TryPeekChar(out char result)
{
return TryPeekChar(0, out result);
}
private bool Expect(char expected)
{
return Expect(c => c == expected);
}
private bool Expect(Predicate<char> predicate)
{
char c;
if (TryPeekChar(out c) && predicate(c))
{
SkipChar();
return true;
}
else
{
return false;
}
}
private bool TryReadChar(out char result)
{
bool hasRead;
if (lookaheadBuffer.Count > 0)
{
result = lookaheadBuffer[0];
lookaheadBuffer.RemoveAt(0);
hasRead = true;
}
else
{
hasRead = CheckReaderResult(reader.Read(), out result);
}
if (hasRead)
{
offset++;
}
return hasRead;
}
private bool CheckReaderResult(int c, out char result)
{
if (c <= 0)
{
result = default(char);
return false;
}
else
{
result = (char)c;
return true;
}
}
private Token ReadNumberToken()
{
var spanStart = offset;
bool isNegated = false;
bool isSigned = false;
if (Expect('+'))
{
isSigned = true;
}
else if (Expect('-'))
{
isSigned = true;
isNegated = true;
}
object val;
if (TryReadUnsignedNumber(isNegated, out val))
{
return new Token(
val is BigInteger ? (isSigned ? TokenKind.SignedInteger : TokenKind.UnsignedInteger) : TokenKind.Float,
new SourceSpan(document, spanStart, offset - spanStart),
val);
}
else
{
return ReadReservedToken(spanStart);
}
}
private bool TryReadUnsignedNumber(bool negate, out object result)
{
if (Expect("nan:0x"))
{
BigInteger hexNum;
if (TryReadHexNum(out hexNum))
{
result = FloatLiteral.NaN(negate, (long)hexNum);
return true;
}
else
{
result = FloatLiteral.NaN(negate);
return false;
}
}
else if (Expect("nan"))
{
result = FloatLiteral.NaN(negate);
return true;
}
else if (Expect("inf"))
{
result = MaybeNegate(FloatLiteral.PositiveInfinity, negate);
return true;
}
else if (Expect("0x"))
{
return TryReadUnsignedNumber(
negate,
out result,
TryReadHexNum,
TryReadHexFrac,
'p',
2);
}
else
{
return TryReadUnsignedNumber(
negate,
out result,
TryReadNum,
TryReadFrac,
'e',
10);
}
}
private FloatLiteral MaybeNegate(FloatLiteral v, bool negate)
{
return negate ? -v : v;
}
private BigInteger MaybeNegate(BigInteger v, bool negate)
{
return negate ? -v : v;
}
private delegate bool IntegerReader(out BigInteger result);
private delegate bool FloatReader(out FloatLiteral result);
private bool TryReadUnsignedNumber(
bool negate,
out object result,
IntegerReader tryReadNum,
FloatReader tryReadFrac,
char exponentChar,
int exponent)
{
BigInteger num;
if (tryReadNum(out num))
{
if (Expect('.'))
{
FloatLiteral frac;
if (!tryReadFrac(out frac))
{
frac = FloatLiteral.Zero(exponent);
}
var floatNum = num + frac;
if (Expect(exponentChar) || Expect(char.ToUpperInvariant(exponentChar)))
{
floatNum = floatNum.ChangeBase(exponent);
if (!TryAppendExponent(floatNum, out floatNum))
{
result = null;
return false;
}
}
result = MaybeNegate(floatNum, negate);
}
else if (Expect(exponentChar) || Expect(char.ToUpperInvariant(exponentChar)))
{
FloatLiteral floatNum;
if (!TryAppendExponent(FloatLiteral.Number(num, exponent), out floatNum))
{
result = null;
return false;
}
result = MaybeNegate(floatNum, negate);
}
else
{
result = MaybeNegate(num, negate);
}
return true;
}
else
{
result = null;
return false;
}
}
private bool TryAppendExponent(
FloatLiteral floatNum,
out FloatLiteral result)
{
bool negateExp = false;
if (Expect('-'))
{
negateExp = true;
}
else
{
Expect('+');
}
BigInteger exp;
if (!TryReadNum(out exp))
{
result = floatNum;
return false;
}
else
{
result = floatNum.AddToExponent(MaybeNegate(exp, negateExp));
return true;
}
}
private Token ReadKeywordToken()
{
var spanStart = offset;
char c;
if (!TryPeekChar(out c) || c < 'a' || c > 'z')
{
return ReadReservedToken(spanStart);
}
var builder = new StringBuilder();
while (TryReadIdentifierChar(out c))
{
builder.Append(c);
}
var span = new SourceSpan(document, spanStart, offset - spanStart);
var result = builder.ToString();
// Some floating point tokens look like keywords, so we'll handle
// them here as well as in the FP parsing routine.
if (result == "nan")
{
return new Token(TokenKind.Float, span, FloatLiteral.NaN(false));
}
else if (result.StartsWith("nan:0x", StringComparison.Ordinal))
{
var payload = result.Substring("nan:0x".Length);
long newBits = 0;
for (int i = 0; i < payload.Length; i++)
{
int digit;
if (payload[i] == '_')
{
continue;
}
else if (TryParseHexDigit(payload[i], out digit))
{
newBits = newBits * 16 + digit;
}
else
{
return new Token(TokenKind.Keyword, span, result);
}
}
return new Token(TokenKind.Float, span, FloatLiteral.NaN(false, newBits));
}
else if (result == "inf")
{
return new Token(TokenKind.Float, span, FloatLiteral.PositiveInfinity);
}
else
{
return new Token(TokenKind.Keyword, span, result);
}
}
private Token ReadIdentifierToken()
{
var spanStart = offset;
if (!Expect('$'))
{
return ReadReservedToken(spanStart);
}
var builder = new StringBuilder();
char c;
while (TryReadIdentifierChar(out c))
{
builder.Append(c);
}
if (builder.Length == 0)
{
return ReadReservedToken(spanStart);
}
else
{
return new Token(
TokenKind.Identifier,
new SourceSpan(document, spanStart, offset - spanStart),
builder.ToString());
}
}
private bool TryReadIdentifierChar(out char c)
{
if (TryPeekChar(out c))
{
bool isIdChar = IsIdentifierChar(c);
if (isIdChar)
{
SkipChar();
}
else
{
c = '\0';
}
return isIdChar;
}
else
{
return false;
}
}
private static bool IsIdentifierChar(char c)
{
return (c >= '0' && c <= '9')
|| (c >= 'A' && c <= 'Z')
|| (c >= 'a' && c <= 'z')
|| c == '!' || c == '#' || c == '$' || c == '%'
|| c == '&' || c == '\'' || c == '*' || c == '+'
|| c == '-' || c == '.' || c == '/' || c == ':'
|| c == '<' || c == '=' || c == '>' || c == '?'
|| c == '@' || c == '\\' || c == '^' || c == '_'
|| c == '`' || c == '|' || c == '~';
}
private Token ReadStringToken()
{
var spanStart = offset;
if (!Expect('"'))
{
return ReadReservedToken(spanStart);
}
var builder = new List<byte>();
char c;
while (TryReadChar(out c))
{
if (c == '"')
{
return new Token(
TokenKind.String,
new SourceSpan(document, spanStart, offset - spanStart),
builder.ToArray());
}
else if (c == '\\')
{
if (!TryReadChar(out c))
{
return ReadReservedToken(spanStart);
}
switch (c)
{
case '\\':
builder.Add((byte)'\\');
break;
case 'n':
builder.Add((byte)'\n');
break;
case 'r':
builder.Add((byte)'\r');
break;
case 't':
builder.Add((byte)'\t');
break;
case '"':
builder.Add((byte)'"');
break;
case '\'':
builder.Add((byte)'\'');
break;
case 'u':
BigInteger num;
if (Expect('{') && TryReadHexNum(out num) && Expect('}'))
{
builder.AddRange(Encoding.UTF8.GetBytes(char.ConvertFromUtf32((int)num)));
continue;
}
return ReadReservedToken(spanStart);
default:
int firstDigit, secondDigit;
if (TryParseHexDigit(c, out firstDigit) && TryReadHexDigit(out secondDigit))
{
builder.Add((byte)(16 * firstDigit + secondDigit));
break;
}
else
{
return ReadReservedToken(spanStart);
}
}
}
else if (IsDisallowedInString(c))
{
return ReadReservedToken(spanStart);
}
else if (char.IsHighSurrogate(c))
{
var high = c;
if (!TryReadChar(out c) || !char.IsLowSurrogate(c))
{
return ReadReservedToken(spanStart);
}
var low = c;
builder.AddRange(Encoding.UTF8.GetBytes(new string(new[] { high, low })));
}
else
{
builder.AddRange(Encoding.UTF8.GetBytes(c.ToString()));
}
}
return ReadReservedToken(spanStart);
}
private static bool IsDisallowedInString(char c)
{
return c < '\u0020' || c == '\u007f';
}
private Token ReadReservedToken(int start)
{
int count = offset - start;
while (!SkipWhitespace())
{
char c;
if (TryReadChar(out c))
{
if (c == ')' || c == '(' || c == '"')
{
break;
}
count++;
}
else
{
break;
}
}
return new Token(TokenKind.Reserved, new SourceSpan(document, start, count), null);
}
private bool TryReadHexFrac(out FloatLiteral frac)
{
return TryReadFrac(out frac, 16, (i, c) =>
{
int digit;
if (TryParseHexDigit(c, out digit))
{
return i * 16 + digit;
}
else
{
return null;
}
});
}
private bool TryReadFrac(out FloatLiteral frac)
{
return TryReadFrac(out frac, 10, (i, c) =>
{
if (c >= '0' && c <= '9')
{
return i * 10 + (c - '0');
}
else
{
return null;
}
});
}
private bool TryReadFrac(
out FloatLiteral frac,
int fracBase,
Func<BigInteger, char, BigInteger?> tryAccumulateFracDigit)
{
(BigInteger, int) pair;
bool parsed = TryReadNum(out pair, (BigInteger.Zero, 0), (acc, c) =>
{
var (i, n) = acc;
var maybeAcc = tryAccumulateFracDigit(i, c);
if (maybeAcc.HasValue)
{
return (maybeAcc.Value, n + 1);
}
else
{
return null;
}
});
if (parsed)
{
var (i, n) = pair;
frac = FloatLiteral.Number(i, fracBase, -n);
return true;
}
else
{
frac = FloatLiteral.Zero(fracBase);
return false;
}
}
private bool TryReadNum<T>(
out T num,
T init,
Func<T, char, T?> tryAccumulate)
where T : struct
{
bool parsed = false;
var acc = init;
char c;
while (TryPeekChar(out c))
{
if (c == '_')
{
parsed = true;
SkipChar();
}
else
{
var maybeAcc = tryAccumulate(acc, c);
if (maybeAcc.HasValue)
{
acc = maybeAcc.Value;
parsed = true;
SkipChar();
}
else
{
break;
}
}
}
num = acc;
return parsed;
}
private bool TryReadNum(
out BigInteger num,
Func<BigInteger, char, BigInteger?> tryAccumulate)
{
return TryReadNum(out num, BigInteger.Zero, tryAccumulate);
}
private bool TryReadNum(out BigInteger num)
{
return TryReadNum(out num, (i, c) =>
{
if (c >= '0' && c <= '9')
{
return i * 10 + (c - '0');
}
else
{
return null;
}
});
}
private bool TryReadHexNum(out BigInteger num)
{
return TryReadNum(out num, (i, c) =>
{
int digit;
if (TryParseHexDigit(c, out digit))
{
return i * 16 + digit;
}
else
{
return null;
}
});
}
private bool TryReadHexDigit(out int result)
{
char c;
if (TryPeekChar(out c) && TryParseHexDigit(c, out result))
{
SkipChar();
return true;
}
else
{
result = 0;
return false;
}
}
private static bool TryParseHexDigit(char c, out int result)
{
if (c >= 'a' && c <= 'f')
{
result = 10 + c - 'a';
return true;
}
else if (c >= 'A' && c <= 'F')
{
result = 10 + c - 'A';
return true;
}
else if (c >= '0' && c <= '9')
{
result = c - '0';
return true;
}
else
{
result = 0;
return false;
}
}
/// <summary>
/// A token as parsed by the lexer.
/// </summary>
public struct Token
{
/// <summary>
/// Creates a new token.
/// </summary>
/// <param name="kind">The token's kind.</param>
/// <param name="span">The span in the source document that defines the token.</param>
/// <param name="value">The token's parsed value, if applicable.</param>
public Token(TokenKind kind, SourceSpan span, object value)
{
this.Kind = kind;
this.Span = span;
this.Value = value;
}
/// <summary>
/// Creates a synthetic token.
/// </summary>
/// <param name="value">The token's value.</param>
/// <param name="kind">The type of the token to synthesize.</param>
/// <returns>A synthetic token.</returns>
public static Token Synthesize(object value, TokenKind kind = TokenKind.Synthetic)
{
var doc = new StringDocument("<synthetic>", value.ToString());
return new Token(kind, new SourceSpan(doc, 0, doc.Length), value);
}
/// <summary>
/// Gets the token's kind.
/// </summary>
/// <value>A token kind.</value>
public TokenKind Kind { get; private set; }
/// <summary>
/// Gets the span in the source document that defines the token.
/// </summary>
/// <value>A source span.</value>
public SourceSpan Span { get; private set; }
/// <summary>
/// Gets the token's parsed value, if applicable.
/// </summary>
/// <value>A parsed value.</value>
public object Value { get; private set; }
/// <inheritdoc/>
public override string ToString()
{
return $"[{Kind}{(Value == null ? "" : " " + Value)} ('{Span.Text}')]";
}
}
/// <summary>
/// An enumeration of different kinds of tokens.
/// </summary>
public enum TokenKind
{
/// <summary>
/// Indicates that a token represents a keyword.
/// </summary>
Keyword,
/// <summary>
/// Indicates that a token represents a signed integer.
/// </summary>
SignedInteger,
/// <summary>
/// Indicates that a token represents an unsigned integer.
/// </summary>
UnsignedInteger,
/// <summary>
/// Indicates that a token represents a floating-point number.
/// </summary>
Float,
/// <summary>
/// Indicates that a token represents a string literal.
/// </summary>
String,
/// <summary>
/// Indicates that a token represents an identifier.
/// </summary>
Identifier,
/// <summary>
/// Indicates that a token represents a left parenthesis.
/// </summary>
LeftParenthesis,
/// <summary>
/// Indicates that a token represents a right parenthesis.
/// </summary>
RightParenthesis,
/// <summary>
/// Indicates that a token is reserved. These tokens should not show up
/// in the WebAssembly text format.
/// </summary>
Reserved,
/// <summary>
/// A token that was generated by some component other than the lexer.
/// Synthetic tokens are never user-created.
/// </summary>
Synthetic
}
}
}
<|start_filename|>libwasm/ExternalKind.cs<|end_filename|>
namespace Wasm
{
/// <summary>
/// A single-byte unsigned integer indicating the kind of definition being imported or defined.
/// </summary>
public enum ExternalKind : byte
{
/// <summary>
/// Indicates a Function import or definition.
/// </summary>
Function = 0,
/// <summary>
/// Indicates a Table import or definition.
/// </summary>
Table = 1,
/// <summary>
/// Indicates a Memory import or definition.
/// </summary>
Memory = 2,
/// <summary>
/// Indicates a Global import or definition.
/// </summary>
Global = 3
}
}
<|start_filename|>libwasm/Optimize/FunctionBodyOptimizations.cs<|end_filename|>
using System;
using System.Collections.Generic;
namespace Wasm.Optimize
{
/// <summary>
/// Defines function body optimizations.
/// </summary>
public static class FunctionBodyOptimizations
{
/// <summary>
/// Merges adjacent local entries that have the same type and deletes empty
/// local entries.
/// </summary>
/// <param name="body">The function body whose locals are to be compressed.</param>
public static void CompressLocalEntries(this FunctionBody body)
{
var newLocals = new List<LocalEntry>();
var aggregateEntry = new LocalEntry(WasmValueType.Int32, 0);
for (int i = 0; i < body.Locals.Count; i++)
{
var currentEntry = body.Locals[i];
if (currentEntry.LocalType == aggregateEntry.LocalType)
{
// If two adjacent local entries have the same type, then
// we should merge them.
aggregateEntry = new LocalEntry(
aggregateEntry.LocalType,
aggregateEntry.LocalCount + currentEntry.LocalCount);
}
else
{
// We can't merge `currentEntry` with `aggregateEntry`. But maybe
// we'll be able to merge `currentEntry` and its successor.
if (aggregateEntry.LocalCount > 0)
{
newLocals.Add(aggregateEntry);
}
aggregateEntry = currentEntry;
}
}
// Append the final entry to the new list of locals.
if (aggregateEntry.LocalCount > 0)
{
newLocals.Add(aggregateEntry);
}
// Clear the old local list and replace its contents with the new entries.
body.Locals.Clear();
body.Locals.AddRange(newLocals);
}
/// <summary>
/// Modifies the function body's local declarations such that every entry
/// declares exactly one local. Empty local entries are deleted and local
/// entries that declare n locals are replaced by n local entries that
/// declare one local.
/// </summary>
/// <param name="body">The function body to update.</param>
public static void ExpandLocalEntries(this FunctionBody body)
{
// Create an equivalent list of local entries in which all local
// entries declare exactly one local.
var newLocals = new List<LocalEntry>();
for (int i = 0; i < body.Locals.Count; i++)
{
var currentEntry = body.Locals[i];
for (uint j = 0; j < currentEntry.LocalCount; j++)
{
newLocals.Add(new LocalEntry(currentEntry.LocalType, 1));
}
}
// Clear the old local list and replace its contents with the new entries.
body.Locals.Clear();
body.Locals.AddRange(newLocals);
}
}
}
<|start_filename|>libwasm/Interpret/InstructionInterpreter.cs<|end_filename|>
using Wasm.Instructions;
namespace Wasm.Interpret
{
/// <summary>
/// A type that handles the execution of instructions.
/// </summary>
public abstract class InstructionInterpreter
{
/// <summary>
/// Interprets the given instruction within the specified context.
/// </summary>
/// <param name="value">The instruction to interpret.</param>
/// <param name="context">The interpreter context.</param>
public abstract void Interpret(Instruction value, InterpreterContext context);
}
}
<|start_filename|>libwasm/Interpret/LinearMemory.cs<|end_filename|>
using System.Collections.Generic;
namespace Wasm.Interpret
{
/// <summary>
/// Describes an instance of a linear memory specification.
/// </summary>
public sealed class LinearMemory
{
/// <summary>
/// Creates a linear memory from the given specification.
/// </summary>
/// <param name="limits">The specification for this linear memory: a range in units of pages.</param>
public LinearMemory(ResizableLimits limits)
{
this.Limits = limits;
this.memory = new List<byte>();
GrowToSize(limits.Initial);
}
private List<byte> memory;
/// <summary>
/// Gets the specification for this linear memory.
/// </summary>
/// <returns>The specification for this linear memory/</returns>
public ResizableLimits Limits { get; private set; }
/// <summary>
/// Gets the size of the linear memory in units of pages.
/// </summary>
/// <returns>The size of the linear memory in units of pages.</returns>
public uint Size => (uint)memory.Count / MemoryType.PageSize;
/// <summary>
/// Grows the memory to the given number of pages.
/// Return the previous memory size in units of pages or -1 on failure.
/// </summary>
/// <param name="newSize">The new number of pages in the linear memory.</param>
/// <returns>The previous memory size in units of pages or -1 on failure.</returns>
private int GrowToSize(uint newSize)
{
if (Limits.HasMaximum && newSize > Limits.Maximum.Value)
{
return -1;
}
int oldMemorySize = (int)Size;
int newSizeInBytes = (int)(newSize * MemoryType.PageSize);
while (memory.Count < newSizeInBytes)
{
memory.Add(0);
}
return oldMemorySize;
}
/// <summary>
/// Grows linear memory by a given unsigned delta of pages.
/// Return the previous memory size in units of pages or -1 on failure.
/// </summary>
/// <param name="numberOfPages">The number of pages to grow the linear memory by.</param>
/// <returns>The previous memory size in units of pages or -1 on failure.</returns>
public int Grow(uint numberOfPages)
{
return GrowToSize(Size + numberOfPages);
}
/// <summary>
/// Accesses linear memory as a sequence of 8-bit signed integers.
/// </summary>
/// <returns>A view of this memory.</returns>
public LinearMemoryAsInt8 Int8
{
get { return new LinearMemoryAsInt8(memory); }
}
/// <summary>
/// Accesses linear memory as a sequence of 16-bit signed integers.
/// </summary>
/// <returns>A view of this memory.</returns>
public LinearMemoryAsInt16 Int16
{
get { return new LinearMemoryAsInt16(memory); }
}
/// <summary>
/// Accesses linear memory as a sequence of 32-bit signed integers.
/// </summary>
/// <returns>A view of this memory.</returns>
public LinearMemoryAsInt32 Int32
{
get { return new LinearMemoryAsInt32(memory); }
}
/// <summary>
/// Accesses linear memory as a sequence of 64-bit signed integers.
/// </summary>
/// <returns>A view of this memory.</returns>
public LinearMemoryAsInt64 Int64
{
get { return new LinearMemoryAsInt64(memory); }
}
/// <summary>
/// Accesses linear memory as a sequence of 32-bit floating-point numbers.
/// </summary>
/// <returns>A view of this memory.</returns>
public LinearMemoryAsFloat32 Float32
{
get { return new LinearMemoryAsFloat32(memory); }
}
/// <summary>
/// Accesses linear memory as a sequence of 64-bit floating-point numbers.
/// </summary>
/// <returns>A view of this memory.</returns>
public LinearMemoryAsFloat64 Float64
{
get { return new LinearMemoryAsFloat64(memory); }
}
internal static void CheckBounds(List<byte> memory, uint offset, uint length)
{
if ((ulong)memory.Count < (ulong)offset + (ulong)length)
{
throw new TrapException(
$"Memory access out of bounds: cannot access {length} bytes at offset {offset} in memory with length {memory.Count}.",
TrapException.SpecMessages.OutOfBoundsMemoryAccess);
}
}
}
/// <summary>
/// Accesses linear memory as a sequence of 8-bit signed integers.
/// </summary>
public struct LinearMemoryAsInt8
{
internal LinearMemoryAsInt8(List<byte> memory)
{
this.mem = memory;
}
private List<byte> mem;
/// <summary>
/// Gets or sets a value in memory at a particular byte offset.
/// </summary>
/// <value>A value in memory.</value>
public sbyte this[uint offset]
{
get
{
LinearMemory.CheckBounds(mem, offset, 1);
return (sbyte)mem[(int)offset];
}
set
{
LinearMemory.CheckBounds(mem, offset, 1);
mem[(int)offset] = (byte)value;
}
}
}
/// <summary>
/// Accesses linear memory as a sequence of 16-bit signed integers.
/// </summary>
public struct LinearMemoryAsInt16
{
internal LinearMemoryAsInt16(List<byte> memory)
{
this.mem = memory;
}
private List<byte> mem;
/// <summary>
/// Gets or sets a value in memory at a particular byte offset.
/// </summary>
/// <value>A value in memory.</value>
public short this[uint offset]
{
get
{
LinearMemory.CheckBounds(mem, offset, 2);
return (short)(
(uint)mem[(int)offset + 1] << 8 |
(uint)mem[(int)offset]);
}
set
{
LinearMemory.CheckBounds(mem, offset, 2);
mem[(int)offset + 1] = (byte)(value >> 8);
mem[(int)offset] = (byte)value;
}
}
}
/// <summary>
/// Accesses linear memory as a sequence of 32-bit signed integers.
/// </summary>
public struct LinearMemoryAsInt32
{
internal LinearMemoryAsInt32(List<byte> memory)
{
this.mem = memory;
}
private List<byte> mem;
/// <summary>
/// Gets or sets a value in memory at a particular byte offset.
/// </summary>
/// <value>A value in memory.</value>
public int this[uint offset]
{
get
{
LinearMemory.CheckBounds(mem, offset, 4);
return (int)mem[(int)offset + 3] << 24
| (int)mem[(int)offset + 2] << 16
| (int)mem[(int)offset + 1] << 8
| (int)mem[(int)offset];
}
set
{
LinearMemory.CheckBounds(mem, offset, 4);
mem[(int)offset + 3] = (byte)(value >> 24);
mem[(int)offset + 2] = (byte)(value >> 16);
mem[(int)offset + 1] = (byte)(value >> 8);
mem[(int)offset] = (byte)value;
}
}
}
/// <summary>
/// Accesses linear memory as a sequence of 64-bit signed integers.
/// </summary>
public struct LinearMemoryAsInt64
{
internal LinearMemoryAsInt64(List<byte> memory)
{
this.mem = memory;
}
private List<byte> mem;
/// <summary>
/// Gets or sets a value in memory at a particular byte offset.
/// </summary>
/// <value>A value in memory.</value>
public long this[uint offset]
{
get
{
LinearMemory.CheckBounds(mem, offset, 8);
return (long)mem[(int)offset + 7] << 56
| (long)mem[(int)offset + 6] << 48
| (long)mem[(int)offset + 5] << 40
| (long)mem[(int)offset + 4] << 32
| (long)mem[(int)offset + 3] << 24
| (long)mem[(int)offset + 2] << 16
| (long)mem[(int)offset + 1] << 8
| (long)mem[(int)offset];
}
set
{
LinearMemory.CheckBounds(mem, offset, 8);
mem[(int)offset + 7] = (byte)(value >> 56);
mem[(int)offset + 6] = (byte)(value >> 48);
mem[(int)offset + 5] = (byte)(value >> 40);
mem[(int)offset + 4] = (byte)(value >> 32);
mem[(int)offset + 3] = (byte)(value >> 24);
mem[(int)offset + 2] = (byte)(value >> 16);
mem[(int)offset + 1] = (byte)(value >> 8);
mem[(int)offset] = (byte)value;
}
}
}
/// <summary>
/// Accesses linear memory as a sequence of 32-bit floating-point numbers.
/// </summary>
public struct LinearMemoryAsFloat32
{
internal LinearMemoryAsFloat32(List<byte> memory)
{
this.mem = memory;
}
private List<byte> mem;
/// <summary>
/// Gets or sets a value in memory at a particular byte offset.
/// </summary>
/// <value>A value in memory.</value>
public float this[uint offset]
{
get
{
return ValueHelpers.ReinterpretAsFloat32(new LinearMemoryAsInt32(mem)[offset]);
}
set
{
var uintView = new LinearMemoryAsInt32(mem);
uintView[offset] = ValueHelpers.ReinterpretAsInt32(value);
}
}
}
/// <summary>
/// Accesses linear memory as a sequence of 64-bit floating-point numbers.
/// </summary>
public struct LinearMemoryAsFloat64
{
internal LinearMemoryAsFloat64(List<byte> memory)
{
this.mem = memory;
}
private List<byte> mem;
/// <summary>
/// Gets or sets a value in memory at a particular byte offset.
/// </summary>
/// <value>A value in memory.</value>
public double this[uint offset]
{
get
{
return ValueHelpers.ReinterpretAsFloat64(new LinearMemoryAsInt64(mem)[offset]);
}
set
{
var uintView = new LinearMemoryAsInt64(mem);
uintView[offset] = ValueHelpers.ReinterpretAsInt64(value);
}
}
}
}
<|start_filename|>libwasm/Optimize/Peephole/TeeLocalOptimization.cs<|end_filename|>
using System.Collections.Generic;
using Wasm.Instructions;
namespace Wasm.Optimize
{
/// <summary>
/// An optimization that rewrites `set_local x; get_local x` as `tee_local x`.
/// </summary>
public sealed class TeeLocalOptimization : PeepholeOptimization
{
private TeeLocalOptimization() { }
/// <summary>
/// The only instance of this optimization.
/// </summary>
public static readonly TeeLocalOptimization Instance = new TeeLocalOptimization();
/// <summary>
/// Tests if the items at the front of the given list of instructions
/// match the peephole optimization; if a match occurs, a nonzero value
/// is returned that indicates the number of instructions at the front
/// of the list of instructions that should be rewritten.
/// </summary>
/// <param name="instructions">
/// The instructions to match against the peephole optimization.
/// </param>
/// <returns>The number of instructions to rewrite.</returns>
public override uint Match(IReadOnlyList<Instruction> instructions)
{
if (instructions.Count < 2)
return 0;
var first = instructions[0];
if (first.Op != Operators.SetLocal)
return 0;
var second = instructions[1];
if (second.Op != Operators.GetLocal)
return 0;
var setLocal = Operators.SetLocal.CastInstruction(first);
var getLocal = Operators.GetLocal.CastInstruction(second);
if (setLocal.Immediate == getLocal.Immediate)
return 2;
else
return 0;
}
/// <summary>
/// Rewrites the given sequence of instructions.
/// </summary>
/// <param name="matched">
/// A list of instructions that has been matched and will all be replaced.
/// </param>
/// <returns>The rewritten instructions.</returns>
public override IReadOnlyList<Instruction> Rewrite(IReadOnlyList<Instruction> matched)
{
var setLocal = Operators.SetLocal.CastInstruction(matched[0]);
return new Instruction[] { Operators.TeeLocal.Create(setLocal.Immediate) };
}
}
}
<|start_filename|>wasm-interp/Program.cs<|end_filename|>
using System;
using System.Collections.Generic;
using Wasm.Interpret.BaseRuntime;
namespace Wasm.Interpret
{
/// <summary>
/// Represents interpreter command-line options.
/// </summary>
public struct InterpreterArguments
{
/// <summary>
/// Gets a path to the WebAssembly file to load.
/// </summary>
public string WasmFilePath { get; private set; }
/// <summary>
/// Gets the name of the function to run, if any.
/// </summary>
public string FunctionToRun { get; private set; }
/// <summary>
/// Gets the name of the importer to use.
/// </summary>
/// <returns>The name of the importer.</returns>
public string ImporterName { get; private set; }
/// <summary>
/// Gets a Boolean flag that specifies if an instruction trace is to be
/// sent to standard error.
/// </summary>
/// <returns>The tracing flag.</returns>
public bool TraceExecution { get; private set; }
/// <summary>
/// Gets the arguments for the function to run, if any.
/// </summary>
public object[] FunctionArgs { get; private set; }
public const string SpectestImporterName = "spectest";
public const string SpectestWithSpacesImporterName = "spectest-spaces";
public const string BaseRuntimeImporterName = "base-runtime";
/// <summary>
/// Tries to create an importer for these options.
/// </summary>
/// <param name="Result">The importer.</param>
/// <returns><c>true</c> if <c>ImporterName</c> identifies an importer; otherwise, <c>false</c>.</returns>
public bool TryGetImporter(out IImporter Result)
{
if (ImporterName == null
|| ImporterName.Equals(SpectestImporterName, StringComparison.OrdinalIgnoreCase))
{
Result = new SpecTestImporter();
return true;
}
else if (ImporterName.Equals(SpectestWithSpacesImporterName, StringComparison.OrdinalIgnoreCase))
{
Result = new SpecTestImporter(" ");
return true;
}
else if (ImporterName.Equals(BaseRuntimeImporterName, StringComparison.OrdinalIgnoreCase))
{
var importer = new PredefinedImporter();
TerminalRuntime.IncludeDefinitionsIn(
Console.OpenStandardInput(),
Console.OpenStandardOutput(),
Console.OpenStandardError(),
importer);
Result = importer;
return true;
}
else
{
Result = null;
return false;
}
}
/// <summary>
/// Tries to read command-line options.
/// </summary>
public static bool TryRead(string[] Args, out InterpreterArguments ParsedArgs)
{
ParsedArgs = default(InterpreterArguments);
bool expectingRunFuncName = false;
bool expectingImporterName = false;
bool expectingArgs = false;
var funcArgs = new List<object>();
for (int i = 0; i < Args.Length; i++)
{
if (expectingArgs)
{
string argStr = Args[i];
if (argStr.EndsWith("l", StringComparison.OrdinalIgnoreCase))
{
long fArg;
if (!long.TryParse(argStr.Substring(0, argStr.Length - 1), out fArg))
{
return false;
}
funcArgs.Add(fArg);
}
else if (argStr.EndsWith("f", StringComparison.OrdinalIgnoreCase))
{
float fArg;
if (!float.TryParse(argStr.Substring(0, argStr.Length - 1), out fArg))
{
return false;
}
funcArgs.Add(fArg);
}
else
{
int intFArg;
double doubleFArg;
uint uintFArg;
if (int.TryParse(argStr, out intFArg))
{
funcArgs.Add(intFArg);
}
else if (uint.TryParse(argStr, out uintFArg))
{
funcArgs.Add((int)uintFArg);
}
else
{
if (!double.TryParse(argStr, out doubleFArg))
{
return false;
}
funcArgs.Add(doubleFArg);
}
}
}
else if (expectingRunFuncName)
{
if (ParsedArgs.FunctionToRun != null)
{
return false;
}
ParsedArgs.FunctionToRun = Args[i];
expectingRunFuncName = false;
expectingArgs = true;
}
else if (expectingImporterName)
{
if (ParsedArgs.ImporterName != null)
{
return false;
}
ParsedArgs.ImporterName = Args[i];
expectingImporterName = false;
}
else if (Args[i] == "--run")
{
expectingRunFuncName = true;
}
else if (Args[i] == "--importer")
{
expectingImporterName = true;
}
else if (Args[i] == "--trace")
{
ParsedArgs.TraceExecution = true;
}
else
{
if (ParsedArgs.WasmFilePath != null)
{
return false;
}
ParsedArgs.WasmFilePath = Args[i];
}
}
ParsedArgs.FunctionArgs = funcArgs.ToArray();
return ParsedArgs.WasmFilePath != null
&& !expectingRunFuncName
&& !expectingImporterName;
}
}
public static class Program
{
private static int PrintUsage()
{
Console.Error.WriteLine("usage: wasm-interp file.wasm [--trace] [--importer spectest|spectest-spaces|base-runtime] [--run exported_func_name [args...]]");
return 1;
}
public static int Main(string[] args)
{
// Read command-line arguments.
InterpreterArguments parsedArgs;
if (!InterpreterArguments.TryRead(args, out parsedArgs))
{
return PrintUsage();
}
IImporter importer;
if (!parsedArgs.TryGetImporter(out importer))
{
Console.Error.WriteLine("error: there is no importer named '" + parsedArgs.ImporterName + "'");
return 1;
}
// Read and instantiate the module.
var wasmFile = WasmFile.ReadBinary(parsedArgs.WasmFilePath);
InstructionInterpreter interp = DefaultInstructionInterpreter.Default;
if (parsedArgs.TraceExecution)
{
interp = new TracingInstructionInterpreter(interp, Console.Error);
}
var module = ModuleInstance.Instantiate(wasmFile, importer, interp);
// Figure out which function to run.
FunctionDefinition funcToRun = null;
if (parsedArgs.FunctionToRun != null)
{
if (!module.ExportedFunctions.TryGetValue(parsedArgs.FunctionToRun, out funcToRun))
{
Console.Error.WriteLine(
"error: module does not export a function named '" +
parsedArgs.FunctionToRun + "'");
return 1;
}
}
else
{
var startSec = wasmFile.GetFirstSectionOrNull<StartSection>();
if (startSec == null)
{
Console.Error.WriteLine(
"error: module does not define a 'start' section " +
" and '--run exported_func_name' was not specified.");
return 1;
}
else
{
IReadOnlyList<FunctionDefinition> funcs = module.Functions;
funcToRun = funcs[(int)startSec.StartFunctionIndex];
}
}
// Run that function.
int exitCode = 0;
try
{
IReadOnlyList<object> output = funcToRun.Invoke(parsedArgs.FunctionArgs);
if (output.Count > 0)
{
for (int i = 0; i < output.Count; i++)
{
if (i > 0)
{
Console.Write(" ");
}
Console.Write(output[i]);
}
Console.WriteLine();
}
}
catch (WasmException ex)
{
Console.Error.WriteLine("error: " + ex.Message);
exitCode = 1;
}
return exitCode;
}
}
}
<|start_filename|>libwasm/Optimize/Peephole/PeepholeOptimization.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using Wasm.Instructions;
namespace Wasm.Optimize
{
/// <summary>
/// An optimization that pattern-matches and rewrites small sequences of
/// instructions.
/// </summary>
public abstract class PeepholeOptimization
{
/// <summary>
/// Tests if the items at the front of the given list of instructions
/// match the peephole optimization; if a match occurs, a nonzero value
/// is returned that indicates the number of instructions at the front
/// of the list of instructions that should be rewritten.
/// </summary>
/// <param name="instructions">
/// The instructions to match against the peephole optimization.
/// </param>
/// <returns>The number of instructions to rewrite.</returns>
public abstract uint Match(IReadOnlyList<Instruction> instructions);
/// <summary>
/// Rewrites the given sequence of instructions.
/// </summary>
/// <param name="matched">
/// A list of instructions that has been matched and will all be replaced.
/// </param>
/// <returns>The rewritten instructions.</returns>
public abstract IReadOnlyList<Instruction> Rewrite(IReadOnlyList<Instruction> matched);
}
/// <summary>
/// An optimizer that applies peephole optimizations.
/// </summary>
public sealed class PeepholeOptimizer
{
/// <summary>
/// Creates a peephole optimizer that applies the given optimizations.
/// </summary>
/// <param name="optimizations">The optimizations to apply.</param>
public PeepholeOptimizer(IEnumerable<PeepholeOptimization> optimizations)
{
this.opts = optimizations;
}
private IEnumerable<PeepholeOptimization> opts;
/// <summary>
/// A peephole optimizer based that uses the default set of peephole
/// optimizations offered by cs-wasm.
/// </summary>
public static PeepholeOptimizer DefaultOptimizer => new PeepholeOptimizer(DefaultOptimizations);
/// <summary>
/// The default set of peephole optimizations that ships with cs-wasm.
/// </summary>
public static readonly IEnumerable<PeepholeOptimization> DefaultOptimizations =
new PeepholeOptimization[]
{
TeeLocalOptimization.Instance,
UnreachableCodeOptimization.Instance
};
/// <summary>
/// Uses this peephole optimizer to optimize the given sequence of instructions.
/// </summary>
/// <param name="instructions">The instructions to optimize.</param>
/// <returns>An optimized sequence of instructions.</returns>
public IReadOnlyList<Instruction> Optimize(IReadOnlyList<Instruction> instructions)
{
var inputArray = Enumerable.ToArray<Instruction>(instructions);
var results = new List<Instruction>();
for (int i = 0; i < inputArray.Length;)
{
PeepholeOptimization bestOpt;
uint matchSize = LongestMatch(
new ArraySegment<Instruction>(inputArray, i, inputArray.Length - i),
out bestOpt);
if (matchSize > 0)
{
results.AddRange(
bestOpt.Rewrite(
new ArraySegment<Instruction>(inputArray, i, (int)matchSize)));
i += (int)matchSize;
}
else
{
if (inputArray[i] is BlockInstruction)
{
// Visit block instructions recursively.
var block = (BlockInstruction)inputArray[i];
results.Add(
new BlockInstruction((BlockOperator)block.Op, block.Type, Optimize(block.Contents)));
}
else if (inputArray[i] is IfElseInstruction)
{
// Visit if-else instructions recursively, too.
var ifElse = (IfElseInstruction)inputArray[i];
results.Add(
new IfElseInstruction(
ifElse.Type,
ifElse.IfBranch == null ? null : Optimize(ifElse.IfBranch),
ifElse.ElseBranch == null ? null : Optimize(ifElse.ElseBranch)));
}
else
{
// Other instructions are added to the list unmodified.
results.Add(inputArray[i]);
}
i++;
}
}
return results;
}
private uint LongestMatch(
IReadOnlyList<Instruction> instructions,
out PeepholeOptimization matchingOptimization)
{
uint bestMatch = 0;
PeepholeOptimization bestOpt = null;
foreach (var opt in opts)
{
uint match = opt.Match(instructions);
if (match > bestMatch)
{
bestMatch = match;
bestOpt = opt;
}
}
matchingOptimization = bestOpt;
return bestMatch;
}
}
}
<|start_filename|>libwasm/Instructions/BrTableInstruction.cs<|end_filename|>
using System.Collections.Generic;
using System.IO;
using Wasm.Binary;
namespace Wasm.Instructions
{
/// <summary>
/// Describes a WebAssembly stack machine instruction that represents a break table.
/// </summary>
public sealed class BrTableInstruction : Instruction
{
/// <summary>
/// Creates a break table instruction from the given operator, table of
/// break targets and a default target.
/// </summary>
/// <param name="op">The operator for this instruction.</param>
/// <param name="targetTable">
/// A table of target entries that indicate an outer block or loop to which to break.
/// </param>
/// <param name="defaultTarget">
/// The default target: an outer block or loop to which to break in the default case.
/// </param>
public BrTableInstruction(BrTableOperator op, IEnumerable<uint> targetTable, uint defaultTarget)
{
this.opValue = op;
this.TargetTable = new List<uint>(targetTable);
this.DefaultTarget = defaultTarget;
}
private BrTableOperator opValue;
/// <summary>
/// Gets the operator for this instruction.
/// </summary>
/// <returns>The instruction's operator.</returns>
public override Operator Op { get { return opValue; } }
/// <summary>
/// Gets a table of target entries that indicate an outer block or loop to which to break.
/// </summary>
/// <returns>The target entry table.</returns>
public List<uint> TargetTable { get; private set; }
/// <summary>
/// Gets the default target: an outer block or loop to which to break in the default case.
/// </summary>
/// <returns>The default target.</returns>
public uint DefaultTarget { get; set; }
/// <summary>
/// Writes this instruction's immediates (but not its opcode)
/// to the given WebAssembly file writer.
/// </summary>
/// <param name="writer">The writer to write this instruction's immediates to.</param>
public override void WriteImmediatesTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt32((uint)TargetTable.Count);
foreach (var entry in TargetTable)
{
writer.WriteVarUInt32(entry);
}
writer.WriteVarUInt32(DefaultTarget);
}
/// <summary>
/// Writes a string representation of this instruction to the given text writer.
/// </summary>
/// <param name="writer">
/// The writer to which a representation of this instruction is written.
/// </param>
public override void Dump(TextWriter writer)
{
Op.Dump(writer);
writer.Write(" default=");
writer.Write(DefaultTarget);
var indentedWriter = DumpHelpers.CreateIndentedTextWriter(writer);
for (int i = 0; i < TargetTable.Count; i++)
{
indentedWriter.WriteLine();
indentedWriter.Write(i);
indentedWriter.Write(" -> ");
indentedWriter.Write(TargetTable[i]);
}
writer.WriteLine();
}
}
}
<|start_filename|>wasm-cat/Program.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
namespace Wasm.Cat
{
// wasm-cat takes WebAssembly files as input and concatenates their sections.
public struct CatArgs
{
public string Output { get; set; }
public IEnumerable<string> Inputs { get; set; }
public static bool TryParse(string[] Args, out CatArgs Result)
{
Result = default(CatArgs);
if (Args.Length == 0)
{
return false;
}
var inputs = new List<string>();
for (int i = 0; i < Args.Length; i++)
{
if (Args[i] == "-o")
{
if (i == Args.Length - 1)
{
return false;
}
i++;
Result.Output = Args[i];
}
else
{
inputs.Add(Args[i]);
}
}
Result.Inputs = inputs;
return true;
}
}
public static class Program
{
public static int Main(string[] args)
{
CatArgs parsedArgs;
if (!CatArgs.TryParse(args, out parsedArgs))
{
Console.Error.WriteLine("usage: wasm-cat file.wasm... [-o output.wasm]");
return 1;
}
var file = new WasmFile();
foreach (var path in parsedArgs.Inputs)
{
// Read the file and append its sections to the resulting file.
var inputFile = WasmFile.ReadBinary(path);
file.Sections.AddRange(inputFile.Sections);
// Also, set the WebAssembly version number to the max of the
// input files.
if (inputFile.Header.Version > file.Header.Version)
{
file.Header = inputFile.Header;
}
}
// Now write the file to standard output.
using (var outputStream = string.IsNullOrEmpty(parsedArgs.Output)
? Console.OpenStandardOutput()
: File.OpenWrite(parsedArgs.Output))
{
file.WriteBinaryTo(outputStream);
}
return 0;
}
}
}
<|start_filename|>libwasm/Instructions/CallIndirectOperator.cs<|end_filename|>
using Wasm.Binary;
namespace Wasm.Instructions
{
/// <summary>
/// Describes an operator that calls a function pointer.
/// </summary>
public sealed class CallIndirectOperator : Operator
{
/// <summary>
/// Creates an indirect call operator.
/// </summary>
/// <param name="opCode">The operator's opcode.</param>
/// <param name="declaringType">A type that defines the operator, if any.</param>
/// <param name="mnemonic">The operator's mnemonic.</param>
public CallIndirectOperator(byte opCode, WasmType declaringType, string mnemonic)
: base(opCode, declaringType, mnemonic)
{ }
/// <summary>
/// Reads the immediates (not the opcode) of a WebAssembly instruction
/// for this operator from the given reader and returns the result as an
/// instruction.
/// </summary>
/// <param name="reader">The WebAssembly file reader to read immediates from.</param>
/// <returns>A WebAssembly instruction.</returns>
public override Instruction ReadImmediates(BinaryWasmReader reader)
{
return new CallIndirectInstruction(this, reader.ReadVarUInt32(), reader.ReadVarUInt32());
}
/// <summary>
/// Creates an indirect call instruction from this operator and
/// an index into the 'type' table.
/// </summary>
/// <param name="typeIndex">The index of the callee's signature in the type table.</param>
public CallIndirectInstruction Create(uint typeIndex)
{
return new CallIndirectInstruction(this, typeIndex);
}
/// <summary>
/// Casts the given instruction to this operator's instruction type.
/// </summary>
/// <param name="value">The instruction to cast.</param>
/// <returns>The given instruction as this operator's instruction type.</returns>
public CallIndirectInstruction CastInstruction(Instruction value)
{
return (CallIndirectInstruction)value;
}
}
}
<|start_filename|>libwasm/Instructions/MemoryOperator.cs<|end_filename|>
using Wasm.Binary;
namespace Wasm.Instructions
{
/// <summary>
/// Describes an operator that accesses a memory location.
/// </summary>
public sealed class MemoryOperator : Operator
{
/// <summary>
/// Creates a memory operator.
/// </summary>
/// <param name="opCode">The operator's opcode.</param>
/// <param name="declaringType">A type that defines the operator, if any.</param>
/// <param name="mnemonic">The operator's mnemonic.</param>
public MemoryOperator(byte opCode, WasmType declaringType, string mnemonic)
: base(opCode, declaringType, mnemonic)
{ }
/// <summary>
/// Reads the immediates (not the opcode) of a WebAssembly instruction
/// for this operator from the given reader and returns the result as an
/// instruction.
/// </summary>
/// <param name="reader">The WebAssembly file reader to read immediates from.</param>
/// <returns>A WebAssembly instruction.</returns>
public override Instruction ReadImmediates(BinaryWasmReader reader)
{
return Create(reader.ReadVarUInt32(), reader.ReadVarUInt32());
}
/// <summary>
/// Creates a new instruction from this operator and the given
/// immediates.
/// </summary>
/// <param name="log2Alignment">The log2 of the memory alignment for this instruction.</param>
/// <param name="offset">
/// The offset of the memory location relative to the pointer that is accessed.
/// </param>
/// <returns>A new instruction.</returns>
public MemoryInstruction Create(uint log2Alignment, uint offset)
{
return new MemoryInstruction(this, log2Alignment, offset);
}
/// <summary>
/// Casts the given instruction to this operator's instruction type.
/// </summary>
/// <param name="value">The instruction to cast.</param>
/// <returns>The given instruction as this operator's instruction type.</returns>
public MemoryInstruction CastInstruction(Instruction value)
{
return (MemoryInstruction)value;
}
}
}
<|start_filename|>libwasm/StartSection.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using Wasm.Binary;
namespace Wasm
{
/// <summary>
/// A type of section that defines a WebAssembly module's entry point.
/// </summary>
public sealed class StartSection : Section
{
/// <summary>
/// Creates a section that identifies a particular function as the entry point.
/// </summary>
/// <param name="startFunctionIndex">The index of a function to define as the entry point.</param>
public StartSection(uint startFunctionIndex)
: this(startFunctionIndex, new byte[0])
{
}
/// <summary>
/// Creates a section that identifies a particular function as the entry point.
/// </summary>
/// <param name="startFunctionIndex">The index of a function to define as the entry point.</param>
/// <param name="extraPayload">
/// A sequence of bytes that have no intrinsic meaning; they are part
/// of the start section but are placed after the start section's actual contents.
/// </param>
public StartSection(uint startFunctionIndex, byte[] extraPayload)
{
this.StartFunctionIndex = startFunctionIndex;
this.ExtraPayload = extraPayload;
}
/// <inheritdoc/>
public override SectionName Name => new SectionName(SectionCode.Start);
/// <summary>
/// Gets the index of the WebAssembly module's entry point.
/// </summary>
/// <returns></returns>
public uint StartFunctionIndex { get; set; }
/// <summary>
/// Gets this start section's additional payload.
/// </summary>
/// <returns>The additional payload, as an array of bytes.</returns>
public byte[] ExtraPayload { get; set; }
/// <inheritdoc/>
public override void WritePayloadTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt32(StartFunctionIndex);
writer.Writer.Write(ExtraPayload);
}
/// <inheritdoc/>
public override void Dump(TextWriter writer)
{
writer.Write(Name.ToString());
writer.Write("; entry point: function #");
writer.Write(StartFunctionIndex);
writer.WriteLine();
if (ExtraPayload.Length > 0)
{
writer.Write("Extra payload size: ");
writer.Write(ExtraPayload.Length);
writer.WriteLine();
DumpHelpers.DumpBytes(ExtraPayload, writer);
writer.WriteLine();
}
}
/// <summary>
/// Reads the start section with the given header.
/// </summary>
/// <param name="header">The section header.</param>
/// <param name="reader">The WebAssembly file reader.</param>
/// <returns>The parsed section.</returns>
public static StartSection ReadSectionPayload(SectionHeader header, BinaryWasmReader reader)
{
long startPos = reader.Position;
// Read the start function index.
uint startIndex = reader.ReadVarUInt32();
// Skip any remaining bytes.
var extraPayload = reader.ReadRemainingPayload(startPos, header);
return new StartSection(startIndex, extraPayload);
}
}
}
<|start_filename|>unit-tests/Text/LexerTests.cs<|end_filename|>
using System;
using System.Linq;
using System.Numerics;
using System.Text;
using Loyc.MiniTest;
namespace Wasm.Text
{
[TestFixture]
public class LexerTests
{
[Test]
public void ParseStrings()
{
AssertStringParsesAs("hi", "\"hi\"");
AssertStringParsesAs("hello there", "\"hello there\"");
AssertStringParsesAs("hello there", "\"hello\\u{20}there\"");
AssertStringParsesAs("hello there", "\"hello\\20there\"");
AssertStringParsesAs("hello\tthere", "\"hello\\tthere\"");
AssertStringParsesAs("hello\rthere", "\"hello\\rthere\"");
AssertStringParsesAs("hello\nthere", "\"hello\\nthere\"");
AssertStringParsesAs("hello\'there", "\"hello\\'there\"");
AssertStringParsesAs("hello\"there", "\"hello\\\"there\"");
AssertStringParsesAs("hello\"there", "\"hello\\\"there\"");
var unicode = new string(new[] { (char)55304, (char)56692 });
AssertStringParsesAs(unicode, $"\"{unicode}\"");
}
[Test]
public void ParseIdentifier()
{
AssertParsesAsKind(Lexer.TokenKind.Identifier, "$hi");
AssertParsesAs("hi", "$hi");
AssertParsesAs("variable_name", "$variable_name");
AssertParsesAs("variable_name123ABC", "$variable_name123ABC");
}
[Test]
public void ParseKeyword()
{
AssertParsesAsKind(Lexer.TokenKind.Keyword, "module");
AssertParsesAs("module", "module");
AssertParsesAs("i32.add", "i32.add");
AssertParsesAsKind(Lexer.TokenKind.Keyword, "offset=4");
AssertParsesAsKind(Lexer.TokenKind.Keyword, "i32.load");
Assert.IsTrue(
Enumerable.SequenceEqual(
new[] { Lexer.TokenKind.Keyword, Lexer.TokenKind.Keyword, Lexer.TokenKind.Keyword },
Lexer.Tokenize("i32.load offset=16 align=2").Select(x => x.Kind)));
}
[Test]
public void ParseUnsignedIntegers()
{
AssertParsesAsKind(Lexer.TokenKind.UnsignedInteger, "10");
AssertParsesAsKind(Lexer.TokenKind.UnsignedInteger, "0x10");
AssertParsesAs(new BigInteger(10), "10");
AssertParsesAs(new BigInteger(0x10), "0x10");
AssertParsesAs(new BigInteger(0xff), "0xff");
}
[Test]
public void ParseSignedIntegers()
{
AssertParsesAsKind(Lexer.TokenKind.SignedInteger, "+10");
AssertParsesAsKind(Lexer.TokenKind.SignedInteger, "+0x10");
AssertParsesAs(new BigInteger(10), "+10");
AssertParsesAs(new BigInteger(0x10), "+0x10");
AssertParsesAs(new BigInteger(0xff), "+0xff");
AssertParsesAsKind(Lexer.TokenKind.SignedInteger, "-10");
AssertParsesAsKind(Lexer.TokenKind.SignedInteger, "-0x10");
AssertParsesAs(new BigInteger(-10), "-10");
AssertParsesAs(new BigInteger(-0x10), "-0x10");
AssertParsesAs(new BigInteger(-0xff), "-0xff");
}
[Test]
public void ParseFloats()
{
AssertParsesAsKind(Lexer.TokenKind.Float, "inf");
AssertParsesAsKind(Lexer.TokenKind.Float, "+inf");
AssertParsesAsKind(Lexer.TokenKind.Float, "-inf");
AssertParsesAsKind(Lexer.TokenKind.Float, "nan");
AssertParsesAsKind(Lexer.TokenKind.Float, "nan:0x2");
AssertParsesAsKind(Lexer.TokenKind.Float, "10.");
AssertParsesAsKind(Lexer.TokenKind.Float, "10.10");
AssertParsesAsKind(Lexer.TokenKind.Float, "+10.10");
AssertParsesAsKind(Lexer.TokenKind.Float, "-10.10");
AssertParsesAsKind(Lexer.TokenKind.Float, "0x10.");
AssertParsesAsKind(Lexer.TokenKind.Float, "0x10.10");
AssertParsesAsKind(Lexer.TokenKind.Float, "+0x10.10");
AssertParsesAsKind(Lexer.TokenKind.Float, "-0x10.10");
AssertParsesAsKind(Lexer.TokenKind.Float, "10.e1");
AssertParsesAsKind(Lexer.TokenKind.Float, "10.10e1");
AssertParsesAsKind(Lexer.TokenKind.Float, "+10.10e1");
AssertParsesAsKind(Lexer.TokenKind.Float, "-10.10e1");
AssertParsesAsKind(Lexer.TokenKind.Float, "0x10.p1");
AssertParsesAsKind(Lexer.TokenKind.Float, "0x10.10p1");
AssertParsesAsKind(Lexer.TokenKind.Float, "+0x10.10p1");
AssertParsesAsKind(Lexer.TokenKind.Float, "-0x10.10p1");
AssertParsesAs(double.NegativeInfinity, "-inf");
AssertParsesAs(+0.0, "0.0");
AssertParsesAs(+0.0, "+0.0");
AssertParsesAs(-0.0, "-0.0");
AssertParsesAs(+0.0, "+0x0p2");
AssertParsesAs(-0.0, "-0x0p2");
AssertParsesAs(10.0, "10.");
AssertParsesAs(10.10, "10.10");
AssertParsesAs(10.10, "+10.10");
AssertParsesAs(-10.10, "-10.10");
AssertParsesAs(16.0, "0x10.");
AssertParsesAs(16.0625, "0x10.10");
AssertParsesAs(16.0625, "+0x10.10");
AssertParsesAs(-16.0625, "-0x10.10");
AssertParsesAs(10.0 * 10, "10E1");
AssertParsesAs(10.0 * 10, "10.e1");
AssertParsesAs(10.10 * 10, "10.10e1");
AssertParsesAs(10.10 * 10, "+10.10e1");
AssertParsesAs(-10.10 * 10, "-10.10e1");
AssertParsesAs(16.0 * 2, "0x10P1");
AssertParsesAs(16.0 * 2, "0x10.p1");
AssertParsesAs(16.0625 * 2, "0x10.10p1");
AssertParsesAs(16.0625 * 2, "+0x10.10p1");
AssertParsesAs(-16.0625 * 2, "-0x10.10p1");
AssertParsesAs(BitConverter.Int64BitsToDouble(0x0000000000000001), "0x1p-1074");
}
[Test]
public void ParseReserved()
{
AssertParsesAsKind(Lexer.TokenKind.Reserved, "0$x");
AssertParsesAsKind(Lexer.TokenKind.Reserved, "\"hello\\u{20x}there\"");
}
[Test]
public void ParseParens()
{
AssertParsesAsKind(Lexer.TokenKind.LeftParenthesis, "(");
AssertParsesAsKind(Lexer.TokenKind.RightParenthesis, ")");
}
[Test]
public void ParseWhitespace()
{
AssertStringParsesAs("hi", " \"hi\"");
AssertStringParsesAs("hi", "\t\"hi\"");
AssertStringParsesAs("hi", "\n\"hi\"");
AssertStringParsesAs("hi", "\r\"hi\"");
AssertStringParsesAs("hi", " \r\n\"hi\"");
AssertStringParsesAs("hi", "(; block comment! ;)\"hi\"");
AssertStringParsesAs("hi", "(; (; nested block comment! ;) ;)\"hi\"");
AssertStringParsesAs("hi", " \"hi\" ");
AssertStringParsesAs("hi", "\"hi\" ;; line comment!");
}
private void AssertStringParsesAs(string expected, string text)
{
var token = ParseSingleToken(text);
Assert.AreEqual(Lexer.TokenKind.String, token.Kind);
Assert.AreEqual(expected, Encoding.UTF8.GetString((byte[])token.Value));
}
private void AssertParsesAs(object expected, string text)
{
Assert.AreEqual(expected, ParseSingleToken(text).Value);
}
private void AssertParsesAs(double expected, string text)
{
Assert.AreEqual(expected, (double)(FloatLiteral)ParseSingleToken(text).Value);
}
private void AssertParsesAs(float expected, string text)
{
Assert.AreEqual(expected, (float)(FloatLiteral)ParseSingleToken(text).Value);
}
private void AssertParsesAsKind(Lexer.TokenKind kind, string text)
{
Assert.AreEqual(kind, ParseSingleToken(text).Kind);
}
private Lexer.Token ParseSingleToken(string text)
{
var tokens = Lexer.Tokenize(text).ToArray();
return tokens.Single();
}
}
}
<|start_filename|>libwasm/Interpret/Jit/JitCompiler.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Reflection.Emit;
using Wasm.Instructions;
namespace Wasm.Interpret.Jit
{
using InstructionImpl = Action<CompilerContext, ILGenerator>;
/// <summary>
/// A module compiler that compiles WebAssembly instructions to CIL.
/// </summary>
public class JitCompiler : ModuleCompiler
{
/// <summary>
/// Creates a JIT compiler from the default operator implementations.
/// </summary>
public JitCompiler()
: this(DefaultOperatorImplementations)
{ }
/// <summary>
/// Creates a JIT compiler from an operator implementation map.
/// </summary>
/// <param name="operatorImplementations">A mapping of operators to functions that compile instructions.</param>
public JitCompiler(
IReadOnlyDictionary<Operator, Func<Instruction, InstructionImpl>> operatorImplementations)
{
this.OperatorImplementations = operatorImplementations;
}
/// <summary>
/// Gets a mapping of operators to functions that compile instances of those operators
/// to implementations. <c>null</c> implementations indicate that an operator instance
/// cannot be compiled.
/// </summary>
/// <value>A mapping of operators to functions that compile instructions.</value>
public IReadOnlyDictionary<Operator, Func<Instruction, InstructionImpl>> OperatorImplementations { get; private set; }
internal ModuleInstance module;
internal int offset;
internal IReadOnlyList<FunctionType> types;
private AssemblyBuilder assembly;
internal IReadOnlyList<MethodBuilder> builders;
private TypeBuilder wasmType;
private IReadOnlyList<Func<IReadOnlyList<object>, IReadOnlyList<object>>> wrappers;
private List<CompiledFunctionDefinition> functionDefinitions;
private int helperFieldIndex;
private Dictionary<FieldInfo, object> constFieldValues;
/// <inheritdoc/>
public override void Initialize(ModuleInstance module, int offset, IReadOnlyList<FunctionType> types)
{
this.module = module;
this.offset = offset;
this.types = types;
this.helperFieldIndex = 0;
this.constFieldValues = new Dictionary<FieldInfo, object>();
this.functionDefinitions = new List<CompiledFunctionDefinition>();
this.assembly = AssemblyBuilder.DefineDynamicAssembly(
new AssemblyName("wasm"),
AssemblyBuilderAccess.RunAndCollect);
var wasmModule = assembly.DefineDynamicModule("main");
this.wasmType = wasmModule.DefineType("CompiledWasm", TypeAttributes.Public | TypeAttributes.Sealed);
var builderList = new List<MethodBuilder>();
var wrapperList = new List<Func<IReadOnlyList<object>, IReadOnlyList<object>>>();
foreach (var signature in types)
{
var methodDef = wasmType.DefineMethod(
$"func_{builderList.Count}",
MethodAttributes.Public | MethodAttributes.Static);
methodDef.SetParameters(
signature.ParameterTypes.Select(ValueHelpers.ToClrType)
.Concat(new[] { typeof(uint) })
.ToArray());
if (signature.ReturnTypes.Count == 0)
{
methodDef.SetReturnType(typeof(void));
}
else if (signature.ReturnTypes.Count == 1)
{
methodDef.SetReturnType(ValueHelpers.ToClrType(signature.ReturnTypes[0]));
}
else
{
throw new WasmException("Cannot compile functions with more than one return value.");
}
builderList.Add(methodDef);
}
this.builders = builderList;
this.wrappers = wrapperList;
}
/// <inheritdoc/>
public override FunctionDefinition Compile(int index, FunctionBody body)
{
var signature = types[index];
var builder = builders[index];
var ilGen = builder.GetILGenerator();
if (TryCompile(signature, body, ilGen))
{
var result = new CompiledFunctionDefinition(signature, builder, module.Policy.TranslateExceptions);
functionDefinitions.Add(result);
return result;
}
else
{
return MakeInterpreterThunk(index, body, ilGen);
}
}
private WasmFunctionDefinition MakeInterpreterThunk(int index, FunctionBody body, ILGenerator generator)
{
var signature = types[index];
// Create an interpreted function definition.
var func = new WasmFunctionDefinition(signature, body, module);
// Call it.
EmitExternalCall(
generator,
signature.ParameterTypes.Count,
func,
signature.ParameterTypes
.Select<WasmValueType, Func<ILGenerator, Type>>(
(p, i) => gen =>
{
gen.Emit(OpCodes.Ldarg, i);
return ValueHelpers.ToClrType(p);
})
.ToArray());
// Return.
generator.Emit(OpCodes.Ret);
return func;
}
internal void EmitExternalCall(ILGenerator generator, int callerParameterCount, FunctionDefinition callee, IReadOnlyList<Func<ILGenerator, Type>> arguments)
{
var signature = new FunctionType(callee.ParameterTypes, callee.ReturnTypes);
// Create a function definition field, fill it and push its value onto the stack.
var field = DefineConstHelperField(callee);
generator.Emit(OpCodes.Ldsfld, field);
// Call it.
EmitExternalCall(generator, callerParameterCount, signature, callee.GetType(), arguments);
}
internal static void EmitExternalCall(ILGenerator generator, int callerParameterCount, FunctionType signature, Type calleeType, IReadOnlyList<Func<ILGenerator, Type>> arguments)
{
// To bridge the divide between JIT-compiled code and the interpreter,
// we generate code that packs the parameter list of a JIT-compiled
// function as an array of objects and feed that to the interpreter.
// We then unpack the list of objects produced by the interpreter.
// Create the arguments array.
EmitNewArray<object>(
generator,
arguments
.Select<Func<ILGenerator, Type>, Action<ILGenerator>>(
arg => gen =>
{
gen.Emit(OpCodes.Box, arg(gen));
})
.ToArray());
// Load the call stack depth.
generator.Emit(OpCodes.Ldarg, callerParameterCount);
// Call the interpreter.
var callee = calleeType
.GetMethod("Invoke", new[] { typeof(IReadOnlyList<object>), typeof(uint) });
if (callee.IsPublic && calleeType.IsPublic)
{
generator.Emit(OpCodes.Call, callee);
}
else
{
generator.Emit(
OpCodes.Callvirt,
typeof(FunctionDefinition)
.GetMethod("Invoke", new[] { typeof(IReadOnlyList<object>), typeof(uint) }));
}
// Unpack the interpreter's return values.
EmitUnpackList(
generator,
signature.ReturnTypes.Select(ValueHelpers.ToClrType).ToArray(),
typeof(IReadOnlyList<object>));
}
private static bool IsGloballyAccessible(MethodInfo method)
{
return method.IsPublic && IsGloballyAccessible(method.DeclaringType);
}
private static bool IsGloballyAccessible(Type type)
{
return type.IsPublic || (type.IsNestedPublic && IsGloballyAccessible(type.DeclaringType));
}
private static void EmitUnpackList(ILGenerator generator, IReadOnlyList<Type> elementTypes, Type type)
{
var itemGetter = type.GetProperties().First(x => x.GetIndexParameters().Length > 0).GetMethod;
var local = generator.DeclareLocal(type);
generator.Emit(OpCodes.Stloc, local);
for (int i = 0; i < elementTypes.Count; i++)
{
generator.Emit(OpCodes.Ldloc, local);
generator.Emit(OpCodes.Ldc_I4, i);
generator.Emit(OpCodes.Callvirt, itemGetter);
generator.Emit(OpCodes.Unbox_Any, elementTypes[i]);
}
}
private FieldBuilder DefineConstHelperField(object value)
{
var field = DefineHelperField(value.GetType());
constFieldValues[field] = value;
return field;
}
private FieldBuilder DefineHelperField(Type type)
{
return wasmType.DefineField($"helper_{helperFieldIndex++}", type, FieldAttributes.Public | FieldAttributes.Static);
}
private static void EmitNewArray<T>(ILGenerator generator, IReadOnlyList<Action<ILGenerator>> valueGenerators)
{
generator.Emit(OpCodes.Ldc_I4, valueGenerators.Count);
generator.Emit(OpCodes.Newarr, typeof(T));
for (int i = 0; i < valueGenerators.Count; i++)
{
generator.Emit(OpCodes.Dup);
generator.Emit(OpCodes.Ldc_I4, i);
valueGenerators[i](generator);
generator.Emit(OpCodes.Stelem, typeof(T));
}
}
private bool TryCompile(FunctionType signature, FunctionBody body, ILGenerator generator)
{
var impl = GetImplementationOrNull(body.BodyInstructions);
if (impl == null)
{
return false;
}
else
{
var locals = new Dictionary<uint, LocalBuilder>();
var localTypes = new List<WasmValueType>(signature.ParameterTypes);
uint localIndex = (uint)signature.ParameterTypes.Count;
foreach (var item in body.Locals)
{
for (uint i = 0; i < item.LocalCount; i++)
{
locals[localIndex++] = generator.DeclareLocal(ValueHelpers.ToClrType(item.LocalType));
localTypes.Add(item.LocalType);
}
}
var context = new CompilerContext(this, localTypes, signature.ParameterTypes.Count, locals);
// Increment the call stack depth.
generator.Emit(OpCodes.Ldarg, signature.ParameterTypes.Count);
generator.Emit(OpCodes.Ldc_I4_1);
generator.Emit(OpCodes.Add);
generator.Emit(OpCodes.Starg, signature.ParameterTypes.Count);
// Emit the method body.
impl(context, generator);
// Return.
generator.Emit(OpCodes.Ret);
return true;
}
}
private InstructionImpl GetImplementationOrNull(Instruction instruction)
{
Func<Instruction, InstructionImpl> impl;
if (OperatorImplementations.TryGetValue(instruction.Op, out impl))
{
return impl(instruction);
}
else
{
return null;
}
}
private InstructionImpl GetImplementationOrNull(IReadOnlyList<Instruction> instructions)
{
var impls = new List<InstructionImpl>();
foreach (var instruction in instructions)
{
var instructionImpl = GetImplementationOrNull(instruction);
if (instructionImpl == null)
{
return null;
}
impls.Add(instructionImpl);
}
return (context, gen) =>
{
foreach (var impl in impls)
{
impl(context, gen);
}
};
}
/// <inheritdoc/>
public override void Finish()
{
// Create the type.
var realType = wasmType.CreateType();
// Populate its fields.
foreach (var pair in constFieldValues)
{
realType.GetField(pair.Key.Name).SetValue(null, pair.Value);
}
constFieldValues = null;
// Rewrite function definitions.
foreach (var functionDef in functionDefinitions)
{
functionDef.method = realType.GetMethod(functionDef.method.Name);
}
functionDefinitions = null;
}
/// <summary>
/// The default mapping of operators to their implementations.
/// </summary>
public static readonly IReadOnlyDictionary<Operator, Func<Instruction, InstructionImpl>> DefaultOperatorImplementations =
new Dictionary<Operator, Func<Instruction, InstructionImpl>>()
{
{ Operators.Nop, JitOperatorImpls.Nop },
{ Operators.Drop, JitOperatorImpls.Drop },
{ Operators.Select, JitOperatorImpls.Select },
{ Operators.Call, JitOperatorImpls.Call },
{ Operators.GetLocal, JitOperatorImpls.GetLocal },
{ Operators.SetLocal, JitOperatorImpls.SetLocal },
{ Operators.TeeLocal, JitOperatorImpls.TeeLocal },
{ Operators.Int32Const, JitOperatorImpls.Int32Const },
{ Operators.Int64Const, JitOperatorImpls.Int64Const },
{ Operators.Float32Const, JitOperatorImpls.Float32Const },
{ Operators.Float64Const, JitOperatorImpls.Float64Const },
{ Operators.Int32Add, JitOperatorImpls.Int32Add },
{ Operators.Int32And, JitOperatorImpls.Int32And },
{ Operators.Int32Clz, JitOperatorImpls.Int32Clz },
{ Operators.Int32Ctz, JitOperatorImpls.Int32Ctz },
{ Operators.Int32DivS, JitOperatorImpls.Int32DivS },
{ Operators.Int32DivU, JitOperatorImpls.Int32DivU },
{ Operators.Int32Eq, JitOperatorImpls.Int32Eq },
{ Operators.Int32Eqz, JitOperatorImpls.Int32Eqz },
{ Operators.Int32GeS, JitOperatorImpls.Int32GeS },
{ Operators.Int32GeU, JitOperatorImpls.Int32GeU },
{ Operators.Int32GtS, JitOperatorImpls.Int32GtS },
{ Operators.Int32GtU, JitOperatorImpls.Int32GtU },
{ Operators.Int32LeS, JitOperatorImpls.Int32LeS },
{ Operators.Int32LeU, JitOperatorImpls.Int32LeU },
{ Operators.Int32LtS, JitOperatorImpls.Int32LtS },
{ Operators.Int32LtU, JitOperatorImpls.Int32LtU },
{ Operators.Int32Mul, JitOperatorImpls.Int32Mul },
{ Operators.Int32Ne, JitOperatorImpls.Int32Ne },
{ Operators.Int32Or, JitOperatorImpls.Int32Or },
{ Operators.Int32Popcnt, JitOperatorImpls.Int32Popcnt },
{ Operators.Int32RemS, JitOperatorImpls.Int32RemS },
{ Operators.Int32RemU, JitOperatorImpls.Int32RemU },
{ Operators.Int32Rotl, JitOperatorImpls.Int32Rotl },
{ Operators.Int32Rotr, JitOperatorImpls.Int32Rotr },
{ Operators.Int32Shl, JitOperatorImpls.Int32Shl },
{ Operators.Int32ShrS, JitOperatorImpls.Int32ShrS },
{ Operators.Int32ShrU, JitOperatorImpls.Int32ShrU },
{ Operators.Int32Sub, JitOperatorImpls.Int32Sub },
{ Operators.Int32WrapInt64, JitOperatorImpls.Int32WrapInt64 },
{ Operators.Int32Xor, JitOperatorImpls.Int32Xor },
{ Operators.Int64Add, JitOperatorImpls.Int64Add },
{ Operators.Int64And, JitOperatorImpls.Int64And },
{ Operators.Int64Clz, JitOperatorImpls.Int64Clz },
{ Operators.Int64Ctz, JitOperatorImpls.Int64Ctz },
{ Operators.Int64DivS, JitOperatorImpls.Int64DivS },
{ Operators.Int64DivU, JitOperatorImpls.Int64DivU },
{ Operators.Int64Eq, JitOperatorImpls.Int64Eq },
{ Operators.Int64Eqz, JitOperatorImpls.Int64Eqz },
{ Operators.Int64ExtendSInt32, JitOperatorImpls.Int64ExtendSInt32 },
{ Operators.Int64ExtendUInt32, JitOperatorImpls.Int64ExtendUInt32 },
{ Operators.Int64GeS, JitOperatorImpls.Int64GeS },
{ Operators.Int64GeU, JitOperatorImpls.Int64GeU },
{ Operators.Int64GtS, JitOperatorImpls.Int64GtS },
{ Operators.Int64GtU, JitOperatorImpls.Int64GtU },
{ Operators.Int64LeS, JitOperatorImpls.Int64LeS },
{ Operators.Int64LeU, JitOperatorImpls.Int64LeU },
{ Operators.Int64LtS, JitOperatorImpls.Int64LtS },
{ Operators.Int64LtU, JitOperatorImpls.Int64LtU },
{ Operators.Int64Mul, JitOperatorImpls.Int64Mul },
{ Operators.Int64Ne, JitOperatorImpls.Int64Ne },
{ Operators.Int64Or, JitOperatorImpls.Int64Or },
{ Operators.Int64Popcnt, JitOperatorImpls.Int64Popcnt },
{ Operators.Int64RemS, JitOperatorImpls.Int64RemS },
{ Operators.Int64RemU, JitOperatorImpls.Int64RemU },
{ Operators.Int64Rotl, JitOperatorImpls.Int64Rotl },
{ Operators.Int64Rotr, JitOperatorImpls.Int64Rotr },
{ Operators.Int64Shl, JitOperatorImpls.Int64Shl },
{ Operators.Int64ShrS, JitOperatorImpls.Int64ShrS },
{ Operators.Int64ShrU, JitOperatorImpls.Int64ShrU },
{ Operators.Int64Sub, JitOperatorImpls.Int64Sub },
{ Operators.Int64Xor, JitOperatorImpls.Int64Xor }
};
}
internal sealed class CompiledFunctionDefinition : FunctionDefinition
{
internal CompiledFunctionDefinition(FunctionType signature, MethodInfo method, bool translateExceptions)
{
this.signature = signature;
this.method = method;
this.translateExceptions = translateExceptions;
}
private FunctionType signature;
internal MethodInfo method;
private bool translateExceptions;
/// <inheritdoc/>
public override IReadOnlyList<WasmValueType> ParameterTypes => signature.ParameterTypes;
/// <inheritdoc/>
public override IReadOnlyList<WasmValueType> ReturnTypes => signature.ReturnTypes;
/// <inheritdoc/>
public override IReadOnlyList<object> Invoke(IReadOnlyList<object> arguments, uint callStackDepth = 0)
{
object result;
try
{
result = method.Invoke(null, arguments.Concat(new object[] { callStackDepth }).ToArray());
}
catch (TargetInvocationException ex)
{
var inner = ex.InnerException;
if (translateExceptions && TryTranslateException(inner, out Exception translate))
{
throw translate;
}
else
{
throw inner;
}
}
catch (Exception ex)
{
if (translateExceptions && TryTranslateException(ex, out Exception translate))
{
throw translate;
}
else
{
throw;
}
}
if (ReturnTypes.Count == 0)
{
return Array.Empty<object>();
}
else if (ReturnTypes.Count == 1)
{
return new[] { result };
}
else
{
throw new WasmException("Cannot compile functions with more than one return value.");
}
}
private static bool TryTranslateException(Exception original, out Exception translated)
{
if (original.GetType() == typeof(DivideByZeroException))
{
translated = new TrapException(original.Message, TrapException.SpecMessages.IntegerDivideByZero);
return true;
}
else if (original.GetType() == typeof(OverflowException))
{
translated = new TrapException(original.Message, TrapException.SpecMessages.IntegerOverflow);
return true;
}
else
{
translated = null;
return false;
}
}
}
}
<|start_filename|>libwasm/TypeSection.cs<|end_filename|>
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Wasm.Binary;
namespace Wasm
{
/// <summary>
/// Represents a type section in a WebAssembly file.
/// </summary>
public sealed class TypeSection : Section
{
/// <summary>
/// Creates an empty type section.
/// </summary>
public TypeSection()
: this(Enumerable.Empty<FunctionType>())
{
}
/// <summary>
/// Creates a type section from the given list of function types.
/// </summary>
/// <param name="functionTypes">The list of function types in this type section.</param>
public TypeSection(IEnumerable<FunctionType> functionTypes)
: this(functionTypes, new byte[0])
{
}
/// <summary>
/// Creates a type section from the given list of function types and an additional payload.
/// </summary>
/// <param name="functionTypes">The list of function types in this type section.</param>
/// <param name="extraPayload">The additional payload for this section, as an array of bytes.</param>
public TypeSection(IEnumerable<FunctionType> functionTypes, byte[] extraPayload)
{
this.FunctionTypes = new List<FunctionType>(functionTypes);
this.ExtraPayload = extraPayload;
}
/// <summary>
/// Gets this type section's list of function types.
/// </summary>
/// <returns>The list of function types in this type section.</returns>
public List<FunctionType> FunctionTypes { get; private set; }
/// <summary>
/// This type section's additional payload.
/// </summary>
/// <returns>The additional payload, as an array of bytes.</returns>
public byte[] ExtraPayload { get; set; }
/// <inheritdoc/>
public override SectionName Name => new SectionName(SectionCode.Type);
/// <inheritdoc/>
public override void WritePayloadTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt32((uint)FunctionTypes.Count);
foreach (var type in FunctionTypes)
type.WriteTo(writer);
writer.Writer.Write(ExtraPayload);
}
/// <inheritdoc/>
public override void Dump(TextWriter writer)
{
writer.Write(Name.ToString());
writer.Write("; number of entries: ");
writer.Write(FunctionTypes.Count);
writer.WriteLine();
for (int i = 0; i < FunctionTypes.Count; i++)
{
writer.Write("#");
writer.Write(i);
writer.Write(" -> ");
FunctionTypes[i].Dump(writer);
writer.WriteLine();
}
if (ExtraPayload.Length > 0)
{
writer.Write("Extra payload size: ");
writer.Write(ExtraPayload.Length);
writer.WriteLine();
DumpHelpers.DumpBytes(ExtraPayload, writer);
writer.WriteLine();
}
}
/// <summary>
/// Reads a type section's payload from the given binary WebAssembly reader.
/// </summary>
/// <param name="header">The type section's header.</param>
/// <param name="reader">A reader for a binary WebAssembly file.</param>
/// <returns>A parsed type section.</returns>
public static TypeSection ReadSectionPayload(SectionHeader header, BinaryWasmReader reader)
{
long initPos = reader.Position;
uint typeCount = reader.ReadVarUInt32();
var types = new List<FunctionType>((int)typeCount);
for (uint i = 0; i < typeCount; i++)
{
types.Add(FunctionType.ReadFrom(reader));
}
var extraBytes = reader.ReadRemainingPayload(initPos, header);
return new TypeSection(types, extraBytes);
}
}
/// <summary>
/// Represents a function type entry in a type section.
/// </summary>
public sealed class FunctionType
{
/// <summary>
/// Creates a function type.
/// </summary>
public FunctionType()
{
this.ParameterTypes = new List<WasmValueType>();
this.ReturnTypes = new List<WasmValueType>();
}
/// <summary>
/// Creates a function type from the given parameter types and return types.
/// </summary>
/// <param name="parameterTypes">This function type's list of parameter types.</param>
/// <param name="returnTypes">This function type's list of return types.</param>
public FunctionType(
IEnumerable<WasmValueType> parameterTypes,
IEnumerable<WasmValueType> returnTypes)
{
this.ParameterTypes = new List<WasmValueType>(parameterTypes);
this.ReturnTypes = new List<WasmValueType>(returnTypes);
}
/// <summary>
/// Creates a function type that takes ownership of the given parameter types and return types.
/// </summary>
/// <param name="parameterTypes">This function type's list of parameter types.</param>
/// <param name="returnTypes">This function type's list of return types.</param>
private FunctionType(
List<WasmValueType> parameterTypes,
List<WasmValueType> returnTypes)
{
this.ParameterTypes = parameterTypes;
this.ReturnTypes = returnTypes;
}
/// <summary>
/// Gets this function type's form, which is always WasmType.Func.
/// </summary>
public WasmType Form => WasmType.Func;
/// <summary>
/// Gets this function type's list of parameter types.
/// </summary>
/// <returns>The list of parameter types for this function.</returns>
public List<WasmValueType> ParameterTypes { get; private set; }
/// <summary>
/// Gets this function type's list of return types.
/// </summary>
/// <returns>The list of return types for this function.</returns>
public List<WasmValueType> ReturnTypes { get; private set; }
/// <summary>
/// Writes this function type to the given binary WebAssembly file.
/// </summary>
/// <param name="writer">The writer for a binary WebAssembly file.</param>
public void WriteTo(BinaryWasmWriter writer)
{
writer.WriteWasmType(Form);
writer.WriteVarUInt32((uint)ParameterTypes.Count);
foreach (var item in ParameterTypes)
writer.WriteWasmValueType(item);
writer.WriteVarUInt32((uint)ReturnTypes.Count);
foreach (var item in ReturnTypes)
writer.WriteWasmValueType(item);
}
/// <summary>
/// Writes a textual representation of this exported value to the given writer.
/// </summary>
/// <param name="writer">The writer to which text is written.</param>
public void Dump(TextWriter writer)
{
writer.Write("func(");
for (int i = 0; i < ParameterTypes.Count; i++)
{
if (i > 0)
writer.Write(", ");
DumpHelpers.DumpWasmType(ParameterTypes[i], writer);
}
writer.Write(") returns (");
for (int i = 0; i < ReturnTypes.Count; i++)
{
if (i > 0)
writer.Write(", ");
DumpHelpers.DumpWasmType(ReturnTypes[i], writer);
}
writer.Write(")");
}
/// <inheritdoc/>
public override string ToString()
{
var writer = new StringWriter();
Dump(writer);
return writer.ToString();
}
/// <summary>
/// Reads a single function type from the given reader.
/// </summary>
/// <returns>The function type.</returns>
public static FunctionType ReadFrom(BinaryWasmReader reader)
{
WasmType form = (WasmType)reader.ReadWasmType();
if (form != WasmType.Func)
throw new WasmException("Invalid 'form' value ('" + form + "') for function type.");
uint paramCount = reader.ReadVarUInt32();
var paramTypes = new List<WasmValueType>((int)paramCount);
for (uint i = 0; i < paramCount; i++)
{
paramTypes.Add(reader.ReadWasmValueType());
}
uint retCount = reader.ReadVarUInt32();
var retTypes = new List<WasmValueType>((int)retCount);
for (uint i = 0; i < retCount; i++)
{
retTypes.Add(reader.ReadWasmValueType());
}
return new FunctionType(paramTypes, retTypes);
}
}
}
<|start_filename|>libwasm/BadHeaderException.cs<|end_filename|>
using System;
namespace Wasm
{
/// <summary>
/// The type of exception that is thrown when an invalid header is detected.
/// </summary>
public sealed class BadHeaderException : WasmException
{
/// <summary>
/// Initializes a new instance of the <see cref="BadHeaderException"/> class.
/// </summary>
/// <param name="header">The version header.</param>
/// <param name="message">The error message.</param>
public BadHeaderException(VersionHeader header, string message)
: base(message)
{
this.Header = header;
}
/// <summary>
/// Gets the erroneous version header.
/// </summary>
/// <value>The version header.</value>
public VersionHeader Header { get; private set; }
}
}
<|start_filename|>libwasm/Optimize/Peephole/UnreachableCodeOptimization.cs<|end_filename|>
using System.Collections.Generic;
using Wasm.Instructions;
namespace Wasm.Optimize
{
/// <summary>
/// An optimization that removes unreachable code.
/// </summary>
public sealed class UnreachableCodeOptimization : PeepholeOptimization
{
private UnreachableCodeOptimization() { }
/// <summary>
/// The only instance of this optimization.
/// </summary>
public static readonly UnreachableCodeOptimization Instance = new UnreachableCodeOptimization();
private static readonly HashSet<Operator> blockTerminatingInstructions =
new HashSet<Operator>()
{
Operators.Br,
Operators.Unreachable,
Operators.Return
};
/// <summary>
/// Tests if the items at the front of the given list of instructions
/// match the peephole optimization; if a match occurs, a nonzero value
/// is returned that indicates the number of instructions at the front
/// of the list of instructions that should be rewritten.
/// </summary>
/// <param name="instructions">
/// The instructions to match against the peephole optimization.
/// </param>
/// <returns>The number of instructions to rewrite.</returns>
public override uint Match(IReadOnlyList<Instruction> instructions)
{
if (instructions.Count <= 1)
return 0;
var head = instructions[0];
if (blockTerminatingInstructions.Contains(head.Op))
return (uint)instructions.Count;
else
return 0;
}
/// <summary>
/// Rewrites the given sequence of instructions.
/// </summary>
/// <param name="matched">
/// A list of instructions that has been matched and will all be replaced.
/// </param>
/// <returns>The rewritten instructions.</returns>
public override IReadOnlyList<Instruction> Rewrite(IReadOnlyList<Instruction> matched)
{
// Return only the first instruction, as no instruction in the linear
// sequence of instructions will be executed after it is run.
return new Instruction[] { matched[0] };
}
}
}
<|start_filename|>libwasm/Instructions/Float64Instruction.cs<|end_filename|>
using System.IO;
using Wasm.Binary;
namespace Wasm.Instructions
{
/// <summary>
/// Describes a WebAssembly stack machine instruction that takes a
/// 64-bit floating-point number as immediate.
/// </summary>
public sealed class Float64Instruction : Instruction
{
/// <summary>
/// Creates an instruction that takes a 32-bit floating-point number immediate.
/// </summary>
/// <param name="op">The instruction's opcode.</param>
/// <param name="immediate">The instruction's immediate.</param>
public Float64Instruction(Float64Operator op, double immediate)
{
this.opValue = op;
this.Immediate = immediate;
}
private Float64Operator opValue;
/// <summary>
/// Gets the operator for this instruction.
/// </summary>
/// <returns>The instruction's operator.</returns>
public override Operator Op { get { return opValue; } }
/// <summary>
/// Gets this instruction's immediate.
/// </summary>
/// <returns>The immediate value.</returns>
public double Immediate { get; set; }
/// <summary>
/// Writes this instruction's immediates (but not its opcode)
/// to the given WebAssembly file writer.
/// </summary>
/// <param name="writer">The writer to write this instruction's immediates to.</param>
public override void WriteImmediatesTo(BinaryWasmWriter writer)
{
writer.WriteFloat64(Immediate);
}
/// <summary>
/// Writes a string representation of this instruction to the given text writer.
/// </summary>
/// <param name="writer">
/// The writer to which a representation of this instruction is written.
/// </param>
public override void Dump(TextWriter writer)
{
Op.Dump(writer);
writer.Write(" ");
writer.Write(Immediate);
}
}
}
<|start_filename|>libwasm/FunctionSection.cs<|end_filename|>
using System.Collections.Generic;
using System.IO;
using Wasm.Binary;
namespace Wasm
{
/// <summary>
/// Represents a function section.
/// </summary>
public sealed class FunctionSection : Section
{
/// <summary>
/// Creates an empty function section.
/// </summary>
public FunctionSection()
{
this.FunctionTypes = new List<uint>();
this.ExtraPayload = new byte[0];
}
/// <summary>
/// Creates a function from the given list of function types.
/// </summary>
/// <param name="functionTypes">The function section's list of types.</param>
public FunctionSection(IEnumerable<uint> functionTypes)
: this(functionTypes, new byte[0])
{
}
/// <summary>
/// Creates a function from the given list of function types and additional payload.
/// </summary>
/// <param name="functionTypes">The function section's list of types.</param>
/// <param name="extraPayload">The function section's additional payload.</param>
public FunctionSection(IEnumerable<uint> functionTypes, byte[] extraPayload)
{
this.FunctionTypes = new List<uint>(functionTypes);
this.ExtraPayload = extraPayload;
}
/// <inheritdoc/>
public override SectionName Name => new SectionName(SectionCode.Function);
/// <summary>
/// Gets this function section's function types, which are entries in the type
/// section.
/// </summary>
/// <returns>A list of indices that refer to entries in the type section.</returns>
public List<uint> FunctionTypes { get; private set; }
/// <summary>
/// This function section's additional payload.
/// </summary>
/// <returns>The additional payload, as an array of bytes.</returns>
public byte[] ExtraPayload { get; set; }
/// <summary>
/// Writes this WebAssembly section's payload to the given binary WebAssembly writer.
/// </summary>
/// <param name="writer">The writer to which the payload is written.</param>
public override void WritePayloadTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt32((uint)FunctionTypes.Count);
foreach (var index in FunctionTypes)
{
writer.WriteVarUInt32(index);
}
writer.Writer.Write(ExtraPayload);
}
/// <summary>
/// Reads the function section with the given header.
/// </summary>
/// <param name="header">The section header.</param>
/// <param name="reader">The WebAssembly file reader.</param>
/// <returns>The parsed section.</returns>
public static FunctionSection ReadSectionPayload(SectionHeader header, BinaryWasmReader reader)
{
long startPos = reader.Position;
// Read the function indices.
uint count = reader.ReadVarUInt32();
var funcTypes = new List<uint>();
for (uint i = 0; i < count; i++)
{
funcTypes.Add(reader.ReadVarUInt32());
}
// Skip any remaining bytes.
var extraPayload = reader.ReadRemainingPayload(startPos, header);
return new FunctionSection(funcTypes, extraPayload);
}
/// <inheritdoc/>
public override void Dump(TextWriter writer)
{
writer.Write(Name.ToString());
writer.Write("; number of entries: ");
writer.Write(FunctionTypes.Count);
writer.WriteLine();
for (int i = 0; i < FunctionTypes.Count; i++)
{
writer.Write("#");
writer.Write(i);
writer.Write(" -> type #");
writer.Write(FunctionTypes[i]);
writer.WriteLine();
}
if (ExtraPayload.Length > 0)
{
writer.Write("Extra payload size: ");
writer.Write(ExtraPayload.Length);
writer.WriteLine();
DumpHelpers.DumpBytes(ExtraPayload, writer);
writer.WriteLine();
}
}
}
}
<|start_filename|>libwasm/Interpret/PredefinedImporter.cs<|end_filename|>
using System;
using System.Collections.Generic;
namespace Wasm.Interpret
{
/// <summary>
/// An importer implementation that imports predefined values.
/// </summary>
public sealed class PredefinedImporter : IImporter
{
/// <summary>
/// Creates a new importer.
/// </summary>
public PredefinedImporter()
{
this.funcDefDict = new Dictionary<string, FunctionDefinition>();
this.varDefDict = new Dictionary<string, Variable>();
this.memDefDict = new Dictionary<string, LinearMemory>();
this.tableDefDict = new Dictionary<string, FunctionTable>();
}
private Dictionary<string, FunctionDefinition> funcDefDict;
private Dictionary<string, Variable> varDefDict;
private Dictionary<string, LinearMemory> memDefDict;
private Dictionary<string, FunctionTable> tableDefDict;
/// <summary>
/// Gets a read-only dictionary view that contains all importable function definitions.
/// </summary>
public IReadOnlyDictionary<string, FunctionDefinition> FunctionDefinitions => funcDefDict;
/// <summary>
/// Gets a read-only dictionary view that contains all importable variable definitions.
/// </summary>
public IReadOnlyDictionary<string, Variable> VariableDefinitions => varDefDict;
/// <summary>
/// Gets a read-only dictionary view that contains all importable memory definitions.
/// </summary>
public IReadOnlyDictionary<string, LinearMemory> MemoryDefinitions => memDefDict;
/// <summary>
/// Gets a read-only dictionary view that contains all importable table definitions.
/// </summary>
public IReadOnlyDictionary<string, FunctionTable> TableDefinitions => tableDefDict;
/// <summary>
/// Maps the given name to the given function definition.
/// </summary>
/// <param name="name">The name to define.</param>
/// <param name="definition">The function definition.</param>
public void DefineFunction(string name, FunctionDefinition definition)
{
funcDefDict[name] = definition;
}
/// <summary>
/// Maps the given name to the given variable.
/// </summary>
/// <param name="name">The name to define.</param>
/// <param name="definition">The variable definition.</param>
public void DefineVariable(string name, Variable definition)
{
varDefDict[name] = definition;
}
/// <summary>
/// Maps the given name to the given memory.
/// </summary>
/// <param name="name">The name to define.</param>
/// <param name="definition">The memory definition.</param>
public void DefineMemory(string name, LinearMemory definition)
{
memDefDict[name] = definition;
}
/// <summary>
/// Maps the given name to the given table.
/// </summary>
/// <param name="name">The name to define.</param>
/// <param name="definition">The table definition.</param>
public void DefineTable(string name, FunctionTable definition)
{
tableDefDict[name] = definition;
}
/// <summary>
/// Includes the definitions from the given importer in this importer.
/// </summary>
/// <param name="importer">The importer to include.</param>
public void IncludeDefinitions(PredefinedImporter importer)
{
CopyDefinitions<FunctionDefinition>(importer.funcDefDict, this.funcDefDict);
CopyDefinitions<Variable>(importer.varDefDict, this.varDefDict);
CopyDefinitions<LinearMemory>(importer.memDefDict, this.memDefDict);
CopyDefinitions<FunctionTable>(importer.tableDefDict, this.tableDefDict);
}
private static void CopyDefinitions<T>(
Dictionary<string, T> sourceDefinitions,
Dictionary<string, T> targetDefinitions)
{
foreach (var pair in sourceDefinitions)
{
targetDefinitions[pair.Key] = pair.Value;
}
}
private static T ImportOrDefault<T>(ImportedValue value, Dictionary<string, T> definitions)
{
T result;
if (definitions.TryGetValue(value.FieldName, out result))
{
return result;
}
else
{
return default(T);
}
}
/// <inheritdoc/>
public FunctionDefinition ImportFunction(ImportedFunction description, FunctionType signature)
{
return ImportOrDefault<FunctionDefinition>(description, funcDefDict);
}
/// <inheritdoc/>
public Variable ImportGlobal(ImportedGlobal description)
{
return ImportOrDefault<Variable>(description, varDefDict);
}
/// <inheritdoc/>
public LinearMemory ImportMemory(ImportedMemory description)
{
return ImportOrDefault<LinearMemory>(description, memDefDict);
}
/// <inheritdoc/>
public FunctionTable ImportTable(ImportedTable description)
{
return ImportOrDefault<FunctionTable>(description, tableDefDict);
}
}
}
<|start_filename|>libwasm/Instructions/CallIndirectInstruction.cs<|end_filename|>
using System.IO;
using Wasm.Binary;
namespace Wasm.Instructions
{
/// <summary>
/// Describes a WebAssembly stack machine instruction that calls a function pointer.
/// </summary>
public sealed class CallIndirectInstruction : Instruction
{
/// <summary>
/// Creates an indirect call instruction from the given operator and
/// an index into the 'type' table.
/// </summary>
/// <param name="op">The operator for this instruction.</param>
/// <param name="typeIndex">The index of the callee's signature in the type table.</param>
public CallIndirectInstruction(CallIndirectOperator op, uint typeIndex)
: this(op, typeIndex, 0)
{ }
/// <summary>
/// Creates an indirect call instruction from the given operator,
/// an index into the 'type' table and a value for the reserved field.
/// </summary>
/// <param name="op">The operator for this instruction.</param>
/// <param name="typeIndex">The index of the callee's signature in the type table.</param>
/// <param name="reserved">A reserved value, which should always be zero.</param>
public CallIndirectInstruction(CallIndirectOperator op, uint typeIndex, uint reserved)
{
this.opValue = op;
this.TypeIndex = typeIndex;
this.Reserved = reserved;
}
private CallIndirectOperator opValue;
/// <summary>
/// Gets the operator for this instruction.
/// </summary>
/// <returns>The instruction's operator.</returns>
public override Operator Op { get { return opValue; } }
/// <summary>
/// Gets the index of the callee's signature in the type table.
/// </summary>
/// <returns>The callee's signature, as an index in the type table.</returns>
public uint TypeIndex { get; private set; }
/// <summary>
/// Gets a reserved value. This should always be zero.
/// </summary>
/// <returns>A reserved value.</returns>
public uint Reserved { get; private set; }
/// <summary>
/// Writes this instruction's immediates (but not its opcode)
/// to the given WebAssembly file writer.
/// </summary>
/// <param name="writer">The writer to write this instruction's immediates to.</param>
public override void WriteImmediatesTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt32(TypeIndex);
writer.WriteVarUInt32(Reserved);
}
/// <summary>
/// Writes a string representation of this instruction to the given text writer.
/// </summary>
/// <param name="writer">
/// The writer to which a representation of this instruction is written.
/// </param>
public override void Dump(TextWriter writer)
{
Op.Dump(writer);
writer.Write(" ");
writer.Write(TypeIndex);
if (Reserved != 0)
{
writer.Write(" (reserved=");
writer.Write(Reserved);
writer.Write(")");
}
}
}
}
<|start_filename|>libwasm-text/Parser.cs<|end_filename|>
using System.Collections.Generic;
using Pixie;
using Pixie.Code;
using Pixie.Markup;
namespace Wasm.Text
{
/// <summary>
/// A parser for the WebAssembly text format.
/// </summary>
public sealed partial class Parser
{
/// <summary>
/// Parses a sequence of tokens as S-expressions.
/// </summary>
/// <param name="tokens">The tokens to parse.</param>
/// <param name="log">A log to send errors to.</param>
/// <returns>A list of parsed S-expressions.</returns>
public static IReadOnlyList<SExpression> ParseAsSExpressions(IEnumerable<Lexer.Token> tokens, ILog log)
{
using (var enumerator = tokens.GetEnumerator())
{
return ParseAsSExpressions(enumerator, log, false);
}
}
/// <summary>
/// Parses a sequence of tokens as S-expressions.
/// </summary>
/// <param name="tokens">The tokens to parse.</param>
/// <param name="log">A log to send errors to.</param>
/// <param name="isNested">Tells if this parsing action is a nested rather than a top-level action.</param>
/// <returns>A list of parsed S-expressions.</returns>
private static IReadOnlyList<SExpression> ParseAsSExpressions(IEnumerator<Lexer.Token> tokens, ILog log, bool isNested)
{
var results = new List<SExpression>();
while (tokens.MoveNext())
{
var token = tokens.Current;
if (token.Kind == Lexer.TokenKind.LeftParenthesis)
{
if (tokens.MoveNext())
{
var head = tokens.Current;
if (head.Kind != Lexer.TokenKind.Keyword)
{
log.Log(
new LogEntry(
Severity.Error,
"expected a keyword",
"all S-expressions should begin with a keyword, but this one doesn't.",
new HighlightedSource(new SourceRegion(head.Span))));
}
var tail = ParseAsSExpressions(tokens, log, true);
if (tokens.Current.Kind != Lexer.TokenKind.RightParenthesis)
{
log.Log(
new LogEntry(
Severity.Error,
"no closing parenthesis",
"left parenthesis indicates the start of an S-expression, but that expression is never closed.",
new HighlightedSource(new SourceRegion(token.Span))));
}
results.Add(SExpression.Create(head, tail));
}
else
{
log.Log(
new LogEntry(
Severity.Error,
"no closing parenthesis",
"left parenthesis indicates the start of an S-expression, but the file ends immediately after.",
new HighlightedSource(new SourceRegion(token.Span))));
}
}
else if (token.Kind == Lexer.TokenKind.RightParenthesis)
{
if (!isNested)
{
log.Log(
new LogEntry(
Severity.Error,
"excess parenthesis",
"right parenthesis does not close a left parenthesis.",
new HighlightedSource(new SourceRegion(token.Span))));
}
break;
}
else
{
results.Add(SExpression.Create(token));
}
}
return results;
}
}
}
<|start_filename|>libwasm/VersionHeader.cs<|end_filename|>
using System;
namespace Wasm
{
/// <summary>
/// The header of a WebAssembly binary file, which specifies the magic number and
/// file format version.
/// </summary>
public struct VersionHeader
{
/// <summary>
/// Initializes a new instance of the <see cref="VersionHeader"/> struct.
/// </summary>
/// <param name="magic">The magic number.</param>
/// <param name="version">The version number.</param>
public VersionHeader(uint magic, uint version)
{
this.Magic = magic;
this.Version = version;
}
/// <summary>
/// Gets the magic number in this version header.
/// </summary>
/// <value>The magic number.</value>
public uint Magic { get; private set; }
/// <summary>
/// Gets the version specified by this version header.
/// </summary>
/// <value>The version.</value>
public uint Version { get; private set; }
/// <summary>
/// Verifies that this version header is a WebAssembly version header for a known
/// version.
/// </summary>
public void Verify()
{
if (Magic != WasmMagic)
{
throw new BadHeaderException(
this,
string.Format(
"Invalid magic number. Got '{0}', expected '{1}'.",
DumpHelpers.FormatHex(Magic),
DumpHelpers.FormatHex(WasmMagic)));
}
if (Version != PreMvpVersion && Version != MvpVersion)
{
throw new BadHeaderException(this, "Invalid version number '" + Version + "'.");
}
}
/// <summary>
/// Gets the WebAssembly magic number 0x6d736100 (i.e., '\0asm').
/// </summary>
public static uint WasmMagic => 0x6d736100;
/// <summary>
/// Gets the version number from the pre-MVP era.
/// </summary>
public static uint PreMvpVersion => 0xd;
/// <summary>
/// Gets the MVP version number.
/// </summary>
public static uint MvpVersion => 1;
/// <summary>
/// Gets the MVP version header.
/// </summary>
public static VersionHeader MvpHeader => new VersionHeader(WasmMagic, MvpVersion);
}
}
<|start_filename|>libwasm/UnknownSection.cs<|end_filename|>
using System.IO;
using Wasm.Binary;
namespace Wasm
{
/// <summary>
/// Represents an unknown section: a non-custom section whose section code was not recognized.
/// </summary>
public sealed class UnknownSection : Section
{
/// <summary>
/// Creates an unknown section from the given section name and payload.
/// </summary>
/// <param name="code">The unknown section's code.</param>
/// <param name="payload">The unknown section's payload.</param>
public UnknownSection(SectionCode code, byte[] payload)
{
this.Code = code;
this.Payload = payload;
}
/// <summary>
/// Gets this unknown section's code.
/// </summary>
/// <returns>The code of the unknown section.</returns>
public SectionCode Code { get; private set; }
/// <inheritdoc/>
public override SectionName Name => new SectionName(Code);
/// <summary>
/// Gets this unknown section's payload, as an array of bytes.
/// </summary>
/// <returns>A byte array that defines the unknown section's payload.</returns>
public byte[] Payload { get; private set; }
/// <summary>
/// Writes this WebAssembly section's payload to the given binary WebAssembly writer.
/// </summary>
/// <param name="writer">The writer to which the payload is written.</param>
public override void WritePayloadTo(BinaryWasmWriter writer)
{
writer.Writer.Write(Payload);
}
}
}
<|start_filename|>libwasm/Instructions/BlockInstruction.cs<|end_filename|>
using System.Collections.Generic;
using System.IO;
using Wasm.Binary;
namespace Wasm.Instructions
{
/// <summary>
/// Describes a WebAssembly stack machine instruction that takes a
/// block of instructions as an immediate.
/// </summary>
public sealed class BlockInstruction : Instruction
{
/// <summary>
/// Creates a block instruction.
/// </summary>
/// <param name="op">The operator performed by the block instruction.</param>
/// <param name="type">The block instruction's result type.</param>
/// <param name="contents">The block instruction's contents, as a sequence of instructions.</param>
public BlockInstruction(BlockOperator op, WasmType type, IEnumerable<Instruction> contents)
{
this.opValue = op;
this.Type = type;
this.Contents = new List<Instruction>(contents);
}
private BlockOperator opValue;
/// <summary>
/// Gets the operator for this instruction.
/// </summary>
/// <returns>The instruction's operator.</returns>
public override Operator Op { get { return opValue; } }
/// <summary>
/// Gets the type of value returned by this block.
/// </summary>
/// <returns>The type of value returned by this block.</returns>
public WasmType Type { get; set; }
/// <summary>
/// Gets the block instruction's arity, that is, the number of elements
/// it produces.
/// </summary>
public int Arity => Type == WasmType.Empty ? 0 : 1;
/// <summary>
/// Gets this block instruction's contents.
/// </summary>
/// <returns>The instruction's contents.</returns>
public List<Instruction> Contents { get; private set; }
/// <summary>
/// Writes this instruction's immediates (but not its opcode)
/// to the given WebAssembly file writer.
/// </summary>
/// <param name="writer">The writer to write this instruction's immediates to.</param>
public override void WriteImmediatesTo(BinaryWasmWriter writer)
{
writer.WriteWasmType(Type);
WriteContentsTo(writer);
}
/// <summary>
/// Writes this instruction's child instructions to the given WebAssembly file writer,
/// followed by an 'end' opcode.
/// </summary>
/// <param name="writer">The writer to write this instruction's child instructions to.</param>
public void WriteContentsTo(BinaryWasmWriter writer)
{
foreach (var instr in Contents)
{
instr.WriteTo(writer);
}
writer.Writer.Write(Operators.EndOpCode);
}
/// <summary>
/// Writes a string representation of this instruction to the given text writer.
/// </summary>
/// <param name="writer">
/// The writer to which a representation of this instruction is written.
/// </param>
public override void Dump(TextWriter writer)
{
Op.Dump(writer);
writer.Write(" (result: ");
DumpHelpers.DumpWasmType(Type, writer);
writer.Write(")");
var indentedWriter = DumpHelpers.CreateIndentedTextWriter(writer);
foreach (var instr in Contents)
{
indentedWriter.WriteLine();
instr.Dump(indentedWriter);
}
writer.WriteLine();
writer.Write("end");
}
}
}
<|start_filename|>libwasm/Instructions/MemoryInstruction.cs<|end_filename|>
using System.IO;
using Wasm.Binary;
namespace Wasm.Instructions
{
/// <summary>
/// Describes a WebAssembly stack machine instruction that takes a memory-immediate.
/// </summary>
public sealed class MemoryInstruction : Instruction
{
/// <summary>
/// Creates a memory instruction from the given operator, alignment
/// and offset.
/// </summary>
/// <param name="op">The operator for this memory instruction.</param>
/// <param name="log2Alignment">The log2 of the memory alignment for this instruction.</param>
/// <param name="offset">
/// The offset of the memory location relative to the pointer that is accessed.
/// </param>
public MemoryInstruction(MemoryOperator op, uint log2Alignment, uint offset)
{
this.opValue = op;
this.Log2Alignment = log2Alignment;
this.Offset = offset;
}
private MemoryOperator opValue;
/// <summary>
/// Gets the operator for this instruction.
/// </summary>
/// <returns>The instruction's operator.</returns>
public override Operator Op { get { return opValue; } }
/// <summary>
/// Gets log2(alignment), where the alignment is the memory location's
/// alignment. As implied by the log2(alignment) encoding, the alignment
/// must be a power of 2. As an additional validation criteria, the
/// alignment must be less or equal to natural alignment.
/// </summary>
/// <returns>log2(alignment)</returns>
public uint Log2Alignment { get; private set; }
/// <summary>
/// Gets the memory location's alignment.
/// </summary>
/// <returns>The memory location's alignment.</returns>
public uint Alignment
{
get
{
return 1u << (int)Log2Alignment;
}
}
/// <summary>
/// Gets the offset of the memory location relative to the pointer
/// that is accessed.
/// </summary>
/// <returns>The offset of the memory location relative to the pointer
/// that is accessed.</returns>
public uint Offset { get; private set; }
/// <summary>
/// Writes this instruction's immediates (but not its opcode)
/// to the given WebAssembly file writer.
/// </summary>
/// <param name="writer">The writer to write this instruction's immediates to.</param>
public override void WriteImmediatesTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt32(Log2Alignment);
writer.WriteVarUInt32(Offset);
}
/// <summary>
/// Writes a string representation of this instruction to the given text writer.
/// </summary>
/// <param name="writer">
/// The writer to which a representation of this instruction is written.
/// </param>
public override void Dump(TextWriter writer)
{
Op.Dump(writer);
writer.Write(" offset=");
writer.Write(Offset);
writer.Write(" align=");
writer.Write(Alignment);
}
}
}
<|start_filename|>libwasm/NameSection.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using Wasm.Binary;
namespace Wasm
{
/// <summary>
/// A type of section that defines names for debugging purposes.
/// </summary>
public sealed class NameSection : Section
{
/// <summary>
/// Creates an empty name section.
/// </summary>
public NameSection()
{
this.Names = new List<NameEntry>();
}
/// <summary>
/// Creates a name section from the given sequence of name entries.
/// </summary>
/// <param name="names">The name entries to initialize this section with.</param>
public NameSection(IEnumerable<NameEntry> names)
{
this.Names = new List<NameEntry>(names);
}
/// <summary>
/// The custom name used for name sections.
/// </summary>
public const string CustomName = "name";
/// <inheritdoc/>
public override SectionName Name => new SectionName(CustomName);
/// <summary>
/// Gets the name entries in this section.
/// </summary>
/// <returns>The name entries.</returns>
public List<NameEntry> Names { get; private set; }
/// <inheritdoc/>
public override void WritePayloadTo(BinaryWasmWriter writer)
{
foreach (var entry in Names)
{
entry.WriteTo(writer);
}
}
/// <inheritdoc/>
public override void Dump(TextWriter writer)
{
writer.Write(Name.ToString());
writer.Write("; number of entries: ");
writer.Write(Names.Count);
writer.WriteLine();
for (int i = 0; i < Names.Count; i++)
{
writer.Write("#");
writer.Write(i);
writer.Write(" -> ");
Names[i].Dump(writer);
writer.WriteLine();
}
}
/// <summary>
/// Reads the name section with the given header.
/// </summary>
/// <param name="header">The section header.</param>
/// <param name="reader">The WebAssembly file reader.</param>
/// <returns>The parsed section.</returns>
public static NameSection ReadSectionPayload(SectionHeader header, BinaryWasmReader reader)
{
var section = new NameSection();
long startPos = reader.Position;
while (reader.Position - startPos < header.PayloadLength)
{
// Read entries until we've read the entire section.
section.Names.Add(NameEntry.Read(reader));
}
return section;
}
}
/// <summary>
/// An enumeration of encodings for name section entries.
/// </summary>
public enum NameEntryKind : byte
{
/// <summary>
/// The name entry code for a module name entry.
/// </summary>
Module = 0,
/// <summary>
/// The name entry code for a function name entry.
/// </summary>
Function = 1,
/// <summary>
/// The name entry code for a local name entry.
/// </summary>
Local = 2
}
/// <summary>
/// A base class for entries in the name section.
/// </summary>
public abstract class NameEntry
{
/// <summary>
/// Gets this name entry's kind.
/// </summary>
/// <returns>The name entry kind.</returns>
public abstract NameEntryKind Kind { get; }
/// <summary>
/// Writes this name entry's payload to the given writer.
/// </summary>
/// <param name="writer">The writer to write the payload to.</param>
public abstract void WritePayloadTo(BinaryWasmWriter writer);
/// <summary>
/// Writes a textual representation of this name entry to the given writer.
/// </summary>
/// <param name="writer">The text writer.</param>
public virtual void Dump(TextWriter writer)
{
using (var memStream = new MemoryStream())
{
using (var binaryWriter = new BinaryWriter(memStream))
{
WritePayloadTo(new BinaryWasmWriter(binaryWriter));
memStream.Seek(0, SeekOrigin.Begin);
writer.WriteLine("entry kind '{0}', payload size: {1}", Kind, memStream.Length);
var instructionWriter = DumpHelpers.CreateIndentedTextWriter(writer);
DumpHelpers.DumpStream(memStream, writer);
}
}
}
/// <summary>
/// Writes this name entry's header and payload to the given writer.
/// </summary>
/// <param name="writer">The writer to write the header and payload to.</param>
public void WriteTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt7((byte)Kind);
writer.WriteLengthPrefixed(WritePayloadTo);
}
/// <inheritdoc/>
public override string ToString()
{
var builder = new StringBuilder();
Dump(new StringWriter(builder));
return builder.ToString();
}
/// <summary>
/// Reads a name entry's header and payload from the given binary
/// WebAssembly reader.
/// </summary>
/// <param name="reader">The reader to read the name entry from.</param>
/// <returns>A name entry.</returns>
public static NameEntry Read(BinaryWasmReader reader)
{
NameEntryKind kind = (NameEntryKind)reader.ReadVarUInt7();
uint length = reader.ReadVarUInt32();
switch (kind)
{
case NameEntryKind.Module:
return ModuleNameEntry.ReadPayload(reader, length);
default:
return UnknownNameEntry.ReadPayload(reader, kind, length);
}
}
}
/// <summary>
/// Describes a name section entry with an unknown entry kind code.
/// </summary>
public sealed class UnknownNameEntry : NameEntry
{
/// <summary>
/// Creates an unknown name entry from the given entry kind and payload.
/// </summary>
public UnknownNameEntry(NameEntryKind kind, byte[] payload)
{
this.entryKind = kind;
this.Payload = payload;
}
private NameEntryKind entryKind;
/// <summary>
/// Gets the payload for this unknown name entry.
/// </summary>
/// <returns>The payload.</returns>
public byte[] Payload { get; set; }
/// <inheritdoc/>
public override NameEntryKind Kind => entryKind;
/// <inheritdoc/>
public override void WritePayloadTo(BinaryWasmWriter writer)
{
writer.Writer.Write(Payload);
}
/// <summary>
/// Reads an unknown name entry's payload.
/// </summary>
/// <param name="reader">The reader to read the name entry payload from.</param>
/// <param name="kind">The kind of name entry to read.</param>
/// <param name="length">The length of the name entry's payload, in bytes.</param>
/// <returns>An unknown name entry.</returns>
public static UnknownNameEntry ReadPayload(BinaryWasmReader reader, NameEntryKind kind, uint length)
{
return new UnknownNameEntry(kind, reader.ReadBytes((int)length));
}
}
/// <summary>
/// A name entry type that defines a module's name.
/// </summary>
public sealed class ModuleNameEntry : NameEntry
{
/// <summary>
/// Creates a module name entry from the given name.
/// </summary>
public ModuleNameEntry(string moduleName)
{
this.ModuleName = moduleName;
}
/// <inheritdoc/>
public override NameEntryKind Kind => NameEntryKind.Module;
/// <summary>
/// Gets or sets the module's name.
/// </summary>
/// <returns>The module's name.</returns>
public string ModuleName { get; set; }
/// <inheritdoc/>
public override void WritePayloadTo(BinaryWasmWriter writer)
{
writer.WriteString(ModuleName);
}
/// <inheritdoc/>
public override void Dump(TextWriter writer)
{
writer.Write("module name: {0}", ModuleName);
}
/// <summary>
/// Reads a module name entry's payload.
/// </summary>
/// <param name="reader">The reader to read the name entry payload from.</param>
/// <param name="length">The length of the name entry's payload, in bytes.</param>
/// <returns>A module name entry.</returns>
public static ModuleNameEntry ReadPayload(BinaryWasmReader reader, uint length)
{
return new ModuleNameEntry(reader.ReadString());
}
}
}
<|start_filename|>libwasm/DumpHelpers.cs<|end_filename|>
using System;
using System.CodeDom.Compiler;
using System.IO;
namespace Wasm
{
/// <summary>
/// Contains functions which help convert raw data to a human-readable format.
/// </summary>
public static class DumpHelpers
{
/// <summary>
/// Formats the given value as a hexadecimal number.
/// </summary>
/// <param name="value">The value to format.</param>
/// <returns>A hexadecimal number, prefixed by '0x'.</returns>
public static string FormatHex(byte value)
{
return string.Format("0x{0:x02}", value);
}
/// <summary>
/// Formats the given value as a hexadecimal number.
/// </summary>
/// <param name="value">The value to format.</param>
/// <returns>A hexadecimal number, prefixed by '0x'.</returns>
public static string FormatHex(ushort value)
{
return string.Format("0x{0:x04}", value);
}
/// <summary>
/// Formats the given value as a hexadecimal number.
/// </summary>
/// <param name="value">The value to format.</param>
/// <returns>A hexadecimal number, prefixed by '0x'.</returns>
public static string FormatHex(uint value)
{
return string.Format("0x{0:x08}", value);
}
/// <summary>
/// Writes the contents of the given stream to the given text writer,
/// as a space-delimited list of hex bytes.
/// </summary>
/// <param name="stream">The stream to read.</param>
/// <param name="writer">The writer to which text is written.</param>
public static void DumpStream(Stream stream, TextWriter writer)
{
bool isFirst = true;
while (true)
{
int b = stream.ReadByte();
if (b == -1)
return;
if (isFirst)
isFirst = false;
else
writer.Write(" ");
writer.Write(FormatHex((byte)b));
}
}
/// <summary>
/// Writes the contents of the byte array to the given text writer,
/// as a space-delimited list of hex bytes.
/// </summary>
/// <param name="bytes">The bytes to print.</param>
/// <param name="writer">The writer to which text is written.</param>
public static void DumpBytes(byte[] bytes, TextWriter writer)
{
using (var memStream = new MemoryStream(bytes))
{
DumpStream(memStream, writer);
}
}
/// <summary>
/// Creates a string representation for the given WebAssembly type.
/// </summary>
/// <param name="value">The WebAssembly type to convert to a string.</param>
/// <returns>A string representation for a WebAssembly type.</returns>
public static string WasmTypeToString(WasmType value)
{
switch (value)
{
case WasmType.AnyFunc:
return "anyfunc";
case WasmType.Empty:
return "empty";
case WasmType.Float32:
return "f32";
case WasmType.Float64:
return "f64";
case WasmType.Func:
return "funcdef";
case WasmType.Int32:
return "i32";
case WasmType.Int64:
return "i64";
default:
return "unknown type (code: " + value + ")";
}
}
/// <summary>
/// Creates a string representation for the given WebAssembly value type.
/// </summary>
/// <param name="value">The WebAssembly value type to convert to a string.</param>
/// <returns>A string representation for a WebAssembly value type.</returns>
public static string WasmTypeToString(WasmValueType value)
{
return WasmTypeToString((WasmType)value);
}
/// <summary>
/// Writes a textual representation of the given WebAssembly type to
/// the given text writer.
/// </summary>
/// <param name="value">The type to print to the text writer.</param>
/// <param name="writer">The writer to which the textual WebAssembly value type should be written.</param>
public static void DumpWasmType(WasmType value, TextWriter writer)
{
writer.Write(WasmTypeToString(value));
}
/// <summary>
/// Writes a textual representation of the given WebAssembly value type to
/// the given text writer.
/// </summary>
/// <param name="value">The value type to print to the text writer.</param>
/// <param name="writer">The writer to which the textual WebAssembly value type should be written.</param>
public static void DumpWasmType(WasmValueType value, TextWriter writer)
{
DumpWasmType((WasmType)value, writer);
}
/// <summary>
/// Creates a text writer that prepends the given indentation string to every line.
/// </summary>
/// <param name="writer">The text writer to which the indented writer should write.</param>
/// <param name="indentation">The indentation string.</param>
/// <returns>A text writer that prepends the given indentation string to every line.</returns>
public static TextWriter CreateIndentedTextWriter(TextWriter writer, string indentation)
{
var result = new IndentedTextWriter(writer, indentation);
result.Indent = 1;
return result;
}
/// <summary>
/// Creates a text writer that prepends indentation string to every line.
/// </summary>
/// <param name="writer">The text writer to which the indented writer should write.</param>
/// <returns>A text writer that prepends indentation to every line.</returns>
public static TextWriter CreateIndentedTextWriter(TextWriter writer)
{
return CreateIndentedTextWriter(writer, " ");
}
}
}
<|start_filename|>libwasm/Optimize/FunctionTypeOptimizations.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
namespace Wasm.Optimize
{
/// <summary>
/// Defines function type optimizations.
/// </summary>
public static class FunctionTypeOptimizations
{
/// <summary>
/// Takes a sequence of function types as input and produces
/// a list of equivalent, distinct function types and a map
/// that maps type indices from the old type sequence to
/// indices of equivalent types in the new function type list.
/// </summary>
/// <param name="types">The sequence of types to make distinct.</param>
/// <param name="newTypes">The list of distinct function types.</param>
/// <param name="typeMapping">
/// A map from function types that occur in <c>Types</c> to their equivalents in <c>NewTypes</c>.
/// </param>
public static void MakeFunctionTypesDistinct(
IEnumerable<FunctionType> types,
out IReadOnlyList<FunctionType> newTypes,
out IReadOnlyDictionary<uint, uint> typeMapping)
{
var newTypeList = new List<FunctionType>();
var structuralOldToNewTypeMap = new Dictionary<FunctionType, uint>(
ConstFunctionTypeComparer.Instance);
var referentialOldToNewTypeMap = new Dictionary<uint, uint>();
uint i = 0;
foreach (var oldType in types)
{
uint newTypeIndex;
if (structuralOldToNewTypeMap.TryGetValue(oldType, out newTypeIndex))
{
referentialOldToNewTypeMap[i] = newTypeIndex;
}
else
{
newTypeIndex = (uint)newTypeList.Count;
structuralOldToNewTypeMap[oldType] = newTypeIndex;
referentialOldToNewTypeMap[i] = newTypeIndex;
newTypeList.Add(oldType);
}
i++;
}
newTypes = newTypeList;
typeMapping = referentialOldToNewTypeMap;
}
/// <summary>
/// Rewrites function type references in the given WebAssembly file
/// by replacing keys from the rewrite map with their corresponding
/// values.
/// </summary>
/// <param name="file">The WebAssembly file to rewrite.</param>
/// <param name="rewriteMap">A mapping of original type indices to new type indices.</param>
public static void RewriteFunctionTypeReferences(
this WasmFile file,
IReadOnlyDictionary<uint, uint> rewriteMap)
{
// Type references occur only in the import and function sections.
var importSections = file.GetSections<ImportSection>();
for (int i = 0; i < importSections.Count; i++)
{
var importSec = importSections[i];
for (int j = 0; j < importSec.Imports.Count; j++)
{
var importDecl = importSec.Imports[j] as ImportedFunction;
uint newIndex;
if (importDecl != null && rewriteMap.TryGetValue(importDecl.TypeIndex, out newIndex))
{
importDecl.TypeIndex = newIndex;
}
}
}
var funcSections = file.GetSections<FunctionSection>();
for (int i = 0; i < funcSections.Count; i++)
{
var funcSec = funcSections[i];
for (int j = 0; j < funcSec.FunctionTypes.Count; j++)
{
uint newIndex;
if (rewriteMap.TryGetValue(funcSec.FunctionTypes[j], out newIndex))
{
funcSec.FunctionTypes[j] = newIndex;
}
}
}
}
/// <summary>
/// Compresses function types in the given WebAssembly file
/// by including only unique function types.
/// </summary>
/// <param name="file">The WebAssembly file to modify.</param>
public static void CompressFunctionTypes(
this WasmFile file)
{
// Grab the first type section.
var typeSection = file.GetFirstSectionOrNull<TypeSection>();
if (typeSection == null)
{
return;
}
// Make all types from the first type section distinct.
IReadOnlyList<FunctionType> newTypes;
IReadOnlyDictionary<uint, uint> typeIndexMap;
MakeFunctionTypesDistinct(typeSection.FunctionTypes, out newTypes, out typeIndexMap);
// Rewrite the type section's function types.
typeSection.FunctionTypes.Clear();
typeSection.FunctionTypes.AddRange(newTypes);
// Rewrite type indices.
file.RewriteFunctionTypeReferences(typeIndexMap);
}
}
/// <summary>
/// An equality comparer for function types that assumes that the contents
/// of the function types it is given remain constant over the course of
/// its operation.
/// </summary>
public sealed class ConstFunctionTypeComparer : IEqualityComparer<FunctionType>
{
private ConstFunctionTypeComparer() { }
/// <summary>
/// Gets the one and only instance of the constant function type comparer.
/// </summary>
public static readonly ConstFunctionTypeComparer Instance = new ConstFunctionTypeComparer();
/// <inheritdoc/>
public bool Equals(FunctionType x, FunctionType y)
{
return Enumerable.SequenceEqual<WasmValueType>(x.ParameterTypes, y.ParameterTypes)
&& Enumerable.SequenceEqual<WasmValueType>(x.ReturnTypes, y.ReturnTypes);
}
private static int HashSequence(IEnumerable<WasmValueType> values, int seed)
{
// Based on Brendan's answer to this StackOverflow question:
// https://stackoverflow.com/questions/16340/how-do-i-generate-a-hashcode-from-a-byte-array-in-c
int result = seed;
foreach (var item in values)
{
result = (result * 31) ^ (int)item;
}
return result;
}
/// <inheritdoc/>
public int GetHashCode(FunctionType obj)
{
return HashSequence(obj.ReturnTypes, HashSequence(obj.ParameterTypes, 0));
}
}
}
<|start_filename|>libwasm/ElementSection.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using Wasm.Binary;
using Wasm.Instructions;
namespace Wasm
{
/// <summary>
/// A type of section that declares the initialized data that is loaded into a table.
/// </summary>
public sealed class ElementSection : Section
{
/// <summary>
/// Creates an empty element section.
/// </summary>
public ElementSection()
{
this.Segments = new List<ElementSegment>();
this.ExtraPayload = new byte[0];
}
/// <summary>
/// Creates an element section from a sequence of segments.
/// </summary>
/// <param name="segments">The segments to put in the elements section.</param>
public ElementSection(IEnumerable<ElementSegment> segments)
: this(segments, new byte[0])
{
}
/// <summary>
/// Creates an element section from a sequence of segments and a trailing payload.
/// </summary>
/// <param name="segments">The segments to put in the elements section.</param>
/// <param name="extraPayload">
/// A sequence of bytes that have no intrinsic meaning; they are part
/// of the element section but are placed after the element section's actual contents.
/// </param>
public ElementSection(IEnumerable<ElementSegment> segments, byte[] extraPayload)
{
this.Segments = new List<ElementSegment>(segments);
this.ExtraPayload = extraPayload;
}
/// <inheritdoc/>
public override SectionName Name => new SectionName(SectionCode.Element);
/// <summary>
/// Gets the list of the element segments defined by this section.
/// </summary>
/// <returns>The element segments defined by this section.</returns>
public List<ElementSegment> Segments { get; private set; }
/// <summary>
/// Gets this function section's additional payload.
/// </summary>
/// <returns>The additional payload, as an array of bytes.</returns>
public byte[] ExtraPayload { get; set; }
/// <inheritdoc/>
public override void WritePayloadTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt32((uint)Segments.Count);
foreach (var segment in Segments)
{
segment.WriteTo(writer);
}
writer.Writer.Write(ExtraPayload);
}
/// <summary>
/// Reads the element section with the given header.
/// </summary>
/// <param name="header">The section header.</param>
/// <param name="reader">A reader for a binary WebAssembly file.</param>
/// <returns>The parsed section.</returns>
public static ElementSection ReadSectionPayload(
SectionHeader header, BinaryWasmReader reader)
{
long startPos = reader.Position;
// Read the element segments.
uint count = reader.ReadVarUInt32();
var segments = new List<ElementSegment>();
for (uint i = 0; i < count; i++)
{
segments.Add(ElementSegment.ReadFrom(reader));
}
// Skip any remaining bytes.
var extraPayload = reader.ReadRemainingPayload(startPos, header);
return new ElementSection(segments, extraPayload);
}
/// <inheritdoc/>
public override void Dump(TextWriter writer)
{
writer.Write(Name.ToString());
writer.Write("; number of entries: ");
writer.Write(Segments.Count);
writer.WriteLine();
var indentedWriter = DumpHelpers.CreateIndentedTextWriter(writer);
for (int i = 0; i < Segments.Count; i++)
{
writer.Write("#{0}:", i);
indentedWriter.WriteLine();
Segments[i].Dump(indentedWriter);
writer.WriteLine();
}
if (ExtraPayload.Length > 0)
{
writer.Write("Extra payload size: ");
writer.Write(ExtraPayload.Length);
writer.WriteLine();
DumpHelpers.DumpBytes(ExtraPayload, writer);
writer.WriteLine();
}
}
}
/// <summary>
/// An entry in the element section.
/// </summary>
public sealed class ElementSegment
{
/// <summary>
/// Creates an element segment from the given table index, offset and data.
/// </summary>
/// <param name="tableIndex">The table index.</param>
/// <param name="offset">An i32 initializer expression that computes the offset at which to place the data.</param>
/// <param name="elements">A sequence of function indices to which a segment of the table is initialized.</param>
public ElementSegment(uint tableIndex, InitializerExpression offset, IEnumerable<uint> elements)
{
this.TableIndex = tableIndex;
this.Offset = offset;
this.Elements = new List<uint>(elements);
}
/// <summary>
/// Gets the table index.
/// </summary>
/// <returns>The table index.</returns>
public uint TableIndex { get; set; }
/// <summary>
/// Gets an i32 initializer expression that computes the offset at which to place the data.
/// </summary>
/// <returns>An i32 initializer expression.</returns>
public InitializerExpression Offset { get; set; }
/// <summary>
/// Gets a list of function indices to which this segment of the table is initialized.
/// </summary>
/// <returns>The list of function indices to which this segment of the table is initialized.</returns>
public List<uint> Elements { get; private set; }
/// <summary>
/// Writes this element segment to the given WebAssembly file writer.
/// </summary>
/// <param name="writer">The WebAssembly file writer.</param>
public void WriteTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt32(TableIndex);
Offset.WriteTo(writer);
writer.WriteVarUInt32((uint)Elements.Count);
foreach (var item in Elements)
{
writer.WriteVarUInt32(item);
}
}
/// <summary>
/// Reads an element segment from the given WebAssembly reader.
/// </summary>
/// <param name="reader">The WebAssembly reader.</param>
/// <returns>The element segment that was read from the reader.</returns>
public static ElementSegment ReadFrom(BinaryWasmReader reader)
{
var index = reader.ReadVarUInt32();
var offset = InitializerExpression.ReadFrom(reader);
var dataLength = reader.ReadVarUInt32();
var elements = new List<uint>((int)dataLength);
for (uint i = 0; i < dataLength; i++)
{
elements.Add(reader.ReadVarUInt32());
}
return new ElementSegment(index, offset, elements);
}
/// <summary>
/// Writes a textual representation of this element segment to the given writer.
/// </summary>
/// <param name="writer">The writer to which text is written.</param>
public void Dump(TextWriter writer)
{
writer.Write("- Table index: ");
writer.Write(TableIndex);
writer.WriteLine();
writer.Write("- Offset:");
var indentedWriter = DumpHelpers.CreateIndentedTextWriter(writer);
foreach (var instruction in Offset.BodyInstructions)
{
indentedWriter.WriteLine();
instruction.Dump(indentedWriter);
}
writer.WriteLine();
writer.Write("- Elements:");
for (int i = 0; i < Elements.Count; i++)
{
indentedWriter.WriteLine();
indentedWriter.Write("#{0} -> func #{1}", i, Elements[i]);
}
}
}
}
<|start_filename|>nullary-opcode-generator/Program.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Text;
using Wasm.Instructions;
namespace Wasm.NullaryOpCodeGenerator
{
public static class Program
{
private static int PrintUsage()
{
Console.Error.WriteLine("usage: nullary-opcode-generator gen-init|gen-defs nullary-opcode-defs.txt nullary-opcode-docs.txt");
Console.Error.WriteLine();
Console.Error.WriteLine(" gen-init: generates initialization code for nullary opcodes.");
Console.Error.WriteLine(" gen-defs: generates field definition code for nullary opcodes.");
Console.Error.WriteLine(" nullary-opcode-defs.txt: a file that contains whitespace-separated (mnemonic, opcode) pairs.");
Console.Error.WriteLine(" nullary-opcode-docs.txt: a file that contains colon-separated (mnemonic, documentation) pairs.");
return 1;
}
public static int Main(string[] args)
{
// This program generates C# code from (potentially large) tables of
// nullary opcode docs and definitions.
// The resulting code is then to be included manually in libwasm/Operators.cs.
if (args.Length != 3)
{
return PrintUsage();
}
bool genInit = false;
if (args[0] == "gen-init")
{
genInit = true;
}
else if (args[0] != "gen-defs")
{
return PrintUsage();
}
var defLines = File.ReadAllLines(args[1]);
var docLines = File.ReadAllLines(args[2]);
var opCodes = new Dictionary<string, string>();
var opDocs = new Dictionary<string, string>();
foreach (var line in docLines)
{
if (!string.IsNullOrWhiteSpace(line))
{
string[] splitLine = line.Split(new char[] { ':' }, 2, StringSplitOptions.RemoveEmptyEntries);
opDocs.Add(splitLine[0], splitLine[1].Trim());
}
}
foreach (var line in defLines)
{
if (!string.IsNullOrWhiteSpace(line))
{
string[] splitLine = line.Split(new char[] { ' ' }, 2, StringSplitOptions.RemoveEmptyEntries);
opCodes.Add(splitLine[0], splitLine[1].Trim());
}
}
if (genInit)
{
foreach (var pair in opCodes)
{
var op = ParseMnemonic(pair.Key);
Console.WriteLine(
"{0} = Register<NullaryOperator>(new NullaryOperator({1}, WasmType.{2}, \"{3}\"));",
GenerateFieldName(op),
pair.Value,
WasmTypeToIdentifier(op.DeclaringType),
op.Mnemonic);
}
}
else
{
foreach (var pair in opCodes)
{
var op = ParseMnemonic(pair.Key);
Console.WriteLine("/// <summary>");
Console.WriteLine(
"/// The '{0}' operator: {1}",
op.ToString(), opDocs.ContainsKey(pair.Key)
? opDocs[pair.Key] + "."
: "");
Console.WriteLine("/// </summary>");
Console.WriteLine("public static readonly NullaryOperator {0};", GenerateFieldName(op));
Console.WriteLine();
}
}
return 0;
}
private static NullaryOperator ParseMnemonic(string FullMnemonic)
{
string[] split = FullMnemonic.Split(new char[] { '.' }, 2);
if (split.Length == 1)
{
return new NullaryOperator(0, WasmType.Empty, split[0]);
}
else
{
return new NullaryOperator(0, ParseWasmType(split[0]), split[1]);
}
}
private static string GenerateFieldName(NullaryOperator Op)
{
if (Op.HasDeclaringType)
{
return WasmTypeToIdentifier(Op.DeclaringType) + MnemonicToIdentifier(Op.Mnemonic);
}
else
{
return MnemonicToIdentifier(Op.Mnemonic);
}
}
private static string MnemonicToIdentifier(string Mnemonic)
{
var result = new StringBuilder();
bool useCaps = true;
int i = 0;
foreach (var character in Mnemonic)
{
i++;
if (character == '_')
{
useCaps = true;
continue;
}
else if (character == '/')
{
string suffixType = Mnemonic.Substring(i);
result.Append(WasmTypeToIdentifier(ParseWasmType(suffixType)));
break;
}
if (useCaps)
{
result.Append(char.ToUpper(character));
useCaps = false;
}
else
{
result.Append(character);
}
}
return result.ToString();
}
private static WasmType ParseWasmType(string Type)
{
switch (Type)
{
case "i32":
return WasmType.Int32;
case "i64":
return WasmType.Int64;
case "f32":
return WasmType.Float32;
case "f64":
return WasmType.Float64;
default:
throw new InvalidOperationException("Unknown WasmType: '" + Type + "'");
}
}
private static string WasmTypeToIdentifier(WasmType Type)
{
return ((object)Type).ToString();
}
}
}
<|start_filename|>libwasm/Interpret/BaseRuntime/TerminalRuntime.cs<|end_filename|>
using System.Collections.Generic;
using System.IO;
namespace Wasm.Interpret.BaseRuntime
{
/// <summary>
/// Defines terminal I/O operations for the base-runtime environment.
/// </summary>
public sealed class TerminalRuntime
{
/// <summary>
/// Creates a base-runtime IO implementation from the given streams.
/// </summary>
/// <param name="inputStream">The runtime's standard input stream.</param>
/// <param name="outputStream">The runtime's standard output stream.</param>
/// <param name="errorStream">The runtime's standard error stream.</param>
private TerminalRuntime(Stream inputStream, Stream outputStream, Stream errorStream)
{
this.stdinStream = inputStream;
this.stdoutStream = outputStream;
this.stderrStream = errorStream;
this.importerVal = new PredefinedImporter();
this.importerVal.DefineFunction(
"stdin_read",
new DelegateFunctionDefinition(
new WasmValueType[0],
new WasmValueType[] { WasmValueType.Int32 },
StdinReadByte));
this.importerVal.DefineFunction(
"stdout_write",
new DelegateFunctionDefinition(
new WasmValueType[] { WasmValueType.Int32 },
new WasmValueType[0],
StdoutWriteByte));
this.importerVal.DefineFunction(
"stderr_write",
new DelegateFunctionDefinition(
new WasmValueType[] { WasmValueType.Int32 },
new WasmValueType[0],
StderrWriteByte));
this.importerVal.DefineFunction(
"stdin_flush",
new DelegateFunctionDefinition(
new WasmValueType[0],
new WasmValueType[0],
StdinFlush));
this.importerVal.DefineFunction(
"stdout_flush",
new DelegateFunctionDefinition(
new WasmValueType[0],
new WasmValueType[0],
StdoutFlush));
this.importerVal.DefineFunction(
"stderr_flush",
new DelegateFunctionDefinition(
new WasmValueType[0],
new WasmValueType[0],
StderrFlush));
}
private Stream stdinStream;
private Stream stdoutStream;
private Stream stderrStream;
private readonly PredefinedImporter importerVal;
/// <summary>
/// Adds all definitions from this runtime to the given importer.
/// </summary>
/// <param name="Importer">The importer.</param>
private void IncludeDefinitionsIn(PredefinedImporter Importer)
{
Importer.IncludeDefinitions(importerVal);
}
/// <summary>
/// Creates a new terminal I/O runtime and adds all of its definitions to the given
/// importer.
/// </summary>
/// <param name="inputStream">The runtime's standard input stream.</param>
/// <param name="outputStream">The runtime's standard output stream.</param>
/// <param name="errorStream">The runtime's standard error stream.</param>
/// <param name="importer">The importer.</param>
public static void IncludeDefinitionsIn(
Stream inputStream,
Stream outputStream,
Stream errorStream,
PredefinedImporter importer)
{
new TerminalRuntime(inputStream, outputStream, errorStream).IncludeDefinitionsIn(importer);
}
private IReadOnlyList<object> StdinReadByte(IReadOnlyList<object> args)
{
return new object[] { stdinStream.ReadByte() };
}
private IReadOnlyList<object> StdoutWriteByte(IReadOnlyList<object> args)
{
object data = args[0];
stdoutStream.WriteByte((byte)(int)data);
return new object[0];
}
private IReadOnlyList<object> StderrWriteByte(IReadOnlyList<object> args)
{
object data = args[0];
stderrStream.WriteByte((byte)(int)data);
return new object[0];
}
private IReadOnlyList<object> StdinFlush(IReadOnlyList<object> args)
{
stdinStream.Flush();
return new object[0];
}
private IReadOnlyList<object> StdoutFlush(IReadOnlyList<object> args)
{
stdinStream.Flush();
return new object[0];
}
private IReadOnlyList<object> StderrFlush(IReadOnlyList<object> args)
{
stdinStream.Flush();
return new object[0];
}
}
}
<|start_filename|>wasm-dump/Program.cs<|end_filename|>
using System;
using System.IO;
namespace Wasm.Dump
{
public static class Program
{
private static MemoryStream ReadStdinToEnd()
{
// Based on <NAME>'s answer to this StackOverflow question:
// https://stackoverflow.com/questions/1562417/read-binary-data-from-console-in
var memStream = new MemoryStream();
using (var stdin = Console.OpenStandardInput())
{
byte[] buffer = new byte[2048];
int bytes;
while ((bytes = stdin.Read(buffer, 0, buffer.Length)) > 0)
{
memStream.Write(buffer, 0, bytes);
}
}
memStream.Seek(0, SeekOrigin.Begin);
return memStream;
}
public static int Main(string[] args)
{
if (args.Length > 1)
{
Console.Error.WriteLine("usage: wasm-dump [file.wasm]");
return 1;
}
WasmFile file;
if (args.Length == 0)
{
using (var input = ReadStdinToEnd())
{
file = WasmFile.ReadBinary(input);
}
}
else
{
file = WasmFile.ReadBinary(args[0]);
}
file.Dump(Console.Out);
Console.WriteLine();
return 0;
}
}
}
<|start_filename|>libwasm/SectionCode.cs<|end_filename|>
namespace Wasm
{
/// <summary>
/// /// Enumerates possible section codes.
/// </summary>
public enum SectionCode
{
/// <summary>
/// The section code for custom sections.
/// </summary>
Custom = 0,
/// <summary>
/// The section code for function signature declarations.
/// </summary>
Type = 1,
/// <summary>
/// The section code for import declarations.
/// </summary>
Import = 2,
/// <summary>
/// The section code for function declarations.
/// </summary>
Function = 3,
/// <summary>
/// The section code for tables, e.g., the indirect function table.
/// </summary>
Table = 4,
/// <summary>
/// The section code for memory attributes.
/// </summary>
Memory = 5,
/// <summary>
/// The section code for global declarations.
/// </summary>
Global = 6,
/// <summary>
/// The section code for exports.
/// </summary>
Export = 7,
/// <summary>
/// The section code for the start function declarations.
/// </summary>
Start = 8,
/// <summary>
/// The section code for an elements section.
/// </summary>
Element = 9,
/// <summary>
/// The section code for function bodies.
/// </summary>
Code = 10,
/// <summary>
/// The section code for data segments.
/// </summary>
Data = 11
}
}
<|start_filename|>libwasm/Interpret/NamespacedImporter.cs<|end_filename|>
using System.Collections.Generic;
namespace Wasm.Interpret
{
/// <summary>
/// An importer that delegates the issue of importing values to another
/// importer based on the module name associated with the value. That is,
/// module names serve as "namespaces" of sorts for other importers.
/// </summary>
public sealed class NamespacedImporter : IImporter
{
/// <summary>
/// Creates a linking importer.
/// </summary>
public NamespacedImporter()
{
this.moduleImporters = new Dictionary<string, IImporter>();
}
private Dictionary<string, IImporter> moduleImporters;
/// <summary>
/// Registers an importer for a particular module name.
/// </summary>
/// <param name="moduleName">
/// The module name to map to <paramref name="importer"/>.
/// </param>
/// <param name="importer">
/// An importer to use for all imports that refer to module <paramref name="moduleName"/>.
/// </param>
public void RegisterImporter(string moduleName, IImporter importer)
{
moduleImporters[moduleName] = importer;
}
/// <inheritdoc/>
public FunctionDefinition ImportFunction(ImportedFunction description, FunctionType signature)
{
if (moduleImporters.TryGetValue(description.ModuleName, out IImporter importer))
{
return importer.ImportFunction(description, signature);
}
else
{
return null;
}
}
/// <inheritdoc/>
public Variable ImportGlobal(ImportedGlobal description)
{
if (moduleImporters.TryGetValue(description.ModuleName, out IImporter importer))
{
return importer.ImportGlobal(description);
}
else
{
return null;
}
}
/// <inheritdoc/>
public LinearMemory ImportMemory(ImportedMemory description)
{
if (moduleImporters.TryGetValue(description.ModuleName, out IImporter importer))
{
return importer.ImportMemory(description);
}
else
{
return null;
}
}
/// <inheritdoc/>
public FunctionTable ImportTable(ImportedTable description)
{
if (moduleImporters.TryGetValue(description.ModuleName, out IImporter importer))
{
return importer.ImportTable(description);
}
else
{
return null;
}
}
}
}
<|start_filename|>libwasm/Instructions/Operator.cs<|end_filename|>
using System.IO;
using System.Text;
using Wasm.Binary;
namespace Wasm.Instructions
{
/// <summary>
/// Describes an operator, which consists of an opcode, a defining type and a mnemonic.
/// </summary>
public abstract class Operator
{
/// <summary>
/// Creates an operator.
/// </summary>
/// <param name="opCode">The operator's opcode.</param>
/// <param name="declaringType">A type that defines the operator, if any.</param>
/// <param name="mnemonic">The operator's mnemonic.</param>
public Operator(byte opCode, WasmType declaringType, string mnemonic)
{
this.OpCode = opCode;
this.DeclaringType = declaringType;
this.Mnemonic = mnemonic;
}
/// <summary>
/// Gets the opcode for this operator.
/// </summary>
/// <returns>The operator's opcode.</returns>
public byte OpCode { get; private set; }
/// <summary>
/// Gets the type that defines this operator, if any.
/// </summary>
/// <returns>The type that defines this operator, if any; otherwise, <cref name="WasmType.Empty"/>.</returns>
public WasmType DeclaringType { get; private set; }
/// <summary>
/// Gets the mnemonic for this operator.
/// </summary>
/// <returns>The operator's mnemonic.</returns>
public string Mnemonic { get; private set; }
/// <summary>
/// Gets a Boolean that tells if this operator has a declaring type.
/// </summary>
public bool HasDeclaringType => DeclaringType != WasmType.Empty;
/// <summary>
/// Reads the immediates (not the opcode) of a WebAssembly instruction
/// for this operator from the given reader and returns the result as an
/// instruction.
/// </summary>
/// <param name="reader">The WebAssembly file reader to read immediates from.</param>
/// <returns>A WebAssembly instruction.</returns>
public abstract Instruction ReadImmediates(BinaryWasmReader reader);
/// <summary>
/// Writes a string representation of this operator to the given text writer.
/// </summary>
/// <param name="writer">
/// The writer to which a representation of this operator is written.
/// </param>
public virtual void Dump(TextWriter writer)
{
if (HasDeclaringType)
{
DumpHelpers.DumpWasmType(DeclaringType, writer);
writer.Write(".");
}
writer.Write(Mnemonic);
}
/// <summary>
/// Creates a string representation of this operator.
/// </summary>
/// <returns>The operator's string representation.</returns>
public override string ToString()
{
var builder = new StringBuilder();
Dump(new StringWriter(builder));
return builder.ToString();
}
}
}
<|start_filename|>libwasm/WasmFile.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Wasm.Binary;
namespace Wasm
{
/// <summary>
/// Represents a WebAssembly file.
/// </summary>
public sealed class WasmFile
{
/// <summary>
/// Creates an empty WebAssembly file.
/// </summary>
public WasmFile()
: this(VersionHeader.MvpHeader)
{ }
/// <summary>
/// Creates an empty WebAssembly file with the given header.
/// </summary>
/// <param name="header">The WebAssembly version header.</param>
public WasmFile(VersionHeader header)
: this(header, Enumerable.Empty<Section>())
{ }
/// <summary>
/// Creates a WebAssembly file from the given list of sections.
/// </summary>
/// <param name="header">The WebAssembly version header.</param>
/// <param name="sections">The list of all sections in the WebAssembly file.</param>
public WasmFile(VersionHeader header, IEnumerable<Section> sections)
{
this.Header = header;
this.Sections = new List<Section>(sections);
}
/// <summary>
/// Gets the WebAssembly version header for this file.
/// </summary>
/// <returns>The WebAssembly version header.</returns>
public VersionHeader Header { get; set; }
/// <summary>
/// Gets a list of all sections in this file.
/// </summary>
/// <returns>All sections in this file.</returns>
public List<Section> Sections { get; private set; }
/// <summary>
/// Gets or sets this module's name as defined in the names section.
/// </summary>
/// <value>
/// The module's name if the names section defines a module name entry;
/// otherwise, <c>null</c>.
/// </value>
public string ModuleName
{
get
{
return ModuleNameEntryOrNull?.ModuleName;
}
set
{
var entry = ModuleNameEntryOrNull;
if (entry == null)
{
AddNameEntry(new ModuleNameEntry(value));
}
else
{
entry.ModuleName = value;
}
}
}
private ModuleNameEntry ModuleNameEntryOrNull
{
get
{
var nameSection = GetFirstSectionOrNull<NameSection>();
if (nameSection == null)
{
return null;
}
else
{
var firstModuleNameEntry = nameSection.Names.OfType<ModuleNameEntry>().FirstOrDefault();
if (firstModuleNameEntry == null)
{
return null;
}
else
{
return firstModuleNameEntry;
}
}
}
}
/// <summary>
/// Gets or sets the index of this module's entry point function, if any.
/// </summary>
/// <value>An entry point index.</value>
public uint? StartFunctionIndex
{
get
{
var startSection = GetFirstSectionOrNull<StartSection>();
return startSection?.StartFunctionIndex;
}
set
{
if (value.HasValue)
{
var startSection = GetFirstSectionOrNull<StartSection>();
if (startSection == null)
{
InsertSection(new StartSection(value.Value));
}
else
{
startSection.StartFunctionIndex = value.Value;
}
}
else
{
Sections.RemoveAll(s => s is StartSection);
}
}
}
/// <summary>
/// Gets a list of all sections of the given type.
/// </summary>
/// <returns>A list of sections with the given type.</returns>
public IReadOnlyList<T> GetSections<T>()
where T : Section
{
var results = new List<T>();
for (int i = 0; i < Sections.Count; i++)
{
var sec = Sections[i];
if (sec is T)
{
results.Add((T)sec);
}
}
return results;
}
/// <summary>
/// Gets a list of all sections with the given section name.
/// </summary>
/// <param name="name">The section name to look for.</param>
/// <returns>A list of sections with the given section name.</returns>
public IReadOnlyList<Section> GetSections(SectionName name)
{
var results = new List<Section>();
for (int i = 0; i < Sections.Count; i++)
{
var sec = Sections[i];
if (sec.Name == name)
{
results.Add(sec);
}
}
return results;
}
/// <summary>
/// Gets the first section with the given name. If no such section exists,
/// <c>null</c> is returned.
/// </summary>
/// <param name="name">The section name to look for.</param>
/// <returns>The first section with the given name, if it exists; otherwise, <c>null</c>.</returns>
public Section GetFirstSectionOrNull(SectionName name)
{
for (int i = 0; i < Sections.Count; i++)
{
var sec = Sections[i];
if (sec.Name == name)
{
return sec;
}
}
return null;
}
/// <summary>
/// Gets the first section of the given type. If no such section exists,
/// <c>null</c> is returned.
/// </summary>
/// <returns>The first section of the given type, if it exists; otherwise, <c>null</c>.</returns>
public T GetFirstSectionOrNull<T>()
where T : Section
{
for (int i = 0; i < Sections.Count; i++)
{
var sec = Sections[i];
if (sec is T)
{
return (T)sec;
}
}
return default(T);
}
/// <summary>
/// Writes this WebAssembly file to the given stream using the binary WebAssembly file encoding.
/// </summary>
/// <param name="target">The stream to write to.</param>
public void WriteBinaryTo(Stream target)
{
var writer = new BinaryWriter(target);
var wasmWriter = new BinaryWasmWriter(writer);
wasmWriter.WriteFile(this);
}
/// <summary>
/// Writes this WebAssembly file to the given stream using the binary WebAssembly file encoding.
/// </summary>
/// <param name="path">A path to the file to write to.</param>
public void WriteBinaryTo(string path)
{
using (var fileStream = File.OpenWrite(path))
{
WriteBinaryTo(fileStream);
}
}
/// <summary>
/// Writes a textual representation of this WebAssembly file to the given text writer.
/// Note that this representation is intended as a human-readable debugging format that may
/// change at any time, not as a first-class textual WebAssembly module encoding.
/// </summary>
/// <param name="writer">The text writer use.</param>
public void Dump(TextWriter writer)
{
writer.Write(
"WebAssembly module; magic number: {0}, version number: {1}",
DumpHelpers.FormatHex(Header.Magic),
Header.Version);
foreach (var section in Sections)
{
writer.WriteLine();
section.Dump(writer);
}
}
/// <summary>
/// Reads a binary WebAssembly from the given stream.
/// </summary>
/// <param name="source">The stream from which a WebAssembly file is to be read.</param>
/// <returns>The WebAssembly file.</returns>
public static WasmFile ReadBinary(Stream source)
{
// Create a WebAssembly reader and read the file.
var reader = new BinaryReader(source);
var wasmReader = new BinaryWasmReader(reader);
return wasmReader.ReadFile();
}
/// <summary>
/// Reads a binary WebAssembly from the given stream.
/// </summary>
/// <param name="source">The stream from which a WebAssembly file is to be read.</param>
/// <param name="streamIsEmpty">Tests if the input stream is empty.</param>
/// <returns>The WebAssembly file.</returns>
public static WasmFile ReadBinary(Stream source, Func<bool> streamIsEmpty)
{
// Create a WebAssembly reader and read the file.
var reader = new BinaryReader(source);
var wasmReader = new BinaryWasmReader(reader, streamIsEmpty);
return wasmReader.ReadFile();
}
/// <summary>
/// Reads a binary WebAssembly from the file at the given path.
/// </summary>
/// <param name="path">A path to the file to read.</param>
/// <returns>The WebAssembly file.</returns>
public static WasmFile ReadBinary(string path)
{
WasmFile result;
using (var fileStream = File.OpenRead(path))
{
result = ReadBinary(fileStream);
}
return result;
}
/// <summary>
/// Inserts a new section into the WebAssembly file.
/// The section is inserted in a way that preserves the ordering
/// of sections as specified by the WebAssembly binary format.
/// </summary>
/// <param name="section">The section to insert.</param>
/// <returns>The index in the section list at which <paramref name="section"/> is inserted.</returns>
public int InsertSection(Section section)
{
if (!section.Name.IsCustom)
{
// The WebAssembly binary format requires that non-custom sections
// are ordered by their codes.
for (int i = 0; i < Sections.Count; i++)
{
if (!Sections[i].Name.IsCustom && section.Name.Code < Sections[i].Name.Code)
{
Sections.Insert(i, section);
return i;
}
}
}
Sections.Add(section);
return Sections.Count - 1;
}
/// <summary>
/// Adds a name entry to the names section, defining a new names section
/// if one doesn't exist already.
/// </summary>
/// <param name="entry">A name entry to add.</param>
/// <returns>The index in the name section of the newly added name entry.</returns>
public uint AddNameEntry(NameEntry entry)
{
var names = GetFirstSectionOrNull<NameSection>();
if (names == null)
{
InsertSection(names = new NameSection());
}
names.Names.Add(entry);
return (uint)names.Names.Count - 1;
}
/// <summary>
/// Adds a user-defined memory to this module's memory section, defining
/// a new memory section if one doesn't exist already.
/// </summary>
/// <param name="memory">The memory to add.</param>
/// <returns>The index in the memory section of the newly added memory.</returns>
public uint AddMemory(MemoryType memory)
{
var memories = GetFirstSectionOrNull<MemorySection>();
if (memories == null)
{
InsertSection(memories = new MemorySection());
}
memories.Memories.Add(memory);
return (uint)memories.Memories.Count - 1;
}
/// <summary>
/// Adds a data segment to this module's data section, defining
/// a new data section if one doesn't exist already.
/// </summary>
/// <param name="segment">The data segment to add.</param>
/// <returns>The index in the data section of the newly added data segment.</returns>
public uint AddDataSegment(DataSegment segment)
{
var data = GetFirstSectionOrNull<DataSection>();
if (data == null)
{
InsertSection(data = new DataSection());
}
data.Segments.Add(segment);
return (uint)data.Segments.Count - 1;
}
/// <summary>
/// Adds an import to this module's import section, defining
/// a new import section if one doesn't exist already.
/// </summary>
/// <param name="import">The import to add.</param>
/// <returns>The index in the import section of the newly added import.</returns>
public uint AddImport(ImportedValue import)
{
var imports = GetFirstSectionOrNull<ImportSection>();
if (imports == null)
{
InsertSection(imports = new ImportSection());
}
imports.Imports.Add(import);
return (uint)imports.Imports.Count - 1;
}
/// <summary>
/// Adds an export to this module's export section, defining
/// a new export section if one doesn't exist already.
/// </summary>
/// <param name="export">The export to add.</param>
/// <returns>The index in the export section of the newly added export.</returns>
public uint AddExport(ExportedValue export)
{
var exports = GetFirstSectionOrNull<ExportSection>();
if (exports == null)
{
InsertSection(exports = new ExportSection());
}
exports.Exports.Add(export);
return (uint)exports.Exports.Count - 1;
}
/// <summary>
/// Adds a function type to this module's type section, defining
/// a new type section if one doesn't exist already.
/// </summary>
/// <param name="type">The type to add.</param>
/// <returns>The index in the type section of the newly added function type.</returns>
public uint AddFunctionType(FunctionType type)
{
var types = GetFirstSectionOrNull<TypeSection>();
if (types == null)
{
InsertSection(types = new TypeSection());
}
types.FunctionTypes.Add(type);
return (uint)types.FunctionTypes.Count - 1;
}
/// <summary>
/// Adds a table to this module's type section, defining
/// a new table section if one doesn't exist already.
/// </summary>
/// <param name="table">The table to add.</param>
/// <returns>The index in the table section of the newly added table.</returns>
public uint AddTable(TableType table)
{
var tables = GetFirstSectionOrNull<TableSection>();
if (tables == null)
{
InsertSection(tables = new TableSection());
}
tables.Tables.Add(table);
return (uint)tables.Tables.Count - 1;
}
/// <summary>
/// Adds a element segment to this module's element section, defining
/// a new element section if one doesn't exist already.
/// </summary>
/// <param name="segment">The element segment to add.</param>
/// <returns>The index in the element section of the newly added element segment.</returns>
public uint AddElementSegment(ElementSegment segment)
{
var elements = GetFirstSectionOrNull<ElementSection>();
if (elements == null)
{
InsertSection(elements = new ElementSection());
}
elements.Segments.Add(segment);
return (uint)elements.Segments.Count - 1;
}
/// <summary>
/// Adds a function definition to this module.
/// </summary>
/// <param name="functionTypeIndex">The index in the type section of the function's type.</param>
/// <param name="functionBody">The body of the function to define.</param>
/// <returns>The index in the function section of the newly added function definition.</returns>
public uint AddFunction(uint functionTypeIndex, FunctionBody functionBody)
{
var funs = GetFirstSectionOrNull<FunctionSection>();
if (funs == null)
{
InsertSection(funs = new FunctionSection());
}
var code = GetFirstSectionOrNull<CodeSection>();
if (code == null)
{
InsertSection(code = new CodeSection());
}
funs.FunctionTypes.Add(functionTypeIndex);
code.Bodies.Add(functionBody);
return (uint)funs.FunctionTypes.Count - 1;
}
/// <summary>
/// Adds a global variable definition to this module.
/// </summary>
/// <param name="globalVariable">A global variable definition to introduce.</param>
/// <returns>The index in the global section of the newly added global variable definition.</returns>
public uint AddGlobal(GlobalVariable globalVariable)
{
var globals = GetFirstSectionOrNull<GlobalSection>();
if (globals == null)
{
InsertSection(globals = new GlobalSection());
}
globals.GlobalVariables.Add(globalVariable);
return (uint)globals.GlobalVariables.Count - 1;
}
}
}
<|start_filename|>libwasm/Instructions/IfElseInstruction.cs<|end_filename|>
using System.Collections.Generic;
using System.IO;
using Wasm.Binary;
namespace Wasm.Instructions
{
/// <summary>
/// Describes a WebAssembly stack machine instruction that runs one of two
/// blocks of instructions, based on the value of its predicate.
/// </summary>
public sealed class IfElseInstruction : Instruction
{
/// <summary>
/// Creates an if-else instruction from the given type, if-branch and
/// else-branch.
/// </summary>
/// <param name="type">The type of value returned by the if-else instruction.</param>
/// <param name="ifBranch">The if-else instruction's 'if' branch.</param>
/// <param name="elseBranch">The if-else instruction's 'else' branch.</param>
public IfElseInstruction(
WasmType type,
IEnumerable<Instruction> ifBranch,
IEnumerable<Instruction> elseBranch)
{
this.Type = type;
this.IfBranch = new List<Instruction>(ifBranch);
this.ElseBranch = elseBranch == null ? null : new List<Instruction>(elseBranch);
}
/// <summary>
/// Gets the operator for this instruction.
/// </summary>
/// <returns>The instruction's operator.</returns>
public override Operator Op { get { return Operators.If; } }
/// <summary>
/// Gets the type of value returned by this instruction.
/// </summary>
/// <returns>The type of value returned by this instruction.</returns>
public WasmType Type { get; set; }
/// <summary>
/// Gets this if-else instruction's contents for the 'if' branch.
/// </summary>
/// <returns>The if-else instruction's contents for the 'if' branch.</returns>
public List<Instruction> IfBranch { get; private set; }
/// <summary>
/// Gets this if-else instruction's contents for the 'else' branch.
/// </summary>
/// <returns>
/// The if-else instruction's contents for the 'else' branch.
/// <c>null</c> is returned if there is no 'else' branch.
/// </returns>
public List<Instruction> ElseBranch { get; private set; }
/// <summary>
/// Checks if this if-else instruction has an 'else' branch.
/// </summary>
public bool HasElseBranch => ElseBranch != null;
/// <summary>
/// Writes this instruction's immediates (but not its opcode)
/// to the given WebAssembly file writer.
/// </summary>
/// <param name="writer">The writer to write this instruction's immediates to.</param>
public override void WriteImmediatesTo(BinaryWasmWriter writer)
{
writer.WriteWasmType(Type);
WriteContentsTo(writer);
}
/// <summary>
/// Writes this instruction's child instructions to the given WebAssembly file writer,
/// followed by an 'end' opcode.
/// </summary>
/// <param name="writer">The writer to write this instruction's child instructions to.</param>
public void WriteContentsTo(BinaryWasmWriter writer)
{
foreach (var instr in IfBranch)
{
instr.WriteTo(writer);
}
if (HasElseBranch)
{
writer.Writer.Write(Operators.ElseOpCode);
foreach (var instr in ElseBranch)
{
instr.WriteTo(writer);
}
}
writer.Writer.Write(Operators.EndOpCode);
}
/// <summary>
/// Writes a string representation of this instruction to the given text writer.
/// </summary>
/// <param name="writer">
/// The writer to which a representation of this instruction is written.
/// </param>
public override void Dump(TextWriter writer)
{
Op.Dump(writer);
writer.Write(" (result: ");
DumpHelpers.DumpWasmType(Type, writer);
writer.Write(")");
var indentedWriter = DumpHelpers.CreateIndentedTextWriter(writer);
foreach (var instr in IfBranch)
{
indentedWriter.WriteLine();
instr.Dump(indentedWriter);
}
writer.WriteLine();
if (HasElseBranch)
{
writer.Write("else");
foreach (var instr in ElseBranch)
{
indentedWriter.WriteLine();
instr.Dump(indentedWriter);
}
writer.WriteLine();
}
writer.Write("end");
}
}
}
<|start_filename|>libwasm/DataSection.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using Wasm.Binary;
using Wasm.Instructions;
namespace Wasm
{
/// <summary>
/// A type of section that declares the initialized data that is loaded into the linear memory.
/// </summary>
public sealed class DataSection : Section
{
/// <summary>
/// Creates an empty data section.
/// </summary>
public DataSection()
{
this.Segments = new List<DataSegment>();
this.ExtraPayload = new byte[0];
}
/// <summary>
/// Creates a data section from a sequence of data segments.
/// </summary>
/// <param name="segments">A sequence of data segments.</param>
public DataSection(IEnumerable<DataSegment> segments)
: this(segments, new byte[0])
{
}
/// <summary>
/// Creates a data section from a sequence of data segments and a trailing payload.
/// </summary>
/// <param name="segments">A sequence of data segments.</param>
/// <param name="extraPayload">
/// A sequence of bytes that have no intrinsic meaning; they are part
/// of the data section but are placed after the data section's actual contents.
/// </param>
public DataSection(IEnumerable<DataSegment> segments, byte[] extraPayload)
{
this.Segments = new List<DataSegment>(segments);
this.ExtraPayload = extraPayload;
}
/// <inheritdoc/>
public override SectionName Name => new SectionName(SectionCode.Data);
/// <summary>
/// Gets the list of the data segments that are defined by this section.
/// </summary>
/// <returns>The data segments defined by this section.</returns>
public List<DataSegment> Segments { get; private set; }
/// <summary>
/// Gets this function section's additional payload.
/// </summary>
/// <returns>The additional payload, as an array of bytes.</returns>
public byte[] ExtraPayload { get; set; }
/// <inheritdoc/>
public override void WritePayloadTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt32((uint)Segments.Count);
foreach (var export in Segments)
{
export.WriteTo(writer);
}
writer.Writer.Write(ExtraPayload);
}
/// <summary>
/// Reads the export section with the given header.
/// </summary>
/// <param name="header">The section header.</param>
/// <param name="reader">A reader for a binary WebAssembly file.</param>
/// <returns>The parsed section.</returns>
public static DataSection ReadSectionPayload(
SectionHeader header, BinaryWasmReader reader)
{
long startPos = reader.Position;
// Read the data segments.
uint count = reader.ReadVarUInt32();
var exportedVals = new List<DataSegment>();
for (uint i = 0; i < count; i++)
{
exportedVals.Add(DataSegment.ReadFrom(reader));
}
// Skip any remaining bytes.
var extraPayload = reader.ReadRemainingPayload(startPos, header);
return new DataSection(exportedVals, extraPayload);
}
/// <inheritdoc/>
public override void Dump(TextWriter writer)
{
writer.Write(Name.ToString());
writer.Write("; number of entries: ");
writer.Write(Segments.Count);
writer.WriteLine();
var indentedWriter = DumpHelpers.CreateIndentedTextWriter(writer);
for (int i = 0; i < Segments.Count; i++)
{
writer.Write("#{0}:", i);
indentedWriter.WriteLine();
Segments[i].Dump(indentedWriter);
writer.WriteLine();
}
if (ExtraPayload.Length > 0)
{
writer.Write("Extra payload size: ");
writer.Write(ExtraPayload.Length);
writer.WriteLine();
DumpHelpers.DumpBytes(ExtraPayload, writer);
writer.WriteLine();
}
}
}
/// <summary>
/// Defines an initializer expression.
/// </summary>
public sealed class InitializerExpression
{
/// <summary>
/// Creates an initializer expression from the given list of instructions.
/// </summary>
/// <param name="body">The list of instructions for this expression.</param>
public InitializerExpression(IEnumerable<Instruction> body)
{
this.BodyInstructions = new List<Instruction>(body);
}
/// <summary>
/// Creates an initializer expression from the given list of instructions.
/// </summary>
/// <param name="body">The list of instructions for this expression.</param>
public InitializerExpression(params Instruction[] body)
: this((IEnumerable<Instruction>)body)
{ }
/// <summary>
/// Gets the body of this initializer expression as a list instruction.
/// </summary>
/// <returns>The initializer expression's body.</returns>
public List<Instruction> BodyInstructions { get; private set; }
/// <summary>
/// Reads an initializer expression from the given WebAssembly reader.
/// </summary>
/// <param name="reader">The WebAssembly reader.</param>
/// <returns>The parsed initializer expression.</returns>
public static InitializerExpression ReadFrom(BinaryWasmReader reader)
{
return new InitializerExpression(
Operators.Block.ReadBlockContents(WasmType.Empty, reader).Contents);
}
/// <summary>
/// Writes the initializer expression to the given WebAssembly writer.
/// </summary>
/// <param name="writer">The WebAssembly writer.</param>
public void WriteTo(BinaryWasmWriter writer)
{
Operators.Block.Create(WasmType.Empty, BodyInstructions).WriteContentsTo(writer);
}
}
/// <summary>
/// An entry in the data section.
/// </summary>
public sealed class DataSegment
{
/// <summary>
/// Creates a data segment from the given memory index, offset and data.
/// </summary>
/// <param name="memoryIndex">The memory index.</param>
/// <param name="offset">An i32 initializer expression that computes the offset at which to place the data.</param>
/// <param name="data">The data to which a segment of the linear memory is initialized.</param>
public DataSegment(uint memoryIndex, InitializerExpression offset, byte[] data)
{
this.MemoryIndex = memoryIndex;
this.Offset = offset;
this.Data = data;
}
/// <summary>
/// Gets or sets the linear memory index.
/// </summary>
/// <returns>The linear memory index.</returns>
public uint MemoryIndex { get; set; }
/// <summary>
/// Gets or sets an i32 initializer expression that computes the offset at which to place the data.
/// </summary>
/// <returns>An i32 initializer expression.</returns>
public InitializerExpression Offset { get; set; }
/// <summary>
/// Gets or sets the data to which a segment of the linear memory is initialized.
/// </summary>
/// <returns>Initial data for a segment of the linear memory.</returns>
public byte[] Data { get; set; }
/// <summary>
/// Writes this exported value to the given WebAssembly file writer.
/// </summary>
/// <param name="writer">The WebAssembly file writer.</param>
public void WriteTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt32(MemoryIndex);
Offset.WriteTo(writer);
writer.WriteVarUInt32((uint)Data.Length);
writer.Writer.Write(Data);
}
/// <summary>
/// Reads a data segment from the given WebAssembly reader.
/// </summary>
/// <param name="reader">The WebAssembly reader.</param>
/// <returns>The data segment that was read from the reader.</returns>
public static DataSegment ReadFrom(BinaryWasmReader reader)
{
var index = reader.ReadVarUInt32();
var offset = InitializerExpression.ReadFrom(reader);
var dataLength = reader.ReadVarUInt32();
var data = reader.ReadBytes((int)dataLength);
return new DataSegment(index, offset, data);
}
/// <summary>
/// Writes a textual representation of this exported value to the given writer.
/// </summary>
/// <param name="writer">The writer to which text is written.</param>
public void Dump(TextWriter writer)
{
writer.Write("- Memory index: ");
writer.Write(MemoryIndex);
writer.WriteLine();
writer.Write("- Offset:");
var indentedWriter = DumpHelpers.CreateIndentedTextWriter(writer);
foreach (var instruction in Offset.BodyInstructions)
{
indentedWriter.WriteLine();
instruction.Dump(indentedWriter);
}
writer.WriteLine();
writer.Write("- Data:");
indentedWriter.WriteLine();
DumpHelpers.DumpBytes(Data, indentedWriter);
}
}
}
<|start_filename|>unit-tests/Text/AssemblerTests.cs<|end_filename|>
using System;
using System.Text;
using Loyc.MiniTest;
using Pixie;
using Wasm.Interpret;
namespace Wasm.Text
{
[TestFixture]
public class AssemblerTests
{
[Test]
public void AssembleEmptyModule()
{
var module = AssembleModule("(module)");
Assert.AreEqual(0, module.Sections.Count);
}
[Test]
public void AssembleNamedEmptyModule()
{
var module = AssembleModule("(module $test_module)");
Assert.AreEqual(1, module.Sections.Count);
Assert.AreEqual(1, module.GetFirstSectionOrNull<NameSection>().Names.Count);
Assert.AreEqual("test_module", module.ModuleName);
}
[Test]
public void AssembleModulesWithMemory()
{
var module = AssembleModule("(module (memory $mem 10 40))");
Assert.AreEqual(1, module.Sections.Count);
var memSection = module.GetFirstSectionOrNull<MemorySection>();
Assert.IsNotNull(memSection);
Assert.AreEqual(1, memSection.Memories.Count);
var memory = memSection.Memories[0];
Assert.AreEqual(10u, memory.Limits.Initial);
Assert.IsTrue(memory.Limits.HasMaximum);
Assert.AreEqual(40u, memory.Limits.Maximum);
module = AssembleModule("(module (memory 10))");
Assert.AreEqual(1, module.Sections.Count);
memSection = module.GetFirstSectionOrNull<MemorySection>();
Assert.IsNotNull(memSection);
Assert.AreEqual(1, memSection.Memories.Count);
memory = memSection.Memories[0];
Assert.AreEqual(10u, memory.Limits.Initial);
Assert.IsFalse(memory.Limits.HasMaximum);
module = AssembleModule("(module (memory (data \"hello world\")))");
Assert.AreEqual(2, module.Sections.Count);
memSection = module.GetFirstSectionOrNull<MemorySection>();
Assert.IsNotNull(memSection);
Assert.AreEqual(1, memSection.Memories.Count);
memory = memSection.Memories[0];
Assert.AreEqual(1u, memory.Limits.Initial);
Assert.IsTrue(memory.Limits.HasMaximum);
Assert.AreEqual(1u, memory.Limits.Maximum);
var dataSection = module.GetFirstSectionOrNull<DataSection>();
Assert.IsNotNull(dataSection);
Assert.AreEqual(1, dataSection.Segments.Count);
var segment = dataSection.Segments[0];
Assert.AreEqual(0u, segment.MemoryIndex);
Assert.AreEqual("hello world", Encoding.UTF8.GetString(segment.Data));
module = AssembleModule("(module (memory (import \"mod\" \"mem\") 10 40))");
Assert.AreEqual(1, module.Sections.Count);
var importSection = module.GetFirstSectionOrNull<ImportSection>();
Assert.IsNotNull(importSection);
Assert.AreEqual(1, importSection.Imports.Count);
var import = importSection.Imports[0];
Assert.AreEqual(ExternalKind.Memory, import.Kind);
Assert.AreEqual("mod", import.ModuleName);
Assert.AreEqual("mem", import.FieldName);
memory = ((ImportedMemory)import).Memory;
Assert.AreEqual(10u, memory.Limits.Initial);
Assert.IsTrue(memory.Limits.HasMaximum);
Assert.AreEqual(40u, memory.Limits.Maximum);
module = AssembleModule("(module (memory (export \"mem\") (import \"mod\" \"mem\") 10 40))");
Assert.AreEqual(2, module.Sections.Count);
importSection = module.GetFirstSectionOrNull<ImportSection>();
Assert.IsNotNull(importSection);
Assert.AreEqual(1, importSection.Imports.Count);
import = importSection.Imports[0];
Assert.AreEqual(ExternalKind.Memory, import.Kind);
Assert.AreEqual("mod", import.ModuleName);
Assert.AreEqual("mem", import.FieldName);
memory = ((ImportedMemory)import).Memory;
Assert.AreEqual(10u, memory.Limits.Initial);
Assert.IsTrue(memory.Limits.HasMaximum);
Assert.AreEqual(40u, memory.Limits.Maximum);
var exportSection = module.GetFirstSectionOrNull<ExportSection>();
Assert.IsNotNull(exportSection);
Assert.AreEqual(1, exportSection.Exports.Count);
var export = exportSection.Exports[0];
Assert.AreEqual("mem", export.Name);
Assert.AreEqual(0u, export.Index);
Assert.AreEqual(ExternalKind.Memory, export.Kind);
}
[Test]
public void AssembleModulesWithExports()
{
var module = AssembleModule("(module (memory $mem1 10 40) (memory $mem2 10 40) (export \"mem\" (memory $mem2)))");
Assert.AreEqual(2, module.Sections.Count);
var memSection = module.GetFirstSectionOrNull<MemorySection>();
Assert.IsNotNull(memSection);
Assert.AreEqual(2, memSection.Memories.Count);
var memory = memSection.Memories[1];
Assert.AreEqual(10u, memory.Limits.Initial);
Assert.IsTrue(memory.Limits.HasMaximum);
Assert.AreEqual(40u, memory.Limits.Maximum);
var exportSection = module.GetFirstSectionOrNull<ExportSection>();
Assert.IsNotNull(exportSection);
Assert.AreEqual(1, exportSection.Exports.Count);
var export = exportSection.Exports[0];
Assert.AreEqual("mem", export.Name);
Assert.AreEqual(1u, export.Index);
Assert.AreEqual(ExternalKind.Memory, export.Kind);
}
[Test]
public void AssembleModulesWithImports()
{
var module = AssembleModule("(module (import \"spectest\" \"memory\" (memory 1 2)))");
Assert.AreEqual(1, module.Sections.Count);
var importSection = module.GetFirstSectionOrNull<ImportSection>();
Assert.IsNotNull(importSection);
Assert.AreEqual(1, importSection.Imports.Count);
var memoryImport = (ImportedMemory)importSection.Imports[0];
Assert.AreEqual("spectest", memoryImport.ModuleName);
Assert.AreEqual("memory", memoryImport.FieldName);
var memory = memoryImport.Memory;
Assert.AreEqual(1u, memory.Limits.Initial);
Assert.IsTrue(memory.Limits.HasMaximum);
Assert.AreEqual(2u, memory.Limits.Maximum);
module = AssembleModule("(module (import \"spectest\" \"memory\" (func)))");
Assert.AreEqual(2, module.Sections.Count);
importSection = module.GetFirstSectionOrNull<ImportSection>();
Assert.IsNotNull(importSection);
Assert.AreEqual(1, importSection.Imports.Count);
var funcImport = (ImportedFunction)importSection.Imports[0];
Assert.AreEqual("spectest", funcImport.ModuleName);
Assert.AreEqual("memory", funcImport.FieldName);
var funcTypeIndex = funcImport.TypeIndex;
Assert.AreEqual(0u, funcTypeIndex);
var typeSection = module.GetFirstSectionOrNull<TypeSection>();
Assert.AreEqual(1, typeSection.FunctionTypes.Count);
var funcType = typeSection.FunctionTypes[0];
Assert.AreEqual(0, funcType.ParameterTypes.Count);
Assert.AreEqual(0, funcType.ReturnTypes.Count);
module = AssembleModule("(module (import \"spectest\" \"memory\" (func (param) (result))))");
Assert.AreEqual(2, module.Sections.Count);
importSection = module.GetFirstSectionOrNull<ImportSection>();
Assert.IsNotNull(importSection);
Assert.AreEqual(1, importSection.Imports.Count);
funcImport = (ImportedFunction)importSection.Imports[0];
Assert.AreEqual("spectest", funcImport.ModuleName);
Assert.AreEqual("memory", funcImport.FieldName);
funcTypeIndex = funcImport.TypeIndex;
Assert.AreEqual(0u, funcTypeIndex);
typeSection = module.GetFirstSectionOrNull<TypeSection>();
Assert.AreEqual(1, typeSection.FunctionTypes.Count);
funcType = typeSection.FunctionTypes[0];
Assert.AreEqual(0, funcType.ParameterTypes.Count);
Assert.AreEqual(0, funcType.ReturnTypes.Count);
module = AssembleModule("(module (import \"spectest\" \"memory\" (func (param i32 i64 f32 f64) (result f64))))");
Assert.AreEqual(2, module.Sections.Count);
importSection = module.GetFirstSectionOrNull<ImportSection>();
Assert.IsNotNull(importSection);
Assert.AreEqual(1, importSection.Imports.Count);
funcImport = (ImportedFunction)importSection.Imports[0];
Assert.AreEqual("spectest", funcImport.ModuleName);
Assert.AreEqual("memory", funcImport.FieldName);
funcTypeIndex = funcImport.TypeIndex;
Assert.AreEqual(0u, funcTypeIndex);
typeSection = module.GetFirstSectionOrNull<TypeSection>();
Assert.AreEqual(1, typeSection.FunctionTypes.Count);
funcType = typeSection.FunctionTypes[0];
Assert.AreEqual(4, funcType.ParameterTypes.Count);
Assert.AreEqual(WasmValueType.Int32, funcType.ParameterTypes[0]);
Assert.AreEqual(WasmValueType.Int64, funcType.ParameterTypes[1]);
Assert.AreEqual(WasmValueType.Float32, funcType.ParameterTypes[2]);
Assert.AreEqual(WasmValueType.Float64, funcType.ParameterTypes[3]);
Assert.AreEqual(1, funcType.ReturnTypes.Count);
Assert.AreEqual(WasmValueType.Float64, funcType.ReturnTypes[0]);
module = AssembleModule("(module (func (import \"spectest\" \"memory\") (param i32 i64 f32 f64) (result f64)))");
Assert.AreEqual(2, module.Sections.Count);
importSection = module.GetFirstSectionOrNull<ImportSection>();
Assert.IsNotNull(importSection);
Assert.AreEqual(1, importSection.Imports.Count);
funcImport = (ImportedFunction)importSection.Imports[0];
Assert.AreEqual("spectest", funcImport.ModuleName);
Assert.AreEqual("memory", funcImport.FieldName);
funcTypeIndex = funcImport.TypeIndex;
Assert.AreEqual(0u, funcTypeIndex);
typeSection = module.GetFirstSectionOrNull<TypeSection>();
Assert.AreEqual(1, typeSection.FunctionTypes.Count);
funcType = typeSection.FunctionTypes[0];
Assert.AreEqual(4, funcType.ParameterTypes.Count);
Assert.AreEqual(WasmValueType.Int32, funcType.ParameterTypes[0]);
Assert.AreEqual(WasmValueType.Int64, funcType.ParameterTypes[1]);
Assert.AreEqual(WasmValueType.Float32, funcType.ParameterTypes[2]);
Assert.AreEqual(WasmValueType.Float64, funcType.ParameterTypes[3]);
Assert.AreEqual(1, funcType.ReturnTypes.Count);
Assert.AreEqual(WasmValueType.Float64, funcType.ReturnTypes[0]);
module = AssembleModule("(module (import \"spectest\" \"global_i32\" (global $x i32)))");
Assert.AreEqual(1, module.Sections.Count);
importSection = module.GetFirstSectionOrNull<ImportSection>();
Assert.IsNotNull(importSection);
Assert.AreEqual(1, importSection.Imports.Count);
var globalImport = (ImportedGlobal)importSection.Imports[0];
Assert.AreEqual("spectest", globalImport.ModuleName);
Assert.AreEqual("global_i32", globalImport.FieldName);
Assert.AreEqual(WasmValueType.Int32, globalImport.Global.ContentType);
Assert.IsFalse(globalImport.Global.IsMutable);
module = AssembleModule("(module (global $x (import \"spectest\" \"global_i32\") i32))");
Assert.AreEqual(1, module.Sections.Count);
importSection = module.GetFirstSectionOrNull<ImportSection>();
Assert.IsNotNull(importSection);
Assert.AreEqual(1, importSection.Imports.Count);
globalImport = (ImportedGlobal)importSection.Imports[0];
Assert.AreEqual("spectest", globalImport.ModuleName);
Assert.AreEqual("global_i32", globalImport.FieldName);
Assert.AreEqual(WasmValueType.Int32, globalImport.Global.ContentType);
Assert.IsFalse(globalImport.Global.IsMutable);
module = AssembleModule("(module (import \"spectest\" \"global_i32\" (global (mut i32))))");
Assert.AreEqual(1, module.Sections.Count);
importSection = module.GetFirstSectionOrNull<ImportSection>();
Assert.IsNotNull(importSection);
Assert.AreEqual(1, importSection.Imports.Count);
globalImport = (ImportedGlobal)importSection.Imports[0];
Assert.AreEqual("spectest", globalImport.ModuleName);
Assert.AreEqual("global_i32", globalImport.FieldName);
Assert.AreEqual(WasmValueType.Int32, globalImport.Global.ContentType);
Assert.IsTrue(globalImport.Global.IsMutable);
module = AssembleModule("(module (global (import \"spectest\" \"global_i32\") (mut i32)))");
Assert.AreEqual(1, module.Sections.Count);
importSection = module.GetFirstSectionOrNull<ImportSection>();
Assert.IsNotNull(importSection);
Assert.AreEqual(1, importSection.Imports.Count);
globalImport = (ImportedGlobal)importSection.Imports[0];
Assert.AreEqual("spectest", globalImport.ModuleName);
Assert.AreEqual("global_i32", globalImport.FieldName);
Assert.AreEqual(WasmValueType.Int32, globalImport.Global.ContentType);
Assert.IsTrue(globalImport.Global.IsMutable);
module = AssembleModule("(module (import \"spectest\" \"table\" (table 10 20 funcref)))");
Assert.AreEqual(1, module.Sections.Count);
importSection = module.GetFirstSectionOrNull<ImportSection>();
Assert.IsNotNull(importSection);
Assert.AreEqual(1, importSection.Imports.Count);
var tableImport = (ImportedTable)importSection.Imports[0];
Assert.AreEqual("spectest", tableImport.ModuleName);
Assert.AreEqual("table", tableImport.FieldName);
Assert.AreEqual(WasmType.AnyFunc, tableImport.Table.ElementType);
Assert.AreEqual(10u, tableImport.Table.Limits.Initial);
Assert.IsTrue(tableImport.Table.Limits.HasMaximum);
Assert.AreEqual(20u, tableImport.Table.Limits.Maximum);
module = AssembleModule("(module " +
"(type $g (func (param i32) (result f64))) " +
"(type $f (func (param i32 i64 f32 f64) (result f64))) " +
"(import \"spectest\" \"f\" (func (type $f) (param i32 i64 f32 f64) (result f64))) " +
"(import \"spectest\" \"f\" (func (type 1) (param i32 i64 f32 f64) (result f64))) " +
"(import \"spectest\" \"f\" (func (param i32 i64 f32 f64) (result f64))) " +
"(import \"spectest\" \"f\" (func (type 1))) " +
"(import \"spectest\" \"f\" (func (type $f))))");
Assert.AreEqual(2, module.Sections.Count);
importSection = module.GetFirstSectionOrNull<ImportSection>();
Assert.IsNotNull(importSection);
Assert.AreEqual(5, importSection.Imports.Count);
for (int i = 0; i < importSection.Imports.Count; i++)
{
funcImport = (ImportedFunction)importSection.Imports[i];
Assert.AreEqual("spectest", funcImport.ModuleName);
Assert.AreEqual("f", funcImport.FieldName);
funcTypeIndex = funcImport.TypeIndex;
Assert.AreEqual(1u, funcTypeIndex);
}
typeSection = module.GetFirstSectionOrNull<TypeSection>();
Assert.AreEqual(2, typeSection.FunctionTypes.Count);
funcType = typeSection.FunctionTypes[1];
Assert.AreEqual(4, funcType.ParameterTypes.Count);
Assert.AreEqual(WasmValueType.Int32, funcType.ParameterTypes[0]);
Assert.AreEqual(WasmValueType.Int64, funcType.ParameterTypes[1]);
Assert.AreEqual(WasmValueType.Float32, funcType.ParameterTypes[2]);
Assert.AreEqual(WasmValueType.Float64, funcType.ParameterTypes[3]);
Assert.AreEqual(1, funcType.ReturnTypes.Count);
Assert.AreEqual(WasmValueType.Float64, funcType.ReturnTypes[0]);
}
[Test]
public void AssembleModulesWithTables()
{
var module = AssembleModule("(module (table 0 funcref))");
Assert.AreEqual(1, module.Sections.Count);
var tableSection = module.GetFirstSectionOrNull<TableSection>();
Assert.IsNotNull(tableSection);
Assert.AreEqual(1, tableSection.Tables.Count);
var table = (TableType)tableSection.Tables[0];
Assert.AreEqual(WasmType.AnyFunc, table.ElementType);
Assert.AreEqual(0u, table.Limits.Initial);
Assert.IsFalse(table.Limits.HasMaximum);
module = AssembleModule("(module (table 0 1 funcref))");
Assert.AreEqual(1, module.Sections.Count);
tableSection = module.GetFirstSectionOrNull<TableSection>();
Assert.IsNotNull(tableSection);
Assert.AreEqual(1, tableSection.Tables.Count);
table = (TableType)tableSection.Tables[0];
Assert.AreEqual(WasmType.AnyFunc, table.ElementType);
Assert.AreEqual(0u, table.Limits.Initial);
Assert.IsTrue(table.Limits.HasMaximum);
Assert.AreEqual(1u, table.Limits.Maximum);
module = AssembleModule("(module (table (import \"spectest\" \"table\") 10 20 funcref))");
Assert.AreEqual(1, module.Sections.Count);
var importSection = module.GetFirstSectionOrNull<ImportSection>();
Assert.IsNotNull(importSection);
Assert.AreEqual(1, importSection.Imports.Count);
var tableImport = (ImportedTable)importSection.Imports[0];
Assert.AreEqual("spectest", tableImport.ModuleName);
Assert.AreEqual("table", tableImport.FieldName);
Assert.AreEqual(WasmType.AnyFunc, tableImport.Table.ElementType);
Assert.AreEqual(10u, tableImport.Table.Limits.Initial);
Assert.IsTrue(tableImport.Table.Limits.HasMaximum);
Assert.AreEqual(20u, tableImport.Table.Limits.Maximum);
module = AssembleModule("(module (table (export \"table1\") (export \"table2\") (import \"spectest\" \"table\") 10 20 funcref))");
Assert.AreEqual(2, module.Sections.Count);
importSection = module.GetFirstSectionOrNull<ImportSection>();
Assert.IsNotNull(importSection);
Assert.AreEqual(1, importSection.Imports.Count);
tableImport = (ImportedTable)importSection.Imports[0];
Assert.AreEqual("spectest", tableImport.ModuleName);
Assert.AreEqual("table", tableImport.FieldName);
Assert.AreEqual(WasmType.AnyFunc, tableImport.Table.ElementType);
Assert.AreEqual(10u, tableImport.Table.Limits.Initial);
Assert.IsTrue(tableImport.Table.Limits.HasMaximum);
Assert.AreEqual(20u, tableImport.Table.Limits.Maximum);
var exportSection = module.GetFirstSectionOrNull<ExportSection>();
Assert.IsNotNull(exportSection);
Assert.AreEqual("table1", exportSection.Exports[0].Name);
Assert.AreEqual(ExternalKind.Table, exportSection.Exports[0].Kind);
Assert.AreEqual(0u, exportSection.Exports[0].Index);
Assert.AreEqual("table2", exportSection.Exports[1].Name);
Assert.AreEqual(ExternalKind.Table, exportSection.Exports[1].Kind);
Assert.AreEqual(0u, exportSection.Exports[1].Index);
}
[Test]
public void AssembleModulesWithStart()
{
var module = AssembleModule("(module (func $f nop) (func $entry nop) (start $entry))");
Assert.AreEqual((uint?)1u, module.StartFunctionIndex);
}
[Test]
public void AssembleBadMemoryModules()
{
AssertInvalidModule("(module (memory))");
AssertInvalidModule("(module (memory (limits 10 50)))");
AssertInvalidModule("(module (memory $mem 78359126329586239865823 725357639275693276582334525))");
AssertInvalidModule("(module (memory $mem 10e7 10e8))");
AssertInvalidModule("(module (memory +10 +40))");
AssertInvalidModule("(module (memory $mem1 $mem2 10 40))");
AssertInvalidModule("(module (memory 10 40 10 40))");
AssertInvalidModule("(module (memory (import \"mod\" \"mem\")))");
}
[Test]
public void AssembleBadExportModules()
{
AssertInvalidModule("(module (export \"mem\" (memory $mem)))");
}
[Test]
public void AssembleInstructions()
{
Assert.AreEqual(10, EvaluateConstExpr(WasmType.Int32, "i32.const 10"));
Assert.AreEqual(15, EvaluateConstExpr(WasmType.Int32, "i32.const 10 i32.const 5 i32.add"));
Assert.AreEqual(5, EvaluateConstExpr(WasmType.Int32, "i32.const 10 i32.const -5 i32.add"));
Assert.AreEqual(15, EvaluateConstExpr(WasmType.Int32, "(i32.add (i32.const 10) (i32.const 5))"));
Assert.AreEqual(5, EvaluateConstExpr(WasmType.Int32, "(block $block (result i32) i32.const 10 i32.const -5 i32.add)"));
Assert.AreEqual(15, EvaluateConstExpr(WasmType.Int32, "block $block (result i32) i32.const 10 i32.const -5 i32.add end i32.const 10 i32.add"));
Assert.AreEqual(5, EvaluateConstExpr(WasmType.Int32, "(if $block (result i32) i32.const 0 (then i32.const 10) (else i32.const 5))"));
Assert.AreEqual(10, EvaluateConstExpr(WasmType.Int32, "(if $block (result i32) i32.const 0 (then i32.const 5) (else i32.const 10))"));
Assert.AreEqual(5, EvaluateConstExpr(WasmType.Int32, "(if $block (result i32) i32.const 1 (then i32.const 5) (else i32.const 10))"));
Assert.AreEqual(5, EvaluateConstExpr(WasmType.Int32, "i32.const 1 (if $block (result i32) (then i32.const 5) (else i32.const 10))"));
Assert.AreEqual(5, EvaluateConstExpr(WasmType.Int32, "i32.const 1 (if (then)) i32.const 5"));
Assert.AreEqual(5, EvaluateConstExpr(WasmType.Int32, "i32.const 1 if $block (result i32) i32.const 5 else i32.const 10 end"));
Assert.AreEqual(5, EvaluateConstExpr(WasmType.Int32, "i32.const 0 i32.const 5 i32.store offset=2 align=2 i32.const 0 i32.load offset=2 align=2"));
Assert.AreEqual(5, EvaluateConstExpr(WasmType.Int32, "(i32.const 0) (i32.const 5) (i32.store offset=2 align=2) (i32.const 0) (i32.load offset=2 align=2)"));
Assert.AreEqual(10.0f, EvaluateConstExpr(WasmType.Float32, "f32.const 10"));
Assert.AreEqual(10.0, EvaluateConstExpr(WasmType.Float64, "f64.const 10"));
Assert.AreEqual(10.0f, EvaluateConstExpr(WasmType.Float32, "f32.const 10.0"));
Assert.AreEqual(10.0, EvaluateConstExpr(WasmType.Float64, "f64.const 10.0"));
Assert.AreEqual(-10.0f, EvaluateConstExpr(WasmType.Float32, "f32.const -10"));
Assert.AreEqual(-10.0, EvaluateConstExpr(WasmType.Float64, "f64.const -10"));
Assert.AreEqual(5, EvaluateConstExpr(WasmType.Int32, "(local $x i32) i32.const 5 local.set $x local.get $x"));
Assert.AreEqual(5, EvaluateConstExpr(WasmType.Int32, "(local $x i32) i32.const 5 local.tee $x"));
Assert.AreEqual(5, EvaluateConstExpr(WasmType.Int32, "call $constant_five"));
Assert.AreEqual(1, EvaluateConstExpr(WasmType.Int32, "memory.size"));
Assert.AreEqual(3, EvaluateConstExpr(WasmType.Int32, "i32.const 1 memory.grow memory.size i32.add"));
Assert.AreEqual(5, EvaluateConstExpr(WasmType.Int32, "(block $block (result i32) (i32.const 5) (br $block) (drop) (i32.const 3))"));
Assert.AreEqual(22, EvaluateConstExpr(WasmType.Int32, "(block (block (br_table 1 0 (i32.const 0)) (return (i32.const 21)) ) (return (i32.const 20)) ) (i32.const 22)"));
Assert.AreEqual(20, EvaluateConstExpr(WasmType.Int32, "(block (block (br_table 1 0 (i32.const 1)) (return (i32.const 21)) ) (return (i32.const 20)) ) (i32.const 22)"));
Assert.AreEqual(5, EvaluateConstExpr(WasmType.Int32, "global.get $five"));
Assert.AreEqual(20, EvaluateConstExpr(WasmType.Int32, "i32.const 20 global.set $five global.get $five"));
}
private static void AssertInvalidModule(string text)
{
Assert.Throws(
typeof(PixieException),
() => AssembleModule(text));
}
private static WasmFile AssembleModule(string text)
{
var log = new TestLog(new[] { Severity.Error }, NullLog.Instance);
var assembler = new Assembler(log);
return assembler.AssembleModule(text);
}
private static object EvaluateConstExpr(WasmType resultType, string expr)
{
var asm = AssembleModule($"(module (memory 1) (global $five (mut i32) (i32.const 5)) (func $f (export \"f\") (result {DumpHelpers.WasmTypeToString(resultType)}) {expr}) (func $constant_five (result i32) i32.const 5))");
var instance = ModuleInstance.Instantiate(asm, new PredefinedImporter());
return instance.ExportedFunctions["f"].Invoke(Array.Empty<object>())[0];
}
}
}
<|start_filename|>libwasm/MemorySection.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using Wasm.Binary;
namespace Wasm
{
/// <summary>
/// A type of section that defines zero or more memories.
/// </summary>
public sealed class MemorySection : Section
{
/// <summary>
/// Creates an empty memory section.
/// </summary>
public MemorySection()
{
this.Memories = new List<MemoryType>();
this.ExtraPayload = new byte[0];
}
/// <summary>
/// Creates a memory section from the given sequence of memory specifications.
/// </summary>
public MemorySection(IEnumerable<MemoryType> memories)
: this(memories, new byte[0])
{
}
/// <summary>
/// Creates a memory section from the given sequence of memory specifications
/// and a trailing byte array.
/// </summary>
public MemorySection(IEnumerable<MemoryType> memories, byte[] extraPayload)
{
this.Memories = new List<MemoryType>(memories);
this.ExtraPayload = extraPayload;
}
/// <inheritdoc/>
public override SectionName Name => new SectionName(SectionCode.Memory);
/// <summary>
/// Gets a list that contains the limits of all memories defined by this section.
/// </summary>
/// <returns>The section's list of memory limits.</returns>
public List<MemoryType> Memories { get; private set; }
/// <summary>
/// Gets this memory section's additional payload.
/// </summary>
/// <returns>The additional payload, as an array of bytes.</returns>
public byte[] ExtraPayload { get; set; }
/// <inheritdoc/>
public override void WritePayloadTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt32((uint)Memories.Count);
foreach (var limits in Memories)
{
limits.WriteTo(writer);
}
writer.Writer.Write(ExtraPayload);
}
/// <inheritdoc/>
public override void Dump(TextWriter writer)
{
writer.Write(Name.ToString());
writer.Write("; number of entries: ");
writer.Write(Memories.Count);
writer.WriteLine();
for (int i = 0; i < Memories.Count; i++)
{
writer.Write("#");
writer.Write(i);
writer.Write(" -> ");
Memories[i].Dump(writer);
writer.WriteLine();
}
if (ExtraPayload.Length > 0)
{
writer.Write("Extra payload size: ");
writer.Write(ExtraPayload.Length);
writer.WriteLine();
DumpHelpers.DumpBytes(ExtraPayload, writer);
writer.WriteLine();
}
}
/// <summary>
/// Reads the memory section with the given header.
/// </summary>
/// <param name="header">The section header.</param>
/// <param name="reader">The WebAssembly file reader.</param>
/// <returns>The parsed section.</returns>
public static MemorySection ReadSectionPayload(SectionHeader header, BinaryWasmReader reader)
{
long startPos = reader.Position;
// Read the resizable limits.
uint count = reader.ReadVarUInt32();
var limits = new List<MemoryType>();
for (uint i = 0; i < count; i++)
{
limits.Add(MemoryType.ReadFrom(reader));
}
// Skip any remaining bytes.
var extraPayload = reader.ReadRemainingPayload(startPos, header);
return new MemorySection(limits, extraPayload);
}
}
/// <summary>
/// Describes a linear memory.
/// </summary>
public sealed class MemoryType
{
/// <summary>
/// Creates a new linear memory description from the given limits.
/// </summary>
/// <param name="limits">The linear memory's limits.</param>
public MemoryType(ResizableLimits limits)
{
this.Limits = limits;
}
/// <summary>
/// Gets this memory's limits.
/// </summary>
/// <returns>This memory's limits.</returns>
public ResizableLimits Limits { get; set; }
/// <summary>
/// Gets the size of a single page, in bytes.
/// </summary>
public const uint PageSize = 64 * 1024;
/// <summary>
/// Writes this memory description to the given binary WebAssembly file.
/// </summary>
/// <param name="writer">The writer for a binary WebAssembly file.</param>
public void WriteTo(BinaryWasmWriter writer)
{
Limits.WriteTo(writer);
}
/// <summary>
/// Writes a textual representation of this memory description to the given writer.
/// </summary>
/// <param name="writer">The writer to which text is written.</param>
public void Dump(TextWriter writer)
{
Limits.Dump(writer);
}
/// <summary>
/// Reads a single memory description from the given reader.
/// </summary>
/// <returns>The memory description.</returns>
public static MemoryType ReadFrom(BinaryWasmReader reader)
{
return new MemoryType(reader.ReadResizableLimits());
}
}
}
<|start_filename|>libwasm/ImportSection.cs<|end_filename|>
using System.Collections.Generic;
using System.IO;
using Wasm.Binary;
namespace Wasm
{
/// <summary>
/// A type of section that imports values.
/// </summary>
public sealed class ImportSection : Section
{
/// <summary>
/// Creates an empty import section.
/// </summary>
public ImportSection()
{
this.Imports = new List<ImportedValue>();
this.ExtraPayload = new byte[0];
}
/// <summary>
/// Creates an import section from a sequence of imports.
/// </summary>
/// <param name="imports">A sequence of imports to put in the import section.</param>
public ImportSection(IEnumerable<ImportedValue> imports)
: this(imports, new byte[0])
{
}
/// <summary>
/// Creates an import section from a sequence of imports and a trailing payload.
/// </summary>
/// <param name="imports">A sequence of imports to put in the import section.</param>
/// <param name="extraPayload">
/// A sequence of bytes that have no intrinsic meaning; they are part
/// of the import section but are placed after the import section's actual contents.
/// </param>
public ImportSection(IEnumerable<ImportedValue> imports, byte[] extraPayload)
{
this.Imports = new List<ImportedValue>(imports);
this.ExtraPayload = extraPayload;
}
/// <inheritdoc/>
public override SectionName Name => new SectionName(SectionCode.Import);
/// <summary>
/// Gets the list of all values that are exported by this section.
/// </summary>
/// <returns>A list of all values exported by this section.</returns>
public List<ImportedValue> Imports { get; private set; }
/// <summary>
/// Gets this function section's additional payload.
/// </summary>
/// <returns>The additional payload, as an array of bytes.</returns>
public byte[] ExtraPayload { get; set; }
/// <inheritdoc/>
public override void WritePayloadTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt32((uint)Imports.Count);
foreach (var import in Imports)
{
import.WriteTo(writer);
}
writer.Writer.Write(ExtraPayload);
}
/// <summary>
/// Reads the import section with the given header.
/// </summary>
/// <param name="header">The section header.</param>
/// <param name="reader">A reader for a binary WebAssembly file.</param>
/// <returns>The parsed section.</returns>
public static ImportSection ReadSectionPayload(
SectionHeader header, BinaryWasmReader reader)
{
long startPos = reader.Position;
// Read the imported values.
uint count = reader.ReadVarUInt32();
var importedVals = new List<ImportedValue>();
for (uint i = 0; i < count; i++)
{
importedVals.Add(ImportedValue.ReadFrom(reader));
}
// Skip any remaining bytes.
var extraPayload = reader.ReadRemainingPayload(startPos, header);
return new ImportSection(importedVals, extraPayload);
}
/// <inheritdoc/>
public override void Dump(TextWriter writer)
{
writer.Write(Name.ToString());
writer.Write("; number of entries: ");
writer.Write(Imports.Count);
writer.WriteLine();
for (int i = 0; i < Imports.Count; i++)
{
writer.Write("#");
writer.Write(i);
writer.Write(" -> ");
Imports[i].Dump(writer);
writer.WriteLine();
}
if (ExtraPayload.Length > 0)
{
writer.Write("Extra payload size: ");
writer.Write(ExtraPayload.Length);
writer.WriteLine();
DumpHelpers.DumpBytes(ExtraPayload, writer);
writer.WriteLine();
}
}
}
/// <summary>
/// An entry in an import section.
/// </summary>
public abstract class ImportedValue
{
/// <summary>
/// Creates an import value from the given pair of names.
/// </summary>
/// <param name="moduleName">The name of the module from which a value is imported.</param>
/// <param name="fieldName">The name of the value that is imported.</param>
public ImportedValue(string moduleName, string fieldName)
{
this.ModuleName = moduleName;
this.FieldName = fieldName;
}
/// <summary>
/// Gets or sets the name of the module from which a value is imported.
/// </summary>
/// <returns>The name of the module from which a value is imported.</returns>
public string ModuleName { get; set; }
/// <summary>
/// Gets or sets the name of the value that is imported.
/// </summary>
/// <returns>The name of the value that is imported.</returns>
public string FieldName { get; set; }
/// <summary>
/// Gets the kind of value that is exported.
/// </summary>
/// <returns>The kind of value that is exported.</returns>
public abstract ExternalKind Kind { get; }
/// <summary>
/// Writes the contents of this imported value to the given binary WebAssembly writer.
/// </summary>
/// <param name="writer">A WebAssembly writer.</param>
protected abstract void WriteContentsTo(BinaryWasmWriter writer);
/// <summary>
/// Dumps the contents of this imported value to the given text writer.
/// </summary>
/// <param name="writer">A text writer.</param>
protected abstract void DumpContents(TextWriter writer);
/// <summary>
/// Writes this exported value to the given WebAssembly file writer.
/// </summary>
/// <param name="writer">The WebAssembly file writer.</param>
public void WriteTo(BinaryWasmWriter writer)
{
writer.WriteString(ModuleName);
writer.WriteString(FieldName);
writer.Writer.Write((byte)Kind);
WriteContentsTo(writer);
}
/// <summary>
/// Writes a textual representation of this exported value to the given writer.
/// </summary>
/// <param name="writer">The writer to which text is written.</param>
public void Dump(TextWriter writer)
{
writer.Write(
"from \"{0}\" import {1} \"{2}\": ",
ModuleName,
((object)Kind).ToString().ToLower(),
FieldName);
DumpContents(writer);
}
/// <summary>
/// Reads an imported value from the given binary WebAssembly reader.
/// </summary>
/// <param name="reader">The WebAssembly reader.</param>
/// <returns>The imported value that was read.</returns>
public static ImportedValue ReadFrom(BinaryWasmReader reader)
{
string moduleName = reader.ReadString();
string fieldName = reader.ReadString();
var kind = (ExternalKind)reader.ReadByte();
switch (kind)
{
case ExternalKind.Function:
return new ImportedFunction(moduleName, fieldName, reader.ReadVarUInt32());
case ExternalKind.Global:
return new ImportedGlobal(moduleName, fieldName, GlobalType.ReadFrom(reader));
case ExternalKind.Memory:
return new ImportedMemory(moduleName, fieldName, MemoryType.ReadFrom(reader));
case ExternalKind.Table:
return new ImportedTable(moduleName, fieldName, TableType.ReadFrom(reader));
default:
throw new WasmException("Unknown imported value kind: " + kind);
}
}
}
/// <summary>
/// Describes an entry in the import section that imports a function.
/// </summary>
public sealed class ImportedFunction : ImportedValue
{
/// <summary>
/// Creates a function import from the given module name, field and function index.
/// </summary>
/// <param name="moduleName">The name of the module from which a value is imported.</param>
/// <param name="fieldName">The name of the value that is imported.</param>
/// <param name="typeIndex">The type index of the function signature.</param>
public ImportedFunction(string moduleName, string fieldName, uint typeIndex)
: base(moduleName, fieldName)
{
this.TypeIndex = typeIndex;
}
/// <summary>
/// Gets or sets the type index of the function signature.
/// </summary>
/// <returns>The type index of the function signature.</returns>
public uint TypeIndex { get; set; }
/// <inheritdoc/>
public override ExternalKind Kind => ExternalKind.Function;
/// <inheritdoc/>
protected override void DumpContents(TextWriter writer)
{
writer.Write("type #{0}", TypeIndex);
}
/// <inheritdoc/>
protected override void WriteContentsTo(BinaryWasmWriter writer)
{
writer.WriteVarUInt32(TypeIndex);
}
}
/// <summary>
/// Describes an entry in the import section that imports a table.
/// </summary>
public sealed class ImportedTable : ImportedValue
{
/// <summary>
/// Creates a table import from the given module name, field and table type.
/// </summary>
/// <param name="moduleName">The name of the module from which a value is imported.</param>
/// <param name="fieldName">The name of the value that is imported.</param>
/// <param name="table">A description of the imported table.</param>
public ImportedTable(string moduleName, string fieldName, TableType table)
: base(moduleName, fieldName)
{
this.Table = table;
}
/// <summary>
/// Gets or sets a description of the table that is imported.
/// </summary>
/// <returns>A description of the table that is imported.</returns>
public TableType Table { get; set; }
/// <inheritdoc/>
public override ExternalKind Kind => ExternalKind.Table;
/// <inheritdoc/>
protected override void DumpContents(TextWriter writer)
{
Table.Dump(writer);
}
/// <inheritdoc/>
protected override void WriteContentsTo(BinaryWasmWriter writer)
{
Table.WriteTo(writer);
}
}
/// <summary>
/// Describes an entry in the import section that imports a linear memory.
/// </summary>
public sealed class ImportedMemory : ImportedValue
{
/// <summary>
/// Creates a memory import from the given module name, field and memory type.
/// </summary>
/// <param name="moduleName">The name of the module from which a value is imported.</param>
/// <param name="fieldName">The name of the value that is imported.</param>
/// <param name="memory">A description of the imported memory.</param>
public ImportedMemory(string moduleName, string fieldName, MemoryType memory)
: base(moduleName, fieldName)
{
this.Memory = memory;
}
/// <summary>
/// Gets or sets a description of the table that is imported.
/// </summary>
/// <returns>A description of the table that is imported.</returns>
public MemoryType Memory { get; set; }
/// <inheritdoc/>
public override ExternalKind Kind => ExternalKind.Memory;
/// <inheritdoc/>
protected override void DumpContents(TextWriter writer)
{
Memory.Dump(writer);
}
/// <inheritdoc/>
protected override void WriteContentsTo(BinaryWasmWriter writer)
{
Memory.WriteTo(writer);
}
}
/// <summary>
/// Describes an entry in the import section that imports a global variable.
/// </summary>
public sealed class ImportedGlobal : ImportedValue
{
/// <summary>
/// Creates a global import from the given module name, field and global type.
/// </summary>
/// <param name="moduleName">The name of the module from which a value is imported.</param>
/// <param name="fieldName">The name of the value that is imported.</param>
/// <param name="global">A description of the imported global.</param>
public ImportedGlobal(string moduleName, string fieldName, GlobalType global)
: base(moduleName, fieldName)
{
this.Global = global;
}
/// <summary>
/// Gets or sets a description of the global variable that is imported.
/// </summary>
/// <returns>A description of the global variable that is imported.</returns>
public GlobalType Global { get; set; }
/// <inheritdoc/>
public override ExternalKind Kind => ExternalKind.Global;
/// <inheritdoc/>
protected override void DumpContents(TextWriter writer)
{
Global.Dump(writer);
}
/// <inheritdoc/>
protected override void WriteContentsTo(BinaryWasmWriter writer)
{
Global.WriteTo(writer);
}
}
}
<|start_filename|>libwasm/Interpret/IImporter.cs<|end_filename|>
using System;
using System.Collections.Generic;
namespace Wasm.Interpret
{
/// <summary>
/// Defines a specification for objects that resolve WebAssembly imports.
/// </summary>
public interface IImporter
{
/// <summary>
/// Imports the linear memory with the given description.
/// </summary>
/// <param name="description">Describes the memory to import.</param>
/// <returns>An imported memory.</returns>
LinearMemory ImportMemory(ImportedMemory description);
/// <summary>
/// Imports the global variable with the given description.
/// </summary>
/// <param name="description">Describes the global variable to import.</param>
/// <returns>An imported global variable.</returns>
Variable ImportGlobal(ImportedGlobal description);
/// <summary>
/// Imports the function with the given description.
/// </summary>
/// <param name="description">Describes the function to import.</param>
/// <param name="signature">The signature of the function to import.</param>
/// <returns>An imported function.</returns>
FunctionDefinition ImportFunction(ImportedFunction description, FunctionType signature);
/// <summary>
/// Imports the table with the given description.
/// </summary>
/// <param name="description">Describes the table to import.</param>
/// <returns>An imported table.</returns>
FunctionTable ImportTable(ImportedTable description);
}
}
<|start_filename|>libwasm/Interpret/DelegateFunctionDefinition.cs<|end_filename|>
using System;
using System.Collections.Generic;
namespace Wasm.Interpret
{
/// <summary>
/// Defines a function definition implementation that calls a delegate when invoked.
/// </summary>
public sealed class DelegateFunctionDefinition : FunctionDefinition
{
/// <summary>
/// Creates a function definition from the given delegate.
/// </summary>
/// <param name="parameterTypes">The list of parameter types.</param>
/// <param name="returnTypes">The list of return types.</param>
/// <param name="implementation">The delegate that implements the function definition.</param>
public DelegateFunctionDefinition(
IReadOnlyList<WasmValueType> parameterTypes,
IReadOnlyList<WasmValueType> returnTypes,
Func<IReadOnlyList<object>, IReadOnlyList<object>> implementation)
{
this.paramTypes = parameterTypes;
this.retTypes = returnTypes;
this.Implementation = implementation;
}
private IReadOnlyList<WasmValueType> paramTypes;
private IReadOnlyList<WasmValueType> retTypes;
/// <summary>
/// Gets the delegate that implements this function definition.
/// </summary>
/// <returns>The delegate that implements this function definition.</returns>
public Func<IReadOnlyList<object>, IReadOnlyList<object>> Implementation { get; private set; }
/// <inheritdoc/>
public override IReadOnlyList<WasmValueType> ParameterTypes => paramTypes;
/// <inheritdoc/>
public override IReadOnlyList<WasmValueType> ReturnTypes => retTypes;
/// <inheritdoc/>
public override IReadOnlyList<object> Invoke(IReadOnlyList<object> arguments, uint callStackDepth = 0)
{
return Implementation(arguments);
}
}
}
<|start_filename|>unit-tests/Program.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Diagnostics;
using Loyc;
using Loyc.MiniTest;
using Loyc.Syntax;
using Wasm.Interpret;
using Wasm.Optimize;
using Wasm.Scripts;
using Wasm.Text;
namespace Wasm.UnitTests
{
// Test driver based on Loyc project: https://github.com/qwertie/ecsharp/blob/master/Core/Tests/Program.cs
public static class Program
{
public static readonly List<Pair<string, Func<int>>> Menu = new List<Pair<string, Func<int>>>()
{
new Pair<string, Func<int>>("Run libwasm-interpret unit tests", LibwasmInterpret),
new Pair<string, Func<int>>("Run libwasm-optimize unit tests", LibwasmOptimize),
new Pair<string, Func<int>>("Run libwasm-text unit tests", LibwasmText),
new Pair<string, Func<int>>("Run spec script tests", SpecScripts)
};
public static void Main(string[] args)
{
// Workaround for MS bug: Assert(false) will not fire in debugger
Debug.Listeners.Clear();
Debug.Listeners.Add(new DefaultTraceListener());
if (RunMenu(Menu, args.Length > 0 ? args[0] : null) > 0)
// Let the outside world know that something went wrong (e.g. Travis CI)
Environment.ExitCode = 1;
}
private static IEnumerable<char> ConsoleChars()
{
for (ConsoleKeyInfo k; (k = Console.ReadKey(true)).Key != ConsoleKey.Escape
&& k.Key != ConsoleKey.Enter;)
yield return k.KeyChar;
}
public static int RunMenu(IList<Pair<string, Func<int>>> menu, IEnumerable<char> input)
{
var reader = (input ?? ConsoleChars()).GetEnumerator();
int errorCount = 0;
for (;;)
{
Console.WriteLine();
Console.WriteLine("What do you want to do? (Esc to quit)");
for (int i = 0; i < menu.Count; i++)
Console.WriteLine(PrintHelpers.HexDigitChar(i + 1) + ". " + menu[i].Key);
Console.WriteLine("Space. Run all tests");
if (!reader.MoveNext())
break;
char c = reader.Current;
if (c == ' ')
{
for (int i = 0; i < menu.Count; i++)
{
Console.WriteLine();
ConsoleMessageSink.WriteColoredMessage(ConsoleColor.White, i + 1, menu[i].Key);
errorCount += menu[i].Value();
}
}
else
{
int i = ParseHelpers.HexDigitValue(c);
if (i > 0 && i <= menu.Count)
errorCount += menu[i - 1].Value();
}
}
return errorCount;
}
public static int LibwasmInterpret()
{
return RunTests.RunMany(
new DefaultInterpreterTests(),
new LinearMemoryTests());
}
public static int LibwasmOptimize()
{
return RunTests.RunMany(
new FunctionBodyOptimizationTests());
}
public static int LibwasmText()
{
return RunTests.RunMany(
new AssemblerTests(),
new LexerTests(),
new ParserTests());
}
public static int SpecScripts()
{
return RunTests.RunMany(
new ScriptTests());
}
}
}
<|start_filename|>libwasm/Optimize/WasmFileOptimizations.cs<|end_filename|>
using System;
namespace Wasm.Optimize
{
/// <summary>
/// Defines convenience methods for WebAssembly file optimization.
/// </summary>
public static class WasmFileOptimizations
{
/// <summary>
/// Applies all known optimizations to the given WebAssembly file.
/// </summary>
/// <param name="file">The file to optimize.</param>
public static void Optimize(this WasmFile file)
{
file.CompressFunctionTypes();
foreach (var section in file.Sections)
{
if (section is CodeSection)
{
((CodeSection)section).Optimize();
}
}
}
/// <summary>
/// Applies all known optimizations to the given code section.
/// </summary>
/// <param name="section">The code section to optimize.</param>
public static void Optimize(this CodeSection section)
{
var optimizer = PeepholeOptimizer.DefaultOptimizer;
foreach (var body in section.Bodies)
{
// Compress local entries.
body.CompressLocalEntries();
// Apply peephole optimizations.
var optInstructions = optimizer.Optimize(body.BodyInstructions);
body.BodyInstructions.Clear();
body.BodyInstructions.AddRange(optInstructions);
}
}
}
}
| jonathanvdc/cs-wasm |
<|start_filename|>ChartJSWrapper/ChartJSWrapper-IOS/ViewController.h<|end_filename|>
//
// ViewController.h
// ChartJSWrapper-IOS
//
// Created by <NAME> on 24/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "PulldownMenu.h"
@interface ViewController : UIViewController<PulldownMenuDelegate, UIScrollViewDelegate, UINavigationBarDelegate>
@end
<|start_filename|>ChartJSWrapper/CW/CWTypes.h<|end_filename|>
//
// CWTypes.h
// ChartJSWrapper
//
// Created by <NAME> on 24/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import <TargetConditionals.h>
#import <Foundation/Foundation.h>
#if TARGET_OS_IPHONE
#import <UIKit/UIKit.h>
#define CWColor UIColor
#define CWWebView WKWebView
#else
#import <Cocoa/Cocoa.h>
#define CWColor NSColor
#define CWWebView WebView
#endif
<|start_filename|>ChartJSWrapper/CW/CWLineChartOptions.h<|end_filename|>
//
// CWLineChartOptions.h
// ChartJSWrapper
//
// Created by <NAME> on 21/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWGlobalOptions.h"
@interface CWLineChartOptions : CWGlobalOptions
///Boolean - Whether grid lines are shown across the chart
@property (nonatomic) CWBoolean* scaleShowGridLines;// : true,
//String - Colour of the grid lines
@property (nonatomic,strong) CWColor* scaleGridLineColor;// : "rgba(0,0,0,.05)",
//Number - Width of the grid lines
@property (nonatomic,strong) NSNumber* scaleGridLineWidth;// : 1,
//Boolean - Whether to show horizontal lines (except X axis)
@property (nonatomic) CWBoolean* scaleShowHorizontalLines;//: true,
//Boolean - Whether to show vertical lines (except Y axis)
@property (nonatomic) CWBoolean* scaleShowVerticalLines;//: true,
//Boolean - Whether the line is curved between points
@property (nonatomic) CWBoolean* bezierCurve;// : true,
//Number - Tension of the bezier curve between points
@property (nonatomic,strong) NSNumber* bezierCurveTension;// : 0.4,
//Boolean - Whether to show a dot for each point
@property (nonatomic) CWBoolean* pointDot;// : true,
//Number - Radius of each point dot in pixels
@property (nonatomic,strong) NSNumber* pointDotRadius;// : 4,
//Number - Pixel width of point dot stroke
@property (nonatomic,strong) NSNumber* pointDotStrokeWidth;// : 1,
//Number - amount extra to add to the radius to cater for hit detection outside the drawn point
@property (nonatomic,strong) NSNumber* pointHitDetectionRadius;// : 20,
//Boolean - Whether to show a stroke for datasets
@property (nonatomic) CWBoolean* datasetStroke;// : true,
//Number - Pixel width of dataset stroke
@property (nonatomic,strong) NSNumber* datasetStrokeWidth;// : 2,
//Boolean - Whether to fill the dataset with a colour
@property (nonatomic) CWBoolean* datasetFill;// : true,
//String - A legend template
@property (nonatomic,strong) NSString* legendTemplate;// : "<ul class=\"<%=name.toLowerCase()%>-legend\"><% for (var i=0; i<datasets.length; i++){%><li><span style=\"background-color:<%=datasets[i].strokeColor%>\"></span><%if(datasets[i].label){%><%=datasets[i].label%><%}%></li><%}%></ul>"
@end
<|start_filename|>ChartJSWrapper/CW/CWPolarAreaChart.h<|end_filename|>
//
// CWPolarAreaChart.h
// ChartJSWrapper
//
// Created by <NAME> on 22/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWChart.h"
#import "CWPolarAreaChartOptions.h"
#import "CWSegmentData.h"
@interface CWPolarAreaChart : CWChart
- (instancetype) initWithWebView:(CWWebView*)webview name:(NSString*)name width:(NSInteger)w height:(NSInteger)h data:(NSArray*)data options:(CWPolarAreaChartOptions*) options;
- (void) setValue:(NSNumber*)val inSegment:(NSInteger)segment;
//.addData( segmentData, index )
- (void) addData:(CWSegmentData*)data index:(NSNumber*)index;
- (void) removeDataAt:(NSNumber*)index;
@end
<|start_filename|>ChartJSWrapper/CW/CWBoolean.h<|end_filename|>
//
// CWBoolean.h
// ChartJSWrapper
//
// Created by <NAME> on 21/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWObject.h"
@interface CWBoolean : CWObject {
BOOL val;
}
+ (CWBoolean*) cwYES;
+ (CWBoolean*) cwNO;
@end
<|start_filename|>ChartJSWrapper/CW/CWPointDataSet.h<|end_filename|>
//
// CWPointDataSet.h
// ChartJSWrapper
//
// Created by <NAME> on 21/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWDataSet.h"
@interface CWPointDataSet : CWDataSet
@property (nonatomic, strong) CWColor* pointColor;//: "rgba(220,220,220,1)",
@property (nonatomic, strong) CWColor* pointStrokeColor;//: "#fff",
@property (nonatomic, strong) CWColor* pointHighlightFill;//: "#fff",
@property (nonatomic, strong) CWColor* pointHighlightStroke;//: "rgba(220,220,220,1)",
//- (instancetype) initWithData:(NSArray*)data;
@end
<|start_filename|>ChartJSWrapper/CW/CWRadarChartData.h<|end_filename|>
//
// CWRadarChartData.h
// ChartJSWrapper
//
// Created by <NAME> on 22/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWLineChartData.h"
@interface CWRadarChartData : CWLineChartData
@end
<|start_filename|>ChartJSWrapper/CW/CWColors.h<|end_filename|>
//
// CWColors.h
// ChartJSWrapper
//
// Created by <NAME> on 23/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWTypes.h"
extern NSString *const CWCTurquise;
extern NSString *const CWCEmerald;
extern NSString *const CWCPeterRiver;
extern NSString *const CWCAmethyst;
extern NSString *const CWCWetAsphalt;
extern NSString *const CWCGreenSea;
extern NSString *const CWCNephritis;
extern NSString *const CWCBelizeHole;
extern NSString *const CWCWisteria;
extern NSString *const CWCMidnightBlue;
extern NSString *const CWCSunFlower;
extern NSString *const CWCCarrot;
extern NSString *const CWCAlizarin;
extern NSString *const CWCClouds;
extern NSString *const CWCConcrete;
extern NSString *const CWCOrange;
extern NSString *const CWCPumpkin;
extern NSString *const CWCPomegrante;
extern NSString *const CWCSilver;
extern NSString *const CWCAsbestos;
@interface CWColors : NSObject
@property (nonatomic, strong) NSDictionary* colors;
- (CWColor*) pickColor;
+ (CWColors*) sharedColors;
@end
<|start_filename|>ChartJSWrapper/CW/CWBarChart.h<|end_filename|>
//
// CWBarChart.h
// ChartJSWrapper
//
// Created by <NAME> on 22/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWLineChart.h"
#import "CWBarChartData.h"
#import "CWBarChartOptions.h"
@interface CWBarChart : CWLineChart
- (instancetype) initWithWebView:(CWWebView*)webview name:(NSString*)name width:(NSInteger)w height:(NSInteger)h data:(CWBarChartData*)data options:(CWBarChartOptions*) options;
@end
<|start_filename|>ChartJSWrapper/CW/CWLineChart.h<|end_filename|>
//
// CWLineChart.h
// ChartJSWrapper
//
// Created by <NAME> on 22/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWChart.h"
#import "CWLineChartData.h"
#import "CWLineChartOptions.h"
@interface CWLineChart : CWChart
- (instancetype) initWithWebView:(CWWebView*)webview name:(NSString*)name width:(NSInteger)w height:(NSInteger)h data:(CWLineChartData*)data options:(CWLineChartOptions*) options;
- (void) setValue:(NSNumber*)val inDataset:(NSInteger)dataset at:(NSInteger)data;
- (void) addData:(NSArray*)values label:(NSString*)label;
- (void) removeData;
@end
<|start_filename|>ChartJSWrapper/CW/CWDoughnutChart.h<|end_filename|>
//
// CWDonughtChart.h
// ChartJSWrapper
//
// Created by <NAME> on 23/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWPieChart.h"
@interface CWDoughnutChart : CWPieChart
@end
<|start_filename|>ChartJSWrapper/CW/CWBarChartData.h<|end_filename|>
//
// CWBarChartData.h
// ChartJSWrapper
//
// Created by <NAME> on 22/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWLabelledData.h"
@interface CWBarChartData : CWLabelledData
- (instancetype) initWithLabels:(NSArray*)labels andBarDataSet:(NSArray*)dataSet;
@end
<|start_filename|>ChartJSWrapper/CW/CWSegmentData.h<|end_filename|>
//
// CWSegmentData.h
// ChartJSWrapper
//
// Created by <NAME> on 22/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWData.h"
@interface CWSegmentData : CWData
@property (nonatomic,strong) NSNumber* value;//: 300,
@property (nonatomic, strong) CWColor* color;//:"#F7464A",
@property (nonatomic, strong) CWColor* highlight;//: "#FF5A5E",
@property (nonatomic, strong) NSString* label;//: "Red"
@end
<|start_filename|>ChartJSWrapper/CW/CWRadarChartOptions.h<|end_filename|>
//
// CWRadarChartOptions.h
// ChartJSWrapper
//
// Created by <NAME> on 22/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWGlobalOptions.h"
@interface CWRadarChartOptions : CWGlobalOptions
//Boolean - Whether to show lines for each scale point
@property (nonatomic) CWBoolean* scaleShowLine;// : true,
//Boolean - Whether we show the angle lines out of the radar
@property (nonatomic) CWBoolean* angleShowLineOut;// : true,
//Boolean - Whether to show labels on the scale
@property (nonatomic) CWBoolean* scaleShowLabels;// : false,
// Boolean - Whether the scale should begin at zero
@property (nonatomic) CWBoolean* scaleBeginAtZero;// : true,
//String - Colour of the angle line
@property (nonatomic,strong) CWColor* angleLineColor;// : "rgba(0,0,0,.1)",
//Number - Pixel width of the angle line
@property (nonatomic,strong) NSNumber* angleLineWidth;// : 1,
//String - Point label font declaration
@property (nonatomic,strong) NSString* pointLabelFontFamily;// : "'Arial'",
//String - Point label font weight
@property (nonatomic,strong) NSString* pointLabelFontStyle;// : "normal",
//Number - Point label font size in pixels
@property (nonatomic,strong) NSNumber* pointLabelFontSize;// : 10,
//String - Point label font colour
@property (nonatomic,strong) CWColor* pointLabelFontColor;// : "#666",
//Boolean - Whether to show a dot for each point
@property (nonatomic) CWBoolean* pointDot;// : true,
//Number - Radius of each point dot in pixels
@property (nonatomic,strong) NSNumber* pointDotRadius;// : 3,
//Number - Pixel width of point dot stroke
@property (nonatomic,strong) NSNumber* pointDotStrokeWidth;// : 1,
//Number - amount extra to add to the radius to cater for hit detection outside the drawn point
@property (nonatomic,strong) NSNumber* pointHitDetectionRadius;// : 20,
//Boolean - Whether to show a stroke for datasets
@property (nonatomic) CWBoolean* datasetStroke;// : true,
//Number - Pixel width of dataset stroke
@property (nonatomic,strong) NSNumber* datasetStrokeWidth;// : 2,
//Boolean - Whether to fill the dataset with a colour
@property (nonatomic) CWBoolean* datasetFill;// : true,
//String - A legend template
@property (nonatomic,strong) NSString* legendTemplate;// : "<ul class=\"<%=name.toLowerCase()%>-legend\"><% for (var i=0; i<datasets.length; i++){%><li><span style=\"background-color:<%=datasets[i].strokeColor%>\"></span><%if(datasets[i].label){%><%=datasets[i].label%><%}%></li><%}%></ul>"
@end
<|start_filename|>ChartJSWrapper/CW/CWChart.h<|end_filename|>
//
// CWChart.h
// ChartJSWrapper
//
// Created by <NAME> on 22/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWObject.h"
#import <WebKit/WebKit.h>
@interface CWChart : CWObject
@property (nonatomic, weak, readonly) CWWebView* webview;
@property (nonatomic, strong, readonly) NSString* name;
@property (nonatomic, readonly) NSInteger width;
@property (nonatomic, readonly) NSInteger height;
- (instancetype) initWithWebView:(CWWebView*)webview name:(NSString*)name width:(NSInteger)w height:(NSInteger)h;
- (void) addChart;
- (void) update;
- (void) removeChart;
- (void) callJavaScriptMethod:(NSString*)method withArguments:(NSArray*)args;
@end
<|start_filename|>ChartJSWrapper/CW/CWBarChartOptions.h<|end_filename|>
//
// CWBarChartOptions.h
// ChartJSWrapper
//
// Created by <NAME> on 22/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWGlobalOptions.h"
@interface CWBarChartOptions : CWGlobalOptions
//Boolean - Whether the scale should start at zero, or an order of magnitude down from the lowest value
@property (nonatomic) CWBoolean* scaleBeginAtZero;// : true,
//Boolean - Whether grid lines are shown across the chart
@property (nonatomic) CWBoolean* scaleShowGridLines;// : true,
//String - Colour of the grid lines
@property (nonatomic,strong) CWColor* scaleGridLineColor;// : "rgba(0,0,0,.05)",
//Number - Width of the grid lines
@property (nonatomic,strong) NSNumber* scaleGridLineWidth;// : 1,
//Boolean - Whether to show horizontal lines (except X axis)
@property (nonatomic) CWBoolean* scaleShowHorizontalLines;//: true,
//Boolean - Whether to show vertical lines (except Y axis)
@property (nonatomic) CWBoolean* scaleShowVerticalLines;//: true,
//Boolean - If there is a stroke on each bar
@property (nonatomic) CWBoolean* barShowStroke;// : true,
//Number - Pixel width of the bar stroke
@property (nonatomic,strong) NSNumber* barStrokeWidth;// : 2,
//Number - Spacing between each of the X value sets
@property (nonatomic,strong) NSNumber* barValueSpacing;// : 5,
//Number - Spacing between data sets within X values
@property (nonatomic,strong) NSNumber* barDatasetSpacing;// : 1,
//String - A legend template
@property (nonatomic,strong) NSString* legendTemplate;// r : "<ul class=\"<%=name.toLowerCase()%>-legend\"><% for (var i=0; i<datasets.length; i++){%><li><span style=\"background-color:<%=datasets[i].fillColor%>\"></span><%if(datasets[i].label){%><%=datasets[i].label%><%}%></li><%}%></ul>"
@end
<|start_filename|>ChartJSWrapper/CW/CWPieChart.h<|end_filename|>
//
// CWPieChart.h
// ChartJSWrapper
//
// Created by <NAME> on 22/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWPolarAreaChart.h"
#import "CWPieChartOptions.h"
@interface CWPieChart : CWPolarAreaChart
@property (nonatomic, strong, readonly) CWPieChartOptions* options;
@property (nonatomic, strong,readonly) NSArray* data;
- (instancetype) initWithWebView:(CWWebView*)webview name:(NSString*)name width:(NSInteger)w height:(NSInteger)h data:(NSArray*)data options:(CWPieChartOptions*) options;
//- (void) setValue:(NSNumber*)val inSegment:(NSInteger)segment;
@end
<|start_filename|>ChartJSWrapper/CW/CWPieChartOptions.h<|end_filename|>
//
// CWPieChartOption.h
// ChartJSWrapper
//
// Created by <NAME> on 22/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWGlobalOptions.h"
@interface CWPieChartOptions : CWGlobalOptions
//Boolean - Whether we should show a stroke on each segment
@property (nonatomic) CWBoolean* segmentShowStroke;// : true,
//String - The colour of each segment stroke
@property (nonatomic) CWColor* segmentStrokeColor;// : "#fff",
//Number - The width of each segment stroke
@property (nonatomic) NSNumber* segmentStrokeWidth;// : 2,
//Number - The percentage of the chart that we cut out of the middle
@property (nonatomic) NSNumber* percentageInnerCutout;// : 50, // This is 0 for Pie charts
//Number - Amount of animation steps
@property (nonatomic) NSNumber* animationSteps;// : 100,
//String - Animation easing effect
@property (nonatomic) NSString* animationEasing;// : "easeOutBounce",
//Boolean - Whether we animate the rotation of the Doughnut
@property (nonatomic) CWBoolean* animateRotate;// : true,
//Boolean - Whether we animate scaling the Doughnut from the centre
@property (nonatomic) CWBoolean* animateScale;// : false,
//String - A legend template
@property (nonatomic) NSString* legendTemplate;// : "<ul class=\"<%=name.toLowerCase()%>-legend\"><% for (var i=0; i<segments.length; i++){%><li><span style=\"background-color:<%=segments[i].fillColor%>\"></span><%if(segments[i].label){%><%=segments[i].label%><%}%></li><%}%></ul>"
@end
<|start_filename|>ChartJSWrapper/CW/CWObject.h<|end_filename|>
//
// CWObject.h
// ChartJSWrapper
//
// Created by <NAME> on 20/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWTypes.h"
@interface CWObject : NSObject
- (id) convertToJSON:(id)val;
- (id) asJSONObject;
- (NSString*) JSON;
+ (NSString*) toJSONString:(id)val;
@end
<|start_filename|>ChartJSWrapper/ChartJSWrapper-IOS/PullDownMenu/PulldownMenu.h<|end_filename|>
//
// PulldownMenu.h
//
// Created by <NAME>
//
#import <UIKit/UIKit.h>
@protocol PulldownMenuDelegate
-(void)menuItemSelected:(NSIndexPath *)indexPath;
-(void)pullDownAnimated:(BOOL)open;
@end
@interface PulldownMenu : UIView<UITableViewDataSource, UITableViewDelegate> {
UITableView *menuList;
NSMutableArray *menuItems;
UIView *handle;
UIView *masterView;
UIPanGestureRecognizer *navigationDragGestureRecognizer;
UIPanGestureRecognizer *handleDragGestureRecognizer;
UINavigationController *masterNavigationController;
UIDeviceOrientation currentOrientation;
float topMargin;
float tableHeight;
}
@property (nonatomic, assign) id<PulldownMenuDelegate> delegate;
@property (nonatomic, retain) UITableView *menuList;
@property (nonatomic, retain) UIView *handle;
/* Appearance Properties */
@property (nonatomic) float handleHeight;
@property (nonatomic) float animationDuration;
@property (nonatomic) float topMarginPortrait;
@property (nonatomic) float topMarginLandscape;
@property (nonatomic) UIColor *cellColor;
@property (nonatomic) UIColor *cellSelectedColor;
@property (nonatomic) UIColor *cellTextColor;
@property (nonatomic) UITableViewCellSelectionStyle cellSelectionStyle;
@property (nonatomic) UIFont *cellFont;
@property (nonatomic) float cellHeight;
@property (nonatomic) BOOL fullyOpen;
- (id)initWithNavigationController:(UINavigationController *)navigationController;
- (id)initWithView:(UIView *)view;
- (void)insertButton:(NSString *)title;
- (void)loadMenu;
- (void)animateDropDown;
@end
<|start_filename|>ChartJSWrapper/CW/CWDataSet.h<|end_filename|>
//
// CWDataSet.h
// ChartJSWrapper
//
// Created by <NAME> on 20/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWTypes.h"
#import "CWObject.h"
#import "CWBoolean.h"
@interface CWDataSet : CWObject
@property (nonatomic, strong) NSString* label;
@property (nonatomic, strong) CWColor* fillColor;
@property (nonatomic, strong) CWColor* strokeColor;
@property (nonatomic, strong,readonly) NSArray* data;//: [65, 59, 80, 81, 56, 55, 40]
- (instancetype) initWithData:(NSArray*)data;
- (void) setValue:(NSNumber*)val at:(NSInteger)data;
@end
<|start_filename|>ChartJSWrapper/CW/CW.h<|end_filename|>
//
// CWDataSets.h
// ChartJSWrapper
//
// Created by <NAME> on 20/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWDataSet.h"
#import "CWObject.h"
#import "CWBoolean.h"
#import "CWPointDataSet.h"
#import "CWLineChartData.h"
#import "CWLineChartOptions.h"
#import "CWLineChart.h"
#import "CWRadarChartData.h"
#import "CWRadarChartOptions.h"
#import "CWRadarChart.h"
#import "CWBarChartData.h"
#import "CWBarChartOptions.h"
#import "CWBarChart.h"
#import "CWBarDataSet.h"
#import "CWSegmentData.h"
#import "CWPolarAreaChartOptions.h"
#import "CWPolarAreaChart.h"
#import "CWPieChartOptions.h"
#import "CWPieChart.h"
#import "CWDoughnutChart.h"
#import "CWColors.h"
//defaults write hu.gyand.ChartJSWrapper WebKitDeveloperExtras -bool true
<|start_filename|>ChartJSWrapper/CW/Assets/cw.js<|end_filename|>
"use strict";
function addPointData(chartId, dataStr, label) {
var chart = window[chartId];
var data = JSON.parse(dataStr);
chart.addData(data,label);
return 'OK'
}
function setPointValue(chartId, dataset, index, value) {
var chart = window[chartId];
chart.datasets[dataset].points[index].value = value;
return 'OK'
}
function removePointData(chartId) {
var chart = window[chartId];
chart.removeData();
return 'OK'
}
function setBarValue(chartId, dataset, index, value) {
var chart = window[chartId];
chart.datasets[dataset].bars[index].value = value;
return 'OK'
}
function setSegmentValue(chartId, segment, value) {
var chart = window[chartId];
chart.segments[segment].value = value;
return 'OK'
}
function addSegmentData(chartId, segment, index) {
var chart = window[chartId];
var data = JSON.parse(segment);
chart.addData(data,index);
return 'OK'
}
function removeSegmentData(chartId, index) {
var chart = window[chartId];
chart.removeData(index);
return 'OK'
}
function updateChart(chartId) {
var chart = window[chartId];
chart.update();
return 'OK'
}
function deleteChart(chartId) {
var chart = window[chartId];
chart.destroy();
var canvas = document.getElementById(chartId+'_canvas');
var div = document.getElementById(chartId+'_div');
div.removeChild(canvas);
document.body.removeChild(div);
delete window[chartId];
return 'OK';
}
function createContext(chartId, chartWidth,chartHeight) {
var div = document.createElement('div');
// document.body.appendChild(div);
document.body.insertBefore(div,document.body.firstChild);
div.id = chartId+'_div';
div.style.height='100%';
div.style.width='100%';
var canvas = document.createElement('canvas');
div.appendChild(canvas);
canvas.id = chartId+'_canvas';
canvas.width = chartWidth;
canvas.height = chartHeight;
var ctx = canvas.getContext('2d');
return ctx;
}
function addLineChart(chartId,chartWidth,chartHeight,chartData, chartOptions) {
var data = JSON.parse(chartData);
var options = JSON.parse(chartOptions);
var ctx = createContext(chartId,chartWidth,chartHeight);
var chart = new Chart(ctx).Line(data,options);
window[chartId] = chart;
return 'OK';
}
function addRadarChart(chartId,chartWidth,chartHeight,chartData, chartOptions) {
var data = JSON.parse(chartData);
var options = JSON.parse(chartOptions);
var ctx = createContext(chartId,chartWidth,chartHeight);
var chart = new Chart(ctx).Radar(data,options);
window[chartId] = chart;
return 'OK';
}
function addBarChart(chartId,chartWidth,chartHeight,chartData, chartOptions) {
var data = JSON.parse(chartData);
var options = JSON.parse(chartOptions);
var ctx = createContext(chartId,chartWidth,chartHeight);
var chart = new Chart(ctx).Bar(data,options);
window[chartId] = chart;
return 'OK';
}
function addPolarAreaChart(chartId,chartWidth,chartHeight,chartData, chartOptions) {
var data = JSON.parse(chartData);
var options = JSON.parse(chartOptions);
var ctx = createContext(chartId,chartWidth,chartHeight);
var chart = new Chart(ctx).PolarArea(data,options);
window[chartId] = chart;
return 'OK';
}
function addPieChart(chartId,chartWidth,chartHeight,chartData, chartOptions) {
var data = JSON.parse(chartData);
var options = JSON.parse(chartOptions);
var ctx = createContext(chartId,chartWidth,chartHeight);
var chart = new Chart(ctx).Pie(data,options);
window[chartId] = chart;
return 'OK';
}
function addDoughnutChart(chartId,chartWidth,chartHeight,chartData, chartOptions) {
var data = JSON.parse(chartData);
var options = JSON.parse(chartOptions);
var ctx = createContext(chartId,chartWidth,chartHeight);
var chart = new Chart(ctx).Doughnut(data,options);
window[chartId] = chart;
return 'OK';
}
<|start_filename|>ChartJSWrapper/ChartJSWrapper/AppDelegate.h<|end_filename|>
//
// AppDelegate.h
// ChartJSWrapper
//
// Created by <NAME> on 20/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import <Cocoa/Cocoa.h>
#import <WebKit/WebKit.h>
@interface AppDelegate : NSObject <NSApplicationDelegate>
@end
<|start_filename|>ChartJSWrapper/CW/CWData.h<|end_filename|>
//
// CWData.h
// ChartJSWrapper
//
// Created by <NAME> on 20/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "CWObject.h"
@interface CWData : CWObject
//- (NSString*) JSON;
@end
<|start_filename|>ChartJSWrapper/CW/CWGlobalOptions.h<|end_filename|>
//
// CWGlobalOptions.h
// ChartJSWrapper
//
// Created by <NAME> on 21/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWObject.h"
#import "CWBoolean.h"
@interface CWGlobalOptions : CWObject
// Boolean - Whether to animate the chart
@property (nonatomic) CWBoolean* animation;//: true,
// Number - Number of animation steps
@property (nonatomic,strong) NSNumber* animationSteps;//: 60,
// String - Animation easing effect
@property (nonatomic,strong) NSString* animationEasing;//: "easeOutQuart",
// Boolean - If we should show the scale at all
@property (nonatomic) CWBoolean* showScale;//: true,
// Boolean - If we want to override with a hard coded scale
@property (nonatomic) CWBoolean* scaleOverride;//: false,
// ** Required if scaleOverride is true **
// Number - The number of steps in a hard coded scale
@property (nonatomic,strong) NSNumber* scaleSteps;//: null,
// Number - The value jump in the hard coded scale
@property (nonatomic,strong) NSNumber* scaleStepWidth;//: null,
// Number - The scale starting value
@property (nonatomic,strong) NSNumber* scaleStartValue;//: null,
// String - Colour of the scale line
@property (nonatomic,strong) CWColor* scaleLineColor;//@property (nonatomic,strong) CWColor* : "rgba(0,0,0,.1)",
// Number - Pixel width of the scale line
@property (nonatomic,strong) NSNumber* scaleLineWidth;//: 1,
// Boolean - Whether to show labels on the scale
@property (nonatomic) CWBoolean* scaleShowLabels;//: true,
// Interpolated JS string - can access value
@property (nonatomic,strong) NSString* scaleLabel;//: "<%=value%>",
// Boolean - Whether the scale should stick to integers, not floats even if drawing space is there
@property (nonatomic) CWBoolean* scaleIntegersOnly;//: true,
// Boolean - Whether the scale should start at zero, or an order of magnitude down from the lowest value
@property (nonatomic) CWBoolean* scaleBeginAtZero;//: false,
// String - Scale label font declaration for the scale label
@property (nonatomic,strong) NSString* scaleFontFamily;//: "'Helvetica Neue', 'Helvetica', 'Arial', sans-serif",
// Number - Scale label font size in pixels
@property (nonatomic,strong) NSNumber* scaleFontSize;//: 12,
// String - Scale label font weight style
@property (nonatomic,strong) NSString* scaleFontStyle;//: "normal",
// String - Scale label font colour
@property (nonatomic,strong) CWColor* scaleFontColor;//: "#666",
// Boolean - whether or not the chart should be responsive and resize when the browser does.
@property (nonatomic) CWBoolean* responsive;//: false,
// Boolean - whether to maintain the starting aspect ratio or not when responsive, if set to false, will take up entire container
@property (nonatomic) CWBoolean* maintainAspectRatio;//: true,
// Boolean - Determines whether to draw tooltips on the canvas or not
@property (nonatomic) CWBoolean* showTooltips;//: true,
// Function - Determines whether to execute the customTooltips function instead of drawing the built in tooltips (See [Advanced - External Tooltips](#advanced-usage-custom-tooltips))
@property (nonatomic) CWBoolean* customTooltips;//: false,
// Array - Array of string names to attach tooltip events
@property (nonatomic,strong) NSArray* tooltipEvents;//: ["mousemove", "touchstart", "touchmove"],
// String - Tooltip background colour
@property (nonatomic,strong) CWColor* tooltipFillColor;//: "rgba(0,0,0,0.8)",
// String - Tooltip label font declaration for the scale label
@property (nonatomic,strong) NSString* tooltipFontFamily;//: "'Helvetica Neue', 'Helvetica', 'Arial', sans-serif",
// Number - Tooltip label font size in pixels
@property (nonatomic,strong) NSNumber* tooltipFontSize;//: 14,
// String - Tooltip font weight style
@property (nonatomic,strong) NSString* tooltipFontStyle;//: "normal",
// String - Tooltip label font colour
@property (nonatomic,strong) CWColor* tooltipFontColor;//: "#fff",
// String - Tooltip title font declaration for the scale label
@property (nonatomic,strong) NSString* tooltipTitleFontFamily;//: "'Helvetica Neue', 'Helvetica', 'Arial', sans-serif",
// Number - Tooltip title font size in pixels
@property (nonatomic,strong) NSNumber* tooltipTitleFontSize;//: 14,
// String - Tooltip title font weight style
@property (nonatomic,strong) NSString* tooltipTitleFontStyle;//: "bold",
// String - Tooltip title font colour
@property (nonatomic,strong) CWColor* tooltipTitleFontColor;//: "#fff",
// Number - pixel width of padding around tooltip text
@property (nonatomic,strong) NSNumber* tooltipYPadding;//: 6,
// Number - pixel width of padding around tooltip text
@property (nonatomic,strong) NSNumber* tooltipXPadding;//: 6,
// Number - Size of the caret on the tooltip
@property (nonatomic,strong) NSNumber* tooltipCaretSize;//: 8,
// Number - Pixel radius of the tooltip border
@property (nonatomic,strong) NSNumber* tooltipCornerRadius;//: 6,
// Number - Pixel offset from point x to tooltip edge
@property (nonatomic,strong) NSNumber* tooltipXOffset;//: 10,
// String - Template string for single tooltips
@property (nonatomic,strong) NSString* tooltipTemplate;//: "<%if (label){%><%=label%>: <%}%><%= value %>",
// String - Template string for multiple tooltips
@property (nonatomic,strong) NSString* multiTooltipTemplate;//: "<%= value %>",
// Function - Will fire on animation progression.
//onAnimationProgress: function(){},
// Function - Will fire on animation completion.
//onAnimationComplete: function(){}
//}
@end
<|start_filename|>ChartJSWrapper/CW/CWPolarAreaChartOptions.h<|end_filename|>
//
// CWPolarAreaChartOptions.h
// ChartJSWrapper
//
// Created by <NAME> on 22/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWGlobalOptions.h"
@interface CWPolarAreaChartOptions : CWGlobalOptions
//Boolean - Show a backdrop to the scale label
@property (nonatomic) CWBoolean* scaleShowLabelBackdrop;// : true,
//String - The colour of the label backdrop
@property (nonatomic,strong) CWColor* scaleBackdropColor;// : "rgba(255,255,255,0.75)",
// Boolean - Whether the scale should begin at zero
@property (nonatomic) CWBoolean* scaleBeginAtZero;// : true,
//Number - The backdrop padding above & below the label in pixels
@property (nonatomic,strong) NSNumber* scaleBackdropPaddingY;// : 2,
//Number - The backdrop padding to the side of the label in pixels
@property (nonatomic,strong) NSNumber* scaleBackdropPaddingX;// : 2,
//Boolean - Show line for each value in the scale
@property (nonatomic) CWBoolean* scaleShowLine;// : true,
//Boolean - Stroke a line around each segment in the chart
@property (nonatomic) CWBoolean* segmentShowStroke;// : true,
//String - The colour of the stroke on each segement.
@property (nonatomic,strong) CWColor* segmentStrokeColor;// : "#fff",
//Number - The width of the stroke value in pixels
@property (nonatomic,strong) NSNumber* segmentStrokeWidth;// : 2,
//Number - Amount of animation steps
@property (nonatomic,strong) NSNumber* animationSteps;// : 100,
//String - Animation easing effect.
@property (nonatomic,strong) NSString* animationEasing;//@property (nonatomic,strong) NSString* : "easeOutBounce",
//Boolean - Whether to animate the rotation of the chart
@property (nonatomic) CWBoolean* animateRotate;// : true,
//Boolean - Whether to animate scaling the chart from the centre
@property (nonatomic) CWBoolean* animateScale;// : false,
//String - A legend template
@property (nonatomic,strong) NSString* legendTemplate;// : "<ul class=\"<%=name.toLowerCase()%>-legend\"><% for (var i=0; i<segments.length; i++){%><li><span style=\"background-color:<%=segments[i].fillColor%>\"></span><%if(segments[i].label){%><%=segments[i].label%><%}%></li><%}%></ul
@end
<|start_filename|>ChartJSWrapper/ChartJSWrapper-IOS/AppDelegate.h<|end_filename|>
//
// AppDelegate.h
// ChartJSWrapper-IOS
//
// Created by <NAME> on 24/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface AppDelegate : UIResponder <UIApplicationDelegate>
@property (strong, nonatomic) UIWindow *window;
@end
<|start_filename|>ChartJSWrapper/CW/CWBarDataSet.h<|end_filename|>
//
// CWBarDataSet.h
// ChartJSWrapper
//
// Created by <NAME> on 22/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWDataSet.h"
@interface CWBarDataSet : CWDataSet
@property (nonatomic, strong) CWColor* highlightFill;//: "rgba(220,220,220,0.75)",
@property (nonatomic, strong) CWColor* highlightStroke;//: "rgba(220,220,220,1)",
@end
<|start_filename|>ChartJSWrapper/CW/CWLineChartData.h<|end_filename|>
//
// CWLineData.h
// ChartJSWrapper
//
// Created by <NAME> on 21/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import "CWLabelledData.h"
@interface CWLineChartData : CWLabelledData
- (instancetype) initWithLabels:(NSArray*)labels andPointDataSet:(NSArray*)dataSet;
@end
<|start_filename|>ChartJSWrapper/CW/CWLabelledData.h<|end_filename|>
//
// CWLabelledData.h
// ChartJSWrapper
//
// Created by <NAME> on 20/03/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "CWData.h"
@interface CWLabelledData : CWData
@property (nonatomic, strong,readonly) NSArray* labels;
@property (nonatomic, strong,readonly) NSArray* datasets;
- (instancetype) initWithLabels:(NSArray*)labels andDataSet:(NSArray*)dataSet;
- (void) setValue:(NSNumber*)val inDataset:(NSInteger)dataset at:(NSInteger)data;
@end
| gyetvan-andras/Chart.js-ObjC-Wrapper |
<|start_filename|>Json.c<|end_filename|>
/*
* Copyright (c) scott.cgi All Rights Reserved.
*
* This source code belongs to project MojoJson, which is hosted on GitHub, and licensed under the MIT License.
*
* License: https://github.com/scottcgi/MojoJson/blob/master/LICENSE
* GitHub : https://github.com/scottcgi/MojoJson
*
* Since : 2013-5-29
* Update : 2021-2-6
* Author : scott.cgi
* Version: 1.2.3
*/
#include <string.h>
#include <stdbool.h>
#include <stdlib.h>
#include <assert.h>
#include <stdio.h>
#include "Json.h"
#define ALog_A(e, ...) e ? (void) 0 : printf(__VA_ARGS__), printf("\n"), assert(e);
#define ALog_D(...) printf(__VA_ARGS__)
// ArrayList tool for JsonArray
//----------------------------------------------------------------------------------------------------------------------
/**
* The list can dynamically increase memory capacity .
*/
typedef struct
{
/**
* Increase memory space when needed, default 10.
*/
int increase;
/**
* The sizeof element type.
*/
int elementTypeSize;
/**
* Elements count.
*/
int size;
/**
* Store memory data, the length is memory capacity.
* if increase capacity, memory data will realloc.
*/
struct
{
/**
* Elements memory space ptr.
*/
void* data;
/**
* Elements count.
*/
int length;
}
elementArr[1];
}
ArrayList;
static void ArrayListRelease(ArrayList* arrayList)
{
free(arrayList->elementArr->data);
arrayList->elementArr->data = NULL;
arrayList->elementArr->length = 0;
arrayList->size = 0;
}
static void ArrayListInit(int elementTypeSize, ArrayList* arrayList)
{
arrayList->elementArr->data = NULL;
arrayList->elementArr->length = 0;
arrayList->elementTypeSize = elementTypeSize;
arrayList->size = 0;
arrayList->increase = 20;
}
static void ArrayListAddCapacity(ArrayList* arrayList, int increase)
{
ALog_A(increase > 0, "Json ArrayListAddCapacity failed, increase = %d cannot <= 0", increase);
void* data = realloc
(
arrayList->elementArr->data,
(size_t) (increase + arrayList->elementArr->length) * arrayList->elementTypeSize
);
ALog_A
(
data != NULL,
"Json ArrayListAddCapacity failed, unable to realloc memory, size = %d, length = %d, increase = %d",
arrayList->size, arrayList->elementArr->length, increase
);
arrayList->elementArr->data = data;
arrayList->elementArr->length += increase;
}
static void* ArrayListAdd(ArrayList* arrayList, void* elementPtr)
{
if (arrayList->size == arrayList->elementArr->length)
{
ArrayListAddCapacity(arrayList, arrayList->increase);
}
return memcpy
(
(char*) arrayList->elementArr->data + arrayList->elementTypeSize * (arrayList->size++),
elementPtr,
(size_t) arrayList->elementTypeSize
);
}
static void* ArrayListInsert(ArrayList* arrayList, int index, void* elementPtr)
{
if (arrayList->size == arrayList->elementArr->length)
{
ArrayListAddCapacity(arrayList, arrayList->increase);
}
void* from = (char*) arrayList->elementArr->data + arrayList->elementTypeSize * index;
void* to = (char*) from + arrayList->elementTypeSize;
// from and to overlap so cannot use memcpy
memmove(to, from, (size_t) arrayList->elementTypeSize * ((arrayList->size++) - index));
return memcpy(from, elementPtr, (size_t) arrayList->elementTypeSize);
}
/**
* Get the element with type.
*/
#define AArrayList_Get(arrayList, index, ElementType) \
(((ElementType*) ((arrayList)->elementArr->data)))[index]
/**
* Shortcut of ArrayListAdd.
*/
#define AArrayList_Add(arrayList, element) \
ArrayListAdd(arrayList, &(element))
/**
* Shortcut of AArrayList->Insert.
*/
#define AArrayList_Insert(arrayList, index, element) \
ArrayListInsert(arrayList, index, &(element))
/**
* Marked ArrayList element type.
*/
#define ArrayList(ElementType) ArrayList
// ArrayStrMap tool for JsonObject
//----------------------------------------------------------------------------------------------------------------------
/**
* The actual element store in ArrayStrMap.
*/
typedef struct
{
/**
* ArrayStrMap value's key.
* the key data will copy into ArrayStrMapElement malloc space.
*/
const char* key;
/**
* The length of key, include '\0'.
*/
int keyLength;
/**
* ArrayStrMap value pointer.
* the value data copy into ArrayStrMapElement malloc space.
*/
void* valuePtr;
}
ArrayStrMapElement;
/**
* A list of elements each of which is a k-v pair.
*/
typedef struct
{
/**
* The sizeof ArrayStrMap value type.
*/
int valueTypeSize;
/**
* Store all ArrayStrMapElements.
*/
ArrayList(ArrayStrMapElement*) elementList[1];
}
ArrayStrMap;
static void ArrayStrMapRelease(ArrayStrMap* arrayStrMap)
{
for (int i = 0; i < arrayStrMap->elementList->size; ++i)
{
free(AArrayList_Get(arrayStrMap->elementList, i, ArrayStrMapElement*));
}
ArrayListRelease(arrayStrMap->elementList);
}
static void ArrayStrMapInit(int valueTypeSize, ArrayStrMap* outArrayStrMap)
{
ArrayListInit(sizeof(ArrayStrMapElement*), outArrayStrMap->elementList);
outArrayStrMap->valueTypeSize = valueTypeSize;
}
static const char* ArrayStrMapGetKey(ArrayStrMap* arrayStrMap, int index)
{
return AArrayList_Get(arrayStrMap->elementList, index, ArrayStrMapElement*)->key;
}
/**
* Search index of key, if negative not found then return "-insertIndex - 1",
* so insert index is "-BinarySearch() - 1".
*/
static int BinarySearch(ArrayList(ArrayStrMapElement)* elementList, const char* key, int keyLength)
{
int high = elementList->size;
int low = -1;
int guess = -1;
while (high - low > 1) // prevent infinite loops
{
// (high + low) always positive, so convert to unsigned
// then the '>>' is unsigned move right
// so the overflow will be handled correctly
// because sign bit shift to right and 0 will be added
guess = (unsigned int) (high + low) >> 1;
ArrayStrMapElement* element = AArrayList_Get(elementList, guess, ArrayStrMapElement*);
if (element->keyLength < keyLength)
{
low = guess;
}
else if (element->keyLength > keyLength)
{
high = guess;
}
else if (element->keyLength == keyLength)
{
int cmp = memcmp(element->key, key, (size_t) keyLength);
if (cmp < 0)
{
low = guess;
}
else if (cmp > 0)
{
high = guess;
}
else if (cmp == 0)
{
// find the key, the guess is positive value
return guess;
}
}
}
// if guess == high
// the guess is bigger than key index and insert value at guess
if (guess == low)
{
// the guess is smaller than key index and insert value behind,
// or if list empty then the guess is -1, also do this make guess at 0
++guess;
}
// when list empty the guess is 0, so we -1 make sure return negative value
return -guess - 1;
}
static void* ArrayStrMapGet(ArrayStrMap* arrayStrMap, const char* key, void* defaultValuePtr)
{
int guess = BinarySearch(arrayStrMap->elementList, key, (int) strlen(key) + 1);
return guess >= 0 ?
AArrayList_Get(arrayStrMap->elementList, guess, ArrayStrMapElement*)->valuePtr : defaultValuePtr;
}
static void* ArrayStrMapTryPut(ArrayStrMap* arrayStrMap, const char* key, void* valuePtr)
{
int keyLength = (int) strlen(key) + 1;
int guess = BinarySearch(arrayStrMap->elementList, key, keyLength);
if (guess < 0)
{
int valueTypeSize = arrayStrMap->valueTypeSize;
ArrayStrMapElement* element = malloc(sizeof(ArrayStrMapElement) + valueTypeSize + keyLength);
element->keyLength = keyLength;
element->valuePtr = (char*) element + sizeof(ArrayStrMapElement);
element->key = (char*) element->valuePtr + valueTypeSize;
memcpy((void*) element->key, key, (size_t) keyLength);
AArrayList_Insert(arrayStrMap->elementList, -guess - 1, element);
return memcpy(element->valuePtr, valuePtr, (size_t) valueTypeSize);
}
else
{
return NULL;
}
}
/**
* Marked ArrayStrMap key and value.
*/
#define ArrayStrMap(keyName, ValueType) ArrayStrMap
/**
* Shortcut of ArrayStrMapGetAt.
* return value.
*/
#define AArrayStrMap_GetAt(arrayStrMap, index, ValueType) \
(*(ValueType*) AArrayList_Get(arrayStrMap->elementList, index, ArrayStrMapElement*)->valuePtr)
/**
* Shortcut of ArrayStrMapGet.
* return value.
*/
#define AArrayStrMap_Get(arrayStrMap, key, ValueType) \
(*(ValueType*) ArrayStrMapGet(arrayStrMap, key, (void*[1]) {NULL}))
/**
* Shortcut of ArrayStrMapTryPut.
*/
#define AArrayStrMap_TryPut(arrayStrMap, key, value) \
ArrayStrMapTryPut(arrayStrMap, key, &(value))
// Define struct of JsonObject and JsonArray
//----------------------------------------------------------------------------------------------------------------------
/**
* For json object that contains a set of k-v pairs.
*/
struct JsonObject
{
ArrayStrMap(objectKey, JsonValue*) valueMap[1];
};
/**
* For json array that contains a list of json value.
*/
struct JsonArray
{
ArrayList(JsonValue*) valueList[1];
};
// Json value create and destory
//----------------------------------------------------------------------------------------------------------------------
/**
* If the JsonValue is JsonType_Array, then free each items and do recursively.
* if the JsonValue is JsonType_Object, then free each k-v and do recursively.
*/
static void Destroy(JsonValue* value)
{
// JsonValue hold the whole memory
// so free JsonValue will be release JsonValue's memory
switch (value->type)
{
case JsonType_Array:
{
ArrayList* list = value->jsonArray->valueList;
for (int i = 0; i < list->size; ++i)
{
Destroy(AArrayList_Get(list, i, JsonValue*));
}
ArrayListRelease(list);
break;
}
case JsonType_Object:
{
ArrayStrMap* map = value->jsonObject->valueMap;
for (int i = 0; i < map->elementList->size; ++i)
{
Destroy(AArrayStrMap_GetAt(map, i, JsonValue*));
}
ArrayStrMapRelease(map);
break;
}
case JsonType_Float:
break;
case JsonType_String:
break;
case JsonType_Null:
break;
}
free(value);
}
static JsonValue* CreateJsonValue(void* data, size_t valueSize, JsonType type)
{
JsonValue* value = malloc(sizeof(JsonValue) + valueSize);
switch (type)
{
case JsonType_Float:
break;
case JsonType_String:
value->jsonString = memcpy((char*) value + sizeof(JsonValue), data, valueSize);
break;
case JsonType_Array:
value->jsonArray = (JsonArray*) ((char*) value + sizeof(JsonValue));
ArrayListInit(sizeof(JsonValue*), value->jsonArray->valueList);
break;
case JsonType_Object:
value->jsonObject = (JsonObject*) ((char*) value + sizeof(JsonValue));
ArrayStrMapInit(sizeof(JsonValue*), value->jsonObject->valueMap);
break;
default:
ALog_A(false, "Json CreateJsonValue unknown JsonType = %d", type);
}
value->type = type;
return value;
}
// JsonObject API
//----------------------------------------------------------------------------------------------------------------------
static bool ObjectGetBool(JsonObject* object, const char* key, bool defaultValue)
{
JsonValue* jsonValue = AArrayStrMap_Get(object->valueMap, key, JsonValue*);
return jsonValue != NULL ? strcmp(jsonValue->jsonString, "true") == 0 : defaultValue;
}
static int ObjectGetInt(JsonObject* object, const char* key, int defaultValue)
{
JsonValue* jsonValue = AArrayStrMap_Get(object->valueMap, key, JsonValue*);
if (jsonValue != NULL)
{
return (int) jsonValue->jsonFloat;
}
return defaultValue;
}
static float ObjectGetFloat(JsonObject* object, const char* key, float defaultValue)
{
JsonValue* jsonValue = AArrayStrMap_Get(object->valueMap, key, JsonValue*);
if (jsonValue != NULL)
{
return jsonValue->jsonFloat;
}
return defaultValue;
}
static char* ObjectGetString(JsonObject* object, const char* key, const char* defaultValue)
{
JsonValue* jsonValue = AArrayStrMap_Get(object->valueMap, key, JsonValue*);
return jsonValue != NULL ? jsonValue->jsonString : (char*) defaultValue;
}
static JsonObject* ObjectGetObject(JsonObject* object, const char* key)
{
JsonValue* jsonValue = AArrayStrMap_Get(object->valueMap, key, JsonValue*);
return jsonValue != NULL ? jsonValue->jsonObject : NULL;
}
static JsonArray* ObjectGetArray(JsonObject* object, const char* key)
{
JsonValue* jsonValue = AArrayStrMap_Get(object->valueMap, key, JsonValue*);
return jsonValue != NULL ? jsonValue->jsonArray : NULL;
}
static JsonType ObjectGetType(JsonObject* object, const char* key)
{
JsonValue* jsonValue = AArrayStrMap_Get(object->valueMap, key, JsonValue*);
if (jsonValue == NULL)
{
return JsonType_Null;
}
return jsonValue->type;
}
static const char* ObjectGetKey(JsonObject* object, int index)
{
return ArrayStrMapGetKey(object->valueMap, index);
}
static JsonObject* ObjectGetObjectByIndex(JsonObject* object, int index)
{
return AArrayStrMap_GetAt(object->valueMap, index, JsonValue*)->jsonObject;
}
static JsonArray* ObjectGetArrayByIndex(JsonObject* object, int index)
{
return AArrayStrMap_GetAt(object->valueMap, index, JsonValue*)->jsonArray;
}
struct AJsonObject AJsonObject[1] =
{{
ObjectGetBool,
ObjectGetInt,
ObjectGetFloat,
ObjectGetType,
ObjectGetString,
ObjectGetObject,
ObjectGetArray,
ObjectGetKey,
ObjectGetObjectByIndex,
ObjectGetArrayByIndex,
}};
// JsonArray API
//----------------------------------------------------------------------------------------------------------------------
static bool ArrayGetBool(JsonArray* array, int index)
{
return strcmp(AArrayList_Get(array->valueList, index, JsonValue*)->jsonString, "true") == 0;
}
static int ArrayGetInt(JsonArray* array, int index)
{
return (int) AArrayList_Get(array->valueList, index, JsonValue*)->jsonFloat;
}
static float ArrayGetFloat(JsonArray* array, int index)
{
return AArrayList_Get(array->valueList, index, JsonValue*)->jsonFloat;
}
static char* ArrayGetString(JsonArray* array, int index)
{
return AArrayList_Get(array->valueList, index, JsonValue*)->jsonString;
}
static JsonObject* ArrayGetObject(JsonArray* array, int index)
{
return AArrayList_Get(array->valueList, index, JsonValue*)->jsonObject;
}
static JsonArray* ArrayGetArray(JsonArray* array, int index)
{
return AArrayList_Get(array->valueList, index, JsonValue*)->jsonArray;
}
static JsonType ArrayGetType(JsonArray* array, int index)
{
if (index < 0 || index >= array->valueList->size)
{
return JsonType_Null;
}
return AArrayList_Get(array->valueList, index, JsonValue*)->type;
}
struct AJsonArray AJsonArray[1] =
{{
ArrayGetBool,
ArrayGetInt,
ArrayGetFloat,
ArrayGetType,
ArrayGetString,
ArrayGetObject,
ArrayGetArray,
}};
// Json parser
//----------------------------------------------------------------------------------------------------------------------
static void SkipWhiteSpace(const char** jsonPtr)
{
const char* json = *jsonPtr;
while (true)
{
switch (*json)
{
case ' ' :
case '\t':
case '\n':
case '\r':
++json;
continue;
default:
break;
}
break;
}
ALog_A(json != NULL, "The Json parse error on NULL, json is incomplete.");
*jsonPtr = json;
}
static void* ParseNumber(const char** jsonPtr)
{
char* endPtr;
JsonValue* value = CreateJsonValue(NULL, 0, JsonType_Float);
value->jsonFloat = strtof(*jsonPtr, &endPtr);
ALog_D("Json number = %.*s", (int) (endPtr - *jsonPtr), *jsonPtr);
*jsonPtr = endPtr;
return value;
}
static int SkipString(const char** jsonPtr, const char** outStrStart)
{
// skip '"'
const char* json = ++(*jsonPtr);
int count = 0;
char c;
// check end '"'
while ((c = json[count++]) != '"')
{
if (c == '\\')
{
// skip escaped quotes
// the escape char may be '\"',which will break while
++count;
}
}
*outStrStart = json;
// already skipped the string end '"'
*jsonPtr += count;
// how many char skipped
// count contains the string end '"', so -1
return count - 1;
}
static JsonValue* ParseString(const char** jsonPtr)
{
const char* strStart;
int length = SkipString(jsonPtr, &strStart);
JsonValue* value = CreateJsonValue((void*) strStart, (length + 1) * sizeof(char), JsonType_String);
value->jsonString[length] = '\0';
ALog_D("Json string = %s", value->jsonString);
return value;
}
// predefine
static JsonValue* ParseValue(const char** jsonPtr);
static JsonValue* ParseArray(const char** jsonPtr)
{
JsonValue* jsonValue = CreateJsonValue(NULL, sizeof(JsonArray), JsonType_Array);
ArrayList* list = jsonValue->jsonArray->valueList;
ALog_D("Json Array: [");
// skip '['
++(*jsonPtr);
do
{
SkipWhiteSpace(jsonPtr);
if (**jsonPtr == ']')
{
break;
}
JsonValue* value = ParseValue(jsonPtr);
// add Array element
AArrayList_Add(list, value);
SkipWhiteSpace(jsonPtr);
if (**jsonPtr == ',')
{
++(*jsonPtr);
}
else
{
ALog_A(**jsonPtr == ']', "Json Array not has ']', error char = %c ", **jsonPtr);
break;
}
}
while (true);
// skip ']'
++(*jsonPtr);
ALog_D("] JsonArray element count = %d", list->size);
return jsonValue;
}
static JsonValue* ParseObject(const char** jsonPtr)
{
JsonValue* jsonValue = CreateJsonValue(NULL, sizeof(JsonObject), JsonType_Object);
ArrayStrMap* map = jsonValue->jsonObject->valueMap;
ALog_D("Json Object: {");
// skip '{'
++(*jsonPtr);
do
{
SkipWhiteSpace(jsonPtr);
if (**jsonPtr == '}')
{
break;
}
ALog_A(**jsonPtr == '"', "Json object parse error, char = %c, should be '\"' ", **jsonPtr);
const char* strStart;
int keyLen = SkipString(jsonPtr, &strStart);
char key[keyLen + 1];
// make string end
key[keyLen] = '\0';
memcpy(key, strStart, (size_t) keyLen);
ALog_D("Json key = %s", key);
SkipWhiteSpace(jsonPtr);
ALog_A((**jsonPtr) == ':', "Json object parse error, char = %c, should be ':' ", **jsonPtr);
// skip ':'
++(*jsonPtr);
JsonValue* value = ParseValue(jsonPtr);
// set object element
AArrayStrMap_TryPut(map, key, value);
SkipWhiteSpace(jsonPtr);
if (**jsonPtr == ',')
{
++(*jsonPtr);
}
else
{
ALog_A(**jsonPtr == '}', "Json Object not has '}', error char = %c ", **jsonPtr);
break;
}
}
while (true);
// skip '}'
++(*jsonPtr);
ALog_D("} JsonObject elements count = %d", map->elementList->size);
return jsonValue;
}
/**
* ParseValue changed the *jsonPtr, so if *jsonPtr is direct malloc will cause error
*/
static JsonValue* ParseValue(const char** jsonPtr)
{
SkipWhiteSpace(jsonPtr);
char c = **jsonPtr;
switch (c)
{
case '{':
return ParseObject(jsonPtr);
case '[':
return ParseArray(jsonPtr);
case '"':
return ParseString(jsonPtr);
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case '-':
return ParseNumber(jsonPtr);
case 'f':
{
const char* json = *jsonPtr;
if
(
json[1] == 'a' &&
json[2] == 'l' &&
json[3] == 's' &&
json[4] == 'e'
)
{
ALog_D("Json false");
(*jsonPtr) += 5;
// copy with '\0'
return CreateJsonValue("false", 6, JsonType_String);
}
break;
}
case 't':
{
const char* json = *jsonPtr;
if
(
json[1] == 'r' &&
json[2] == 'u' &&
json[3] == 'e'
)
{
ALog_D("Json true");
(*jsonPtr) += 4;
// copy with '\0'
return CreateJsonValue("true", 5, JsonType_String);
}
break;
}
case 'n':
{
const char* json = *jsonPtr;
if
(
json[1] == 'u' &&
json[2] == 'l' &&
json[3] == 'l'
)
{
ALog_D("Json null");
(*jsonPtr) += 4;
// copy with '\0'
return CreateJsonValue("null", 5, JsonType_String);
}
break;
}
default:
break;
}
ALog_A(false, "Invalid json value type, error char = %c", c);
return NULL;
}
static JsonValue* Parse(const char* jsonString)
{
return ParseValue(&jsonString);
}
struct AJson AJson[1] =
{{
Parse,
Destroy,
}};
#undef ALog_A
#undef ALog_D
<|start_filename|>MojoJson.cs<|end_filename|>
/*
* Copyright (c) scott.cgi All Rights Reserved.
*
* This source code belongs to project MojoJson, which is hosted on GitHub, and licensed under the MIT License.
*
* License: https://github.com/scottcgi/MojoJson/blob/master/LICENSE
* GitHub : https://github.com/scottcgi/MojoJson
*
* Since : 2017-9-6
* Update : 2020-2-28
* Author : scott.cgi
* Version: 1.2.3
*/
using System.Collections.Generic;
using System.Text;
using System;
namespace MojoJson
{
public static class Json
{
private const int ObjectInitCapacity = 8;
private const int ArrayInitCapacity = 8;
private static bool isEscapeString;
#region Parse Json API
/// <summary>
/// Parse json string.
/// </summary>
public static JsonValue Parse(string json)
{
var data = new Data(json);
return ParseValue(ref data);
}
/// <summary>
/// Whether the string value need to be escaped ?
/// </summary>
public static void SetEscapeString(bool isEscapeString)
{
Json.isEscapeString = isEscapeString;
}
#endregion
#region Parse Json
/// <summary>
/// Parse the JsonValue.
/// </summary>
private static JsonValue ParseValue(ref Data data)
{
SkipWhiteSpace(ref data);
switch (data.json[data.index])
{
case '{':
return ParseObject(ref data);
case '[':
return ParseArray (ref data);
case '"':
return ParseString(ref data);
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case '-':
return ParseNumber(ref data);
case 'f':
if
(
data.json[data.index + 1] == 'a' &&
data.json[data.index + 2] == 'l' &&
data.json[data.index + 3] == 's' &&
data.json[data.index + 4] == 'e'
)
{
data.index += 5;
return new JsonValue(JsonType.Bool, 0.0f);
}
break;
case 't':
if
(
data.json[data.index + 1] == 'r' &&
data.json[data.index + 2] == 'u' &&
data.json[data.index + 3] == 'e'
)
{
data.index += 4;
return new JsonValue(JsonType.Bool, 1.0f);
}
break;
case 'n':
if
(
data.json[data.index + 1] == 'u' &&
data.json[data.index + 2] == 'l' &&
data.json[data.index + 3] == 'l'
)
{
data.index += 4;
return new JsonValue(JsonType.Null, null);
}
break;
}
throw new Exception
(
string.Format
(
"Json ParseValue error on char '{0}' index at '{1}' ",
data.json[data.index],
data.index
)
);
}
/// <summary>
/// Parse JsonObject.
/// </summary>
private static JsonValue ParseObject(ref Data data)
{
var jsonObject = new Dictionary<string, JsonValue>(Json.ObjectInitCapacity);
// skip '{'
++data.index;
do
{
SkipWhiteSpace(ref data);
if (data.json[data.index] == '}')
{
break;
}
DebugTool.Assert
(
data.json[data.index] == '"',
"Json ParseObject error, char '{0}' should be '\"' ",
data.json[data.index]
);
// get object key string
var key = GetString(ref data);
SkipWhiteSpace(ref data);
DebugTool.Assert
(
data.json[data.index] == ':',
"Json ParseObject error, after key = {0}, char '{1}' should be ':' ",
key,
data.json[data.index]
);
// skip ':'
++data.index;
// set JsonObject key and value
jsonObject.Add(key, ParseValue(ref data));
SkipWhiteSpace(ref data);
if (data.json[data.index] == ',')
{
++data.index;
}
else
{
DebugTool.Assert
(
data.json[data.index] == '}',
"Json ParseObject error, after key = {0}, char '{1}' should be '}' ",
key,
data.json[data.index]
);
break;
}
}
while (true);
// skip '}'
++data.index;
return new JsonValue(JsonType.Object, jsonObject);
}
/// <summary>
/// Parse JsonArray.
/// </summary>
private static JsonValue ParseArray(ref Data data)
{
var jsonArray = new List<JsonValue>(Json.ArrayInitCapacity);
// skip '['
++data.index;
do
{
SkipWhiteSpace(ref data);
if (data.json[data.index] == ']')
{
break;
}
// add JsonArray item
jsonArray.Add(ParseValue(ref data));
SkipWhiteSpace(ref data);
if (data.json[data.index] == ',')
{
++data.index;
}
else
{
DebugTool.Assert
(
data.json[data.index] == ']',
"Json ParseArray error, char '{0}' should be ']' ",
data.json[data.index]
);
break;
}
}
while (true);
// skip ']'
++data.index;
return new JsonValue(JsonType.Array, jsonArray);
}
/// <summary>
/// Parses the JsonString.
/// </summary>
private static JsonValue ParseString(ref Data data)
{
string str;
if (Json.isEscapeString == false)
{
str = GetString(ref data);
}
else
{
str = GetEscapedString(ref data);
}
return new JsonValue(JsonType.String, str);
}
/// <summary>
/// Parses the JsonNumber.
/// </summary>
private static JsonValue ParseNumber(ref Data data)
{
var start = data.index;
while (true)
{
switch (data.json[++data.index])
{
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case '-':
case '+':
case '.':
case 'e':
case 'E':
continue;
}
break;
}
var strNum = data.json.Substring(start, data.index - start);
if (float.TryParse(strNum, out float num))
{
return new JsonValue(JsonType.Number, num);
}
throw new Exception(string.Format("Json ParseNumber error, cannot parse string [{0}]", strNum));
}
/// <summary>
/// Skip the white space.
/// </summary>
private static void SkipWhiteSpace(ref Data data)
{
while (true)
{
switch (data.json[data.index])
{
case ' ' :
case '\t':
case '\n':
case '\r':
++data.index;
continue;
}
// index point to non-whitespace
break;
}
}
/// <summary>
/// Get the original string value includes escape char.
/// </summary>
private static string GetString(ref Data data)
{
// skip '"'
var start = ++data.index;
while (true)
{
switch (data.json[data.index++])
{
// check end '"'
case '"':
break;
case '\\':
// skip escaped quotes
// the escape char may be '\"',which will break while
++data.index;
continue;
default:
continue;
}
break;
}
// index after the string end '"' so -1
return data.json.Substring(start, data.index - start - 1);
}
/// <summary>
/// Get the escaped string value.
/// </summary>
private static string GetEscapedString(ref Data data)
{
// skip '"'
var start = ++data.index;
string str;
while (true)
{
switch (data.json[data.index++])
{
// check string end '"'
case '"':
if (data.sb.Length == 0)
{
// no escaped char just Substring
str = data.json.Substring(start, data.index - start - 1);
}
else
{
str = data.sb.Append(data.json, start, data.index - start - 1).ToString();
// clear for next string
data.sb.Length = 0;
}
break;
// check escaped char
case '\\':
{
var escapedIndex = data.index;
char c;
switch (data.json[data.index++])
{
case '"':
c = '"';
break;
case '\\':
c = '\\';
break;
case '/':
c = '/';
break;
case '\'':
c = '\'';
break;
case 'b':
c = '\b';
break;
case 'f':
c = '\f';
break;
case 'n':
c = '\n';
break;
case 'r':
c = '\r';
break;
case 't':
c = '\t';
break;
case 'u':
c = GetUnicodeCodePoint(ref data);
break;
default:
// not support just add in pre string
continue;
}
// add pre string and escaped char
data.sb.Append(data.json, start, escapedIndex - start - 1).Append(c);
// update pre string start index
start = data.index;
continue;
}
default:
continue;
}
// index skipped the string end '"'
break;
}
return str;
}
/// <summary>
/// Get the unicode code point.
/// </summary>
private static char GetUnicodeCodePoint(ref Data data)
{
var index = data.index;
for (var i = 0; i < 4; ++i)
{
char c = data.json[index + i];
switch (c)
{
case '0':
data.unicode[i] = 0;
break;
case '1':
data.unicode[i] = 1;
break;
case '2':
data.unicode[i] = 2;
break;
case '3':
data.unicode[i] = 3;
break;
case '4':
data.unicode[i] = 4;
break;
case '5':
data.unicode[i] = 5;
break;
case '6':
data.unicode[i] = 6;
break;
case '7':
data.unicode[i] = 7;
break;
case '8':
data.unicode[i] = 8;
break;
case '9':
data.unicode[i] = 9;
break;
case 'A':
case 'a':
data.unicode[i] = 10;
break;
case 'B':
case 'b':
data.unicode[i] = 11;
break;
case 'C':
case 'c':
data.unicode[i] = 12;
break;
case 'D':
case 'd':
data.unicode[i] = 13;
break;
case 'E':
case 'e':
data.unicode[i] = 14;
break;
case 'F':
case 'f':
data.unicode[i] = 15;
break;
default:
throw new Exception(string.Format("Json Unicode char '{0}' error", c));
}
}
// skip code point
data.index += 4;
return (char) (
(data.unicode[0] << 12) +
(data.unicode[1] << 8) +
(data.unicode[2] << 4) +
(data.unicode[3] )
);
}
#endregion
private struct Data
{
public readonly string json;
public int index;
public readonly StringBuilder sb;
public readonly int[] unicode;
public Data(string json)
{
this.json = json;
this.index = 0;
this.sb = new StringBuilder();
this.unicode = new int[4];
}
}
}
public enum JsonType
{
Object,
Array,
String,
Number,
Bool,
Null,
}
public class JsonValue
{
public readonly JsonType type;
private readonly object objectValue;
private readonly float numberValue;
public JsonValue(JsonType type, object value)
{
this.type = type;
this.objectValue = value;
}
public JsonValue(JsonType type, float value)
{
this.type = type;
this.numberValue = value;
}
#region JsonObject API
/// <summary>
/// Use JsonValue as JsonObject.
/// </summary>
public Dictionary<string, JsonValue> AsObject()
{
DebugTool.Assert(this.type == JsonType.Object, "JsonValue type is not Object !");
return this.objectValue as Dictionary<string, JsonValue>;
}
/// <summary>
/// Use JsonValue as JsonObject and get JsonValue item by key.
/// return null if not found key.
/// </summary>
public JsonValue AsObjectGet(string key)
{
DebugTool.Assert(this.type == JsonType.Object, "JsonValue type is not Object !");
var dict = this.objectValue as Dictionary<string, JsonValue>;
if (dict.TryGetValue(key, out JsonValue jsonValue))
{
return jsonValue;
}
return null;
}
/// <summary>
/// Use JsonValue as JsonObject and get JsonObject item by key.
/// return null if not found key.
/// </summary>
public Dictionary<string, JsonValue> AsObjectGetObject(string key)
{
var jsonValue = this.AsObjectGet(key);
if (jsonValue != null)
{
return jsonValue.AsObject();
}
return null;
}
/// <summary>
/// Use JsonValue as JsonObject and get JsonArray item by key.
/// return null if not found key.
/// </summary>
public List<JsonValue> AsObjectGetArray(string key)
{
var jsonValue = this.AsObjectGet(key);
if (jsonValue != null)
{
return jsonValue.AsArray();
}
return null;
}
/// <summary>
/// Use JsonValue as JsonObject and get string item by key.
/// return null if not found key.
/// </summary>
public string AsObjectGetString(string key)
{
var jsonValue = this.AsObjectGet(key);
if (jsonValue != null)
{
return jsonValue.AsString();
}
return null;
}
/// <summary>
/// Use JsonValue as JsonObject and get float item by key.
/// return defaultValue if not found key.
/// </summary>
public float AsObjectGetFloat(string key, float defaultValue)
{
var jsonValue = this.AsObjectGet(key);
if (jsonValue != null)
{
return jsonValue.AsFloat();
}
return defaultValue;
}
/// <summary>
/// Use JsonValue as JsonObject and get float item by key.
/// </summary>
public float AsObjectGetFloat(string key)
{
return this.AsObjectGet(key).AsFloat();
}
/// <summary>
/// Use JsonValue as JsonObject and get int item by key.
/// return defaultValue if not found key.
/// </summary>
public int AsObjectGetInt(string key, int defaultValue)
{
var jsonValue = this.AsObjectGet(key);
if (jsonValue != null)
{
return jsonValue.AsInt();
}
return defaultValue;
}
/// <summary>
/// Use JsonValue as JsonObject and get int item by key.
/// </summary>
public int AsObjectGetInt(string key)
{
return this.AsObjectGet(key).AsInt();
}
/// <summary>
/// Use JsonValue as JsonObject and get bool item by key.
/// return defaultValue if not found key.
/// </summary>
public bool AsObjectGetBool(string key, bool defaultValue)
{
var jsonValue = this.AsObjectGet(key);
if (jsonValue != null)
{
return jsonValue.AsBool();
}
return defaultValue;
}
/// <summary>
/// Use JsonValue as JsonObject and get int item by key.
/// </summary>
public bool AsObjectGetBool(string key)
{
return this.AsObjectGet(key).AsBool();
}
/// <summary>
/// Use JsonValue as JsonObject and check null item by key.
/// </summary>
public bool AsObjectGetIsNull(string key)
{
var jsonValue = this.AsObjectGet(key);
if (jsonValue != null)
{
return jsonValue.IsNull();
}
return false;
}
#endregion
#region JsonArray API
/// <summary>
/// Use JsonValue as JsonArray.
/// </summary>
public List<JsonValue> AsArray()
{
DebugTool.Assert(this.type == JsonType.Array, "JsonValue type is not Array !");
return this.objectValue as List<JsonValue>;
}
/// <summary>
/// Use JsonValue as JsonArray and get JsonValue item by index.
/// </summary>
public JsonValue AsArrayGet(int index)
{
DebugTool.Assert(this.type == JsonType.Array, "JsonValue type is not Array !");
return (this.objectValue as List<JsonValue>)[index];
}
/// <summary>
/// Use JsonValue as JsonArray and get JsonObject item by index.
/// </summary>
public Dictionary<string, JsonValue> AsArrayGetObject(int index)
{
return this.AsArrayGet(index).AsObject();
}
/// <summary>
/// Use JsonValue as JsonArray and get JsonArray item by index.
/// </summary>
public List<JsonValue> AsArrayGetArray(int index)
{
return this.AsArrayGet(index).AsArray();
}
/// <summary>
/// Use JsonValue as JsonArray and get string item by index.
/// </summary>
public string AsArrayGetString(int index)
{
return this.AsArrayGet(index).AsString();
}
/// <summary>
/// Use JsonValue as JsonArray and get float item by index.
/// </summary>
public float AsArrayGetFloat(int index)
{
return this.AsArrayGet(index).AsFloat();
}
/// <summary>
/// Use JsonValue as JsonArray and get int item by index.
/// </summary>
public int AsArrayGetInt(int index)
{
return this.AsArrayGet(index).AsInt();
}
/// <summary>
/// Use JsonValue as JsonArray and get bool item by index.
/// </summary>
public bool AsArrayGetBool(int index)
{
return this.AsArrayGet(index).AsBool();
}
/// <summary>
/// Use JsonValue as JsonArray and check null item by index.
/// </summary>
public bool AsArrayGetIsNull(int index)
{
return this.AsArrayGet(index).IsNull();
}
#endregion
#region Other Json Value API
/// <summary>
/// Get JsonValue as string.
/// </summary>
public string AsString()
{
DebugTool.Assert(this.type == JsonType.String, "JsonValue type is not String !");
return this.objectValue as string;
}
/// <summary>
/// Get JsonValue as float.
/// </summary>
public float AsFloat()
{
DebugTool.Assert(this.type == JsonType.Number, "JsonValue type is not Number !");
return this.numberValue;
}
/// <summary>
/// Get JsonValue as int.
/// </summary>
public int AsInt()
{
DebugTool.Assert(this.type == JsonType.Number, "JsonValue type is not Number !");
return (int) this.numberValue;
}
/// <summary>
/// Get JsonValue as bool.
/// </summary>
public bool AsBool()
{
DebugTool.Assert(this.type == JsonType.Bool, "JsonValue type is not Bool !");
return this.numberValue > 0.0f;
}
/// <summary>
/// Whether JsonValue is null ?
/// </summary>
public bool IsNull()
{
return this.type == JsonType.Null;
}
#endregion
}
internal static class DebugTool
{
public static void Assert(bool condition, string msg, params object[] args)
{
if (condition == false)
{
throw new Exception(string.Format(msg, args));
}
}
}
}
| scottcgi/MojoJson |
<|start_filename|>css/main.css<|end_filename|>
*,
*::before,
*::after {
box-sizing: border-box; }
body {
height: 100vh;
margin: 0;
font-family: "Mulish", sans-serif;
color: #131336;
background-color: #fff;
display: grid;
grid-template-rows: 6.25em 1fr;
grid-template-columns: 11.75em 1fr 19.5em;
grid-template-areas: "header_nav header_main header_aside" "nav main aside" "nav main aside";
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
text-rendering: optimizeLegibility; }
body::after {
content: "";
background: url(../art_reference/web.png);
background-size: auto 100%;
background-repeat: no-repeat;
opacity: 0.0;
top: 0;
left: 0;
bottom: 0;
right: 0;
position: absolute;
z-index: 1; }
input {
font-family: "Mulish", sans-serif; }
input:focus,
select:focus,
textarea:focus,
button:focus {
outline: none; }
ul {
list-style: none;
padding: 0;
margin: 0; }
textarea:focus,
input:focus {
outline: none; }
a {
text-decoration: none;
color: #131336; }
nav,
main,
aside {
display: grid;
grid-template-rows: 1fr min-content; }
header {
padding: 0.7em 1.75em 0 1.75em;
align-items: center;
z-index: 2; }
.header_nav {
grid-area: header_nav;
display: grid; }
.header_main {
grid-area: header_main;
background-color: #f5f5fb;
display: grid;
grid-template-columns: 1fr max-content min-content;
gap: 1em;
border-left: 1px solid #e3e3eb;
border-right: 1px solid #e3e3eb; }
.header_aside {
grid-area: header_aside;
display: grid;
grid-template-columns: 1fr min-content; }
aside {
grid-area: aside; }
.logo {
display: grid;
grid-template-columns: min-content 1fr;
align-items: center;
gap: 1em;
font-size: 1.1rem;
font-family: "Ubuntu", Helvetica, sans-serif; }
.logo img {
width: 1.3em; }
.logo span {
font-size: 1.1rem;
font-weight: 600; }
.header_main .title {
font-weight: 700;
font-size: 0.9rem; }
.header_main .date {
font-size: 0.8rem;
color: #9a9abe; }
.header_main i {
font-size: 0.9rem;
color: #5e81f4;
background-color: #e5e9f9;
padding: 0.75em;
border-radius: 0.5em; }
.header_aside .title {
font-size: 0.8rem;
font-weight: 600; }
.header_aside i {
color: #b1b6d1; }
nav {
grid-area: nav;
grid-template-rows: 1fr min-content;
align-items: center;
margin-bottom: 1em; }
nav ul {
display: grid;
z-index: 2;
gap: 3em;
font-size: 0.8rem;
margin-bottom: 3em;
padding: 0 0 0 2.5em; }
nav ul li {
display: grid; }
nav ul a {
display: grid;
grid-template-columns: min-content 1fr;
gap: 2em;
font-weight: 600;
align-items: center;
color: #b1b6d1; }
nav ul li.active a {
color: #131336; }
nav ul li.active::after {
content: '';
border-right: 3px solid #5e81f4;
position: absolute;
height: 1.5em;
left: 14.6em; }
nav ul li.active i {
color: #5e81f4; }
nav ul li .messageCount {
display: flex;
justify-content: center;
align-items: center;
width: 1.5em;
height: 1.5em;
border-radius: 2em;
position: absolute;
font-size: 0.5rem;
background-color: #ff606d;
color: #fff;
margin-left: 1em;
margin-top: -.5em; }
nav ul i {
font-size: 1rem; }
.upgradeContainer {
z-index: 2;
display: grid;
grid-template-rows: min-content min-content min-content;
gap: 2em;
justify-content: center;
text-align: center;
align-items: center;
background-color: #f5f5fb;
border-radius: 1em;
margin: 1em;
padding: 1em 2em;
font-size: 0.75rem;
color: #b1b6d1; }
.upgradeContainer b {
color: #131336;
font-weight: 700; }
.upgradeContainer svg {
width: 4.5em;
height: auto;
justify-self: center;
position: relative;
margin-top: -3em; }
.upgradeContainer a {
background-color: #5e81f4;
padding: 0.75em 1em;
border-radius: 0.75em;
color: #fff; }
main {
grid-area: main;
background-color: #f5f5fb;
border-left: 1px solid #e3e3eb;
border-right: 1px solid #e3e3eb;
display: grid;
grid-template-rows: min-content min-content min-content min-content;
grid-template-columns: 1fr 1fr;
padding: 0.3em 2em;
gap: 1.5em 1em; }
main .welcomeBack {
grid-column: auto / 2 span;
display: grid;
grid-template-columns: 1fr 1fr;
align-items: center;
background-color: #f7e5e9;
border-radius: 1em;
z-index: 2;
padding: 0 3em; }
main .welcomeBack svg {
position: relative;
justify-self: end;
width: 17.5em;
margin-top: -5em;
margin-bottom: -2em; }
main .welcomeBack .textContainer {
display: grid;
grid-template-rows: min-content min-content;
gap: 1em; }
main .welcomeBack .title {
font-size: 1.25rem;
font-weight: 600;
color: #ff606d; }
main .welcomeBack .subtitle {
font-size: 0.8rem;
color: #131336; }
main .latestResults,
main .timeSpent {
display: grid;
grid-template-rows: min-content 1fr;
gap: 2em;
background-color: #fff;
border-radius: 0.75em;
padding: 1.5em;
z-index: 2 !important; }
.header {
display: flex;
justify-content: space-between;
align-items: center; }
.headerText {
font-size: 0.8rem;
font-weight: 800; }
main .rightElement,
main .rightElement {
font-size: 0.7rem; }
.latestResults .container ul {
display: grid;
gap: 0.75em; }
.latestResults .container li {
display: grid;
grid-template-columns: max-content min-content max-content 1fr max-content;
align-items: center;
gap: 0.75em;
font-size: 0.7rem; }
.latestResults .container hr {
width: 100%;
height: 1px;
border: none;
background-color: #e1e2eb; }
.latestResults .unitText,
.latestResults .unitSeperator {
font-weight: 600; }
.latestResults .field {
color: #b1b6d1; }
.progressContainer {
width: 100%;
height: 0.4em;
background-color: #e1e2eb;
border-radius: 0.4em; }
.latestResults .progressContainer .progress,
.languages .progressContainer .progress {
background-color: #5e81f4;
border-radius: 0.4em;
height: 100%; }
.latestResults .progressContainer .progress.red {
background-color: #ff606d; }
.latestResults .progressText {
font-weight: 600;
color: #5e81f4; }
.latestResults .progressText.red {
color: #ff606d; }
.timeSpent select {
background: none;
border: none; }
.timeSpent .container {
display: grid;
grid-template-columns: repeat(7, 1fr);
color: #b1b6d1; }
.timeSpent .day {
font-size: 0.7rem;
display: grid;
grid-template-rows: min-content 1fr;
gap: 2em;
height: 100%;
text-align: center; }
.timeSpent .progressContainer {
width: 0.4em;
height: 100%;
border-radius: 0.4em;
background-color: #5e81f4;
justify-self: center; }
.day:nth-child(0) .progressContainer .vocabulary {
background-color: #ff808b;
height: 15%; }
.day:nth-child(0) .progressContainer .grammar {
background-color: #4d4cac;
height: 4%; }
.day:nth-child(0) .progressContainer .listening {
background-color: #5e81f4;
height: 0%; }
.day:nth-child(0) .progressContainer .writing {
background-color: #c8c9e9;
height: 81%; }
.day:nth-child(1) .progressContainer .vocabulary {
background-color: #ff808b;
height: 2%; }
.day:nth-child(1) .progressContainer .grammar {
background-color: #4d4cac;
height: 4%; }
.day:nth-child(1) .progressContainer .listening {
background-color: #5e81f4;
height: 16%; }
.day:nth-child(1) .progressContainer .writing {
background-color: #c8c9e9;
height: 78%; }
.day:nth-child(2) .progressContainer .vocabulary {
background-color: #ff808b;
height: 14%; }
.day:nth-child(2) .progressContainer .grammar {
background-color: #4d4cac;
height: 2%; }
.day:nth-child(2) .progressContainer .listening {
background-color: #5e81f4;
height: 9%; }
.day:nth-child(2) .progressContainer .writing {
background-color: #c8c9e9;
height: 75%; }
.day:nth-child(3) .progressContainer .vocabulary {
background-color: #ff808b;
height: 46%; }
.day:nth-child(3) .progressContainer .grammar {
background-color: #4d4cac;
height: 5%; }
.day:nth-child(3) .progressContainer .listening {
background-color: #5e81f4;
height: 8%; }
.day:nth-child(3) .progressContainer .writing {
background-color: #c8c9e9;
height: 41%; }
.day:nth-child(4) .progressContainer .vocabulary {
background-color: #ff808b;
height: 79%; }
.day:nth-child(4) .progressContainer .grammar {
background-color: #4d4cac;
height: 1%; }
.day:nth-child(4) .progressContainer .listening {
background-color: #5e81f4;
height: 3%; }
.day:nth-child(4) .progressContainer .writing {
background-color: #c8c9e9;
height: 17%; }
.day:nth-child(5) .progressContainer .vocabulary {
background-color: #ff808b;
height: 7%; }
.day:nth-child(5) .progressContainer .grammar {
background-color: #4d4cac;
height: 44%; }
.day:nth-child(5) .progressContainer .listening {
background-color: #5e81f4;
height: 30%; }
.day:nth-child(5) .progressContainer .writing {
background-color: #c8c9e9;
height: 19%; }
.day:nth-child(6) .progressContainer .vocabulary {
background-color: #ff808b;
height: 95%; }
.day:nth-child(6) .progressContainer .grammar {
background-color: #4d4cac;
height: 2%; }
.day:nth-child(6) .progressContainer .listening {
background-color: #5e81f4;
height: 1%; }
.day:nth-child(6) .progressContainer .writing {
background-color: #c8c9e9;
height: 2%; }
.day:nth-child(7) .progressContainer .vocabulary {
background-color: #ff808b;
height: 5%; }
.day:nth-child(7) .progressContainer .grammar {
background-color: #4d4cac;
height: 11%; }
.day:nth-child(7) .progressContainer .listening {
background-color: #5e81f4;
height: 9%; }
.day:nth-child(7) .progressContainer .writing {
background-color: #c8c9e9;
height: 75%; }
.timeSpent .legendContainer {
display: grid;
grid-template-columns: repeat(4, 1fr);
font-size: 0.7rem;
justify-content: center;
gap: 0.75em;
color: #b1b6d1; }
.circle {
width: 0.7em;
height: 0.7em;
border-radius: 0.7em;
background-color: #ff808b; }
.vocabulary .circle {
background-color: #ff808b; }
.grammar .circle {
background-color: #4d4cac; }
.listening .circle {
background-color: #5e81f4; }
.writing .circle {
background-color: #c8c9e9; }
.timeSpent .legendContainer .legend {
display: grid;
grid-template-columns: min-content 1fr;
align-items: center;
gap: 0.75em; }
main .courses {
z-index: 2;
grid-column: auto / 2 span; }
main .coursesContainer {
grid-column: auto / 2 span;
display: grid;
grid-template-columns: 1fr 1fr 1fr;
z-index: 2;
gap: 0.75em; }
main .course {
display: grid;
grid-template-areas: "level field field" "level type arrow";
grid-template-columns: min-content 1fr min-content;
grid-template-rows: min-content min-content;
background-color: #4d4cac;
color: #fff;
gap: 0.75em;
padding: 1em;
align-items: center;
border-radius: 0.75em;
font-size: 0.8rem; }
main .course:nth-child(2) {
background-color: #9698d6; }
main .course:nth-child(3) {
background-color: #ff808b; }
.level {
grid-area: level;
background-color: rgba(255, 255, 255, 0.1);
padding: 1em;
border-radius: 0.75em;
font-weight: 700; }
.course .field {
grid-area: field;
font-size: 0.7rem;
opacity: .5; }
.course .type {
grid-area: type; }
.course .arrow {
grid-area: arrow;
justify-self: end; }
aside {
display: grid;
grid-template-rows: min-content min-content min-content;
gap: 1em; }
aside .profile {
display: grid;
grid-template-rows: repeat(3, min-content);
gap: 0.75em;
justify-content: center;
text-align: center;
align-items: center;
z-index: 2; }
aside .profile svg {
width: 8em;
height: auto; }
aside .profile .add {
width: 2em;
height: 2em;
border-radius: 2em;
border: 3px solid #fff;
background-color: #5e81f4;
display: flex;
align-items: center;
justify-content: center;
color: #fff;
position: relative;
margin-top: -5em;
justify-self: end; }
aside .profile .name {
font-weight: 800;
z-index: 2; }
aside .profile .title {
font-size: 0.9rem;
color: #b1b6d1; }
aside .languages {
z-index: 2;
padding: 1.5em;
display: grid;
gap: 0.75em; }
aside .language {
display: grid;
grid-template-areas: "level text progressContainer" "level levelDescription progressContainer";
grid-template-columns: min-content 1fr .5fr;
grid-template-rows: min-content min-content;
gap: 0.75em;
align-items: center;
border-radius: 0.75em;
font-size: 0.8rem; }
aside .reminders .language {
grid-template-areas: "level text text" "level levelDescription levelDescription"; }
.language .level {
grid-area: level;
background-color: #eef2fd;
border-radius: 0.75em; }
.language .progressContainer {
grid-row: auto/ span 2; }
.language .text {
grid-area: text;
font-weight: 700; }
.language .levelDescription {
grid-area: levelDescription;
font-size: 0.7rem; }
.reminders {
display: grid;
grid-template-rows: min-content 1fr;
gap: 1em;
padding: 1em;
z-index: 2; }
.reminders .notifications {
color: #b1b6d1;
font-size: 1.25rem; }
.reminders .circle {
width: 0.5em;
height: 0.5em;
border: 2px solid #fff;
margin-top: -1em;
margin-left: 0.5em;
position: relative; }
.level.message {
background-color: #eef2fd; }
.level.danger {
background-color: #f7e5e9; }
.level.message i {
color: #5e81f4; }
.level.danger i {
color: #ff606d; }
@media screen and (max-width: 1210px) {
/* Laptop (Smaller) */
body {
height: 100%; }
main .latestResults,
main .timeSpent {
grid-column: auto /2 span; }
nav {
grid-template-rows: min-content min-content; }
.timeSpent .progressContainer {
height: 12em; }
.coursesContainer .course {
grid-column: auto / 3 span; } }
@media screen and (max-width: 1125px) {
/* Laptop (Smaller) */
body {
grid-template-columns: 11.75em 1fr min-content;
grid-template-areas: "header_nav header_main" "nav main" "nav main"; }
main {
grid-column: auto / 2 span;
border-right: none;
margin-bottom: 6.25em; }
.header_main {
border-right: none; }
aside {
display: none; }
.header_aside {
display: none; } }
@media screen and (max-width: 768px) {
/* Tablet (768px) */
body {
grid-template-columns: 11.75em 1fr;
grid-template-areas: "header_main header_main" "main main" "nav nav";
background-color: #f5f5fb; }
.header_nav,
nav .upgradeContainer,
nav ul li.active::after,
nav ul li.learningPlan {
display: none; }
nav {
grid-column: auto / 3 span;
position: fixed;
bottom: 0;
z-index: 3;
background-color: #fff;
width: 100%;
margin: 0;
height: auto;
border-top: 1px solid #e3e3eb; }
nav ul {
display: grid;
grid-template-columns: repeat(5, 1fr);
padding: 1em;
margin-bottom: 0;
gap: 0.75em; }
nav ul a {
grid-template-rows: min-content min-content;
grid-template-columns: 1fr;
place-items: center;
gap: 0.75em; }
nav ul li .messageCount {
position: relative;
margin-top: -12em; } }
@media screen and (max-width: 530px) {
body {
grid-template-rows: 5em 1fr; }
main,
.header_main {
padding: 0.75em; }
.header_main {
grid-template-columns: min-content 1fr min-content; }
.header_main .date {
text-align: right; }
main .welcomeBack {
padding: 0 0.75em; }
main .welcomeBack .textContainer {
padding: 0.75em;
grid-column: auto / 2 span;
gap: 0.5em; }
.welcomeBack svg {
display: none; }
.header_aside,
.header_nav {
display: none; } }
<|start_filename|>js/main.js<|end_filename|>
// JS
function formatTitleDate(date = new Date()) {
let { day, month, year, weekday } = new Intl.DateTimeFormat('en', {
day: '2-digit',
month: 'short',
year: 'numeric',
weekday: 'long',
}).formatToParts(date).reduce((acc, part) => {
if (part.type != 'literal') {
acc[part.type] = part.value;
}
return acc;
}, Object.create(null));
return `${day} ${month} ${year}, ${weekday}`;
}
document.getElementsByClassName('date')[0].textContent = formatTitleDate(); | ozcanzaferayan/langlearn-dashboard-css-grid |
<|start_filename|>src/crudeditor-lib/common/workerSagas/redirect.js<|end_filename|>
import { call, put } from 'redux-saga/effects';
import {
VIEW_SEARCH,
VIEW_CREATE,
VIEW_EDIT,
VIEW_SHOW,
VIEW_ERROR
} from '../constants';
import {
VIEW_REDIRECT_REQUEST as SEARCH_VIEW_REDIRECT_REQUEST,
VIEW_REDIRECT_FAIL as SEARCH_VIEW_REDIRECT_FAIL
} from '../../views/search/constants';
import {
VIEW_REDIRECT_REQUEST as CREATE_VIEW_REDIRECT_REQUEST,
VIEW_REDIRECT_FAIL as CREATE_VIEW_REDIRECT_FAIL
} from '../../views/create/constants';
import {
VIEW_REDIRECT_REQUEST as EDIT_VIEW_REDIRECT_REQUEST,
VIEW_REDIRECT_FAIL as EDIT_VIEW_REDIRECT_FAIL
} from '../../views/edit/constants';
import {
VIEW_REDIRECT_REQUEST as SHOW_VIEW_REDIRECT_REQUEST,
VIEW_REDIRECT_FAIL as SHOW_VIEW_REDIRECT_FAIL
} from '../../views/show/constants';
import {
VIEW_REDIRECT_REQUEST as ERROR_VIEW_REDIRECT_REQUEST,
VIEW_REDIRECT_FAIL as ERROR_VIEW_REDIRECT_FAIL
} from '../../views/error/constants';
const VIEW_REDIRECT_REQUEST = {
[VIEW_SEARCH]: SEARCH_VIEW_REDIRECT_REQUEST,
[VIEW_CREATE]: CREATE_VIEW_REDIRECT_REQUEST,
[VIEW_EDIT]: EDIT_VIEW_REDIRECT_REQUEST,
[VIEW_SHOW]: SHOW_VIEW_REDIRECT_REQUEST,
[VIEW_ERROR]: ERROR_VIEW_REDIRECT_REQUEST
};
const VIEW_REDIRECT_FAIL = {
[VIEW_SEARCH]: SEARCH_VIEW_REDIRECT_FAIL,
[VIEW_CREATE]: CREATE_VIEW_REDIRECT_FAIL,
[VIEW_EDIT]: EDIT_VIEW_REDIRECT_FAIL,
[VIEW_SHOW]: SHOW_VIEW_REDIRECT_FAIL,
[VIEW_ERROR]: ERROR_VIEW_REDIRECT_FAIL
};
/*
* XXX: in case of failure, a worker saga must dispatch an appropriate action and exit by throwing error(s).
*/
export default function*({
modelDefinition,
softRedirectSaga,
action: {
payload: {
view: {
name: redirectViewName,
state: redirectViewState
},
...additionalArgs
},
meta
}
}) {
const currentViewName = meta.spawner;
yield put({
type: VIEW_REDIRECT_REQUEST[currentViewName],
meta
});
try {
yield call(softRedirectSaga, {
viewName: redirectViewName,
viewState: redirectViewState,
...additionalArgs
});
} catch (err) {
yield put({
type: VIEW_REDIRECT_FAIL[currentViewName],
payload: err,
error: true,
meta
});
throw err;
}
}
<|start_filename|>src/components/SearchForm/index.js<|end_filename|>
import React from 'react';
import PropTypes from 'prop-types';
// import isEqual from 'lodash/isEqual';
import Button from 'react-bootstrap/lib/Button';
import Form from 'react-bootstrap/lib/Form';
import FormGroup from 'react-bootstrap/lib/FormGroup';
import ControlLabel from 'react-bootstrap/lib/ControlLabel';
import { getFieldLabel } from '../lib';
import FieldErrorLabel from '../FieldErrors/FieldErrorLabel';
import WithFieldErrors from '../FieldErrors/WithFieldErrorsHOC';
import './SearchForm.less';
class SearchForm extends React.Component {
static propTypes = {
model: PropTypes.shape({
data: PropTypes.shape({
formFilter: PropTypes.object.isRequired,
formattedFilter: PropTypes.object.isRequired,
searchableFields: PropTypes.arrayOf(PropTypes.object),
resultFilter: PropTypes.object.isRequired
}).isRequired,
actions: PropTypes.objectOf(PropTypes.func)
}).isRequired,
toggledFieldErrors: PropTypes.object.isRequired,
toggleFieldErrors: PropTypes.func.isRequired
}
static contextTypes = {
i18n: PropTypes.object
};
handleSubmit = e => {
e.preventDefault();
this.props.model.actions.searchInstances({
filter: this.props.model.data.formFilter
});
}
handleFormFilterUpdate = fieldName => newFieldValue => {
this.props.toggleFieldErrors(false, fieldName);
this.props.model.actions.updateFormFilter({
name: fieldName,
value: newFieldValue
});
}
handleFormFilterBlur = fieldName => _ => this.props.toggleFieldErrors(true, fieldName);
fieldErrors = name => name ?
(this.props.toggledFieldErrors[name] || []) :
!!Object.keys(this.props.toggledFieldErrors).length
render() {
const {
model: {
data: {
formattedFilter,
searchableFields,
// formFilter,
// resultFilter
},
actions: {
resetFormFilter
}
}
} = this.props;
const { i18n } = this.context;
return (
<Form horizontal={true} onSubmit={this.handleSubmit} className="clearfix crud--search-form">
<div className="crud--search-form__controls">
{
searchableFields.map(({ name, component: Component, valuePropName }) => (
<FormGroup
key={`form-group-${name}`}
controlId={`fg-${name}`}
validationState={this.fieldErrors(name).length ? 'error' : null}
className="crud--search-form__form-group"
>
<ControlLabel>
{
getFieldLabel({ i18n, name })
}
</ControlLabel>
<Component
{...{ [valuePropName]: formattedFilter[name] }}
onChange={this.handleFormFilterUpdate(name)}
onBlur={this.handleFormFilterBlur(name)}
/>
<FieldErrorLabel errors={this.fieldErrors(name)} fieldName={name}/>
</FormGroup>
))
}
</div>
<div className="crud--search-form__submit-group">
<Button
bsStyle='link'
onClick={resetFormFilter}
>
{i18n.getMessage('common.CrudEditor.reset.button')}
</Button>
<Button
bsStyle="primary"
type="submit"
ref={ref => (this.submitBtn = ref)}
disabled={this.fieldErrors()}
>
{i18n.getMessage('common.CrudEditor.search.button')}
</Button>
</div>
</Form>
);
}
}
export default WithFieldErrors(SearchForm);
<|start_filename|>src/crudeditor-lib/views/search/constants.js<|end_filename|>
import { VIEW_SEARCH } from '../../common/constants';
const namespace = VIEW_SEARCH;
export const
VIEW_NAME = VIEW_SEARCH,
DEFAULT_OFFSET = 0,
DEFAULT_ORDER = 'asc',
/* ████████████████████████████████████████████
* ███ ACTION TYPES (in alphabetical order) ███
* ████████████████████████████████████████████
*/
ALL_INSTANCES_SELECT = namespace + '/ALL_INSTANCES_SELECT',
ALL_INSTANCES_DESELECT = namespace + '/ALL_INSTANCES_DESELECT',
FORM_FILTER_RESET = namespace + '/FORM_FILTER_RESET',
FORM_FILTER_UPDATE = namespace + '/FORM_FILTER_UPDATE',
GOTO_PAGE_UPDATE = namespace + '/GOTO_PAGE_UPDATE',
INSTANCES_SEARCH = namespace + '/INSTANCES_SEARCH',
INSTANCES_SEARCH_FAIL = namespace + '/INSTANCES_SEARCH_FAIL',
INSTANCES_SEARCH_REQUEST = namespace + '/INSTANCES_SEARCH_REQUEST',
INSTANCES_SEARCH_SUCCESS = namespace + '/INSTANCES_SEARCH_SUCCESS',
INSTANCE_SELECT = namespace + '/INSTANCE_SELECT',
INSTANCE_DESELECT = namespace + '/INSTANCE_DESELECT',
VIEW_INITIALIZE_REQUEST = namespace + '/VIEW_INITIALIZE_REQUEST',
VIEW_INITIALIZE_FAIL = namespace + '/VIEW_INITIALIZE_FAIL',
VIEW_INITIALIZE_SUCCESS = namespace + '/VIEW_INITIALIZE_SUCCESS',
VIEW_REDIRECT_REQUEST = namespace + '/VIEW_REDIRECT_REQUEST',
VIEW_REDIRECT_FAIL = namespace + '/VIEW_REDIRECT_FAIL',
VIEW_REDIRECT_SUCCESS = namespace + '/VIEW_REDIRECT_SUCCESS',
SEARCH_FORM_TOGGLE = namespace + '/SEARCH_FORM_TOGGLE';
<|start_filename|>src/crudeditor-lib/lib.js<|end_filename|>
import cloneDeep from 'lodash/cloneDeep';
import { checkModelDefinition } from './check-model';
import { getViewState as getSearchViewState, getUi as getSearchUi } from './views/search';
import { getViewState as getCreateViewState, getUi as getCreateUi } from './views/create';
import { getViewState as getEditViewState, getUi as getEditUi } from './views/edit';
import { getViewState as getShowViewState, getUi as getShowUi } from './views/show';
import { getViewState as getErrorViewState } from './views/error';
import {
DEFAULT_FIELD_TYPE,
VIEW_SEARCH,
VIEW_CREATE,
VIEW_EDIT,
VIEW_SHOW,
VIEW_ERROR,
PERMISSION_CREATE,
PERMISSION_EDIT,
PERMISSION_VIEW,
PERMISSION_DELETE
} from './common/constants';
const getViewState = {
[VIEW_SEARCH]: getSearchViewState,
[VIEW_CREATE]: getCreateViewState,
[VIEW_EDIT]: getEditViewState,
[VIEW_SHOW]: getShowViewState,
[VIEW_ERROR]: getErrorViewState
};
// see npm "bounce" module for details: https://github.com/hapijs/bounce
export const isSystemError = err => [
EvalError,
RangeError,
ReferenceError,
SyntaxError,
TypeError,
URIError
].some(systemErrorConstructor => err instanceof systemErrorConstructor);
export const storeState2appState = (storeState, modelDefinition) => storeState.common.activeViewName ?
{
name: storeState.common.activeViewName,
state: cloneDeep(getViewState[storeState.common.activeViewName](storeState, modelDefinition))
} :
undefined;
export function getPrefixedTranslations(translations, prefix) {
return Object.keys(translations).
reduce((acc, lang) => ({
...acc,
[lang]: Object.keys(translations[lang]).
reduce((acc, msgKey) => ({
...acc,
[`${prefix}.${msgKey}`]: translations[lang][msgKey]
}), {})
}), {})
}
/**
* Function 'isAllowed' returns permission for certain operation
* @param {{ view: <bool|func>, create: <bool|func>, delete: <bool|func>, edit: <bool|func> }} permissions
* @param {string} operation - one of 'view', 'create', `edit`, 'delete'
* @param {undefined|object} data - arg for permissions function, e.g. { instance } for per-instance permissions
* @returns {boolean}
*/
export function isAllowed(permissions, operation, data) { // eslint-disable-line consistent-return
if (permissions[operation] instanceof Function) {
return arguments.length === 3 ?
// global permission is enforced before checking data
permissions[operation]() && permissions[operation](data) :
permissions[operation]()
}
return permissions[operation]
}
// Filling modelDefinition with default values where necessary.
export function fillDefaults({ baseModelDefinition, i18n }) {
const modelDefinition = cloneDeep(baseModelDefinition);
// validate modelDefinition using 'prop-types'
checkModelDefinition(modelDefinition);
const fieldsMeta = modelDefinition.model.fields;
Object.keys(fieldsMeta).forEach(fieldName => {
if (!fieldsMeta[fieldName].type) {
fieldsMeta[fieldName].type = DEFAULT_FIELD_TYPE;
}
if (!fieldsMeta[fieldName].constraints) {
fieldsMeta[fieldName].constraints = {};
}
});
if (!modelDefinition.model.validate) {
modelDefinition.model.validate = _ => true;
}
if (!modelDefinition.ui) {
modelDefinition.ui = {};
}
if (!modelDefinition.ui.instanceLabel) {
modelDefinition.ui.instanceLabel = ({ _objectLabel }) => _objectLabel;
}
if (!modelDefinition.ui.customOperations) {
modelDefinition.ui.customOperations = _ => [];
}
const { crudOperations } = modelDefinition.permissions;
[PERMISSION_CREATE, PERMISSION_EDIT, PERMISSION_VIEW, PERMISSION_DELETE].
filter(p => !crudOperations.hasOwnProperty(p)).
forEach(p => {
crudOperations[p] = false
});
const getUi = {
...(isAllowed(crudOperations, PERMISSION_VIEW) ? { [VIEW_SEARCH]: getSearchUi } : null),
...(isAllowed(crudOperations, PERMISSION_CREATE) ? { [VIEW_CREATE]: getCreateUi } : null),
...(isAllowed(crudOperations, PERMISSION_EDIT) ? { [VIEW_EDIT]: getEditUi } : null),
...(isAllowed(crudOperations, PERMISSION_VIEW) ? { [VIEW_SHOW]: getShowUi } : null)
};
Object.keys(getUi).forEach(viewName => {
if (getUi[viewName]) {
modelDefinition.ui[viewName] = getUi[viewName]({ modelDefinition, i18n });
}
});
return modelDefinition;
}
<|start_filename|>src/crudeditor-lib/views/create/container.js<|end_filename|>
import React from 'react';
import { connect } from 'react-redux';
import Main from '../../../components/CreateMain';
import { softRedirectView } from '../../common/actions';
import { expandExternalOperation, expandCustomOperation } from '../lib';
import { VIEW_NAME } from './constants';
import { VIEW_SEARCH, PERMISSION_VIEW } from '../../common/constants';
import { isAllowed } from '../../lib';
import {
getViewModelData,
getViewState
} from './selectors';
import {
saveInstance,
selectTab,
validateInstanceField,
changeInstanceField,
saveAndNewInstance
} from './actions';
const mergeProps = /* istanbul ignore next */ (
{
viewModelData: {
unsavedChanges,
...restData
},
viewState,
permissions: { crudOperations },
customOperations,
externalOperations,
uiConfig
},
{
softRedirectView,
exitView,
saveInstance,
saveAndNewInstance,
...dispatchProps
},
{ i18n }
) => ({
viewModel: {
uiConfig,
data: {
unsavedChanges,
...restData
},
actions: {
...dispatchProps,
...(isAllowed(crudOperations, PERMISSION_VIEW) && { exitView })
},
/*
* Operations requiering confirmation in case of unsaved changes
* are supplied with "confirm" property containing an object with translation texts for Confirm Dialog.
*
* "show" property is removed from each custom/external operation
* since operations with "show" set to "false" are not included in the result array.
*/
operations: viewState ? [
...(isAllowed(crudOperations, PERMISSION_VIEW) && [{
title: i18n.getMessage('common.CrudEditor.cancel.button'),
handler: exitView,
style: 'link',
...(!!unsavedChanges && {
confirm: {
message: i18n.getMessage('common.CrudEditor.unsaved.confirmation'),
textConfirm: i18n.getMessage('common.CrudEditor.confirm.action'),
textCancel: i18n.getMessage('common.CrudEditor.cancel.button')
}
})
}]),
...[
...customOperations().map(expandCustomOperation({
viewName: VIEW_NAME,
viewState,
softRedirectView
})),
...externalOperations().map(expandExternalOperation({
viewName: VIEW_NAME,
viewState
}))
].
filter(operation => operation).
map(operation => unsavedChanges ?
({
...operation,
confirm: {
message: i18n.getMessage('common.CrudEditor.unsaved.confirmation'),
textConfirm: i18n.getMessage('common.CrudEditor.confirm.action'),
textCancel: i18n.getMessage('common.CrudEditor.cancel.button')
}
}) :
operation
),
{
title: i18n.getMessage('common.CrudEditor.saveAndNew.button'),
disabled: !unsavedChanges,
handler: saveAndNewInstance
},
{
title: i18n.getMessage('common.CrudEditor.save.button'),
disabled: !unsavedChanges,
handler: saveInstance,
style: 'primary'
}
] :
[] // viewState is undefined when view is not initialized yet (ex. during Hard Redirect).
}
});
export default connect(
/* istanbul ignore next */
(storeState, { modelDefinition, externalOperations, uiConfig }) => ({
viewModelData: getViewModelData(storeState, modelDefinition),
viewState: getViewState(storeState, modelDefinition),
permissions: modelDefinition.permissions,
customOperations: modelDefinition.ui.customOperations,
externalOperations,
uiConfig
}), {
exitView: /* istanbul ignore next */ _ => softRedirectView({ name: VIEW_SEARCH }),
saveInstance,
selectTab,
validateInstanceField,
changeInstanceField,
saveAndNewInstance,
softRedirectView
},
mergeProps
)(
/* istanbul ignore next */
({ viewModel }) => <Main model={viewModel} />
);
<|start_filename|>src/crudeditor-lib/i18n/fi.js<|end_filename|>
import exceptions from './exceptions/fi';
/* eslint-disable max-len */
const common = {
"common.CrudEditor.new.title": "Uusi",
"common.CrudEditor.actions.tableHeader": "Toimenpiteitä",
"common.CrudEditor.cancel.button": "Peruuta",
"common.CrudEditor.close.button": "Sulje",
"common.CrudEditor.create.button": "Luo",
"common.CrudEditor.create.header": "Luo {modelName}",
"common.CrudEditor.delete.button": "Poista",
"common.CrudEditor.delete.confirmation": "Haluatko varmasti poistaa tämän rivin?",
"common.CrudEditor.deleteSelected.button": "Poista valitut",
"common.CrudEditor.deleteSelected.confirmation": "Haluatko varmasti poistaa valitut rivit?",
"common.CrudEditor.duplicate.button": "Kaksoiskappale",
"common.CrudEditor.duplicate.header": "Kaksoiskappale {modelName}",
"common.CrudEditor.edit.button": "Muokkaa",
"common.CrudEditor.edit.header": "Muokkaa {modelName}",
"common.CrudEditor.message.ajax.loading": "Ole hyvä ja odota…",
"common.CrudEditor.noAssociationEntriesFound.message": "Syötteet puuttuvat. Voit {1} uuden syötteen.",
"common.CrudEditor.noItemsSelected.alert": "Rivejä ei ole valittu!",
"common.CrudEditor.objectDeleteFailed.message": "Rivin poistaminen epäonnistui, se voi olla jo käytössä.",
"common.CrudEditor.objectDeleted.message": "Rivi poistettu.",
"common.CrudEditor.objectDuplicated.message": "Rivi kopioitu.",
"common.CrudEditor.objectSaveFailed.message": "Rivin tallentaminen epäonnistui.",
"common.CrudEditor.objectSaved.message": "Rivi luotu.",
"common.CrudEditor.objectUpdated.message": "Rivi päivitetty.",
"common.CrudEditor.objectsDeleteFailed.message": "Rivien {count} poistaminen epäonnistui, koska ne ovat jo käytössä.",
"common.CrudEditor.objectsDeleteIsNoAllowed.message": "Joitakin rivejä ei voi poistaa turvallisuusrajoitteista johtuen.",
"common.CrudEditor.objectsDeleted.message": "Rivit {labels} poistettu.",
"common.CrudEditor.refresh.button": "Päivitä",
"common.CrudEditor.reset.button": "Tyhjennä",
"common.CrudEditor.revisions.button": "Revisiot",
"common.CrudEditor.save.button": "Tallenna",
"common.CrudEditor.saveAndNew.button": "Tallenna ja luo uusi",
"common.CrudEditor.saveAndNext.button": "Tallenna ja siirry seuraavaan",
"common.CrudEditor.search.all": "Kaikki",
"common.CrudEditor.search.button": "Hae",
"common.CrudEditor.search.header": "Hae {payload}",
"common.CrudEditor.search.result.label": "Hakutulos",
"common.CrudEditor.search.resultsPerPage": "Tuloksia sivulla",
"common.CrudEditor.select.button": "Valitse",
"common.CrudEditor.unsaved.confirmation": "Olet tehnyt muutoksia. Jos poistut sivulta, menetät muutokset.",
"common.CrudEditor.show.button": "Tarkastele",
"common.CrudEditor.show.header": "Tarkastele mallia {modelName}",
"common.CrudEditor.export.button": "Vie",
"common.CrudEditor.found.items.message": "{count} hakutulosta l\u00F6ytyi",
"common.CrudEditor.range.from": "l\u00E4htien",
"common.CrudEditor.range.to": "asti",
"common.CrudEditor.confirm.action": "Vahvista",
"common.CrudEditor.search.showSearchForm": "Näytä hakukentät",
"common.CrudEditor.search.hideSearchForm": "Piilota hakukentät",
"common.CrudEditor.pagination.goToPage": "Siirry"
}
/* eslint-enable max-len */
export default {
...common,
...exceptions
}
<|start_filename|>src/demo/models/contracts/index.js<|end_filename|>
import api from './api';
import DateRangeCellRender from './components/DateRangeCellRender';
import StatusField from './components/StatusField';
import translations from './i18n';
import CustomSpinner from './components/CustomSpinner';
import ContractReferenceSearch from './components/ContractReferenceSearch';
import CustomTabComponent from './components/CustomTabComponent';
import {
FIELD_TYPE_BOOLEAN,
FIELD_TYPE_DECIMAL,
FIELD_TYPE_INTEGER,
FIELD_TYPE_STRING,
FIELD_TYPE_STRING_DATE,
// BUILTIN_INPUT,
// BUILTIN_RANGE_INPUT,
VIEW_CREATE,
VIEW_EDIT,
VIEW_SHOW,
VIEW_SEARCH
} from '../../../crudeditor-lib';
export const fields = {
'contractId': {
unique: true,
'type': FIELD_TYPE_STRING,
'constraints': {
'max': 100,
'required': true
}
},
'description': {
'type': FIELD_TYPE_STRING,
'constraints': {
'max': 100,
'required': false,
validate: /* istanbul ignore next */ (value, instance) => {
if ((value || '').toLowerCase().indexOf('booo') !== -1) {
const err = [{
code: 400,
// `id` is used to find translations for this particular error
// define translations with the following key structure:
// model.field.FIELD_NAME.error.ERROR_ID, where ERROR_ID is `id` defined below
id: 'forbiddenWord',
// `message` is a default message in case translation is not found
message: 'Description cannot contain `booo`!',
// optional `payload` for error translations
// here you can define props which you use in i18n messages
// example: for i18n message `Hello {name}! This field cannot exceed {maxValue}`
// define `name` and `maxValue` props
args: {
forbiddenWord: 'BOOO'
}
}];
throw err;
}
return true;
}
}
},
'testNumberTypeField': {
'type': FIELD_TYPE_DECIMAL,
'constraints': {
'required': false,
'max': 9999999
}
},
'email': {
'type': FIELD_TYPE_STRING,
'constraints': {
email: true
}
},
'url': {
'type': FIELD_TYPE_STRING,
'constraints': {
url: true
}
},
'testRegexp': {
'type': FIELD_TYPE_STRING,
'constraints': {
matches: /^hello/i
}
},
// 'contractBoilerplates': {
// 'type': 'collection',
// 'constraints': {
// 'required': false
// }
// },
'hierarchyCode': {
'type': FIELD_TYPE_STRING,
'constraints': {
'max': 100,
'required': false
}
},
'termsOfPaymentId': {
'type': FIELD_TYPE_STRING,
'constraints': {
'max': 20,
'required': false
}
},
'termsOfDeliveryId': {
'type': FIELD_TYPE_STRING,
'constraints': {
'max': 20,
'required': false
}
},
'freeShippingBoundary': {
'type': FIELD_TYPE_INTEGER,
'constraints': {
'min': 0,
'max': 999999999,
'required': false
}
},
'createdOn': {
'type': FIELD_TYPE_STRING_DATE,
'constraints': {
'required': true
}
},
'changedOn': {
'type': FIELD_TYPE_STRING_DATE,
'constraints': {
'required': true
}
},
// 'contractedCatalogs': {
// 'type': 'collection',
// 'constraints': {
// 'required': false
// }
// },
'minOrderValueRequired': {
'type': FIELD_TYPE_BOOLEAN,
'constraints': {
'required': false
}
},
// 'contractedClassificationGroups': {
// 'type': 'collection',
// 'constraints': {
// 'required': false
// }
// },
'extContractId': {
'type': FIELD_TYPE_STRING,
'constraints': {
'max': 10,
'required': false
}
},
// 'children': {
// 'type': 'collection',
// 'constraints': {
// 'required': false
// }
// },
'changedBy': {
'type': FIELD_TYPE_STRING,
'constraints': {
'required': true
}
},
// 'translations': {
// 'type': 'collection',
// 'constraints': {
// 'required': false
// }
// },
// 'usages': {
// 'type': 'collection',
// 'constraints': {
// 'required': false
// }
// },
'currencyId': {
'type': FIELD_TYPE_STRING,
'constraints': {
'max': 3,
'required': false
}
},
'isFrameContract': {
'type': FIELD_TYPE_BOOLEAN,
'constraints': {
'required': false
}
},
'totalContractedAmount': {
'type': FIELD_TYPE_INTEGER,
'constraints': {
'min': 0,
'max': 999999999,
'required': false
}
},
'smallVolumeSurcharge': {
'type': FIELD_TYPE_DECIMAL,
'constraints': {
'min': 0,
'max': 999999999,
'required': false
}
},
// 'provisionings': {
// 'type': 'collection',
// 'constraints': {
// 'required': false
// }
// },
'isOffer': {
'type': FIELD_TYPE_BOOLEAN,
'constraints': {
'required': false
}
},
'maxOrderValue': {
'type': FIELD_TYPE_INTEGER,
'constraints': {
'min': 0,
'max': 999999999,
'required': false
}
},
// 'validRange': {
// 'type': 'com.jcatalog.core.DateRange',
// 'constraints': {
// 'required': false
// }
// },
'isPreferred': {
'type': FIELD_TYPE_BOOLEAN,
'constraints': {
'required': false
}
},
'isInternal': {
'type': FIELD_TYPE_BOOLEAN,
'constraints': {
'required': false
}
},
// 'contractCategory': {
// 'type': 'com.jcatalog.contract.ContractCategory',
// 'constraints': {
// 'required': false
// }
// },
'freightSurcharge': {
'type': FIELD_TYPE_DECIMAL,
'constraints': {
'min': 0,
'max': 999999999,
'required': false
}
},
'isStandard': {
'type': FIELD_TYPE_BOOLEAN,
'constraints': {
'required': false
}
},
'statusId': {
'type': 'com.opuscapita.hexadecimal',
'constraints': {
'min': 0,
'max': "800",
'required': false
}
},
'createdBy': {
'type': FIELD_TYPE_STRING,
'constraints': {
'required': true
}
},
'extContractLineId': {
'type': FIELD_TYPE_STRING,
'constraints': {
'max': 10,
'required': false
}
},
'parentContract': {},
'minOrderValue': {
'type': FIELD_TYPE_INTEGER,
'constraints': {
'min': 0,
'max': 99,
'required': false
}
}
};
const buildFormLayout = /* istanbul ignore next */ viewName => ({ tab, section, field }) => instance => [
tab({ name: 'general', columns: 2 }, // Best look with N = 2, 3, 4 (default is 1)
field({ name: 'contractId', readOnly: viewName !== VIEW_CREATE }),
field({ name: 'description' }),
// field({ name: 'translations', render: { component: TranslatableTextEditor }}),
field({ name: 'statusId', render: { component: StatusField, value: { converter: {
format: value => value || value === 0 ? parseInt(value, 16) : null,
parse: value => value || value === 0 ? value.toString(16) : null
} } } }),
field({ name: 'parentContract', render: { component: ContractReferenceSearch } }),
// field({ name: 'currencyId', render: { component: CurrencyField }}),
viewName !== VIEW_CREATE && section({ name: 'auditable', columns: 2 },
field({ name: 'createdBy', readOnly: true }),
field({ name: 'createdOn', readOnly: true }),
field({ name: 'changedOn', readOnly: true }),
field({ name: 'changedBy', readOnly: true })
)
),
tab({ name: 'catalogs' }),
tab({ name: 'customer' }),
tab({ name: 'boilerplates' }),
tab({ name: 'supplier' }),
tab({ name: 'groups' }),
tab({ name: 'additional', disabled: viewName === VIEW_CREATE },
section({ name: 'test' },
field({ name: 'testNumberTypeField' }),
field({ name: 'email' }),
field({ name: 'url' }),
field({ name: 'testRegexp' }),
),
section({ name: 'order', columns: 3 },
field({ name: 'minOrderValue' }),
field({ name: 'maxOrderValue' }),
field({ name: 'freeShippingBoundary' }),
field({ name: 'freightSurcharge' }),
field({ name: 'smallVolumeSurcharge' }),
field({ name: 'totalContractedAmount' }),
field({ name: 'minOrderValueRequired' })
),
section({ name: 'type', columns: 4 },
field({ name: 'isStandard' }),
field({ name: 'isPreferred' }),
field({ name: 'isFrameContract' }),
field({ name: 'isInternal' }),
field({ name: 'isOffer' })
)
),
tab({ name: 'custom', component: CustomTabComponent, disabled: viewName === VIEW_CREATE })
];
export default {
model: {
name: 'Contracts', // unique for each model used in your app; used to distinguish translations
translations,
fields,
validate: /* istanbul ignore next */ ({ formInstance }) => {
if (formInstance.minOrderValueRequired && formInstance.minOrderValue === null) {
const err = [{
code: 400,
id: 'requiredFieldMissing',
message: 'minOrderValue must be set when minOrderValueRequired is true',
args: {
contractId: formInstance.contractId
}
}];
throw err;
}
return true;
}
},
permissions: {
crudOperations: {
create: _ => true,
edit: ({ instance } = {}) => {
if (instance) {
return (instance.description || '').indexOf('of') > -1;
}
return true;
},
delete: ({ instance } = {}) => {
if (instance) {
return (instance.contractId || '').toLowerCase().indexOf('abd') === -1;
}
return true;
},
view: true
}
},
api,
ui: {
search: /* istanbul ignore next */ _ => ({
searchableFields: [
{ name: 'contractId' },
{ name: 'description' },
{ name: 'extContractId' },
{ name: 'extContractLineId' },
{ name: 'statusId', render: { component: StatusField, value: { converter: {
format: value => value || value === 0 ? parseInt(value, 16) : null,
parse: value => value || value === 0 ? value.toString(16) : null
} } } },
{ name: 'maxOrderValue' },
// THE SAME CAN BE ACHIEVED WITH THE FOLLOWING
// EXAMPLE OF USING BUILT-IN RANGE INPUT COMPONENT:
// { name: 'maxOrderValue', render: { component: BUILTIN_RANGE_INPUT, props: { type: 'integer' } } },
{ name: 'createdOn' },
{ name: 'parentContract', render: { component: ContractReferenceSearch } }
],
resultFields: [
{ name: 'contractId', sortable: true },
{ name: 'description', sortable: true },
{ name: 'extContractId', sortable: true },
{ name: 'extContractLineId', sortable: true },
{ name: 'validRange', component: DateRangeCellRender }
]
}),
instanceLabel: /* istanbul ignore next */ instance => instance._objectLabel || instance.contractId || '',
create: {
defaultNewInstance: /* istanbul ignore next */ ({ filter } = {}) => Object.keys(filter || {}).reduce(
(rez, fieldName) => {
const isRange = ['maxOrderValue', 'createdOn'].indexOf(fieldName) !== -1;
return isRange || filter[fieldName] === null ?
rez :
{
...rez,
[fieldName]: filter[fieldName]
};
},
{}
),
formLayout: buildFormLayout(VIEW_CREATE)
},
edit: {
formLayout: buildFormLayout(VIEW_EDIT)
},
show: {
formLayout: buildFormLayout(VIEW_SHOW)
},
spinner: CustomSpinner,
customOperations: /* istanbul ignore next */ instance => [{
handler: _ => ({
name: VIEW_CREATE,
state: {
predefinedFields: {
parentContract: instance.contractId
}
}
}),
ui: ({
name: viewName,
state: viewState
}) => ({
title: _ => 'createChild',
show: viewName !== VIEW_CREATE,
dropdown: viewName === VIEW_SEARCH
})
}, {
handler: _ => ({
name: VIEW_CREATE,
state: {
predefinedFields: Object.keys(instance).
filter(key => [
'contractId',
'createdBy',
'createdOn',
'changedBy',
'changedOn'
].indexOf(key) === -1).
reduce((obj, key) => ({ ...obj, [key]: instance[key] }), {})
}
}),
ui: ({
name: viewName,
state: viewState
}) => ({
title: _ => 'duplicate',
show: viewName !== VIEW_CREATE,
dropdown: true
})
}]
}
};
<|start_filename|>src/demo/models/second-model/api/index.js<|end_filename|>
import {
get,
create,
update,
search,
deleteOne
} from './api';
const FAKE_RESPONSE_TIMEOUT = 300; // In milliseconds. 0 for no timeout.
export default {
get({ instance }) {
return new Promise((resolve, reject) => {
setTimeout(_ => {
try {
const item = get({ instance });
resolve(item)
//
// comment 'resolve' and uncomment 'reject' if you need to test for errors alerts on Search view
//
// reject({ code: 404, message: `Server error: Contract ${JSON.stringify(instance.contractId)} not found` })
} catch (e) {
reject({ code: 404, message: `Server error: Contract ${JSON.stringify(instance.contractId)} not found` })
}
}, FAKE_RESPONSE_TIMEOUT)
})
},
search({ filter = {}, sort, order, offset, max }) {
return new Promise((resolve, reject) => {
setTimeout(_ => resolve(search({ filter, sort, order, offset, max })), FAKE_RESPONSE_TIMEOUT)
})
},
// One-by-one deletion.
// errors array length does correspond to number of instances failed to be deleted.
async delete({ instances }) {
const deletionResults = await Promise.all(instances.map(async instance => {
try {
const deleteInstance = instance => new Promise((resolve, reject) => setTimeout(
_ => instance.contractId.toLowerCase().indexOf('seminal') === -1 ?
(
instance.contractId.toLowerCase().indexOf('emphysema') === -1 ?
resolve(deleteOne({ instance })) :
reject({
code: 400,
id: 'emphysemaDeletionAttempt',
message: 'Contracts with IDs containing "emphysema" must not be deleted'
})
) :
reject({
code: 400,
id: 'seminalDeletionAttempt',
message: 'Contracts with IDs containing "seminal" must not be deleted'
}),
FAKE_RESPONSE_TIMEOUT
));
const deletedCount = await deleteInstance(instance);
return deletedCount;
} catch (err) {
return err;
}
}));
const errors = [];
let deletedCount = 0;
deletionResults.forEach(rez => {
if (typeof rez === 'number') {
deletedCount += rez;
} else {
// rez is an error object.
errors.push(rez);
}
})
return {
count: deletedCount,
...(errors.length && { errors })
};
},
create({ instance }) {
return new Promise((resolve, reject) => {
setTimeout(_ => {
try {
const item = create({ instance });
resolve(item)
} catch (e) {
reject({
code: 400,
message: `Server error: Instance with contractId "${instance.contractId}" already exists in the database`
})
}
}, FAKE_RESPONSE_TIMEOUT)
})
},
update({ instance }) {
return new Promise((resolve, reject) => {
setTimeout(_ => {
try {
const result = update({ instance });
resolve(result)
} catch (e) {
reject({ code: 400, message: `Server error: Contract ${JSON.stringify(instance.contractId)} not found` })
}
}, FAKE_RESPONSE_TIMEOUT)
})
}
}
<|start_filename|>src/crudeditor-lib/i18n/ru.js<|end_filename|>
import exceptions from './exceptions/ru';
/* eslint-disable max-len */
const common = {
"common.CrudEditor.new.title": "Новый",
"common.CrudEditor.actions.tableHeader": "Действия",
"common.CrudEditor.cancel.button": "Отменить",
"common.CrudEditor.close.button": "Закрыть",
"common.CrudEditor.create.button": "Создать",
"common.CrudEditor.create.header": "Создать {modelName}",
"common.CrudEditor.delete.button": "Удалить",
"common.CrudEditor.delete.confirmation": "Вы действительно хотите удалить эту запись?",
"common.CrudEditor.deleteSelected.button": "Удалить выбранное",
"common.CrudEditor.deleteSelected.confirmation": "Вы действительно хотите удалить выбранные позиции?",
"common.CrudEditor.duplicate.button": "Копировать",
"common.CrudEditor.duplicate.header": "Копировать {modelName}",
"common.CrudEditor.edit.button": "Редактировать",
"common.CrudEditor.edit.header": "Редактировать: {modelName}",
"common.CrudEditor.message.ajax.loading": "Пожалуйста, подождите...",
"common.CrudEditor.noAssociationEntriesFound.message": "Записей не найдено. Вы можете {1} новую запись.",
"common.CrudEditor.noItemsSelected.alert": "Нет выбранных позиций!",
"common.CrudEditor.objectDeleteFailed.message": "Не удалось удалить объект, вероятно, он уже используется.",
"common.CrudEditor.objectDeleted.message": "Объект удален.",
"common.CrudEditor.objectDuplicated.message": "Объект скопирован.",
"common.CrudEditor.objectSaveFailed.message": "Не удалось сохранить объект.",
"common.CrudEditor.objectSaved.message": "Объект создан.",
"common.CrudEditor.objectUpdated.message": "Объект обновлен.",
"common.CrudEditor.objectsDeleteFailed.message": "Не удалось удалить объекты {count}, вероятно, они уже используются.",
"common.CrudEditor.objectsDeleteIsNoAllowed.message": "Вы не можете удалить некоторые объекты из-за ограничений безопасности.",
"common.CrudEditor.objectsDeleted.message": "Объекты {labels} удалены.",
"common.CrudEditor.refresh.button": "Обновить",
"common.CrudEditor.reset.button": "Сбросить",
"common.CrudEditor.revisions.button": "Ревизии",
"common.CrudEditor.save.button": "Сохранить",
"common.CrudEditor.saveAndNew.button": "Сохранить и создать новое",
"common.CrudEditor.saveAndNext.button": "Сохранить и далее",
"common.CrudEditor.search.all": "Все",
"common.CrudEditor.search.button": "Поиск",
"common.CrudEditor.search.header": "Поиск: {payload}",
"common.CrudEditor.search.result.label": "Результаты поиска",
"common.CrudEditor.search.resultsPerPage": "Число результатов на страницу",
"common.CrudEditor.select.button": "Выбрать",
"common.CrudEditor.unsaved.confirmation": "Вы внесли изменения. Если Вы покинете данную страницу, эти изменения будут утеряны.",
"common.CrudEditor.show.button": "Просмотреть",
"common.CrudEditor.show.header": "Просмотр: {modelName}",
"common.CrudEditor.export.button": "Экспорт",
"common.CrudEditor.found.items.message": "\u041D\u0430\u0439\u0434\u0435\u043D\u043E \u043F\u043E\u0437\u0438\u0446\u0438\u0439\: {count}",
"common.CrudEditor.range.from": "\u0441",
"common.CrudEditor.range.to": "\u043F\u043E",
"common.CrudEditor.search.showSearchForm": "Показать форму поиска",
"common.CrudEditor.search.hideSearchForm": "Скрыть форму поиска",
"common.CrudEditor.confirm.action": "Подтвердить",
"common.CrudEditor.pagination.goToPage": "Перейти"
}
/* eslint-enable max-len */
export default {
...common,
...exceptions
}
<|start_filename|>src/crudeditor-lib/views/search/scenario.js<|end_filename|>
import { call, put, spawn } from 'redux-saga/effects';
import deleteSaga from './workerSagas/delete';
import searchSaga from './workerSagas/search';
import redirectSaga from '../../common/workerSagas/redirect';
import scenarioSaga from '../../common/scenario';
import {
INSTANCES_DELETE,
VIEW_SOFT_REDIRECT
} from '../../common/constants';
import {
INSTANCES_SEARCH,
VIEW_INITIALIZE_REQUEST,
VIEW_INITIALIZE_FAIL,
VIEW_INITIALIZE_SUCCESS,
VIEW_NAME
} from './constants';
const transitions = {
blocking: {
[INSTANCES_DELETE]: deleteSaga,
},
nonBlocking: {
[INSTANCES_SEARCH]: searchSaga,
[VIEW_SOFT_REDIRECT]: redirectSaga
}
};
/*
* The saga initializes the view and
* -- returns its life cycle scenario-saga in case of successful initialization
* or
* -- throws error(s) otherwise.
*
* source is relevant only for initialization but not for life cycle.
* It is because initialization process and its result must not be reported to owner app.
*/
export default function*({
modelDefinition,
softRedirectSaga,
viewState: /* istanbul ignore next */ {
filter,
sort,
order,
max,
offset,
hideSearchForm
} = {},
source
}) {
yield put({
type: VIEW_INITIALIZE_REQUEST,
payload: {
hideSearchForm
},
meta: { source }
});
try {
yield call(searchSaga, {
modelDefinition,
action: {
payload: {
filter,
sort,
order,
max,
offset
},
meta: { source }
}
});
} catch (err) {
yield put({
type: VIEW_INITIALIZE_FAIL,
payload: err,
error: true,
meta: { source }
});
throw err; // Initialization error(s) are forwarded to the parent saga.
}
yield put({
type: VIEW_INITIALIZE_SUCCESS,
meta: { source }
});
return (yield spawn(scenarioSaga, {
modelDefinition,
softRedirectSaga,
transitions,
viewName: VIEW_NAME
}));
}
<|start_filename|>src/crudeditor-lib/common/workerSagas/redirect.spec.js<|end_filename|>
import { expect } from 'chai';
import { runSaga } from 'redux-saga';
import { call } from 'redux-saga/effects';
import sinon from 'sinon';
import redirectSaga from './redirect';
import { VIEW_SEARCH, VIEW_EDIT } from '../constants';
import {
VIEW_REDIRECT_REQUEST as SEARCH_VIEW_REDIRECT_REQUEST,
VIEW_REDIRECT_FAIL as SEARCH_VIEW_REDIRECT_FAIL
} from '../../views/search/constants';
describe('common / workerSagas / redirect saga', () => {
const softRedirectSaga = _ => null;
const arg = {
modelDefinition: {},
softRedirectSaga,
action: {
payload: {
view: {
name: VIEW_EDIT,
state: { a: 'b' }
}
},
meta: {
spawner: VIEW_SEARCH
}
}
}
describe('good case', () => {
const gen = redirectSaga(arg);
it('should put VIEW_REDIRECT_REQUEST', () => {
const { value, done } = gen.next();
expect(value).to.have.ownProperty('PUT');
expect(value.PUT.action.type).to.equal(SEARCH_VIEW_REDIRECT_REQUEST);
expect(value.PUT.action.meta).to.deep.equal(arg.action.meta)
expect(done).to.be.false; // eslint-disable-line no-unused-expressions
});
it('should call softRedirectSaga', () => {
const { value, done } = gen.next();
expect(value).to.have.ownProperty('CALL');
expect(value.CALL).to.have.ownProperty('fn');
expect(value.CALL.fn).to.equal(softRedirectSaga);
expect(value.CALL.args).to.deep.equal([{
viewName: arg.action.payload.view.name,
viewState: arg.action.payload.view.state
}]);
expect(done).to.be.false; // eslint-disable-line no-unused-expressions
});
it('should end iterator', () => {
const { done } = gen.next();
expect(done).to.be.true; // eslint-disable-line no-unused-expressions
});
});
describe('bad case', () => {
const dispatched = [];
const err = {
code: 345
}
const wrapper = function*(...args) {
try {
yield call(redirectSaga, ...args)
} catch (e) {
expect(e).deep.equal(err)
}
}
const badRedirect = sinon.stub().throws(err)
it('should put VIEW_REDIRECT_FAIL', () => {
runSaga({
dispatch: (action) => dispatched.push(action)
}, wrapper, {
...arg,
softRedirectSaga: badRedirect
});
expect(dispatched.map(({ type }) => type)).deep.equal([
SEARCH_VIEW_REDIRECT_REQUEST,
SEARCH_VIEW_REDIRECT_FAIL
])
expect(dispatched.find(({ type }) => type === SEARCH_VIEW_REDIRECT_FAIL).payload).to.be.deep.equal(err);
});
})
});
<|start_filename|>src/crudeditor-lib/rootReducer.js<|end_filename|>
import { combineReducers } from 'redux';
import common from './common/reducer';
import search from './views/search/reducer';
import create from './views/create/reducer';
import edit from './views/edit/reducer';
import show from './views/show/reducer';
import error from './views/error/reducer';
import { isAllowed } from './lib';
import {
VIEW_SEARCH,
VIEW_CREATE,
VIEW_EDIT,
VIEW_SHOW,
VIEW_ERROR,
PERMISSION_CREATE,
PERMISSION_EDIT,
PERMISSION_VIEW
} from './common/constants';
export default /* istanbul ignore next */ (modelDefinition, i18n) => {
const { crudOperations } = modelDefinition.permissions;
const viewReducers = {
...(isAllowed(crudOperations, PERMISSION_VIEW) ? { [VIEW_SEARCH]: search(modelDefinition, i18n) } : null),
...(isAllowed(crudOperations, PERMISSION_CREATE) ? { [VIEW_CREATE]: create(modelDefinition, i18n) } : null),
...(isAllowed(crudOperations, PERMISSION_EDIT) ? { [VIEW_EDIT]: edit(modelDefinition, i18n) } : null),
...(isAllowed(crudOperations, PERMISSION_VIEW) ? { [VIEW_SHOW]: show(modelDefinition, i18n) } : null),
[VIEW_ERROR]: error(modelDefinition, i18n)
}
return combineReducers({
common: common(modelDefinition, i18n),
views: combineReducers(viewReducers)
});
}
<|start_filename|>src/components/ConfirmDialog/ConfirmUnsavedChanges.js<|end_filename|>
import React, { PureComponent } from 'react';
import PropTypes from 'prop-types';
import ConfirmDialog from './';
export default class ConfirmUnsavedChanges extends PureComponent {
static propTypes = {
trigger: PropTypes.string,
showDialog: PropTypes.func
}
static contextTypes = {
i18n: PropTypes.object.isRequired
}
render() {
const { children, ...rest } = this.props;
const { i18n } = this.context;
return (
<ConfirmDialog
message={i18n.getMessage('common.CrudEditor.unsaved.confirmation')}
textConfirm={i18n.getMessage('common.CrudEditor.confirm.action')}
textCancel={i18n.getMessage('common.CrudEditor.cancel.button')}
{...rest}
>
{children}
</ConfirmDialog>
)
}
}
<|start_filename|>src/crudeditor-lib/views/edit/workerSagas/delete.spec.js<|end_filename|>
import { expect } from 'chai';
import { runSaga } from 'redux-saga';
import { call } from 'redux-saga/effects';
import sinon from 'sinon';
import deleteSaga from './delete';
import {
INSTANCES_DELETE_REQUEST,
INSTANCES_DELETE_SUCCESS,
INSTANCES_DELETE_FAIL
} from '../../../common/constants';
import { VIEW_REDIRECT_REQUEST } from '../constants';
import {
VIEW_ERROR,
VIEW_SEARCH
} from '../../../common/constants';
describe('edit view / workerSagas / delete', () => {
const instances = [{
a: 'b'
}]
const deleteApi = sinon.spy();
const softRedirectSaga = sinon.spy();
const arg = {
modelDefinition: {
api: {
delete: deleteApi
},
model: {
fields: {
a: {}
}
}
},
softRedirectSaga,
action: {
payload: { instances },
meta: {}
}
}
it.skip('should redirect to search view after successful delete', () => {
const dispatched = [];
runSaga({
dispatch: (action) => dispatched.push(action)
}, deleteSaga, arg);
expect(dispatched.map(({ type }) => type)).deep.equal([
INSTANCES_DELETE_REQUEST,
INSTANCES_DELETE_SUCCESS,
VIEW_REDIRECT_REQUEST
])
expect(dispatched.find(({ type }) => type === INSTANCES_DELETE_SUCCESS).payload).deep.equal({ instances })
expect(deleteApi.calledOnce).to.be.true; // eslint-disable-line no-unused-expressions
expect(softRedirectSaga.calledOnce).to.be.true; // eslint-disable-line no-unused-expressions
expect(softRedirectSaga.calledWith({ // eslint-disable-line no-unused-expressions
viewName: VIEW_SEARCH
})).to.be.true;
})
it('should redirect to error view if redirect to search view failed', () => {
const dispatched = [];
const err = {
code: 400,
message: 'Failed to redirect to search view'
}
const wrapper = function*(...args) {
try {
yield call(deleteSaga, ...args)
} catch (e) {
expect(e).to.deep.equal(err)
}
}
const softRedirectSaga = sinon.stub().throws(err);
runSaga({
dispatch: (action) => dispatched.push(action)
}, wrapper, {
...arg,
softRedirectSaga
});
expect(softRedirectSaga.calledTwice).to.be.true; // eslint-disable-line no-unused-expressions
expect(softRedirectSaga.firstCall.calledWith({ // eslint-disable-line no-unused-expressions
viewName: VIEW_SEARCH
})).to.be.true;
expect(softRedirectSaga.secondCall.calledWith({ // eslint-disable-line no-unused-expressions
viewName: VIEW_ERROR,
viewState: err
})).to.be.true;
})
it.skip('should throw if delete api fails', () => {
const dispatched = [];
const err = {
code: 500,
message: 'Delete api failed'
}
const wrapper = function*(...args) {
try {
yield call(deleteSaga, ...args)
} catch (e) {
expect(e).to.deep.equal(err)
}
}
const badApi = sinon.stub().throws(err);
runSaga({
dispatch: (action) => dispatched.push(action)
}, wrapper, {
...arg,
modelDefinition: {
...arg.modelDefinition,
api: {
delete: badApi
}
}
});
expect(badApi.calledOnce).to.be.true; // eslint-disable-line no-unused-expressions
expect(dispatched.map(({ type }) => type)).deep.equal([
INSTANCES_DELETE_REQUEST,
INSTANCES_DELETE_FAIL
])
expect(dispatched.find(({ type }) => type === INSTANCES_DELETE_FAIL).payload).deep.equal(err)
})
})
<|start_filename|>src/crudeditor-lib/views/search/reducer.js<|end_filename|>
import cloneDeep from 'lodash/cloneDeep';
import isEqual from 'lodash/isEqual';
import u from 'updeep';
import { getLogicalKeyBuilder } from '../lib';
import { isSystemError } from '../../lib';
import {
getDefaultSortField,
cleanFilter
} from './lib';
import {
DEFAULT_OFFSET,
DEFAULT_ORDER,
ALL_INSTANCES_SELECT,
ALL_INSTANCES_DESELECT,
FORM_FILTER_RESET,
FORM_FILTER_UPDATE,
GOTO_PAGE_UPDATE,
INSTANCES_SEARCH_FAIL,
INSTANCES_SEARCH_REQUEST,
INSTANCES_SEARCH_SUCCESS,
INSTANCE_SELECT,
INSTANCE_DESELECT,
VIEW_INITIALIZE_REQUEST,
VIEW_INITIALIZE_FAIL,
VIEW_INITIALIZE_SUCCESS,
VIEW_REDIRECT_REQUEST,
VIEW_REDIRECT_FAIL,
VIEW_REDIRECT_SUCCESS,
SEARCH_FORM_TOGGLE
} from './constants';
import {
STATUS_DELETING,
STATUS_INITIALIZING,
STATUS_READY,
STATUS_REDIRECTING,
STATUS_SEARCHING,
STATUS_UNINITIALIZED,
EMPTY_FIELD_VALUE,
INSTANCES_DELETE_FAIL,
INSTANCES_DELETE_REQUEST,
INSTANCES_DELETE_SUCCESS,
UNPARSABLE_FIELD_VALUE
} from '../../common/constants';
const buildDefaultParsedFilter = searchableFields => searchableFields.reduce(
(rez, {
name: fieldName
}) => ({
...rez,
[fieldName]: EMPTY_FIELD_VALUE
}),
{}
);
const buildDefaultFormattedFilter = ({
ui: {
search: { searchableFields }
}
}) => searchableFields.reduce(
(rez, {
name: fieldName,
render: {
value: {
converter: {
format
}
}
}
}) => ({
...rez,
[fieldName]: format(EMPTY_FIELD_VALUE)
}),
{}
);
// The function accepts parsed filter and returns corresponding formatted filter.
const buildFormattedFilter = ({
modelDefinition: {
ui: {
search: { searchableFields }
}
},
filter,
i18n
}) => Object.keys(filter).reduce(
(rez, fieldName) => {
let format;
searchableFields.some(fieldMeta => {
if (fieldMeta.name === fieldName) {
({ format } = fieldMeta.render.value.converter);
return true;
}
return false;
});
return {
...rez,
[fieldName]: format(filter[fieldName], i18n)
};
},
{}
);
export const buildDefaultStoreState = modelDefinition => ({
// Active filter as displayed in Search Result.
resultFilter: buildDefaultParsedFilter(modelDefinition.ui.search.searchableFields),
// Raw filter as displayed in Search Criteria.
formFilter: buildDefaultParsedFilter(modelDefinition.ui.search.searchableFields),
// Raw filter as communicated to Search fields React Components.
formattedFilter: buildDefaultFormattedFilter(modelDefinition),
sortParams: {
field: getDefaultSortField(modelDefinition.ui.search),
order: DEFAULT_ORDER
},
pageParams: {
max: modelDefinition.ui.search.pagination.defaultMax,
offset: DEFAULT_OFFSET
},
gotoPage: '',
resultInstances: undefined, // XXX: must be undefined until first extraction.
selectedInstances: [], // XXX: must be a sub-array of refs from resultInstances.
totalCount: undefined,
errors: {
// object with keys as field names,
// values as arrays of Parsing Errors, may not be empty
// (the object does not have keys for successfully parsed values).
fields: {}
},
status: STATUS_UNINITIALIZED,
hideSearchForm: false
});
/*
* XXX:
* Only objects and arrays are allowed at branch nodes.
* Only primitive data types are allowed at leaf nodes.
*/
export default /* istanbul ignore next */ (modelDefinition, i18n) => {
const buildLogicalKey = getLogicalKeyBuilder(modelDefinition.model.fields);
// Remove benchmarkInstances from sourceInstances by comparing their Logical Keys.
const removeInstances = (sourceInstances, benchmarkInstances) =>
sourceInstances.filter(sourceInstance =>
!benchmarkInstances.some(benchmarkInstance =>
isEqual(
buildLogicalKey(sourceInstance),
buildLogicalKey(benchmarkInstance)
)
)
);
return (storeState = buildDefaultStoreState(modelDefinition), { type, payload, error, meta }) => {
if (
storeState.status === STATUS_UNINITIALIZED &&
[VIEW_INITIALIZE_REQUEST, INSTANCES_SEARCH_SUCCESS].indexOf(type) === -1
) {
return storeState;
}
const newStoreStateSlice = {};
/* eslint-disable padded-blocks */
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
if (type === VIEW_INITIALIZE_REQUEST) {
const { hideSearchForm } = payload;
if (typeof hideSearchForm === 'boolean') {
newStoreStateSlice.hideSearchForm = hideSearchForm;
}
newStoreStateSlice.status = STATUS_INITIALIZING;
} else if (type === VIEW_INITIALIZE_FAIL) {
newStoreStateSlice.status = STATUS_UNINITIALIZED;
} else if (type === VIEW_INITIALIZE_SUCCESS) {
newStoreStateSlice.status = STATUS_READY;
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === VIEW_REDIRECT_REQUEST) {
newStoreStateSlice.status = STATUS_REDIRECTING;
} else if (type === VIEW_REDIRECT_FAIL) {
newStoreStateSlice.status = STATUS_READY;
} else if (type === VIEW_REDIRECT_SUCCESS) {
// Do not reset store to initial uninitialized state because
// totalCount, filter, order, sort, etc. must remain after returning from other Views.
newStoreStateSlice.formFilter = u.constant(cloneDeep(storeState.resultFilter));
newStoreStateSlice.gotoPage = '';
newStoreStateSlice.selectedInstances = [];
newStoreStateSlice.formattedFilter = u.constant(buildFormattedFilter({
modelDefinition,
filter: storeState.resultFilter,
i18n
}));
newStoreStateSlice.errors = u.constant({
fields: {}
});
newStoreStateSlice.status = STATUS_UNINITIALIZED;
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === INSTANCES_DELETE_REQUEST) {
newStoreStateSlice.status = STATUS_DELETING;
} else if (type === INSTANCES_DELETE_SUCCESS) {
const { instances, count } = payload;
newStoreStateSlice.gotoPage = '';
newStoreStateSlice.totalCount = storeState.totalCount - count;
if (instances) { // Actually deleted instances are known.
newStoreStateSlice.selectedInstances = removeInstances(storeState.selectedInstances, instances);
newStoreStateSlice.resultInstances = removeInstances(storeState.resultInstances, instances);
} else {
newStoreStateSlice.selectedInstances = [];
}
newStoreStateSlice.status = STATUS_READY;
} else if (type === INSTANCES_DELETE_FAIL) {
newStoreStateSlice.status = STATUS_READY;
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === INSTANCES_SEARCH_REQUEST) {
const { filter } = payload;
if (storeState.status !== STATUS_INITIALIZING) {
newStoreStateSlice.status = STATUS_SEARCHING;
} else if (!isEqual(filter, storeState.formFilter)) {
newStoreStateSlice.formFilter = u.constant(cloneDeep(filter));
newStoreStateSlice.formattedFilter = u.constant(buildFormattedFilter({
modelDefinition,
filter,
i18n
}));
}
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === INSTANCES_SEARCH_SUCCESS) {
const {
filter,
sort,
order,
max,
offset,
instances,
totalCount
} = payload;
if (storeState.status === STATUS_UNINITIALIZED) {
if (isEqual(
cleanFilter(filter),
cleanFilter(storeState.resultFilter)
)) {
// Updating totalCount since another View has made "search" API call with the same filter.
// XXX: totalCount for current Search View filter is used by other Views => it must always be up-to-date.
newStoreStateSlice.totalCount = totalCount;
}
} else {
// filter is formFilter so there is no need to reassign formFilter and formattedFilter.
newStoreStateSlice.gotoPage = '';
if (!isEqual(storeState.resultFilter, storeState.formFilter)) {
newStoreStateSlice.resultFilter = u.constant(cloneDeep(storeState.formFilter));
}
newStoreStateSlice.sortParams = {
field: sort,
order
};
newStoreStateSlice.pageParams = {
max,
offset
};
newStoreStateSlice.totalCount = totalCount;
// XXX: updeep-package does not check arrays for equality.
if (!isEqual(instances, storeState.resultInstances)) {
newStoreStateSlice.resultInstances = instances;
newStoreStateSlice.selectedInstances = [];
}
if (storeState.status !== STATUS_INITIALIZING) {
newStoreStateSlice.status = STATUS_READY;
}
}
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === INSTANCES_SEARCH_FAIL && storeState.status !== STATUS_INITIALIZING) {
newStoreStateSlice.status = STATUS_READY;
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === FORM_FILTER_RESET) {
newStoreStateSlice.formattedFilter = u.constant(buildDefaultFormattedFilter(modelDefinition));
newStoreStateSlice.formFilter = u.constant(buildDefaultParsedFilter(modelDefinition.ui.search.searchableFields));
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === GOTO_PAGE_UPDATE) {
const { page } = payload;
newStoreStateSlice.gotoPage = page;
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === FORM_FILTER_UPDATE) {
const {
name: fieldName,
value: fieldValue
} = payload;
let converter;
modelDefinition.ui.search.searchableFields.some(fieldMeta => {
if (fieldMeta.name === fieldName) {
({ converter } = fieldMeta.render.value);
return true;
}
return false;
});
PARSE_LABEL: {
let newFormValue;
try {
newFormValue = converter.parse(fieldValue, i18n);
} catch (err) {
// Rethrow system errors.
if (isSystemError(err)) {
throw err;
}
const errors = Array.isArray(err) ? err : [err];
newStoreStateSlice.formFilter = {
[fieldName]: UNPARSABLE_FIELD_VALUE
};
if (!isEqual(fieldValue, storeState.formattedFilter[fieldName])) {
newStoreStateSlice.formattedFilter = {
[fieldName]: u.constant(fieldValue)
};
}
if (!isEqual(errors, storeState.errors.fields[fieldName])) {
newStoreStateSlice.errors = {
fields: {
[fieldName]: errors
}
};
}
break PARSE_LABEL;
}
if (!isEqual(newFormValue, storeState.formFilter[fieldName])) {
newStoreStateSlice.formFilter = {
[fieldName]: u.constant(newFormValue)
};
}
const newFormattedValue = converter.format(newFormValue, i18n);
if (!isEqual(newFormattedValue, storeState.formattedFilter[fieldName])) {
newStoreStateSlice.formattedFilter = {
[fieldName]: u.constant(newFormattedValue)
};
}
if (storeState.errors.fields[fieldName]) {
newStoreStateSlice.errors = {
// u.omit() argument must be an array, since lodash v. 4.17.4 no longer supports a string.
fields: u.omit([fieldName])
};
}
}
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === ALL_INSTANCES_SELECT) {
newStoreStateSlice.selectedInstances = storeState.resultInstances;
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === ALL_INSTANCES_DESELECT) {
newStoreStateSlice.selectedInstances = [];
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === INSTANCE_SELECT) {
let { instance } = payload;
newStoreStateSlice.selectedInstances = [
...storeState.selectedInstances,
instance
];
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === INSTANCE_DESELECT) {
let { instance } = payload;
newStoreStateSlice.selectedInstances = storeState.selectedInstances.filter(ins => ins !== instance);
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === SEARCH_FORM_TOGGLE) {
newStoreStateSlice.hideSearchForm = !storeState.hideSearchForm
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
/* eslint-enable padded-blocks */
}
return u(newStoreStateSlice, storeState); // returned object is frozen for NODE_ENV === 'development'
};
};
<|start_filename|>src/crudeditor-lib/i18n/index.js<|end_filename|>
import en from './en';
import de from './de';
import fi from './fi';
import no from './no';
import ru from './ru';
import sv from './sv';
import da from './da';
/* eslint-disable max-len */
export default {
en,
de,
fi,
no,
ru,
sv,
da
}
/* eslint-enable max-len */
<|start_filename|>src/crudeditor-lib/views/error/container.js<|end_filename|>
import React from 'react';
import { connect } from 'react-redux';
import Main from '../../../components/ErrorMain';
import { getViewModelData } from './selectors';
import { softRedirectView } from '../../common/actions';
import { VIEW_SEARCH, PERMISSION_VIEW } from '../../common/constants';
import { isAllowed } from '../../lib';
const mergeProps = /* istanbul ignore next */ (
{
viewModelData,
permissions: {
crudOperations
},
uiConfig
},
{
goHome,
...dispatchProps
}
) => ({
viewModel: {
uiConfig,
data: viewModelData,
actions: {
...(isAllowed(crudOperations, PERMISSION_VIEW) && { goHome }),
...dispatchProps
}
}
});
export default connect(
/* istanbul ignore next */
(storeState, { modelDefinition, uiConfig }) => ({
viewModelData: getViewModelData(storeState, modelDefinition),
permissions: modelDefinition.permissions,
uiConfig
}),
{
goHome: /* istanbul ignore next */ _ => softRedirectView({
name: VIEW_SEARCH
})
},
mergeProps
)(
/* istanbul ignore next */
({ viewModel }) => <Main model={viewModel} />
);
<|start_filename|>src/crudeditor-lib/check-model/modelDefinition.js<|end_filename|>
import PropTypes from 'prop-types';
import { allowedSome, allPropTypes } from './lib';
import {
PERMISSION_CREATE,
PERMISSION_DELETE,
PERMISSION_EDIT,
PERMISSION_VIEW
} from '../common/constants';
const modelPropTypes = /* istanbul ignore next */ modelDefinition => ({
model: PropTypes.shape({
name: PropTypes.string.isRequired,
translationsKeyPrefix: PropTypes.string,
fields: allPropTypes(
PropTypes.objectOf(PropTypes.shape({
unique: PropTypes.bool,
type: PropTypes.string,
constraints: PropTypes.shape({
max: PropTypes.oneOfType([
PropTypes.number,
PropTypes.instanceOf(Date),
PropTypes.string
]),
min: PropTypes.oneOfType([
PropTypes.number,
PropTypes.instanceOf(Date),
PropTypes.string
]),
required: PropTypes.bool,
email: PropTypes.bool,
matches: PropTypes.instanceOf(RegExp),
url: PropTypes.bool,
validate: PropTypes.func
})
})).isRequired,
(props, propName, componentName) => {
if (!props[propName]) {
return; // don't duplicate an Error because it'll be returned by 'isRequired' above
}
const noUniqueFields = Object.keys(props[propName]).
filter(fieldName => props[propName][fieldName].unique).length === 0;
if (noUniqueFields) {
// eslint-disable-next-line consistent-return
return new Error(`${componentName}: At least one field should have property 'unique: true'.`);
}
}
),
validate: PropTypes.func
}).isRequired,
permissions: PropTypes.shape({
crudOperations: allPropTypes(
PropTypes.shape({
[PERMISSION_CREATE]: PropTypes.oneOfType([PropTypes.bool, PropTypes.func]),
[PERMISSION_EDIT]: PropTypes.oneOfType([PropTypes.bool, PropTypes.func]),
[PERMISSION_DELETE]: PropTypes.oneOfType([PropTypes.bool, PropTypes.func]),
[PERMISSION_VIEW]: PropTypes.oneOfType([PropTypes.bool, PropTypes.func])
}).isRequired,
(props, propName, componentName) => {
if (!props || !props[propName]) {
return; // don't duplicate an Error because it'll be returned by 'isRequired' above
}
if (
!Object.keys(props[propName]).some(
p => props[propName][p] === true || props[propName][p] instanceof Function
)
) {
// eslint-disable-next-line consistent-return,max-len
return new Error(`${componentName}: At least one field in permissions.crudOperations must be defined as boolean 'true' OR function, otherwise all operations are forbidden`);
}
}
)
}).isRequired,
api: PropTypes.shape({
get: allowedSome([PERMISSION_VIEW, PERMISSION_EDIT], modelDefinition) ?
PropTypes.func.isRequired : PropTypes.func,
search: allowedSome([PERMISSION_VIEW, PERMISSION_EDIT], modelDefinition) ?
PropTypes.func.isRequired : PropTypes.func,
delete: allowedSome([PERMISSION_DELETE], modelDefinition) ? PropTypes.func.isRequired : PropTypes.func,
create: allowedSome([PERMISSION_CREATE], modelDefinition) ? PropTypes.func.isRequired : PropTypes.func,
update: allowedSome([PERMISSION_EDIT], modelDefinition) ? PropTypes.func.isRequired : PropTypes.func,
}),
ui: PropTypes.shape({
spinner: PropTypes.func,
search: PropTypes.func,
instanceLabel: PropTypes.func,
create: PropTypes.shape({
defaultNewInstance: PropTypes.func,
formLayout: PropTypes.func
}),
edit: PropTypes.shape({
formLayout: PropTypes.func
}),
show: PropTypes.shape({
formLayout: PropTypes.func
}),
customViews: PropTypes.objectOf(PropTypes.func),
customOperations: PropTypes.func
})
})
export default /* istanbul ignore next */ modelDefinition => PropTypes.checkPropTypes(
modelPropTypes(modelDefinition),
modelDefinition,
'property',
'React-CrudEditor Model'
)
<|start_filename|>src/demo/models/second-model/index.js<|end_filename|>
import api from './api';
import translations from './i18n';
import CustomTabComponent from './components/CustomTabComponent';
import {
FIELD_TYPE_BOOLEAN,
FIELD_TYPE_DECIMAL,
FIELD_TYPE_INTEGER,
FIELD_TYPE_STRING,
FIELD_TYPE_STRING_INTEGER,
FIELD_TYPE_STRING_DATE,
VIEW_CREATE,
VIEW_EDIT,
VIEW_SHOW
} from '../../../crudeditor-lib';
export const fields = {
'contractId': {
unique: true,
'type': FIELD_TYPE_STRING,
'constraints': {
'max': 100,
'required': true
}
},
'description': {
'type': FIELD_TYPE_STRING,
'constraints': {
'max': 100,
'required': false,
validate: /* istanbul ignore next */ (value, instance) => {
if ((value || '').toLowerCase().indexOf('booo') !== -1) {
const err = [{
code: 400,
// `id` is used to find translations for this particular error
// define translations with the following key structure:
// model.field.FIELD_NAME.error.ERROR_ID, where ERROR_ID is `id` defined below
id: 'forbiddenWord',
// `message` is a default message in case translation is not found
message: 'Description cannot contain `booo`!',
// optional `payload` for error translations
// here you can define props which you use in i18n messages
// example: for i18n message `Hello {name}! This field cannot exceed {maxValue}`
// define `name` and `maxValue` props
payload: {
forbiddenWord: 'BOOO'
}
}];
throw err;
}
return true;
}
}
},
'testNumberTypeField': {
'type': FIELD_TYPE_DECIMAL,
'constraints': {
'required': false,
'max': Number.MAX_SAFE_INTEGER
}
},
'contractBoilerplates': {
'type': 'collection',
'constraints': {
'required': false
}
},
'hierarchyCode': {
'type': FIELD_TYPE_STRING,
'constraints': {
'max': 100,
'required': false
}
},
'termsOfPaymentId': {
'type': FIELD_TYPE_STRING,
'constraints': {
'max': 20,
'required': false
}
},
'termsOfDeliveryId': {
'type': FIELD_TYPE_STRING,
'constraints': {
'max': 20,
'required': false
}
},
'freeShippingBoundary': {
'type': FIELD_TYPE_INTEGER,
'constraints': {
'min': 0,
'max': 999999999,
'required': false
}
},
'createdOn': {
'type': FIELD_TYPE_STRING_DATE,
'constraints': {
'required': true
}
},
'changedOn': {
'type': FIELD_TYPE_STRING_DATE,
'constraints': {
'required': true
}
},
'contractedCatalogs': {
'type': 'collection',
'constraints': {
'required': false
}
},
'minOrderValueRequired': {
'type': FIELD_TYPE_BOOLEAN,
'constraints': {
'required': false
}
},
'contractedClassificationGroups': {
'type': 'collection',
'constraints': {
'required': false
}
},
'extContractId': {
'type': FIELD_TYPE_STRING,
'constraints': {
'max': 10,
'required': false
}
},
'children': {
'type': 'collection',
'constraints': {
'required': false
}
},
'changedBy': {
'type': FIELD_TYPE_STRING,
'constraints': {
'required': true
}
},
'translations': {
'type': 'collection',
'constraints': {
'required': false
}
},
'usages': {
'type': 'collection',
'constraints': {
'required': false
}
},
'currencyId': {
'type': FIELD_TYPE_STRING,
'constraints': {
'max': 3,
'required': false
}
},
'isFrameContract': {
'type': FIELD_TYPE_BOOLEAN,
'constraints': {
'required': false
}
},
'totalContractedAmount': {
'type': FIELD_TYPE_INTEGER,
'constraints': {
'min': 0,
'max': 999999999,
'required': false
}
},
'smallVolumeSurcharge': {
'type': FIELD_TYPE_DECIMAL,
'constraints': {
'min': 0,
'max': 999999999,
'required': false
}
},
'provisionings': {
'type': 'collection',
'constraints': {
'required': false
}
},
'isOffer': {
'type': FIELD_TYPE_BOOLEAN,
'constraints': {
'required': false
}
},
'maxOrderValue': {
'type': FIELD_TYPE_INTEGER,
'constraints': {
'min': 0,
'max': 999999999,
'required': false
}
},
'validRange': {
'type': 'com.jcatalog.core.DateRange',
'constraints': {
'required': false
}
},
'isPreferred': {
'type': FIELD_TYPE_BOOLEAN,
'constraints': {
'required': false
}
},
'isInternal': {
'type': FIELD_TYPE_BOOLEAN,
'constraints': {
'required': false
}
},
'contractCategory': {
'type': 'com.jcatalog.contract.ContractCategory',
'constraints': {
'required': false
}
},
'freightSurcharge': {
'type': FIELD_TYPE_DECIMAL,
'constraints': {
'min': 0,
'max': 999999999,
'required': false
}
},
'isStandard': {
'type': FIELD_TYPE_BOOLEAN,
'constraints': {
'required': false
}
},
'statusId': {
'type': FIELD_TYPE_STRING_INTEGER,
'constraints': {
'min': 0,
'max': "800",
'required': false
}
},
'createdBy': {
'type': FIELD_TYPE_STRING,
'constraints': {
'required': true
}
},
'extContractLineId': {
'type': FIELD_TYPE_STRING,
'constraints': {
'max': 10,
'required': false
}
},
'parentContract': {},
'minOrderValue': {
'type': FIELD_TYPE_INTEGER,
'constraints': {
'min': 0,
'max': 999999999,
'required': false
}
}
};
const buildFormLayout = /* istanbul ignore next */ viewName => ({ tab, section, field }) => instance => [
tab({ name: 'general', columns: 2 }, // Best look with N = 2, 3, 4 (default is 1)
field({ name: 'contractId', readOnly: viewName !== VIEW_CREATE }),
field({ name: 'description' }),
viewName !== VIEW_CREATE && section({ name: 'auditable', columns: 2 },
field({ name: 'createdBy', readOnly: true }),
field({ name: 'createdOn', readOnly: true }),
field({ name: 'changedOn', readOnly: true }),
field({ name: 'changedBy', readOnly: true })
)
),
tab({ name: 'catalogs' }),
tab({ name: 'customer' }),
tab({ name: 'boilerplates' }),
tab({ name: 'supplier' }),
tab({ name: 'groups' }),
tab({ name: 'additional', disabled: viewName === VIEW_CREATE },
section({ name: 'test' },
field({ name: 'testNumberTypeField' })
),
section({ name: 'order', columns: 3 },
field({ name: 'minOrderValue' }),
field({ name: 'maxOrderValue' }),
field({ name: 'freeShippingBoundary' }),
field({ name: 'freightSurcharge' }),
field({ name: 'smallVolumeSurcharge' }),
field({ name: 'totalContractedAmount' }),
field({ name: 'minOrderValueRequired' })
),
section({ name: 'type', columns: 4 },
field({ name: 'isStandard' }),
field({ name: 'isPreferred' }),
field({ name: 'isFrameContract' }),
field({ name: 'isInternal' }),
field({ name: 'isOffer' })
)
),
tab({ name: 'custom', component: CustomTabComponent, disabled: viewName === VIEW_CREATE })
];
export default {
model: {
name: 'Contracts', // unique for each model used in your app; used to distinguish translations
translations,
fields,
validate: /* istanbul ignore next */ ({ formInstance }) => {
if (formInstance.minOrderValueRequired && formInstance.minOrderValue === null) {
const err = [{
code: 400,
id: 'requiredFieldMissing',
message: 'minOrderValue must be set when minOrderValueRequired is true',
args: {
contractId: formInstance.contractId
}
}];
throw err;
}
return true;
}
},
permissions: {
crudOperations: {
create: true,
edit: true,
delete: true,
view: true
}
},
api,
ui: {
search: /* istanbul ignore next */ _ => ({
searchableFields: [
{ name: 'contractId' },
{ name: 'description' },
{ name: 'extContractId' },
{ name: 'extContractLineId' },
{ name: 'maxOrderValue' },
// THE SAME CAN BE ACHIEVED WITH THE FOLLOWING
// EXAMPLE OF USING BUILT-IN RANGE INPUT COMPONENT:
// { name: 'maxOrderValue', render: { component: BUILTIN_RANGE_INPUT, props: { type: 'integer' } } },
{ name: 'createdOn' }
],
resultFields: [
{ name: 'contractId', sortable: true },
{ name: 'description', sortable: true, sortByDefault: true },
{ name: 'extContractId', sortable: true },
{ name: 'extContractLineId', sortable: true },
{ name: 'testNumberTypeField', textAlignment: 'right' }
],
/**
* custom pagination settings can be defined like this.
* 'pagination' should be either missing or fully defined
* (no partial definitions are allowed, e.g. only 'defaultMax' gonna break)
*/
pagination: {
defaultMax: 10,
options: [
{ max: 10, label: '10' },
{ max: 20, label: '20' },
{ max: 30, label: '30' },
]
}
}),
instanceLabel: /* istanbul ignore next */ instance => instance._objectLabel || instance.contractId || '',
create: {
defaultNewInstance: /* istanbul ignore next */ ({ filter }) => Object.keys(filter).reduce(
(rez, fieldName) => {
const isRange = ['maxOrderValue', 'createdOn'].indexOf(fieldName) !== -1;
return isRange || filter[fieldName] === null ?
rez :
{
...rez,
[fieldName]: filter[fieldName]
};
},
{}
),
formLayout: buildFormLayout(VIEW_CREATE)
},
edit: {
formLayout: buildFormLayout(VIEW_EDIT)
},
show: {
formLayout: buildFormLayout(VIEW_SHOW)
}
}
};
<|start_filename|>src/crudeditor-lib/views/create/reducer.js<|end_filename|>
import cloneDeep from 'lodash/cloneDeep';
import isEqual from 'lodash/isEqual';
import u from 'updeep';
import { checkFormLayout } from '../../check-model';
import { isSystemError } from '../../lib';
import {
findFieldLayout,
getTab
} from '../lib';
import {
ALL_INSTANCE_FIELDS_VALIDATE_REQUEST,
INSTANCE_FIELD_CHANGE,
INSTANCE_FIELD_VALIDATE,
INSTANCE_SAVE_FAIL,
INSTANCE_SAVE_REQUEST,
INSTANCE_SAVE_SUCCESS,
VIEW_INITIALIZE,
VIEW_REDIRECT_REQUEST,
VIEW_REDIRECT_SUCCESS,
VIEW_REDIRECT_FAIL,
TAB_SELECT
} from './constants';
import {
STATUS_READY,
STATUS_CREATING,
STATUS_UNINITIALIZED,
EMPTY_FIELD_VALUE,
STATUS_REDIRECTING,
UNPARSABLE_FIELD_VALUE
} from '../../common/constants';
const defaultStoreStateTemplate = {
// predefinedFields: <object, an entity instance with predefined field values>
predefinedFields: {},
/* Parsed instance as displayed in the form.
* {
* <string, field name>: <serializable, field value for communication with the server>,
* }
*/
formInstance: undefined,
/* Formatted instance as displayed in the form.
* {
* <sting, field name>: <any, field value for cummunication with rendering React Component>,
* }
* NOTE: formInstance values and formattedInstance values represent different values in case of parsing error
* (i.e. rendered value cannot be parsed into its string representation).
*/
formattedInstance: undefined,
// Must always be an array, may be empty.
formLayout: [],
// A ref to one of tabs element => it is undefined when and only when formLayout does not consist of tabs.
activeTab: undefined,
instanceLabel: undefined,
errors: {
// object with keys as field names,
// values as arrays of Parsing Errors and Field Validation Errors, may not be empty.
// (the object does not have keys for fields with successfully parsed/validated values).
fields: {}
},
status: STATUS_UNINITIALIZED
};
/*
* XXX:
* Only objects and arrays are allowed at branch nodes.
* Only primitive data types are allowed at leaf nodes.
*/
export default /* istanbul ignore next */ (modelDefinition, i18n) => (
storeState = cloneDeep(defaultStoreStateTemplate),
{ type, payload, error, meta }
) => {
if (storeState.status === STATUS_UNINITIALIZED && type !== VIEW_INITIALIZE) {
return storeState;
}
let newStoreStateSlice = {};
/* eslint-disable padded-blocks */
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
if (type === VIEW_INITIALIZE) {
const { predefinedFields } = payload;
if (!isEqual(predefinedFields, storeState.predefinedFields)) {
newStoreStateSlice.predefinedFields = u.constant(predefinedFields);
}
// create form instance using all existing fields
// then rewrite predefined values coming from search view
const formInstance = {
...Object.keys(modelDefinition.model.fields).reduce(
(rez, fieldName) => ({
...rez,
[fieldName]: EMPTY_FIELD_VALUE
}),
{}
),
...cloneDeep(predefinedFields)
};
newStoreStateSlice.formInstance = u.constant(formInstance);
const formLayout = modelDefinition.ui.create.formLayout(formInstance).
filter(entry => !!entry); // Removing empty tabs/sections and null tabs/sections/fields.
checkFormLayout(formLayout);
let hasTabs;
let hasSectionsOrFields;
formLayout.forEach(entry => {
hasTabs = hasTabs || entry.tab;
hasSectionsOrFields = hasSectionsOrFields || entry.section || entry.field;
if (hasTabs && hasSectionsOrFields) {
throw new TypeError('formLayout must not have tabs together with sections/fields at top level');
}
});
newStoreStateSlice.formLayout = u.constant(formLayout);
const formattedInstance = Object.keys(formInstance).reduce(
(rez, fieldName) => {
const fieldLayout = findFieldLayout(fieldName)(formLayout);
return fieldLayout ? {
...rez,
[fieldName]: fieldLayout.render.value.converter.format(formInstance[fieldName], i18n)
} : rez; // Field from the modelDefinition.model.fields is not in formLayout => it isn't displayed.
},
{}
);
newStoreStateSlice.formattedInstance = u.constant(formattedInstance);
newStoreStateSlice.activeTab = u.constant(
getTab(formLayout)
);
newStoreStateSlice.instanceLabel = modelDefinition.ui.instanceLabel(formInstance);
newStoreStateSlice.errors = u.constant({
fields: {}
});
newStoreStateSlice.status = STATUS_READY;
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === VIEW_REDIRECT_REQUEST) {
newStoreStateSlice.status = STATUS_REDIRECTING;
} else if (type === INSTANCE_SAVE_SUCCESS) {
newStoreStateSlice.status = STATUS_READY;
} else if (type === VIEW_REDIRECT_FAIL) {
newStoreStateSlice.status = STATUS_READY;
} else if (type === VIEW_REDIRECT_SUCCESS) {
// Reseting the store to initial uninitialized state.
newStoreStateSlice = u.constant(cloneDeep(defaultStoreStateTemplate));
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === INSTANCE_SAVE_REQUEST) {
newStoreStateSlice.status = STATUS_CREATING;
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === INSTANCE_SAVE_FAIL) {
newStoreStateSlice.status = STATUS_READY;
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === TAB_SELECT) {
const { tabName } = payload; // may be not specified (i.e. falsy).
newStoreStateSlice.activeTab = u.constant(
getTab(storeState.formLayout, tabName)
);
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === INSTANCE_FIELD_CHANGE) {
const {
name: fieldName,
value: fieldValue
} = payload;
const {
validate,
render: {
value: {
converter
}
}
} = findFieldLayout(fieldName)(storeState.formLayout);
PARSE_LABEL: {
let newFormValue;
try {
newFormValue = converter.parse(fieldValue, i18n);
} catch (err) {
// Rethrow system errors.
if (isSystemError(err)) {
throw err;
}
const errors = Array.isArray(err) ? err : [err];
newStoreStateSlice.formInstance = {
[fieldName]: UNPARSABLE_FIELD_VALUE
};
if (!isEqual(fieldValue, storeState.formattedInstance[fieldName])) {
newStoreStateSlice.formattedInstance = {
[fieldName]: u.constant(fieldValue)
};
}
if (!isEqual(errors, storeState.errors.fields[fieldName])) {
newStoreStateSlice.errors = {
fields: {
[fieldName]: errors
}
};
}
break PARSE_LABEL;
}
if (!isEqual(newFormValue, storeState.formInstance[fieldName])) {
newStoreStateSlice.formInstance = {
[fieldName]: u.constant(newFormValue)
};
}
const newFormattedValue = converter.format(newFormValue, i18n);
if (!isEqual(newFormattedValue, storeState.formattedInstance[fieldName])) {
newStoreStateSlice.formattedInstance = {
[fieldName]: u.constant(newFormattedValue)
};
}
try {
validate(newFormValue, {
...storeState.formInstance,
[fieldName]: newFormValue
});
} catch (err) {
// Rethrow system errors.
if (isSystemError(err)) {
throw err;
}
const errors = Array.isArray(err) ? err : [err];
if (!isEqual(errors, storeState.errors.fields[fieldName])) {
newStoreStateSlice.errors = {
fields: {
[fieldName]: errors
}
};
}
break PARSE_LABEL;
}
if (storeState.errors.fields[fieldName]) {
newStoreStateSlice.errors = {
// u.omit() argument must be an array, since lodash v. 4.17.4 no longer supports a string.
fields: u.omit([fieldName])
};
}
}
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === INSTANCE_FIELD_VALIDATE) {
const fieldName = payload.name;
const fieldValue = storeState.formInstance[fieldName];
if (fieldValue !== UNPARSABLE_FIELD_VALUE) {
PARSE_LABEL: {
try {
findFieldLayout(fieldName)(storeState.formLayout).validate(fieldValue, storeState.formInstance);
} catch (err) {
// Rethrow system errors.
if (isSystemError(err)) {
throw err;
}
const errors = Array.isArray(err) ? err : [err];
if (!isEqual(errors, storeState.errors.fields[fieldName])) {
newStoreStateSlice.errors = {
fields: {
[fieldName]: errors
}
};
}
break PARSE_LABEL;
}
if (storeState.errors.fields[fieldName]) {
newStoreStateSlice.errors = {
// u.omit() argument must be an array, since lodash v. 4.17.4 no longer supports a string.
fields: u.omit([fieldName])
};
}
}
}
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === ALL_INSTANCE_FIELDS_VALIDATE_REQUEST) {
newStoreStateSlice.errors = {
fields: {
}
};
Object.keys(modelDefinition.model.fields).forEach(fieldName => {
const fieldValue = storeState.formInstance[fieldName];
const fieldLayout = findFieldLayout(fieldName)(storeState.formLayout);
if (
// Field from the modelDefinition.model.fields is not in formLayout => it isn't displayed in Create View
!fieldLayout ||
// Field is read-only => no validation needed
fieldLayout.readOnly ||
fieldValue === UNPARSABLE_FIELD_VALUE
) {
return;
}
try {
fieldLayout.validate(fieldValue, storeState.formInstance);
} catch (err) {
// Rethrow system errors.
if (isSystemError(err)) {
throw err;
}
const errors = Array.isArray(err) ? err : [err];
if (!isEqual(errors, storeState.errors.fields[fieldName])) {
newStoreStateSlice.errors.fields[fieldName] = errors;
}
return;
}
if (storeState.errors.fields[fieldName]) {
// u.omit() argument must be an array, since lodash v. 4.17.4 no longer supports a string.
newStoreStateSlice.errors.fields = u.omit([fieldName]);
}
});
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
/* eslint-enable padded-blocks */
}
return u(newStoreStateSlice, storeState); // returned object is frozen for NODE_ENV === 'development'
};
<|start_filename|>src/crudeditor-lib/views/create/index.js<|end_filename|>
import { VIEW_NAME } from './constants';
import { buildFormLayout } from '../lib';
export { getViewState } from './selectors';
export const getUi = ({ modelDefinition }) => {
const createMeta = modelDefinition.ui.create || {};
if (!createMeta.defaultNewInstance) {
createMeta.defaultNewInstance = _ => ({});
}
createMeta.formLayout = buildFormLayout({
customBuilder: createMeta.formLayout,
viewName: VIEW_NAME,
fieldsMeta: modelDefinition.model.fields
});
return createMeta;
}
<|start_filename|>src/components/SearchResultListing/index.js<|end_filename|>
import React, { PureComponent } from 'react';
import PropTypes from 'prop-types';
import Table from 'react-bootstrap/lib/Table';
import Glyphicon from 'react-bootstrap/lib/Glyphicon';
import Checkbox from 'react-bootstrap/lib/Checkbox';
import { getFieldLabel } from '../lib';
import SearchResultButtons from './SearchResultButtons';
import './styles.less';
class SearchResultListing extends PureComponent {
static propTypes = {
model: PropTypes.shape({
data: PropTypes.shape({
resultInstances: PropTypes.arrayOf(PropTypes.object),
selectedInstances: PropTypes.arrayOf(PropTypes.object),
resultFields: PropTypes.arrayOf(PropTypes.object),
sortParams: PropTypes.object,
isLoading: PropTypes.bool,
pageParams: PropTypes.shape({
offset: PropTypes.number.isRequired
})
}),
permissions: PropTypes.shape({
delete: PropTypes.func.isRequired
}).isRequired,
actions: PropTypes.objectOf(PropTypes.func).isRequired,
instanceOperations: PropTypes.func.isRequired,
bulkOperations: PropTypes.object.isRequired
}).isRequired
}
static contextTypes = {
i18n: PropTypes.object
};
constructor(props) {
super(props);
this._tableContainerRef = React.createRef();
}
handleResort = fieldName => _ => this.props.model.actions.searchInstances({
sort: fieldName,
// XXX: sortField and sortOrder must be accessed with this.props.model.data for up to date values!
order: fieldName === this.props.model.data.sortParams.field && this.props.model.data.sortParams.order === 'asc' ?
'desc' :
'asc'
})
handleToggleSelected = instance => ({
target: {
checked: selected
}
}) => this.props.model.actions.toggleSelected({ selected, instance })
handleToggleSelectedAll = ({ target: { checked } }) => this.props.model.actions.toggleSelectedAll(checked)
render() {
const {
data: {
selectedInstances,
resultInstances: instances,
resultFields,
pageParams: { offset },
sortParams: {
field: sortField,
order: sortOrder
}
},
instanceOperations,
bulkOperations
} = this.props.model;
const { i18n } = this.context;
const bulkOperationsExist = Object.keys(bulkOperations).length > 0;
return (
<div className="crud--search-result-listing__table-container" ref={this._tableContainerRef}>
<Table condensed={true} className="crud--search-result-listing__table">
<thead>
<tr>
{
bulkOperationsExist &&
<th>
<Checkbox
checked={selectedInstances.length === instances.length && instances.length !== 0}
disabled={instances.length === 0}
onChange={this.handleToggleSelectedAll}
/>
</th>
}
{
resultFields.map(({ name, sortable }) => (
<th key={`th-${name}`}>
{
sortable ?
<a
className="crud--search-result-listing__sort-button"
style={{ cursor: "pointer", whiteSpace: "nowrap" }}
onClick={this.handleResort(name)}
>
{
getFieldLabel({ i18n, name })
}
{
sortField === name &&
<Glyphicon
className="crud--search-result-listing__sort-icon"
glyph={`arrow-${sortOrder === 'asc' ? 'up' : 'down'}`}
/>
}
</a> :
getFieldLabel({ i18n, name })
}
</th>
))
}
<th> </th>
</tr>
</thead>
<tbody>
{
instances.map((instance, index) => (
<tr key={`tr-${JSON.stringify(instance)}`}>
{
bulkOperationsExist &&
<td>
<Checkbox
checked={selectedInstances.indexOf(instance) > -1}
onChange={this.handleToggleSelected(instance)}
/>
</td>
}
{
resultFields.map(({ name, component: Component, textAlignment, format }) => (
<td
key={`td-${name}`}
className={
textAlignment === 'right' && 'text-right' ||
textAlignment === 'center' && 'text-center' ||
'text-left'
}
>
{
Component ?
<Component name={name} instance={instance} /> :
(instance.hasOwnProperty(name) ? format(instance[name], i18n) : '')
}
</td>
))
}
<td className="text-right">
<SearchResultButtons
operations={instanceOperations({
instance,
offset: offset + index
})}
parentRef={this._tableContainerRef}
/>
</td>
</tr>
))
}
</tbody>
</Table>
</div>
);
}
}
export default SearchResultListing;
<|start_filename|>src/components/EditSection/index.js<|end_filename|>
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import Collapse from 'react-bootstrap/lib/Collapse';
import { getModelMessage, titleCase } from '../lib';
import './styles.less';
export default class EditSelection extends Component {
static propTypes = {
title: PropTypes.string.isRequired
}
static contextTypes = {
i18n: PropTypes.object.isRequired
}
state = {
collapsed: false
}
handleSelect = _ => this.setState({
collapsed: !this.state.collapsed
})
render() {
const {
title,
children: fields
} = this.props;
const { collapsed } = this.state;
return (
<div>
<h4
onClick={this.handleSelect}
style={{ cursor: "pointer" }}
>
<a>
<span
className={`fa fa-angle-${collapsed ? 'down' : 'up'}`}
style={{ marginRight: "0.2em", textDecoration: 'none' }}
></span>
{getModelMessage({
i18n: this.context.i18n,
key: `model.section.${title}.label`,
defaultMessage: titleCase(title)
})}
</a>
</h4>
<Collapse in={!collapsed}>
<div>
{fields}
</div>
</Collapse>
</div>
);
}
}
<|start_filename|>src/demo/models/contracts/components/CustomTabComponent/index.js<|end_filename|>
import React, { PureComponent } from 'react';
import PropTypes from 'prop-types';
import createCrud from '../../../../../crudeditor-lib';
import secondModel from '../../../second-model';
export default class CustomTabComponent extends PureComponent {
static propTypes = {
viewName: PropTypes.string.isRequired,
instance: PropTypes.object.isRequired
}
constructor(...args) {
super(...args);
this._secondCrud = createCrud(secondModel)
}
handleTransition = state => {
this._lastState = state
};
render() {
const SecondCrud = this._secondCrud;
return (
<SecondCrud
view={this._lastState || {
name: 'search',
state: {
hideSearchForm: true,
max: 10
}
}}
uiConfig={{
headerLevel: 3
}}
onTransition={this.handleTransition}
/>
)
}
}
<|start_filename|>src/crudeditor-lib/views/create/scenario.spec.js<|end_filename|>
import { expect } from 'chai';
import { put, spawn } from 'redux-saga/effects';
import scenario from './scenario';
import commonScenario from '../../common/scenario';
import saveSaga from './workerSagas/save';
import redirectSaga from '../../common/workerSagas/redirect';
import { VIEW_SOFT_REDIRECT } from '../../common/constants';
import {
INSTANCE_SAVE,
VIEW_INITIALIZE,
VIEW_NAME
} from './constants';
const transitions = {
blocking: {
[INSTANCE_SAVE]: saveSaga
},
nonBlocking: {
[VIEW_SOFT_REDIRECT]: redirectSaga
}
}
const arg = {
modelDefinition: {},
softRedirectSaga: _ => null,
viewState: {}
}
describe('create view / scenario', () => {
const gen = scenario(arg);
it('should put VIEW_INITIALIZE', () => {
const { value, done } = gen.next();
expect(value).to.deep.equal(put({
type: VIEW_INITIALIZE,
payload: { predefinedFields: arg.viewState.predefinedFields || {} },
meta: { source: arg.source }
}));
expect(done).to.be.false; // eslint-disable-line no-unused-expressions
})
it('should fork scenario saga', () => {
const { value, done } = gen.next();
expect(value).to.deep.equal(spawn(commonScenario, {
viewName: VIEW_NAME,
modelDefinition: arg.modelDefinition,
softRedirectSaga: arg.softRedirectSaga,
transitions
}))
expect(done).to.be.false; // eslint-disable-line no-unused-expressions
})
it('should end iterator', () => {
const { done } = gen.next();
expect(done).to.be.true; // eslint-disable-line no-unused-expressions
})
})
<|start_filename|>src/crudeditor-lib/i18n/exceptions/da.js<|end_filename|>
export default {
"common.CrudEditor.default.doesnt.match.message": "Værdien svarer ikke til det krævede mønster ''{pattern}''",
"common.CrudEditor.default.invalid.email.message": "Ikke et gyldigt e-mailadresseformat",
"common.CrudEditor.default.invalid.max.message": "Værdien overskrider den maksimale værdi ''{max}''",
"common.CrudEditor.default.invalid.min.message": "Værdien er mindre end minimumsværdien ''{min}''",
"common.CrudEditor.default.invalid.max.size.message": "Værdien overskrider den maksimale størrelse på ''{max}''",
"common.CrudEditor.default.invalid.min.size.message": "Værdien er mindre end minimumsstørrelsen på ''{min}''",
"common.CrudEditor.default.invalid.validator.message":
"Værdien kunne ikke gennemføre den brugerdefinerede validering",
"common.CrudEditor.default.blank.message": "Feltet må ikke være tomt",
"common.CrudEditor.default.null.message": "Egenskaben må ikke være nul",
"common.CrudEditor.default.not.unique.message": "Værdien skal være entydig",
"common.CrudEditor.default.invalid.url.message": "Værdien skal være en gyldig URL-adresse",
"common.CrudEditor.default.invalid.date.message": "Værdien skal være en gyldig dato",
"common.CrudEditor.default.invalid.decimal.message": "Værdien skal være et gyldigt tal",
"common.CrudEditor.default.invalid.integer.message": "Værdien skal være et gyldigt tal",
"common.CrudEditor.default.errorOccurred.message": "Der opstod en fejl"
}
<|start_filename|>src/crudeditor-lib/views/error/constants.js<|end_filename|>
import { VIEW_ERROR } from '../../common/constants';
const namespace = VIEW_ERROR;
export const
VIEW_NAME = VIEW_ERROR,
/* ████████████████████████████████████████████
* ███ ACTION TYPES (in alphabetical order) ███
* ████████████████████████████████████████████
*/
VIEW_INITIALIZE = namespace + '/VIEW_INITIALIZE',
VIEW_REDIRECT_REQUEST = namespace + '/VIEW_REDIRECT_REQUEST',
VIEW_REDIRECT_FAIL = namespace + '/VIEW_REDIRECT_FAIL',
VIEW_REDIRECT_SUCCESS = namespace + '/VIEW_REDIRECT_SUCCESS';
<|start_filename|>src/crudeditor-lib/views/error/reducer.js<|end_filename|>
import cloneDeep from 'lodash/cloneDeep';
import u from 'updeep';
import {
STATUS_READY,
STATUS_REDIRECTING,
STATUS_UNINITIALIZED,
ERROR_CODE_INTERNAL
} from '../../common/constants';
import {
VIEW_INITIALIZE,
VIEW_REDIRECT_REQUEST,
VIEW_REDIRECT_FAIL,
VIEW_REDIRECT_SUCCESS
} from './constants';
const defaultStoreStateTemplate = {
/*
* An array of errors, may be empty. Each error has the following structure:
* {
* code: <natural number, error code>,
* ?payload: <any, structure is defined by error code>
* }
*/
errors: [],
status: STATUS_UNINITIALIZED
};
/*
* XXX:
* Only objects and arrays are allowed at branch nodes.
* Only primitive data types are allowed at leaf nodes.
*/
export default /* istanbul ignore next */ (modelDefinition, i18n) => (
storeState = cloneDeep(defaultStoreStateTemplate),
{ type, payload, error, meta }
) => {
if (storeState.status === STATUS_UNINITIALIZED && type !== VIEW_INITIALIZE) {
return storeState;
}
let newStoreStateSlice = {};
/* eslint-disable padded-blocks */
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
if ([VIEW_INITIALIZE, VIEW_REDIRECT_FAIL].indexOf(type) > -1) {
const errors = Array.isArray(payload) ? payload : [payload];
newStoreStateSlice.errors = errors.map(({ code, ...rest }) => ({
code: code || ERROR_CODE_INTERNAL,
...(Object.keys(rest).length ? { payload: rest.payload || rest } : {})
}));
newStoreStateSlice.status = STATUS_READY;
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === VIEW_REDIRECT_REQUEST) {
newStoreStateSlice.status = STATUS_REDIRECTING;
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === VIEW_REDIRECT_SUCCESS) {
// Reseting the store to initial uninitialized state.
newStoreStateSlice = u.constant(cloneDeep(defaultStoreStateTemplate));
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
/* eslint-enable padded-blocks */
}
return u(newStoreStateSlice, storeState); // returned object is frozen for NODE_ENV === 'development'
};
<|start_filename|>src/crudeditor-lib/views/lib.js<|end_filename|>
import React from 'react';
import cloneDeep from 'lodash/cloneDeep';
import GenericInput from '../../components/GenericInput';
import RangeInput from '../../components/RangeInput';
import deferValueSync from '../../components/DeferValueSyncHOC';
import {
converter,
validate as standardFieldValidate
} from '../../data-types-lib';
import {
DEFAULT_TAB_COLUMNS,
VIEW_EDIT,
VIEW_SHOW
} from '../common/constants';
import {
FIELD_TYPE_BOOLEAN,
FIELD_TYPE_INTEGER,
FIELD_TYPE_DECIMAL,
FIELD_TYPE_STRING,
FIELD_TYPE_STRING_DATE,
FIELD_TYPE_STRING_DATE_ONLY,
FIELD_TYPE_STRING_INTEGER,
FIELD_TYPE_STRING_DECIMAL,
FIELD_TYPE_STRING_DATE_RANGE,
FIELD_TYPE_INTEGER_RANGE,
FIELD_TYPE_DECIMAL_RANGE,
FIELD_TYPE_STRING_INTEGER_RANGE,
FIELD_TYPE_STRING_DECIMAL_RANGE,
UI_TYPE_BOOLEAN,
UI_TYPE_DATE,
UI_TYPE_STRING,
UI_TYPE_DATE_RANGE_OBJECT,
UI_TYPE_STRING_RANGE_OBJECT
} from '../../data-types-lib/constants';
export const
COMPONENT_NAME_INPUT = 'input',
COMPONENT_NAME_RANGE_INPUT = 'rangeInput';
/*
* The function receives render object with component name in "component" property.
* It returns React Component with the name and UI Type corrresponding to the Component.
* As side effect, it also assigns default "type" to render.props, if not specified.
*/
const namedComponentInfo = ({
component: name,
props
}) => {
let component, uiType, valuePropName;
switch (name) {
case COMPONENT_NAME_INPUT:
component = deferValueSync(GenericInput);
valuePropName = 'value';
if (!props.hasOwnProperty('type')) {
props.type = 'string'; // eslint-disable-line no-param-reassign
}
switch (props.type) {
case 'checkbox':
uiType = UI_TYPE_BOOLEAN;
break;
case 'date':
uiType = UI_TYPE_DATE;
break;
case 'string':
uiType = UI_TYPE_STRING;
break;
default:
throw new TypeError(`Unknown type "${props.type}" of "${COMPONENT_NAME_INPUT}" render component`);
}
break;
case COMPONENT_NAME_RANGE_INPUT:
component = deferValueSync(RangeInput);
valuePropName = 'value';
if (!props.hasOwnProperty('type')) {
props.type = 'string'; // eslint-disable-line no-param-reassign
}
switch (props.type) {
case 'date':
uiType = UI_TYPE_DATE_RANGE_OBJECT;
break;
case 'string':
uiType = UI_TYPE_STRING_RANGE_OBJECT;
break;
default:
throw new TypeError(`Unknown type "${props.type}" of "${COMPONENT_NAME_RANGE_INPUT}" render component`);
}
break;
default:
throw new TypeError(`Unknown render component "${name}"`);
}
return {
component,
uiType,
valuePropName
};
}
const defaultFieldRenders = {
[FIELD_TYPE_BOOLEAN]: {
component: 'input',
props: {
type: 'checkbox'
}
},
[FIELD_TYPE_INTEGER]: {
component: 'input',
props: {
type: 'string'
}
},
[FIELD_TYPE_DECIMAL]: {
component: 'input',
props: {
type: 'string'
}
},
[FIELD_TYPE_STRING]: {
component: 'input',
props: {
type: 'string'
}
},
[FIELD_TYPE_STRING_DATE]: {
component: 'input',
props: {
type: 'date'
}
},
[FIELD_TYPE_STRING_DATE_ONLY]: {
component: 'input',
props: {
type: 'date'
}
},
[FIELD_TYPE_STRING_INTEGER]: {
component: 'input',
props: {
type: 'string'
}
},
[FIELD_TYPE_STRING_DECIMAL]: {
component: 'input',
props: {
type: 'string'
}
},
[FIELD_TYPE_INTEGER_RANGE]: {
component: 'rangeInput',
props: {
type: 'string'
}
},
[FIELD_TYPE_DECIMAL_RANGE]: {
component: 'rangeInput',
props: {
type: 'string'
}
},
[FIELD_TYPE_STRING_DATE_RANGE]: {
component: 'rangeInput',
props: {
type: 'date'
}
},
[FIELD_TYPE_STRING_INTEGER_RANGE]: {
component: 'rangeInput',
props: {
type: 'string'
}
},
[FIELD_TYPE_STRING_DECIMAL_RANGE]: {
component: 'rangeInput',
props: {
type: 'string'
}
}
};
// █████████████████████████████████████████████████████████████████████████████████████████████████████████
export const buildFieldRender = ({
render: customRender,
type: fieldType
}) => {
const render = customRender ?
cloneDeep(customRender) :
defaultFieldRenders[fieldType] || (_ => {
throw new TypeError(
`Field type ${fieldType} is unknown or does not have an assigned render component. Define custom component`
);
})();
if (!render.hasOwnProperty('component')) {
throw new TypeError('render.component must be defined');
}
if (!render.hasOwnProperty('props')) {
render.props = {};
}
if (!render.hasOwnProperty('value')) {
render.value = {};
}
let Component;
if (typeof render.component === 'string') {
const { component, uiType, valuePropName } = namedComponentInfo(render);
if (!render.value.hasOwnProperty('type')) {
render.value.type = uiType;
} else if (render.value.type !== uiType) {
throw new TypeError(`Invalid "${render.value.type}" value.type for "${render.component}" component`);
}
if (!render.value.hasOwnProperty('propName')) {
render.value.propName = valuePropName;
} else if (render.value.propName !== valuePropName) {
throw new TypeError(`Invalid "${render.value.propName}" value.propName for "${render.component}" component`);
}
Component = component;
} else {
Component = render.component;
}
if (!render.value.hasOwnProperty('propName')) {
render.value.propName = 'value';
}
if (render.value.hasOwnProperty('type')) {
if (!render.value.hasOwnProperty('converter')) {
const defaultConverter = converter({
fieldType,
uiType: render.value.type
});
if (defaultConverter) {
render.value.converter = defaultConverter;
}
}
// Removing "type" because it was only needed to get default converter, if any.
// delete render.value.type;
}
if (!render.value.hasOwnProperty('converter')) {
render.value.converter = {
format: value => value,
parse: value => value
};
}
return {
...render,
component: ({ children, ...props }) => <Component {...props} {...render.props}>{children}</Component>
};
};
// █████████████████████████████████████████████████████████████████████████████████████████████████████████
const buildDefaultFormLayout = ({ viewName, fieldsMeta }) => _ => Object.keys(fieldsMeta).map(name => ({
field: name,
readOnly: viewName === VIEW_EDIT && fieldsMeta[name].unique, // Logical Key fields are read-only in Edit View.
render: buildFieldRender({
type: fieldsMeta[name].type
}),
validate: standardFieldValidate({
type: fieldsMeta[name].type,
constraints: fieldsMeta[name].constraints
}) ||
(value => true)
}));
// █████████████████████████████████████████████████████████████████████████████████████████████████████████
const buildFieldLayout = (viewName, fieldsMeta) =>
({
name: fieldName,
readOnly,
render,
validate: customValidate
}) => ({
field: fieldName,
// making all fields read-only in "show" view.
readOnly: viewName === VIEW_SHOW || !!readOnly,
validate: customValidate ||
standardFieldValidate({
type: fieldsMeta[fieldName].type,
constraints: fieldsMeta[fieldName].constraints
}) ||
(value => true),
// assigning default component to fields w/o custom component.
render: buildFieldRender({
render,
type: fieldsMeta[fieldName].type
})
});
// █████████████████████████████████████████████████████████████████████████████████████████████████████████
const sectionLayout = ({ name: sectionId, ...props }, ...allEntries) => {
// entries is always an array, may be empty.
const entries = allEntries.filter(entry => !!entry);
entries.section = sectionId;
Object.keys(props).forEach(name => {
entries[name] = props[name];
});
return entries.length ? entries : null;
};
// █████████████████████████████████████████████████████████████████████████████████████████████████████████
const tabLayout = ({ name: tabId, columns, ...props }, ...allEntries) => {
// entries is always an array, may be empty.
const entries = allEntries.filter(entry => !!entry);
entries.tab = tabId;
entries.columns = columns || DEFAULT_TAB_COLUMNS;
entries.forEach(entry => {
if (entry.section && !entry.columns) {
entry.columns = entries.columns; // eslint-disable-line no-param-reassign
}
});
Object.keys(props).forEach(name => {
entries[name] = props[name];
});
return entries.length || entries.component ? entries : null;
};
// █████████████████████████████████████████████████████████████████████████████████████████████████████████
export const buildFormLayout = ({ customBuilder, viewName, fieldsMeta }) => customBuilder ?
customBuilder({
tab: tabLayout,
section: sectionLayout,
field: buildFieldLayout(viewName, fieldsMeta)
}) :
buildDefaultFormLayout({ viewName, fieldsMeta });
// █████████████████████████████████████████████████████████████████████████████████████████████████████████
export const getLogicalKeyBuilder = fieldsMeta => {
const logicalKeyFields = Object.keys(fieldsMeta).filter(fieldName => fieldsMeta[fieldName].unique);
return instance => logicalKeyFields.reduce(
(rez, fieldName) => ({
...rez,
[fieldName]: instance[fieldName]
}),
{}
)
};
export const findFieldLayout = fieldName => {
const layoutWalker = layout => {
if (layout.field === fieldName) {
return layout;
}
let foundFieldLayout;
return Array.isArray(layout) &&
layout.some(entry => {
foundFieldLayout = layoutWalker(entry);
return foundFieldLayout;
}) &&
foundFieldLayout;
};
return layoutWalker;
};
export const getTab = (formLayout, tabName) => {
// The function returns tab object by tabName,
// or default tab if tabName is not specified (i.e. falsy).
const tabs = formLayout.filter(({ tab }) => !!tab); // [] in case of no tabs.
let rezTab = tabs[0]; // default tab, undefined in case of no tabs.
if (tabName) {
tabs.some(tab => {
if (tab.tab === tabName) {
rezTab = tab;
return true;
}
return false;
});
}
return rezTab;
}
const expandOperationUi = ({ viewName, viewState, ui }) => {
const {
title,
icon,
show = true,
disabled = false,
dropdown = true,
...rest
} = ui({ name: viewName, state: viewState });
if (Object.keys(rest).length) {
throw new TypeError('Forbidden custom/external operation properties:', Object.keys(rest).join(', '));
}
if (!show) {
return null;
}
return {
title: title(),
disabled,
dropdown,
...(!!icon && { icon }),
};
};
// The function unfolds custom operation by calling "ui()" method
// and connecting "handler()" with softRedirectView.
export const expandCustomOperation = ({ viewName, viewState, softRedirectView }) => ({ handler, ui }) => {
const operation = expandOperationUi({ viewName, viewState, ui });
if (!operation) {
return null;
}
if (!operation.disabled) {
// handler is a pure function => calling it is harmless.
// Since handler() may return undefined, the operation button must be disabled to prevent its click.
const view = handler();
if (typeof view === 'object' && view && view.name) {
operation.handler = _ => softRedirectView(view);
} else {
operation.disabled = true;
}
}
return operation;
}
// The function unfolds external operation by calling "ui()" method.
export const expandExternalOperation = ({ viewName, viewState }) => ({ handler, ui }) => {
const operationUi = expandOperationUi({ viewName, viewState, ui });
if (!operationUi) {
return null;
}
if (!operationUi.disabled && typeof handler !== 'function') {
throw new TypeError(`External operation "${operationUi.title}" must have a handler`);
}
return {
...operationUi,
handler
};
}
<|start_filename|>src/crudeditor-lib/views/search/selectors.js<|end_filename|>
import cloneDeep from 'lodash/cloneDeep';
import { buildViewSelectorWrapper } from '../../selectorWrapper';
import {
cleanFilter,
getDefaultSortField
} from './lib';
import {
DEFAULT_OFFSET,
DEFAULT_ORDER,
VIEW_NAME
} from './constants';
import {
STATUS_DELETING,
STATUS_INITIALIZING,
STATUS_REDIRECTING,
STATUS_SEARCHING,
PERMISSION_CREATE
} from '../../common/constants';
import { isAllowed } from '../../lib';
const wrapper = buildViewSelectorWrapper(VIEW_NAME);
const _getTotalCount = ({ totalCount }) => totalCount;
const _getViewState = ({
resultFilter,
sortParams: {
field: sort,
order
},
pageParams: {
max,
offset
}
}, {
ui: {
search: searchMeta
}
}) => {
const filter = cleanFilter(resultFilter);
return {
...(filter ? { filter } : {}),
...(sort === getDefaultSortField(searchMeta) ? {} : { sort }),
...(order === DEFAULT_ORDER ? {} : { order }),
...(max === searchMeta.pagination.defaultMax ? {} : { max }),
...(offset === DEFAULT_OFFSET ? {} : { offset })
};
};
export const
// █████████████████████████████████████████████████████████████████████████████████████████████████████████
getViewState = wrapper(_getViewState),
// █████████████████████████████████████████████████████████████████████████████████████████████████████████
getTotalCount = wrapper(_getTotalCount),
// █████████████████████████████████████████████████████████████████████████████████████████████████████████
getDefaultNewInstance = wrapper((storeState, modelDefinition) => isAllowed(
modelDefinition.permissions.crudOperations,
PERMISSION_CREATE
) &&
cloneDeep(modelDefinition.ui.create.defaultNewInstance({
filter: {}, // Setting filter to empty object if it is not specified in view state.
..._getViewState(storeState, modelDefinition)
})) ||
{}
),
// █████████████████████████████████████████████████████████████████████████████████████████████████████████
getViewModelData = wrapper(/* istanbul ignore next */ (storeState, {
model: modelMeta,
ui: {
spinner,
search: {
resultFields,
searchableFields,
pagination
}
}
}) => ({
spinner,
entityName: modelMeta.name,
fieldErrors: storeState.errors.fields,
formFilter: storeState.formFilter,
formattedFilter: storeState.formattedFilter,
isLoading: [
STATUS_DELETING,
STATUS_INITIALIZING,
STATUS_REDIRECTING,
STATUS_SEARCHING
].indexOf(storeState.status) > -1,
pageParams: {
max: storeState.pageParams.max,
offset: storeState.pageParams.offset
},
resultFields,
resultFilter: storeState.resultFilter,
resultInstances: storeState.resultInstances,
searchableFields: searchableFields.map(({
name,
render: {
component,
value: {
propName: valuePropName
}
}
}) => ({
name,
component,
valuePropName
})),
selectedInstances: storeState.selectedInstances,
sortParams: {
field: storeState.sortParams.field,
order: storeState.sortParams.order
},
status: storeState.status,
totalCount: _getTotalCount(storeState),
hideSearchForm: storeState.hideSearchForm,
gotoPage: storeState.gotoPage,
pagination
}));
<|start_filename|>src/components/SearchResultListing/SearchResultButtons.js<|end_filename|>
import React, { PureComponent } from 'react';
import PropTypes from 'prop-types';
import ButtonGroup from 'react-bootstrap/lib/ButtonGroup';
import OperationsBar from '../OperationsBar';
export default class SearchResultButtons extends PureComponent {
static propTypes = {
parentRef: PropTypes.object,
operations: PropTypes.any
}
state = {
previousSource: null
}
// handleToggleDropdown is a workaround for weird CSS overflow behavior
// details: https://stackoverflow.com/a/6433475
handleToggleDropdown = (dropdownOpened, event, { source }) => {
const { parentRef } = this.props;
const parentWidth = parentRef.current.clientWidth;
const tableWidth = parentRef.current.firstChild.scrollWidth
// table is wider than visible div -> show scroll
if (parentWidth < tableWidth) {
parentRef.current.style.overflow = 'auto';
return;
}
// handle multiple dropdowns closing each other
// don't rewrite styles if one DD is closed by opening another DD
if (this.state.previousSource === 'click' && source === 'rootClose') {
return;
}
parentRef.current.style.overflow = dropdownOpened ? 'visible' : 'auto';
this.setState({ previousSource: source });
}
render() {
const { operations } = this.props;
return (
<OperationsBar operations={operations} onToggleDropdown={this.handleToggleDropdown} size="small">
{
buttons => buttons.length ? (
<ButtonGroup bsSize="sm" className="crud--search-result-listing__action-buttons">
{buttons}
</ButtonGroup>
) :
null
}
</OperationsBar>
);
}
}
<|start_filename|>src/data-types-lib/fieldTypes/stringDateOnly/stringUiType.spec.js<|end_filename|>
import { expect } from 'chai';
import assert from 'assert';
import {
ERROR_CODE_PARSING,
EMPTY_FIELD_VALUE,
ERROR_INVALID_DATE
} from '../../constants';
import converter from './stringUiType';
describe('fieldTypes :: stringDateOnly <-> string', () => {
describe('format', () => {
it('should convert stringified date to string', () => {
const value = new Date().toISOString().slice(0, 10);
const result = converter.format(value);
expect(result).to.equal(new Date(value).toString())
});
});
describe('parse', () => {
it('should convert empty string into null', () => {
const value = '';
const result = converter.parse(value);
expect(result).to.equal(EMPTY_FIELD_VALUE)
});
it('should convert stringified date into stringDateOnly', () => {
const date = new Date('1995-02-17T03:24:00')
const value = date.toString();
const result = converter.parse(value);
expect(result).to.equal('1995-02-17')
});
it('should throw for not date-like string', () => {
const value = 'ewqrwerew';
try {
converter.parse(value);
assert(false)
} catch (e) {
assert.deepEqual(
e, {
code: ERROR_CODE_PARSING,
id: ERROR_INVALID_DATE,
message: 'Invalid date'
}
)
}
})
})
});
<|start_filename|>src/crudeditor-lib/i18n/sv.js<|end_filename|>
import exceptions from './exceptions/sv';
/* eslint-disable max-len */
const common = {
"common.CrudEditor.new.title": "Ny",
"common.CrudEditor.create.header": "Skapa {modelName}",
"common.CrudEditor.edit.header": "Redigera {modelName}",
"common.CrudEditor.show.header": "Visa {modelName0}",
"common.CrudEditor.duplicate.header": "Kopiera {modelName}",
"common.CrudEditor.cancel.button": "Avbryt",
"common.CrudEditor.save.button": "Spara",
"common.CrudEditor.saveAndNew.button": "Spara och ny",
"common.CrudEditor.saveAndNext.button": "Spara och nästa",
"common.CrudEditor.search.header": "Sök {payload}",
"common.CrudEditor.search.button": "Sök",
"common.CrudEditor.reset.button": "Återställ",
"common.CrudEditor.create.button": "Skapa",
"common.CrudEditor.select.button": "Välj",
"common.CrudEditor.close.button": "Stäng",
"common.CrudEditor.actions.tableHeader": "Åtgärder",
"common.CrudEditor.show.button": "Vy",
"common.CrudEditor.edit.button": "Redigera",
"common.CrudEditor.delete.button": "Ta bort",
"common.CrudEditor.deleteSelected.button": "Ta bort markerade",
"common.CrudEditor.duplicate.button": "Kopia",
"common.CrudEditor.refresh.button": "Uppdatera",
"common.CrudEditor.revisions.button": "Revisioner",
"common.CrudEditor.delete.confirmation": "Vill du ta bort posten?",
"common.CrudEditor.deleteSelected.confirmation": "Vill du ta bort de markerade artiklarna?",
"common.CrudEditor.noItemsSelected.alert": "Inga poster har valts!",
"common.CrudEditor.objectSaved.message": "Objekt skapat.",
"common.CrudEditor.objectUpdated.message": "Objekt uppdaterat.",
"common.CrudEditor.objectSaveFailed.message": "Objektet kunde inte sparas.",
"common.CrudEditor.objectDeleted.message": "Objekt borttaget.",
"common.CrudEditor.objectsDeleted.message": "Objekt {labels} borttagna.",
"common.CrudEditor.objectsDeleteIsNoAllowed.message": "Du kan inte ta bort vissa objekt på grund av säkerhetsbegränsningar.",
"common.CrudEditor.objectDeleteFailed.message": "Det gick inte att ta bort objektet, eventuell används det redan.",
"common.CrudEditor.objectsDeleteFailed.message": "Det gick inte att ta bort objekten {count}, eventuellt används de redan.",
"common.CrudEditor.objectDuplicated.message": "Objektet kopieras.",
"common.CrudEditor.noAssociationEntriesFound.message": "Inga poster hittades. Du kan {1} en ny post.",
"common.CrudEditor.message.ajax.loading": "Vänta...",
"common.CrudEditor.search.result.label": "Sökresultat",
"common.CrudEditor.unsaved.confirmation": "Du har gjort ändringar. Om du lämnar platsen försvinner ändringarna.",
"common.CrudEditor.search.resultsPerPage": "Resultat per sida",
"common.CrudEditor.search.all": "Alla",
"common.CrudEditor.export.button": "Exportera",
"common.CrudEditor.found.items.message": "{count} artikel/artiklar hittades",
"common.CrudEditor.range.from": "fr\u00e5n",
"common.CrudEditor.range.to": "till",
"common.CrudEditor.confirm.action": "Bekräfta",
"common.CrudEditor.search.showSearchForm": "Visa sökformulär",
"common.CrudEditor.search.hideSearchForm": "Dölj sökformulär",
"common.CrudEditor.pagination.goToPage": "Go"
}
/* eslint-enable max-len */
export default {
...common,
...exceptions
}
<|start_filename|>src/crudeditor-lib/i18n/en.js<|end_filename|>
import exceptions from './exceptions/en';
/* eslint-disable max-len */
const common = {
"common.CrudEditor.new.title": "New",
"common.CrudEditor.create.header": "Create {modelName}",
"common.CrudEditor.edit.header": "Edit {modelName}",
"common.CrudEditor.show.header": "View {modelName}",
"common.CrudEditor.duplicate.header": "Duplicate {modelName}",
"common.CrudEditor.cancel.button": "Cancel",
"common.CrudEditor.save.button": "Save",
"common.CrudEditor.saveAndNew.button": "Save and New",
"common.CrudEditor.saveAndNext.button": "Save and Next",
"common.CrudEditor.search.header": "Search {payload}",
"common.CrudEditor.search.button": "Search",
"common.CrudEditor.reset.button": "Reset",
"common.CrudEditor.create.button": "Create",
"common.CrudEditor.select.button": "Select",
"common.CrudEditor.export.button": "Export",
"common.CrudEditor.close.button": "Close",
"common.CrudEditor.actions.tableHeader": "Actions",
"common.CrudEditor.show.button": "View",
"common.CrudEditor.edit.button": "Edit",
"common.CrudEditor.delete.button": "Delete",
"common.CrudEditor.deleteSelected.button": "Delete selected",
"common.CrudEditor.duplicate.button": "Duplicate",
"common.CrudEditor.refresh.button": "Refresh",
"common.CrudEditor.revisions.button": "Revisions",
"common.CrudEditor.delete.confirmation": "Do you really want to delete this entry?",
"common.CrudEditor.deleteSelected.confirmation": "Do you really want to delete the selected items?",
"common.CrudEditor.noItemsSelected.alert": "No items selected!",
"common.CrudEditor.objectSaved.message": "Object created.",
"common.CrudEditor.objectUpdated.message": "Object updated.",
"common.CrudEditor.objectSaveFailed.message": "Object save failed.",
"common.CrudEditor.objectDeleted.message": "Object deleted.",
"common.CrudEditor.objectsDeleted.message": "Objects {labels} deleted.",
"common.CrudEditor.objectsDeleteIsNoAllowed.message": "You cannot delete some objects because of security restrictions.",
"common.CrudEditor.objectDeleteFailed.message": "Failed to delete the object, perhaps it is already in use.",
"common.CrudEditor.objectsDeleteFailed.message": "Failed to delete {count} objects, perhaps they are already in use.",
"common.CrudEditor.objectDuplicated.message": "Object copied.",
"common.CrudEditor.noAssociationEntriesFound.message": "No entries found. You can {1} a new entry.",
"common.CrudEditor.message.ajax.loading": "Please wait...",
"common.CrudEditor.search.result.label": "Search results",
"common.CrudEditor.unsaved.confirmation": "You have made changes. If you leave this page these changes are discarded.",
"common.CrudEditor.search.resultsPerPage": "Results per page",
"common.CrudEditor.search.all": "All",
"common.CrudEditor.found.items.message": "{count} item(s) found",
"common.CrudEditor.range.from": "from",
"common.CrudEditor.range.to": "to",
"common.CrudEditor.confirm.action": "Confirm",
"common.CrudEditor.search.showSearchForm": "Show search form",
"common.CrudEditor.search.hideSearchForm": "Hide search form",
"common.CrudEditor.pagination.goToPage": "Go"
}
/* eslint-enable max-len */
export default {
...common,
...exceptions
}
<|start_filename|>src/crudeditor-lib/views/show/workerSagas/show.js<|end_filename|>
import { call, put } from 'redux-saga/effects';
import { getLogicalKeyBuilder } from '../../lib';
import {
INSTANCE_SHOW_FAIL,
INSTANCE_SHOW_REQUEST,
INSTANCE_SHOW_SUCCESS
} from '../constants';
/* //
* XXX: in case of failure, a worker saga must dispatch an appropriate action and exit by throwing error(s).
*/
export default function*({
modelDefinition,
action: {
payload: {
instance,
offset
},
meta
}
}) {
yield put({
type: INSTANCE_SHOW_REQUEST,
meta
});
let persistentInstance;
try {
persistentInstance = yield call(modelDefinition.api.get, {
instance: getLogicalKeyBuilder(modelDefinition.model.fields)(instance)
});
} catch (err) {
yield put({
type: INSTANCE_SHOW_FAIL,
payload: err,
error: true,
meta
});
throw err;
}
yield put({
type: INSTANCE_SHOW_SUCCESS,
payload: {
instance: persistentInstance,
offset
},
meta
});
}
<|start_filename|>src/crudeditor-lib/views/show/constants.js<|end_filename|>
import { VIEW_SHOW } from '../../common/constants'
const namespace = VIEW_SHOW;
export const
VIEW_NAME = VIEW_SHOW,
/* ████████████████████████████████████████████
* ███ ACTION TYPES (in alphabetical order) ███
* ████████████████████████████████████████████
*/
ADJACENT_INSTANCE_SHOW = namespace + '/ADJACENT_INSTANCE_SHOW',
ADJACENT_INSTANCE_SHOW_FAIL = namespace + '/ADJACENT_INSTANCE_SHOW_FAIL',
INSTANCE_SHOW_REQUEST = namespace + '/INSTANCE_SHOW_REQUEST',
INSTANCE_SHOW_FAIL = namespace + '/INSTANCE_SHOW_FAIL',
INSTANCE_SHOW_SUCCESS = namespace + '/INSTANCE_SHOW_SUCCESS',
TAB_SELECT = namespace + '/TAB_SELECT',
VIEW_REDIRECT_REQUEST = namespace + '/VIEW_REDIRECT_REQUEST',
VIEW_REDIRECT_FAIL = namespace + '/VIEW_REDIRECT_FAIL',
VIEW_REDIRECT_SUCCESS = namespace + '/VIEW_REDIRECT_SUCCESS',
VIEW_INITIALIZE_REQUEST = namespace + '/VIEW_INITIALIZE_REQUEST',
VIEW_INITIALIZE_FAIL = namespace + '/VIEW_INITIALIZE_FAIL',
VIEW_INITIALIZE_SUCCESS = namespace + '/VIEW_INITIALIZE_SUCCESS';
<|start_filename|>src/crudeditor-lib/i18n/exceptions/fi.js<|end_filename|>
export default {
"common.CrudEditor.default.doesnt.match.message": "Arvo ei vastaa vaadittua kuviota {pattern}",
"common.CrudEditor.default.invalid.email.message": "Sähköpostiosoite on väärän muotoinen",
"common.CrudEditor.default.invalid.max.message": "Arvo ylittää enimmäisarvon {max}",
"common.CrudEditor.default.invalid.min.message": "Arvo alittaa vähimmäisarvon {min}",
"common.CrudEditor.default.invalid.max.size.message": "Arvo ylittää enimmäiskoon {max}",
"common.CrudEditor.default.invalid.min.size.message": "Arvo alittaa vähimmäiskoon {min}",
"common.CrudEditor.default.invalid.validator.message": "Arvo ei läpäise mukautettua tarkistusta",
"common.CrudEditor.default.blank.message": "Kenttä ei voi olla tyhjä",
"common.CrudEditor.default.null.message": "Ominaisuus ei voi olla tyhjä",
"common.CrudEditor.default.not.unique.message": "Arvon on oltava yksilöivä",
"common.CrudEditor.default.invalid.url.message": "URL on väärän muotoinen",
"common.CrudEditor.default.invalid.date.message": "Päivämäärä on väärän muotoinen",
"common.CrudEditor.default.invalid.decimal.message": "Numero on väärän muotoinen",
"common.CrudEditor.default.invalid.integer.message": "Numero on väärän muotoinen",
"common.CrudEditor.default.errorOccurred.message": "Virhetila"
}
<|start_filename|>src/components/EditMain/index.js<|end_filename|>
import React from 'react';
import PropTypes from 'prop-types';
import Heading from '../EditHeading';
import Tab from '../EditTab';
import WithFieldErrors from '../FieldErrors/WithFieldErrorsHOC';
import WithSpinner from '../Spinner/SpinnerOverlayHOC';
import { VIEW_NAME } from '../../crudeditor-lib/views/edit/constants';
const EditMain = ({ model, toggledFieldErrors, toggleFieldErrors }) => {
const ActiveTabComponent = model.data.activeTab && model.data.activeTab.component;
return (<div>
<Heading model={model} />
{ActiveTabComponent ?
<ActiveTabComponent viewName={model.data.viewName} instance={model.data.persistentInstance} /> :
<Tab model={model} toggledFieldErrors={toggledFieldErrors} toggleFieldErrors={toggleFieldErrors}/>
}
</div>);
};
EditMain.propTypes = {
model: PropTypes.shape({
data: PropTypes.shape({
activeTab: PropTypes.array,
viewName: PropTypes.oneOf([VIEW_NAME]).isRequired,
persistentInstance: PropTypes.object
}).isRequired
}).isRequired,
toggledFieldErrors: PropTypes.object.isRequired,
toggleFieldErrors: PropTypes.func.isRequired
}
export default WithSpinner(WithFieldErrors(EditMain));
<|start_filename|>src/crudeditor-lib/views/edit/reducer.js<|end_filename|>
import cloneDeep from 'lodash/cloneDeep';
import isEqual from 'lodash/isEqual';
import u from 'updeep';
import { FIELD_TYPE_BOOLEAN } from '../../../data-types-lib/constants';
import { checkFormLayout } from '../../check-model';
import { isSystemError } from '../../lib';
import {
findFieldLayout,
getTab
} from '../lib';
import {
ALL_INSTANCE_FIELDS_VALIDATE_REQUEST,
INSTANCE_EDIT_REQUEST,
INSTANCE_EDIT_SUCCESS,
INSTANCE_EDIT_FAIL,
INSTANCE_FIELD_CHANGE,
INSTANCE_FIELD_VALIDATE,
INSTANCE_SAVE_FAIL,
INSTANCE_SAVE_REQUEST,
INSTANCE_SAVE_SUCCESS,
TAB_SELECT,
VIEW_INITIALIZE_REQUEST,
VIEW_INITIALIZE_FAIL,
VIEW_INITIALIZE_SUCCESS,
VIEW_REDIRECT_REQUEST,
VIEW_REDIRECT_FAIL,
VIEW_REDIRECT_SUCCESS
} from './constants';
import {
STATUS_EXTRACTING,
STATUS_DELETING,
STATUS_INITIALIZING,
STATUS_READY,
STATUS_REDIRECTING,
STATUS_SEARCHING,
STATUS_UNINITIALIZED,
STATUS_UPDATING,
INSTANCES_DELETE_FAIL,
INSTANCES_DELETE_REQUEST,
UNPARSABLE_FIELD_VALUE
} from '../../common/constants';
import {
INSTANCES_SEARCH_REQUEST,
INSTANCES_SEARCH_FAIL,
INSTANCES_SEARCH_SUCCESS
} from '../search/constants';
// Synchronize formInstance and formattedInstance with instance (which is a persistentInstance).
const synchronizeInstances = /* istanbul ignore next */ ({ instance, formLayout, i18n }) => ({
formInstance: u.constant(cloneDeep(instance)),
formattedInstance: u.constant(Object.keys(instance).reduce(
(rez, fieldName) => {
const fieldLayout = findFieldLayout(fieldName)(formLayout);
return fieldLayout ? {
...rez,
[fieldName]: fieldLayout.render.value.converter.format(instance[fieldName], i18n)
} : rez; // Field from the modelDefinition.model.fields is not in formLayout => it isn't displayed in Edit View.
},
{}
)),
errors: u.constant({
fields: {}
})
});
const defaultStoreStateTemplate = {
// Instance as saved on server-side.
persistentInstance: undefined,
/* Parsed instance as displayed in the form.
* {
* <string, field name>: <serializable, field value for communication with the server>,
* }
*/
formInstance: undefined,
/* Formatted instance as displayed in the form.
* {
* <sting, field name>: <any, field value for cummunication with rendering React Component>,
* }
* NOTE: formInstance values and formattedInstance values represent different values in case of parsing error
* (i.e. rendered value cannot be parsed into its Field Type representation).
*/
formattedInstance: undefined,
/*
* Must always be an array, may be empty.
*
* Either an array of arrays (representing tabs) -- for tabbed layout,
* or an array of arrays (representing sections) and objects (representing fields) -- otherwise.
*
* A tab is represented by an array which elements are
* - arrays (representing sections)
* and/or
* - objects (representing fields).
*
* XXX: an array representing a tab has props (since in JavaScript an array is also an object):
* - "tab", string with tab name,
* - "disabled", boolean.
* - "component", React Component.
*
* A section is represented by an array which elements are objects (representing fields).
*
* XXX: an array representing a section has props (since in JavaScript an array is also an object):
* - "section", string with section name.
*
* A field is represented by an object with props:
* - "field", string with tab name,
* - "readOnly", boolean.
* - "component", React Component.
*/
formLayout: [],
// A ref to one of tabs element => it is undefined when and only when formLayout does not consist of tabs.
activeTab: undefined,
instanceLabel: undefined,
errors: {
/* object with keys as field names,
* values as arrays of Parsing Errors and Field Validation Errors, may not be empty.
* (the object does not have keys for fields with successfully parsed/validated values).
*/
fields: {}
},
/* instance's absolute offset in search result (0 <= offset < totalCount),
* or
* undefined if absolute offset is unknown (in cases of hard redirect to Edit View or soft redirect from Create View).
*/
offset: undefined,
status: STATUS_UNINITIALIZED
};
/*
* XXX:
* Only objects and arrays are allowed at branch nodes.
* Only primitive data types are allowed at leaf nodes.
*/
export default /* istanbul ignore next */ (modelDefinition, i18n) => (
storeState = cloneDeep(defaultStoreStateTemplate),
{ type, payload, error, meta }
) => {
if (storeState.status === STATUS_UNINITIALIZED && type !== VIEW_INITIALIZE_REQUEST) {
return storeState;
}
let newStoreStateSlice = {};
/* eslint-disable padded-blocks */
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
if (type === VIEW_INITIALIZE_REQUEST) {
newStoreStateSlice.status = STATUS_INITIALIZING;
} else if (type === VIEW_INITIALIZE_FAIL) {
newStoreStateSlice.status = STATUS_UNINITIALIZED;
} else if (type === VIEW_INITIALIZE_SUCCESS) {
newStoreStateSlice.status = STATUS_READY;
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === INSTANCES_SEARCH_REQUEST) {
newStoreStateSlice.status = STATUS_SEARCHING;
} else if ([INSTANCES_SEARCH_FAIL, INSTANCES_SEARCH_SUCCESS].indexOf(type) > -1) {
newStoreStateSlice.status = STATUS_READY;
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === VIEW_REDIRECT_REQUEST) {
newStoreStateSlice.status = STATUS_REDIRECTING;
} else if (type === VIEW_REDIRECT_FAIL) {
newStoreStateSlice.status = STATUS_READY;
} else if (type === VIEW_REDIRECT_SUCCESS) {
// Reseting the store to initial uninitialized state.
newStoreStateSlice = u.constant(cloneDeep(defaultStoreStateTemplate));
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === INSTANCES_DELETE_REQUEST) {
newStoreStateSlice.status = STATUS_DELETING;
} else if (type === INSTANCES_DELETE_FAIL) {
newStoreStateSlice.status = STATUS_READY;
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === INSTANCE_EDIT_REQUEST && storeState.status !== STATUS_INITIALIZING) {
newStoreStateSlice.status = STATUS_EXTRACTING;
} else if (type === INSTANCE_SAVE_REQUEST) {
newStoreStateSlice.status = STATUS_UPDATING;
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
} else if ([INSTANCE_EDIT_SUCCESS, INSTANCE_SAVE_SUCCESS].indexOf(type) > -1) {
const { instance } = payload;
if (type === INSTANCE_EDIT_SUCCESS) {
newStoreStateSlice.offset = payload.offset;
}
const formLayout = modelDefinition.ui.edit.formLayout(instance).
filter(entry => !!entry); // Removing empty tabs/sections and null tabs/sections/fields.
checkFormLayout(formLayout);
let hasTabs;
let hasSectionsOrFields;
formLayout.forEach(entry => {
hasTabs = hasTabs || entry.tab;
hasSectionsOrFields = hasSectionsOrFields || entry.section || entry.field;
if (hasTabs && hasSectionsOrFields) {
throw new TypeError('formLayout must not have tabs together with sections/fields at top level');
}
});
newStoreStateSlice.formLayout = u.constant(formLayout);
newStoreStateSlice.activeTab = u.constant(
getTab(formLayout, (storeState.activeTab || {}).tab)
);
newStoreStateSlice.persistentInstance = u.constant(instance);
newStoreStateSlice.instanceLabel = modelDefinition.ui.instanceLabel(instance);
newStoreStateSlice = {
...newStoreStateSlice,
...synchronizeInstances({ instance, formLayout, i18n })
};
if (storeState.status !== STATUS_INITIALIZING) {
newStoreStateSlice.status = STATUS_READY;
}
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if ([INSTANCE_EDIT_FAIL, INSTANCE_SAVE_FAIL].indexOf(type) > -1 && storeState.status !== STATUS_INITIALIZING) {
newStoreStateSlice.status = STATUS_READY;
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === INSTANCE_FIELD_CHANGE) {
const {
name: fieldName,
value: fieldValue
} = payload;
const {
validate,
render: {
value: {
converter
}
}
} = findFieldLayout(fieldName)(storeState.formLayout);
PARSE_LABEL: {
let newFormValue;
try {
newFormValue = converter.parse(fieldValue, i18n);
} catch (err) {
// Rethrow system errors.
if (isSystemError(err)) {
throw err;
}
const errors = Array.isArray(err) ? err : [err];
newStoreStateSlice.formInstance = {
[fieldName]: UNPARSABLE_FIELD_VALUE
};
if (!isEqual(fieldValue, storeState.formattedInstance[fieldName])) {
newStoreStateSlice.formattedInstance = {
[fieldName]: u.constant(fieldValue)
};
}
if (!isEqual(errors, storeState.errors.fields[fieldName])) {
newStoreStateSlice.errors = {
fields: {
[fieldName]: errors
}
};
}
break PARSE_LABEL;
}
const persistentValue = storeState.persistentInstance[fieldName];
if (modelDefinition.model.fields[fieldName].type === FIELD_TYPE_BOOLEAN && !persistentValue && !newFormValue) {
newFormValue = persistentValue; // null and false are considered the same.
}
if (!isEqual(newFormValue, storeState.formInstance[fieldName])) {
newStoreStateSlice.formInstance = {
[fieldName]: u.constant(newFormValue)
};
}
const newFormattedValue = converter.format(newFormValue, i18n);
if (!isEqual(newFormattedValue, storeState.formattedInstance[fieldName])) {
newStoreStateSlice.formattedInstance = {
[fieldName]: u.constant(newFormattedValue)
};
}
try {
validate(newFormValue, {
...storeState.formInstance,
[fieldName]: newFormValue
});
} catch (err) {
// Rethrow system errors.
if (isSystemError(err)) {
throw err;
}
const errors = Array.isArray(err) ? err : [err];
if (!isEqual(errors, storeState.errors.fields[fieldName])) {
newStoreStateSlice.errors = {
fields: {
[fieldName]: errors
}
};
}
break PARSE_LABEL;
}
if (storeState.errors.fields[fieldName]) {
newStoreStateSlice.errors = {
// u.omit() argument must be an array, since lodash v. 4.17.4 no longer supports a string.
fields: u.omit([fieldName])
};
}
}
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === INSTANCE_FIELD_VALIDATE) {
const fieldName = payload.name;
const fieldValue = storeState.formInstance[fieldName];
if (fieldValue !== UNPARSABLE_FIELD_VALUE) {
PARSE_LABEL: {
try {
findFieldLayout(fieldName)(storeState.formLayout).validate(fieldValue, storeState.formInstance);
} catch (err) {
// Rethrow system errors.
if (isSystemError(err)) {
throw err;
}
const errors = Array.isArray(err) ? err : [err];
if (!isEqual(errors, storeState.errors.fields[fieldName])) {
newStoreStateSlice.errors = {
fields: {
[fieldName]: errors
}
};
}
break PARSE_LABEL;
}
if (storeState.errors.fields[fieldName]) {
newStoreStateSlice.errors = {
// u.omit() argument must be an array, since lodash v. 4.17.4 no longer supports a string.
fields: u.omit([fieldName])
};
}
}
}
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === ALL_INSTANCE_FIELDS_VALIDATE_REQUEST) {
newStoreStateSlice.errors = {
fields: {
}
};
Object.keys(modelDefinition.model.fields).forEach(fieldName => {
const fieldValue = storeState.formInstance[fieldName];
const fieldLayout = findFieldLayout(fieldName)(storeState.formLayout);
if (
// Field from the modelDefinition.model.fields is not in formLayout => it isn't displayed in Edit View
!fieldLayout ||
// Field is read-only => no validation needed
fieldLayout.readOnly ||
fieldValue === UNPARSABLE_FIELD_VALUE
) {
return;
}
try {
fieldLayout.validate(fieldValue, storeState.formInstance);
} catch (err) {
// Rethrow system errors.
if (isSystemError(err)) {
throw err;
}
const errors = Array.isArray(err) ? err : [err];
if (!isEqual(errors, storeState.errors.fields[fieldName])) {
newStoreStateSlice.errors.fields[fieldName] = errors;
}
return;
}
if (storeState.errors.fields[fieldName]) {
// u.omit() argument must be an array, since lodash v. 4.17.4 no longer supports a string.
newStoreStateSlice.errors.fields = u.omit([fieldName]);
}
});
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === TAB_SELECT) {
const { tabName } = payload; // may be falsy, i.e. not specified.
// reset to persistentInstance
if (!isEqual(storeState.formInstance, storeState.persistentInstance)) {
newStoreStateSlice = {
...newStoreStateSlice,
...synchronizeInstances({
instance: storeState.persistentInstance,
formLayout: storeState.formLayout,
i18n
})
};
}
newStoreStateSlice.activeTab = u.constant(
getTab(storeState.formLayout, tabName)
);
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
/* eslint-enable padded-blocks */
}
return u(newStoreStateSlice, storeState); // returned object is frozen for NODE_ENV === 'development'
};
<|start_filename|>src/crudeditor-lib/views/create/scenario.js<|end_filename|>
import { put, spawn } from 'redux-saga/effects';
import saveSaga from './workerSagas/save';
import redirectSaga from '../../common/workerSagas/redirect';
import { VIEW_SOFT_REDIRECT } from '../../common/constants';
import scenarioSaga from '../../common/scenario';
import {
INSTANCE_SAVE,
VIEW_INITIALIZE,
VIEW_NAME
} from './constants';
const transitions = {
blocking: {
[INSTANCE_SAVE]: saveSaga
},
nonBlocking: {
[VIEW_SOFT_REDIRECT]: redirectSaga
}
}
// See Search View scenario for detailed description of the saga.
export default function*({
modelDefinition,
softRedirectSaga,
viewState: {
predefinedFields = {}
},
source
}) {
yield put({
type: VIEW_INITIALIZE,
payload: { predefinedFields },
meta: { source }
});
return (yield spawn(scenarioSaga, {
modelDefinition,
softRedirectSaga,
transitions,
viewName: VIEW_NAME
}));
}
<|start_filename|>src/crudeditor-lib/rootSaga.js<|end_filename|>
import { call, put, cancel, takeLatest } from 'redux-saga/effects';
import searchViewScenario from './views/search/scenario';
import createViewScenario from './views/create/scenario';
import editViewScenario from './views/edit/scenario';
import showViewScenario from './views/show/scenario';
import errorViewScenario from './views/error/scenario';
import { isAllowed } from './lib';
import {
ACTIVE_VIEW_CHANGE,
DEFAULT_VIEW,
ERROR_UNKNOWN_VIEW,
ERROR_FORBIDDEN_VIEW,
VIEW_HARD_REDIRECT,
VIEW_SEARCH,
VIEW_CREATE,
VIEW_EDIT,
VIEW_SHOW,
VIEW_ERROR,
PERMISSION_CREATE,
PERMISSION_EDIT,
PERMISSION_VIEW
} from './common/constants';
const isStandardView = viewName => [VIEW_CREATE, VIEW_EDIT, VIEW_SHOW, VIEW_SEARCH].indexOf(viewName) > -1;
export default function*(modelDefinition) {
const { crudOperations } = modelDefinition.permissions;
const initializeViewSagas = {
...(isAllowed(crudOperations, PERMISSION_VIEW) ? { [VIEW_SEARCH]: searchViewScenario } : null),
...(isAllowed(crudOperations, PERMISSION_CREATE) ? { [VIEW_CREATE]: createViewScenario } : null),
...(isAllowed(crudOperations, PERMISSION_EDIT) ? { [VIEW_EDIT]: editViewScenario } : null),
...(isAllowed(crudOperations, PERMISSION_VIEW) ? { [VIEW_SHOW]: showViewScenario } : null),
[VIEW_ERROR]: errorViewScenario
};
let activeViewScenarioTask;
/*
* The saga handles an active view request for replacements with another view.
*
* The saga attempts to initialize requested view without displaying it.
* When successful, it replaces currently active view with requested one
* (by canceling active activeViewScenarioTask and displaying requested view).
* When view initialization failured, the saga throws error(s).
*/
function* softRedirectSaga({ viewName, viewState, ...additionalArgs }) {
const initializeViewSaga = initializeViewSagas[viewName];
if (!initializeViewSaga) {
if (isStandardView(viewName)) {
throw ERROR_FORBIDDEN_VIEW(viewName);
}
throw ERROR_UNKNOWN_VIEW(viewName);
}
const oldViewScenarioTask = activeViewScenarioTask;
// Initialization error(s) are forwarded to the parent saga.
activeViewScenarioTask = yield call(initializeViewSaga, {
modelDefinition,
softRedirectSaga,
viewState,
...additionalArgs
});
yield put({
type: ACTIVE_VIEW_CHANGE,
payload: { viewName }
});
// It must be the very last statement because it cancels this saga also,
// since it is an ancestor of oldViewScenarioTask.
yield cancel(oldViewScenarioTask);
}
/*
* The saga handles cases when currently active view, if exists, must not remain:
* -- initial CRUD Editor loading
* and
* -- CRID Editor reloading when forced by owner application.
*
* Requested view gets displayed immediately even if uninitialized.
*
* The view either remains or gets replaced with error view (if initialization failed).
*/
function* hardRedirectSaga({
payload,
meta: { source } = {}
}) {
let {
viewName = DEFAULT_VIEW,
viewState = {}
} = payload;
let initializeViewSaga = initializeViewSagas[viewName];
if (!initializeViewSaga) {
initializeViewSaga = errorViewScenario;
viewState = isStandardView(viewName) ?
ERROR_FORBIDDEN_VIEW(viewName) :
ERROR_UNKNOWN_VIEW(viewName);
viewName = VIEW_ERROR;
}
yield put({
type: ACTIVE_VIEW_CHANGE,
payload: { viewName },
meta: { source }
});
if (activeViewScenarioTask) {
yield cancel(activeViewScenarioTask);
}
try {
activeViewScenarioTask = yield call(initializeViewSaga, {
modelDefinition,
softRedirectSaga,
viewState,
source
});
} catch (err) {
viewName = VIEW_ERROR;
yield put({
type: ACTIVE_VIEW_CHANGE,
payload: { viewName },
meta: { source }
});
activeViewScenarioTask = yield call(errorViewScenario, {
modelDefinition,
softRedirectSaga,
viewState: err,
source
});
}
}
yield takeLatest(VIEW_HARD_REDIRECT, hardRedirectSaga);
}
<|start_filename|>src/demo/models/second-model/i18n/ru.js<|end_filename|>
export default {
"model.name": "Контракты",
"model.tab.general.label": "Главное",
"model.tab.additional.label": "Дополнительно",
"model.section.order.label": "Параметры заказа",
"model.section.test.label": "Тестовое поле",
"model.section.auditable.label": "Проверяемые поля",
"model.field.contractId.label": "Номер контракта",
"model.field.description.label": "Описание",
"model.field.validRange.label": "Период действия",
"model.field.testNumberTypeField.label": "Тестовое числовое поле",
"model.field.createdOn.label": "Время создания",
"model.field.changedOn.label": "Время изменения",
"model.field.changedBy.label": "Кем изменен",
"model.field.createdBy.label": "Кто создал"
}
<|start_filename|>src/crudeditor-lib/views/show/reducer.js<|end_filename|>
import cloneDeep from 'lodash/cloneDeep';
import u from 'updeep';
import {
INSTANCE_SHOW_SUCCESS,
INSTANCE_SHOW_REQUEST,
VIEW_INITIALIZE_REQUEST,
VIEW_INITIALIZE_FAIL,
VIEW_INITIALIZE_SUCCESS,
VIEW_REDIRECT_REQUEST,
VIEW_REDIRECT_FAIL,
VIEW_REDIRECT_SUCCESS,
TAB_SELECT
} from './constants';
import {
STATUS_INITIALIZING,
STATUS_READY,
STATUS_REDIRECTING,
STATUS_SEARCHING,
STATUS_UNINITIALIZED,
STATUS_EXTRACTING
} from '../../common/constants';
import {
INSTANCES_SEARCH_REQUEST,
INSTANCES_SEARCH_FAIL,
INSTANCES_SEARCH_SUCCESS
} from '../search/constants';
import { checkFormLayout } from '../../check-model';
import { findFieldLayout, getTab } from '../lib';
const defaultStoreStateTemplate = {
// Instance as saved on server-side.
persistentInstance: undefined,
/* Formatted instance as displayed in the form.
* {
* <sting, field name>: <any, field value for cummunication with rendering React Component>,
* }
* NOTE: formInstance values and formattedInstance values represent different values in case of parsing error
* (i.e. rendered value cannot be parsed into its string representation).
*/
formattedInstance: undefined,
// Must always be an array, may be empty.
formLayout: [],
// A ref to one of tabs element => it is undefined when and only when formLayout does not consist of tabs.
activeTab: undefined,
instanceLabel: undefined,
/* instance's absolute offset in search result (0 <= offset < totalCount),
* or
* undefined if absolute offset is unknown (in cases of hard redirect to Search View).
*/
offset: undefined,
status: STATUS_UNINITIALIZED
};
/*
* XXX:
* Only objects and arrays are allowed at branch nodes.
* Only primitive data types are allowed at leaf nodes.
*/
export default /* istanbul ignore next */ (modelDefinition, i18n) => (
storeState = cloneDeep(defaultStoreStateTemplate),
{ type, payload, error, meta }
) => {
if (storeState.status === STATUS_UNINITIALIZED && type !== VIEW_INITIALIZE_REQUEST) {
return storeState;
}
let newStoreStateSlice = {};
/* eslint-disable padded-blocks */
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
if (type === VIEW_INITIALIZE_REQUEST) {
newStoreStateSlice.status = STATUS_INITIALIZING;
} else if (type === VIEW_INITIALIZE_FAIL) {
newStoreStateSlice.status = STATUS_UNINITIALIZED;
} else if (type === VIEW_INITIALIZE_SUCCESS) {
newStoreStateSlice.status = STATUS_READY;
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === INSTANCES_SEARCH_REQUEST) {
newStoreStateSlice.status = STATUS_SEARCHING;
} else if ([INSTANCES_SEARCH_FAIL, INSTANCES_SEARCH_SUCCESS].indexOf(type) > -1) {
newStoreStateSlice.status = STATUS_READY;
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === VIEW_REDIRECT_REQUEST) {
newStoreStateSlice.status = STATUS_REDIRECTING;
} else if (type === VIEW_REDIRECT_FAIL) {
newStoreStateSlice.status = STATUS_READY;
} else if (type === VIEW_REDIRECT_SUCCESS) {
// Reseting the store to initial uninitialized state.
newStoreStateSlice = u.constant(cloneDeep(defaultStoreStateTemplate));
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === INSTANCE_SHOW_REQUEST) {
newStoreStateSlice.status = STATUS_EXTRACTING;
} else if (type === INSTANCE_SHOW_SUCCESS) {
const { instance, offset } = payload;
newStoreStateSlice.offset = offset;
const formLayout = modelDefinition.ui.show.formLayout(instance).
filter(entry => !!entry); // Removing empty tabs/sections and null tabs/sections/fields.
checkFormLayout(formLayout);
let hasTabs;
let hasSectionsOrFields;
formLayout.forEach(entry => {
hasTabs = hasTabs || entry.tab;
hasSectionsOrFields = hasSectionsOrFields || entry.section || entry.field;
if (hasTabs && hasSectionsOrFields) {
throw new TypeError('formLayout must not have tabs together with sections/fields at top level');
}
});
newStoreStateSlice.formLayout = u.constant(formLayout);
newStoreStateSlice.activeTab = u.constant(
getTab(formLayout, (storeState.activeTab || {}).tab)
);
newStoreStateSlice.persistentInstance = u.constant(instance);
newStoreStateSlice.instanceLabel = modelDefinition.ui.instanceLabel(instance);
newStoreStateSlice.formattedInstance = u.constant(Object.keys(instance).reduce(
(rez, fieldName) => {
const fieldLayout = findFieldLayout(fieldName)(formLayout);
return fieldLayout ? {
...rez,
[fieldName]: fieldLayout.render.value.converter.format(instance[fieldName], i18n)
} : rez; // Field from the modelDefinition.model.fields is not in formLayout => it isn't displayed in Edit View.
},
{}
));
if (storeState.status !== STATUS_INITIALIZING) {
newStoreStateSlice.status = STATUS_READY;
}
// ███████████████████████████████████████████████████████████████████████████████████████████████████████
} else if (type === TAB_SELECT) {
const { tabName } = payload; // may be not specified (i.e. falsy).
newStoreStateSlice.activeTab = u.constant(
getTab(storeState.formLayout, tabName)
);
// ███████████████████████████████████████████████████████████████████████████████████████████████████████████
/* eslint-enable padded-blocks */
}
return u(newStoreStateSlice, storeState); // returned object is frozen for NODE_ENV === 'development'
};
<|start_filename|>src/demo/models/second-model/api/api.js<|end_filename|>
import find from 'lodash/find';
import Big from 'big.js';
import { exists } from '../../../../components/lib';
import initialData from './data';
import { DEFAULT_FIELD_TYPE } from '../../../../crudeditor-lib/common/constants.js';
import {
FIELD_TYPE_BOOLEAN,
FIELD_TYPE_STRING_DATE,
FIELD_TYPE_STRING,
FIELD_TYPE_STRING_INTEGER,
FIELD_TYPE_STRING_DECIMAL,
FIELD_TYPE_DECIMAL,
FIELD_TYPE_INTEGER
} from '../../../../data-types-lib/constants';
import { fields } from '../'
export const testNumberFieldType = "testNumberTypeField";
const data = { // remove doubles
contracts: Object.keys(
initialData.contracts.
map(({ contractId }) => contractId).
reduce((obj, id) => ({ ...obj, [id]: '' }), {})
).
map(id => find(initialData.contracts, ({ contractId }) => contractId === id)).
map(c => ({
...c,
nonExisting: Math.random(),
[testNumberFieldType]: Math.random() * 1000000,
parentContract: Math.random() > 0.8 ?
null :
initialData.contracts.map(({ contractId }) => contractId)[
Math.floor(Math.random() * initialData.contracts.length)
]
}))
}
const setCreatedFields = instance => {
data.contracts = data.contracts.map(
contract => contract.contractId === instance.contractId ?
{
...contract,
createdOn: (new Date()).toISOString(),
createdBy: '<NAME>'
} :
contract
)
}
const setChangedFields = instance => {
data.contracts = data.contracts.map(
contract => contract.contractId === instance.contractId ?
{
...contract,
changedOn: (new Date()).toISOString(),
changedBy: '<NAME>'
} :
contract
)
}
const isRangeObject = obj =>
typeof obj === 'object' &&
obj !== null &&
(obj => {
const keys = Object.keys(obj);
return (keys.length === 1 || keys.length === 2) &&
(keys.indexOf('from') > -1 || keys.indexOf('to') > -1)
})(obj);
export const
getNumberOfInstances = _ => data.contracts.length,
getContracts = _ => data.contracts,
get = ({ instance }) => {
const item = find(data.contracts, ({ contractId }) => {
return contractId === instance.contractId
});
if (item) {
return item
}
throw new Error("404")
},
create = ({ instance }) => {
if (find(data.contracts, ({ contractId }) => contractId === instance.contractId)) {
throw new Error("400")
}
return ((data, instance) => {
data.contracts.push(instance);
setCreatedFields(instance);
return get({ instance })
})(data, instance)
},
update = ({ instance }) => (
(data, instance) => {
if (find(data.contracts, ({ contractId }) => contractId === instance.contractId)) {
data.contracts = data.contracts.map( // eslint-disable-line no-param-reassign
contract => contract.contractId === instance.contractId ?
instance :
contract
);
setChangedFields(instance);
return get({ instance })
}
throw new Error("404")
}
)(data, instance),
deleteMany = ({ instances }) => (
(data, instances) => {
const idsToDelete = instances.map(({ contractId }) => contractId);
data.contracts = data.contracts.filter( // eslint-disable-line no-param-reassign
({ contractId }) => !idsToDelete.includes(contractId)
);
return instances.length;
}
)(data, instances),
deleteOne = ({ instance }) => (
(data, instance) => {
let deleteIndex;
if (data.contracts.some(({ contractId }, index) => { // eslint-disable-line consistent-return
if (instance.contractId === contractId) {
deleteIndex = index;
return true;
}
})) {
data.contracts.splice(deleteIndex, 1);
}
return 1;
}
)(data, instance),
search = ({ filter, sort, order, offset, max }) => {
const searchableData = data.contracts;
let result = searchableData.slice();
if (filter) {
const filteredData = searchableData.filter(
item => Object.keys(filter).reduce(
(rez, fieldName) => {
if (item[fieldName] === undefined) {
const err = {
code: 500,
message: `Fatal error: field ${fieldName} does not exist on instance contractId=${item.contractId}`
}
throw err;
}
const
fieldValue = filter[fieldName],
fieldType = fields[fieldName].type || DEFAULT_FIELD_TYPE,
itemValue = item[fieldName];
// Handle range from..to fields
// If not object - we should check strict equality to handle search by
// 'statusId' field
if (isRangeObject(fieldValue)) {
let match = true;
if (item[fieldName] !== null) {
let gte, lte;
switch (fieldType) {
// Number and stringNumber fieldTypes are treated and compared as Numbers
case FIELD_TYPE_DECIMAL:
case FIELD_TYPE_INTEGER:
gte = (itemValue, filterValue) => Number(itemValue) >= Number(filterValue);
lte = (itemValue, filterValue) => Number(itemValue) <= Number(filterValue);
break;
case FIELD_TYPE_STRING_INTEGER:
case FIELD_TYPE_STRING_DECIMAL:
gte = (itemValue, filterValue) => Big(itemValue).gte(Big(filterValue));
lte = (itemValue, filterValue) => Big(itemValue).lte(Big(filterValue));
break;
case FIELD_TYPE_STRING_DATE:
gte = (itemValue, filterValue) => new Date(itemValue) >= new Date(filterValue);
lte = (itemValue, filterValue) => {
const filterDate = new Date(filterValue);
filterDate.setDate(filterDate.getDate() + 1);
return new Date(itemValue) <= filterDate;
}
break;
default:
console.log("Search api switch: Unknown RANGE field type: " + fieldType);
return false;
}
if (exists(fieldValue.from)) {
match = match && gte(itemValue, String(fieldValue.from).trim())
}
if (exists(fieldValue.to)) {
match = match && lte(itemValue, String(fieldValue.to).trim())
}
} else {
// null returns false for any range
match = false;
}
return rez && match
// Now handle non-range fields
} else if (fieldType === FIELD_TYPE_BOOLEAN) {
// Boolean() converts incoming null -> false and keeps true -> true or false -> false
const match = Boolean(itemValue) === fieldValue;
return rez && match
} else if (fieldType === FIELD_TYPE_STRING) {
const match = itemValue !== null ?
itemValue.toLowerCase().indexOf(fieldValue.trim().toLowerCase()) > -1 :
false;
return rez && match
// TODO add [] search
} else if ([
FIELD_TYPE_STRING_INTEGER,
FIELD_TYPE_STRING_DECIMAL,
FIELD_TYPE_DECIMAL,
FIELD_TYPE_INTEGER
].indexOf(fieldType) > -1
) {
const match = itemValue !== null && Number(String(fieldValue).trim()) === Number(itemValue);
return rez && match
} else if (fieldType === FIELD_TYPE_STRING_DATE) {
const match = new Date(String(fieldValue).trim()).valueOf() === new Date(itemValue).valueOf();
return rez && match
}
return false
}, true
)
);
result = filteredData.slice()
}
const totalCount = result.length;
if (sort) {
const fieldType = fields[sort].type || DEFAULT_FIELD_TYPE;
if (fieldType === FIELD_TYPE_STRING) {
result = result.sort((a, b) => (a[sort] || '').localeCompare(b[sort] || '', { sensitivity: 'base' }));
} else {
result = result.sort((a, b) => (a[sort] < b[sort]) ? -1 : 1);
}
if (order && order === 'desc') {
result.reverse();
}
}
if (Number(offset) === parseInt(offset, 10)) {
const offsetNum = parseInt(offset, 10);
const offsetResult = totalCount > offsetNum ?
result.slice(offsetNum) :
[]
// handle search for the last page in case that previous last page was completely deleted
if (offsetResult.length === 0 &&
max !== undefined &&
offsetNum >= max &&
totalCount > 0
) {
const totalPages = Math.ceil(totalCount / max);
const newOffset = totalPages * max - max;
result = result.slice(newOffset)
} else {
result = offsetResult
}
}
if (Number(max) === parseInt(max, 10)) {
const maxItems = parseInt(max, 10);
// max = -1 for all items
if (maxItems > 0) {
result = result.slice(0, maxItems)
}
}
return {
totalCount,
instances: result
}
}
<|start_filename|>src/components/EditHeading/index.js<|end_filename|>
import React, { PureComponent } from 'react';
import PropTypes from 'prop-types';
import Nav from 'react-bootstrap/lib/Nav';
import NavItem from 'react-bootstrap/lib/NavItem';
import Col from 'react-bootstrap/lib/Col';
import Row from 'react-bootstrap/lib/Row';
import ButtonGroup from 'react-bootstrap/lib/ButtonGroup';
import Button from 'react-bootstrap/lib/Button';
import Glyphicon from 'react-bootstrap/lib/Glyphicon';
import { getKeyWithPrefix, getModelMessage, getTabLabel } from '../lib';
import { VIEW_CREATE } from '../../crudeditor-lib/common/constants';
import ConfirmUnsavedChanges from '../ConfirmDialog/ConfirmUnsavedChanges';
export default class EditHeading extends PureComponent {
static propTypes = {
model: PropTypes.shape({
data: PropTypes.shape({
activeTab: PropTypes.array,
instanceLabel: PropTypes.string,
tabs: PropTypes.array,
viewName: PropTypes.string.isRequired,
persistentInstance: PropTypes.object,
unsavedChanges: PropTypes.bool
}),
actions: PropTypes.objectOf(PropTypes.func),
uiConfig: PropTypes.object.isRequired
}).isRequired
}
static contextTypes = {
i18n: PropTypes.object.isRequired
};
showConfirmDialog = _ => this.props.model.data.unsavedChanges;
render() {
const {
model: {
data: {
activeTab: {
tab: activeTabName
} = {},
instanceLabel,
tabs,
viewName
},
actions: {
selectTab,
exitView,
gotoPreviousInstance,
gotoNextInstance
},
uiConfig: {
headerLevel = 1
}
}
} = this.props;
const { i18n } = this.context;
const modelName = getModelMessage({ i18n, key: getKeyWithPrefix(i18n, 'model.name') });
const title = exitView ?
(
<ConfirmUnsavedChanges showDialog={this.showConfirmDialog}>
<a style={{ cursor: 'pointer' }} onClick={exitView}>
{modelName}
</a>
</ConfirmUnsavedChanges>
) :
modelName;
const arrows = [
<ConfirmUnsavedChanges showDialog={this.showConfirmDialog} key='arrow-left'>
<Button
disabled={!gotoPreviousInstance}
onClick={gotoPreviousInstance}
>
<Glyphicon glyph="arrow-left"/>
</Button>
</ConfirmUnsavedChanges>,
<ConfirmUnsavedChanges showDialog={this.showConfirmDialog} key='arrow-right'>
<Button
disabled={!gotoNextInstance}
onClick={gotoNextInstance}
>
<Glyphicon glyph="arrow-right"/>
</Button>
</ConfirmUnsavedChanges>
]
const H = 'h' + headerLevel;
return (<div style={{ marginBottom: '10px' }}>
<H>
<Row>
<Col xs={8}>
{title}
{
(instanceLabel || viewName === VIEW_CREATE) &&
<small>
{' / ' + (viewName === VIEW_CREATE ? i18n.getMessage('common.CrudEditor.new.title') : instanceLabel)}
</small>
}
</Col>
<Col xs={4}>
<div style={{ float: "right" }}>
<ButtonGroup>
{arrows}
</ButtonGroup>
</div>
</Col>
</Row>
</H>
{
tabs.length > 1 &&
<ConfirmUnsavedChanges
trigger='select'
showDialog={this.showConfirmDialog}
>
<Nav bsStyle='tabs' activeKey={activeTabName} onSelect={selectTab}>
{
tabs.map(({ tab: name, disabled }, index) =>
(<NavItem
eventKey={name}
disabled={!!disabled || name === activeTabName}
key={index}
>
<h4>
{
getTabLabel({ i18n, name })
}
</h4>
</NavItem>)
)
}
</Nav>
</ConfirmUnsavedChanges>
}
</div>);
}
}
<|start_filename|>src/components/ConfirmDialog/ConfirmDialog.spec.js<|end_filename|>
import React from "react";
import Enzyme from "enzyme";
import Adapter from 'enzyme-adapter-react-16';
import { expect } from 'chai';
import sinon from 'sinon';
import ConfirmDialog from './';
import { I18nManager } from '@opuscapita/i18n';
Enzyme.configure({ adapter: new Adapter() });
const context = {
i18n: new I18nManager()
}
describe("ConfirmDialog", _ => {
it("should properly render", () => {
const onClick = sinon.spy();
const showDialogInner = sinon.spy();
const showDialog = _ => {
showDialogInner();
return true
}
const child = _ => (<button onClick={onClick}>Hi I'm a child</button>);
const wrapper = Enzyme.mount(<ConfirmDialog showDialog={showDialog}>{child}</ConfirmDialog>, {
context
});
expect(wrapper).to.exist; // eslint-disable-line no-unused-expressions
});
it("should render array of children into a span", () => {
const onClick = sinon.spy();
const showDialogInner = sinon.spy();
const showDialog = _ => {
showDialogInner();
return true
}
const child = _ => (<button onClick={onClick}>Hi I'm a child</button>);
const children = [child, child, child];
const wrapper = Enzyme.mount(<ConfirmDialog showDialog={showDialog}>{children}</ConfirmDialog>, {
context
});
/* eslint-disable no-unused-expressions */
expect(wrapper.getDOMNode()).to.have.property('className').equal('confirm-dialog-span');
/* eslint-enable no-unused-expressions */
});
});
<|start_filename|>src/demo/models/contracts/components/DateRangeCellRender/DateRangeCellRender.spec.js<|end_filename|>
import React from "react";
import Enzyme from "enzyme";
import Adapter from 'enzyme-adapter-react-16';
import { expect } from 'chai';
import DateRangeCellRender from "./";
import { I18nManager } from '@opuscapita/i18n';
Enzyme.configure({ adapter: new Adapter() });
describe("DateRangeCellRender", _ => {
const i18n = new I18nManager();
const context = { i18n }
it("should properly display the Date range", () => {
const props = {
name: "dateRange",
instance: {
dateRange: {
from: '2008-09-01',
to: '2010-09-19'
}
}
};
const wrapper = Enzyme.mount(<DateRangeCellRender {...props}/>, {
context
});
expect(wrapper.exists()).to.be.true; // eslint-disable-line no-unused-expressions
expect(wrapper.find('span').text()).to.equal(
i18n.formatDate(new Date(props.instance.dateRange.from)) +
' - ' +
i18n.formatDate(new Date(props.instance.dateRange.to))
);
});
it("should properly display the Date range if passed Date objects instead of strings", () => {
const props = {
name: "dateRange",
instance: {
dateRange: {
from: new Date('2008-09-01'),
to: new Date('2010-09-19')
}
}
};
const wrapper = Enzyme.mount(<DateRangeCellRender {...props} />, {
context
});
expect(wrapper.exists()).to.be.true; // eslint-disable-line no-unused-expressions
expect(wrapper.find('span').text()).to.equal(
i18n.formatDate(props.instance.dateRange.from) + ' - ' + i18n.formatDate(props.instance.dateRange.to)
);
});
it("should return null in render function for unmatched props", () => {
const props = {
name: "date",
instance: {}
};
const wrapper = Enzyme.mount(<DateRangeCellRender {...props} />, {
context
});
expect(wrapper.isEmptyRender()).to.be.true; // eslint-disable-line no-unused-expressions
});
it("should render 'from' if 'to' is missing", () => {
const props = {
name: "dateRange",
instance: {
dateRange: {
from: '2008-09-01'
}
}
};
const wrapper = Enzyme.mount(<DateRangeCellRender {...props} />, {
context
});
expect(wrapper.exists()).to.be.true; // eslint-disable-line no-unused-expressions
expect(wrapper.find('span').text()).to.equal(i18n.formatDate(new Date(props.instance.dateRange.from)) + ' - ...');
});
it("should render 'to' if 'from' is missing", () => {
const props = {
name: "dateRange",
instance: {
dateRange: {
to: '2008-09-01'
}
}
};
const wrapper = Enzyme.mount(<DateRangeCellRender {...props} />, {
context
});
expect(wrapper.exists()).to.be.true; // eslint-disable-line no-unused-expressions
expect(wrapper.find('span').text()).to.equal('... - ' + i18n.formatDate(new Date(props.instance.dateRange.to)));
});
});
<|start_filename|>src/crudeditor-lib/i18n/de.js<|end_filename|>
import exceptions from './exceptions/de';
/* eslint-disable max-len */
const common = {
"common.CrudEditor.new.title": "Neu",
"common.CrudEditor.create.header": "{modelName} anlegen",
"common.CrudEditor.edit.header": "{modelName} bearbeiten",
"common.CrudEditor.show.header": "{modelName} ansehen",
"common.CrudEditor.duplicate.header": "{modelName} duplizieren",
"common.CrudEditor.cancel.button": "Abbrechen",
"common.CrudEditor.save.button": "Speichern",
"common.CrudEditor.saveAndNew.button": "Speichern und Neu",
"common.CrudEditor.saveAndNext.button": "Speichern und Weiter",
"common.CrudEditor.search.header": "{payload} suchen",
"common.CrudEditor.search.button": "Suchen",
"common.CrudEditor.reset.button": "Zurücksetzen",
"common.CrudEditor.create.button": "Hinzufügen",
"common.CrudEditor.select.button": "Auswählen",
"common.CrudEditor.export.button": "Export",
"common.CrudEditor.close.button": "Schließen",
"common.CrudEditor.actions.tableHeader": "Aktionen",
"common.CrudEditor.show.button": "Ansehen",
"common.CrudEditor.edit.button": "Bearbeiten",
"common.CrudEditor.delete.button": "Löschen",
"common.CrudEditor.deleteSelected.button": "Ausgewählte löschen",
"common.CrudEditor.duplicate.button": "Duplizieren",
"common.CrudEditor.refresh.button": "Aktualisieren",
"common.CrudEditor.revisions.button": "Revisionen",
"common.CrudEditor.delete.confirmation": "Möchten Sie diesen Eintrag wirklich löschen?",
"common.CrudEditor.deleteSelected.confirmation": "Möchten Sie wirklich alle markierten Einträge löschen?",
"common.CrudEditor.noItemsSelected.alert": "Keine Elemente ausgewählt!",
"common.CrudEditor.objectSaved.message": "Objekt angelegt.",
"common.CrudEditor.objectUpdated.message": "Objekt aktualisiert.",
"common.CrudEditor.objectSaveFailed.message": "Speichern von Objekten fehlgeschlagen.",
"common.CrudEditor.objectDeleted.message": "Objekt gelöscht.",
"common.CrudEditor.objectsDeleted.message": "Objekte {labels} gelöscht.",
"common.CrudEditor.objectsDeleteIsNoAllowed.message": "Aus Sicherheitsgründen lassen sich einige Objekte nicht löschen.",
"common.CrudEditor.objectDeleteFailed.message": "Das Objekt kann nicht gelöscht werden, vielleicht ist es bereits im Einsatz.",
"common.CrudEditor.objectsDeleteFailed.message": "Objekte {count} konnten nicht gelöscht werden, vielleicht sind sie bereits im Einsatz.",
"common.CrudEditor.objectDuplicated.message": "Das Objekt wird kopiert.",
"common.CrudEditor.noAssociationEntriesFound.message": "Keine Einträge gefunden. Sie können einen neuen Eintrag {1}",
"common.CrudEditor.message.ajax.loading": "Bitte warten...",
"common.CrudEditor.search.result.label": "Suchergebnis",
"common.CrudEditor.unsaved.confirmation": "Sie haben Änderungen vorgenommen. Wenn Sie diese Seite verlassen, werden die Änderungen nicht gespeichert.",
"common.CrudEditor.search.resultsPerPage": "Ergebnisse pro Seite",
"common.CrudEditor.search.all": "Alles",
"common.CrudEditor.found.items.message": "{count} Datens\u00E4tze gefunden",
"common.CrudEditor.range.from": "von",
"common.CrudEditor.range.to": "bis",
"common.CrudEditor.confirm.action": "Bestätigen",
"common.CrudEditor.search.showSearchForm": "Suchmaske einblenden",
"common.CrudEditor.search.hideSearchForm": "Suchmaske ausblenden",
"common.CrudEditor.pagination.goToPage": "Öffnen"
}/* eslint-enable max-len */
export default {
...common,
...exceptions
}
<|start_filename|>src/crudeditor-lib/i18n/no.js<|end_filename|>
import exceptions from './exceptions/no';
/* eslint-disable max-len */
const common = {
"common.CrudEditor.new.title": "Ny",
"common.CrudEditor.create.header": "Opprett {modelName}",
"common.CrudEditor.edit.header": "Rediger {modelName}",
"common.CrudEditor.show.header": "Vis {modelName}",
"common.CrudEditor.duplicate.header": "Lag kopi av {modelName}",
"common.CrudEditor.cancel.button": "Avbryt",
"common.CrudEditor.save.button": "Lagre",
"common.CrudEditor.saveAndNew.button": "Lagre og ny",
"common.CrudEditor.saveAndNext.button": "Lagre og neste",
"common.CrudEditor.search.header": "Søk {payload}",
"common.CrudEditor.search.button": "Søk",
"common.CrudEditor.reset.button": "Tilbakestill",
"common.CrudEditor.create.button": "Opprett",
"common.CrudEditor.select.button": "Velg",
"common.CrudEditor.close.button": "Lukk",
"common.CrudEditor.actions.tableHeader": "Handlinger",
"common.CrudEditor.show.button": "Visning",
"common.CrudEditor.edit.button": "Rediger",
"common.CrudEditor.delete.button": "Slett",
"common.CrudEditor.deleteSelected.button": "Slett valgt",
"common.CrudEditor.duplicate.button": "Duplikat",
"common.CrudEditor.refresh.button": "Oppdater",
"common.CrudEditor.revisions.button": "Revisjoner",
"common.CrudEditor.delete.confirmation": "Vil du slette denne posten?",
"common.CrudEditor.deleteSelected.confirmation": "Vil du slette de valgte elementene?",
"common.CrudEditor.noItemsSelected.alert": "Elementer er ikke valgt!",
"common.CrudEditor.objectSaved.message": "Objekt opprettet.",
"common.CrudEditor.objectUpdated.message": "Objekt oppdatert.",
"common.CrudEditor.objectSaveFailed.message": "Objektet kunne ikke lagres.",
"common.CrudEditor.objectDeleted.message": "Objekt slettet.",
"common.CrudEditor.objectsDeleted.message": "Objekter {labels} slettet.",
"common.CrudEditor.objectsDeleteIsNoAllowed.message": "Noen av objektene kan ikke slettes på grunn av sikkerhetsbegrensninger.",
"common.CrudEditor.objectDeleteFailed.message": "Kunne ikke slette objektet. Kanskje det er i bruk allerede.",
"common.CrudEditor.objectsDeleteFailed.message": "Kunne ikke slette objektene {count}. Kanskje de er i bruk allerede.",
"common.CrudEditor.objectDuplicated.message": "Objektet er kopiert.",
"common.CrudEditor.noAssociationEntriesFound.message": "Fant ingen poster. Du kan {1} en ny post.",
"common.CrudEditor.message.ajax.loading": "Vent litt ...",
"common.CrudEditor.search.result.label": "Søkeresultat",
"common.CrudEditor.unsaved.confirmation": "Du har foretatt endringer. Endringene går tapt hvis du forlater denne siden.",
"common.CrudEditor.search.resultsPerPage": "Resultater per side",
"common.CrudEditor.search.all": "Alle",
"common.CrudEditor.export.button": "Eksporter",
"common.CrudEditor.found.items.message": "{count} elementer funnet",
"common.CrudEditor.range.from": "fra",
"common.CrudEditor.range.to": "til",
"common.CrudEditor.confirm.action": "Bekreft",
"common.CrudEditor.search.showSearchForm": "Vis søkeskjema",
"common.CrudEditor.search.hideSearchForm": "Skjul søkeskjema",
"common.CrudEditor.pagination.goToPage": "Go"
}
/* eslint-enable max-len */
export default {
...common,
...exceptions
}
<|start_filename|>src/crudeditor-lib/components/ViewSwitcher/index.js<|end_filename|>
import React from 'react';
import PropTypes from 'prop-types';
import { connect } from 'react-redux';
import SearchView from '../../views/search/container';
import CreateView from '../../views/create/container';
import EditView from '../../views/edit/container';
import ShowView from '../../views/show/container';
import ErrorView from '../../views/error/container';
import {
VIEW_SEARCH,
VIEW_CREATE,
VIEW_EDIT,
VIEW_SHOW,
VIEW_ERROR
} from '../../common/constants';
import WithAlerts from '../WithAlertsHOC';
const ViewSwitcher = ({ activeViewName, modelDefinition, externalOperations, uiConfig }, { i18n }) => {
if (!activeViewName) {
return null;
}
const ViewContainer = ({
[VIEW_SEARCH]: SearchView,
[VIEW_CREATE]: CreateView,
[VIEW_EDIT]: EditView,
[VIEW_SHOW]: ShowView,
[VIEW_ERROR]: ErrorView
})[activeViewName];
return (
<div>
{
ViewContainer ?
<ViewContainer
modelDefinition={modelDefinition}
externalOperations={externalOperations}
uiConfig={uiConfig}
i18n={i18n}
/> :
<div>Unknown view <i>{activeViewName}</i></div>
}
</div>
);
}
ViewSwitcher.propTypes = {
activeViewName: PropTypes.string,
modelDefinition: PropTypes.object.isRequired,
externalOperations: PropTypes.func.isRequired,
uiConfig: PropTypes.object.isRequired
};
ViewSwitcher.contextTypes = {
i18n: PropTypes.object
};
export default connect(
storeState => ({ activeViewName: storeState.common.activeViewName })
)(WithAlerts(ViewSwitcher));
<|start_filename|>src/components/SearchPaginationPanel/index.js<|end_filename|>
import React, { PureComponent } from 'react';
import PropTypes from 'prop-types';
import Dropdown from 'react-bootstrap/lib/Dropdown';
import MenuItem from 'react-bootstrap/lib/MenuItem';
import find from 'lodash/find';
import PaginationPanel from './PaginationPanel';
import './SearchPaginationPanel.less';
export default class SearchResultPaginationPanel extends PureComponent {
static propTypes = {
model: PropTypes.shape({
data: PropTypes.shape({
pageParams: PropTypes.shape({
max: PropTypes.number,
offset: PropTypes.number
}),
totalCount: PropTypes.number,
gotoPage: PropTypes.string
}).isRequired,
actions: PropTypes.objectOf(PropTypes.func)
}).isRequired
}
static contextTypes = {
i18n: PropTypes.object.isRequired
};
handlePaginate = activePage => this.props.model.actions.searchInstances({
offset: (activePage - 1) * this.props.model.data.pageParams.max
})
handleMaxChange = pageMax => this.props.model.actions.searchInstances({ max: pageMax })
render() {
const {
data: {
gotoPage,
totalCount,
pageParams: {
max,
offset
},
pagination
},
actions: { updateGotoPage }
} = this.props.model;
const { i18n } = this.context;
return (
<div className="crud--search-pagination-panel clearfix">
<div className='paginate'>
<Dropdown
id='max-dropdown'
onSelect={this.handleMaxChange}
dropup={true}
className="crud--search-pagination-panel__per-page-dropdown"
>
<Dropdown.Toggle>
{i18n.getMessage('common.CrudEditor.search.resultsPerPage')}
{':\u0020'}
<b>{find(pagination.options, opt => opt.max === max).label}</b>
</Dropdown.Toggle>
<Dropdown.Menu>
{
pagination.options.map(({ max: value, label }) => (
<MenuItem key={value} eventKey={value} active={max === value}>{label}</MenuItem>
))
}
</Dropdown.Menu>
</Dropdown>
</div>
{
totalCount > max && max > 0 &&
<div className="crud--search-pagination-panel__paginate">
<PaginationPanel
totalCount={totalCount}
max={max}
offset={offset}
onPaginate={this.handlePaginate}
gotoPage={gotoPage}
onGotoPageChange={updateGotoPage}
/>
</div>
}
<div>
<span>{this.context.i18n.getMessage('common.CrudEditor.found.items.message', { count: totalCount })}</span>
</div>
</div>
);
}
}
<|start_filename|>src/crudeditor-lib/views/edit/index.js<|end_filename|>
import { VIEW_NAME } from './constants';
import { buildFormLayout } from '../lib';
export { getViewState } from './selectors';
export const getUi = ({ modelDefinition }) => {
const editMeta = modelDefinition.ui.edit || {};
editMeta.formLayout = buildFormLayout({
customBuilder: editMeta.formLayout,
viewName: VIEW_NAME,
fieldsMeta: modelDefinition.model.fields
});
return editMeta;
}
<|start_filename|>src/components/FormGrid/index.js<|end_filename|>
import React from 'react';
import PropTypes from 'prop-types';
import Row from 'react-bootstrap/lib/Row';
import Col from 'react-bootstrap/lib/Col';
import Section from '../EditSection';
import Field from '../EditField';
const DEFAULT_COLUMNS_COUNT = 1;
const formGrid = ({ model, toggledFieldErrors, toggleFieldErrors }) => {
let uniqueKey = 1;
const buildRow = (fields, columnsCnt) => {
const rowsCnt = Math.ceil(fields.length / columnsCnt);
const colSize = Math.floor(12 / columnsCnt);
// list element id in matrix with 'columnsCnt' columns
// indexes start from 0
const elIdx = ({ rowIdx, columnIdx }) => columnsCnt * rowIdx + columnIdx;
++uniqueKey;
return [...Array(rowsCnt).keys()].map(rowIdx => (
<Row key={'row-' + uniqueKey + '-' + rowIdx}>
{
[...Array(columnsCnt).keys()].map(columnIdx => {
const fieldIdx = elIdx({ rowIdx, columnIdx });
// if fieldIdx exceeds fields length then field gonna be undefined
const field = fields[fieldIdx];
return (
<Col
sm={colSize}
key={'column-' + uniqueKey + '-' + rowIdx + '-' + columnIdx}
>
{
field ? (
<Field
model={model}
toggledFieldErrors={toggledFieldErrors}
toggleFieldErrors={toggleFieldErrors}
columns={columnsCnt}
entry={{
name: field.field,
readOnly: field.readOnly,
component: field.render.component,
valuePropName: field.render.value.propName
}}
/>
) : null // grid may have empty elements in the end if fields.length < rowCnt * columnsCnt
}
</Col>
)
})
}
</Row>
))
};
const buildGrid = (entries, tabColumns) => {
if (!entries.length) {
return [];
}
if (entries[0].section) {
const [section, ...rest] = entries;
return [
<Section title={section.section} model={model} key={'section-' + ++uniqueKey}>
{
buildRow(section, section.columns)
}
</Section>,
...buildGrid(rest, tabColumns)
];
}
let nextIndex = 1; // Next index after last sequential field.
while (nextIndex < entries.length && entries[nextIndex].field) {
nextIndex++;
}
return [
buildRow(entries.slice(0, nextIndex), tabColumns),
...buildGrid(entries.slice(nextIndex), tabColumns)
]
}
return (
<div>
{
buildGrid(model.data.activeEntries, model.data.activeEntries.columns || DEFAULT_COLUMNS_COUNT)
}
</div>
);
}
formGrid.propTypes = {
model: PropTypes.shape({
data: PropTypes.shape({
activeEntries: PropTypes.arrayOf(PropTypes.oneOfType([
PropTypes.shape({
field: PropTypes.string,
readOnly: PropTypes.bool,
validate: PropTypes.func,
render: PropTypes.shape({
component: PropTypes.func,
props: PropTypes.object,
value: PropTypes.shape({
type: PropTypes.string,
propName: PropTypes.string,
converter: PropTypes.shape({
format: PropTypes.func,
parse: PropTypes.func
})
})
})
}),
PropTypes.array
]))
})
}),
toggledFieldErrors: PropTypes.object,
toggleFieldErrors: PropTypes.func
}
export default formGrid;
<|start_filename|>src/crudeditor-lib/common/scenario.js<|end_filename|>
import { take, cancel, call, fork, cancelled, put } from 'redux-saga/effects';
import { isSystemError } from '../lib';
import {
VIEW_CREATE,
VIEW_EDIT,
VIEW_SHOW,
VIEW_SEARCH,
VIEW_ERROR
} from './constants';
import { VIEW_REDIRECT_SUCCESS as CREATE_VIEW_REDIRECT_SUCCESS } from '../views/create/constants';
import { VIEW_REDIRECT_SUCCESS as EDIT_VIEW_REDIRECT_SUCCESS } from '../views/edit/constants';
import { VIEW_REDIRECT_SUCCESS as SHOW_VIEW_REDIRECT_SUCCESS } from '../views/show/constants';
import { VIEW_REDIRECT_SUCCESS as SEARCH_VIEW_REDIRECT_SUCCESS } from '../views/search/constants';
import { VIEW_REDIRECT_SUCCESS as ERROR_VIEW_REDIRECT_SUCCESS } from '../views/error/constants';
const VIEW_REDIRECT_SUCCESS = {
[VIEW_CREATE]: CREATE_VIEW_REDIRECT_SUCCESS,
[VIEW_EDIT]: EDIT_VIEW_REDIRECT_SUCCESS,
[VIEW_SHOW]: SHOW_VIEW_REDIRECT_SUCCESS,
[VIEW_SEARCH]: SEARCH_VIEW_REDIRECT_SUCCESS,
[VIEW_ERROR]: ERROR_VIEW_REDIRECT_SUCCESS
}
/*
* View life cycle scenario saga.
* It must handle all errors and do clean-up on cancelation (happens on soft/hard redirect).
*
* When the view wants to exit during its life cycle, it must call softRedirectSaga
* which cancels life cycle scenario-saga in case of successful redirect,
* or throws error(s) otherwise
* => softRedirectSaga must be passed to all worker sagas.
*/
const scenarioSaga = /* istanbul ignore next */ function*({
modelDefinition,
softRedirectSaga,
transitions,
viewName
}) {
let lastTask;
while (true) {
const action = yield take([
...Object.keys(transitions.blocking),
...Object.keys(transitions.nonBlocking)
]);
// Automatically cancel any task started previously if it's still running.
if (lastTask) {
yield cancel(lastTask);
}
if (Object.keys(transitions.blocking).indexOf(action.type) > -1) {
try {
yield call(transitions.blocking[action.type], {
modelDefinition,
softRedirectSaga,
action: {
...action,
meta: {
...action.meta,
spawner: viewName
}
}
});
} catch (err) {
// Swallow custom errors.
if (isSystemError(err)) {
throw err;
}
}
} else if (Object.keys(transitions.nonBlocking).indexOf(action.type) > -1) {
lastTask = yield fork(function*() {
try {
yield call(transitions.nonBlocking[action.type], {
modelDefinition,
softRedirectSaga,
action: {
...action,
meta: {
...action.meta,
spawner: viewName
}
}
});
} catch (err) {
// Swallow custom errors.
if (isSystemError(err)) {
throw err;
}
}
});
}
}
};
export default /* istanbul ignore next */ function*({
modelDefinition,
softRedirectSaga,
transitions,
viewName
}) {
try {
yield call(scenarioSaga, {
modelDefinition,
softRedirectSaga,
transitions,
viewName
});
} finally {
if (yield cancelled()) {
yield put({
type: VIEW_REDIRECT_SUCCESS[viewName]
});
}
}
}
<|start_filename|>src/components/FieldString/FieldString.spec.js<|end_filename|>
import React from "react";
import Enzyme from "enzyme";
import Adapter from 'enzyme-adapter-react-16';
import { expect } from 'chai';
import sinon from 'sinon';
import FieldString from "./";
import FormControl from 'react-bootstrap/lib/FormControl';
Enzyme.configure({ adapter: new Adapter() });
describe("FieldString", _ => {
it("should properly render a FormControl", () => {
const props = {
value: 'some string'
};
const wrapper = Enzyme.mount(<FieldString {...props} />);
expect(wrapper.find(FormControl).prop('value')).to.equal(props.value); // eslint-disable-line no-unused-expressions
});
it("should pass an empty string value for null/undefined value prop", () => {
const props = {
value: null
};
const wrapper = Enzyme.mount(<FieldString {...props} />);
expect(wrapper.find(FormControl).prop('value')).to.equal(''); // eslint-disable-line no-unused-expressions
});
it("should render a FormControl and pass handlers", () => {
const onChange = sinon.spy();
const onBlur = sinon.spy();
const props = {
readOnly: false,
value: 'some string',
onChange,
onBlur
};
const wrapper = Enzyme.mount(<FieldString {...props} />);
const fc = wrapper.find(FormControl)
fc.prop('onChange')({ target: { value: 'new string' } })
fc.prop('onBlur')()
expect(onChange.calledOnce).to.be.true; // eslint-disable-line no-unused-expressions
expect(onBlur.calledOnce).to.be.true; // eslint-disable-line no-unused-expressions
expect(onChange.calledWith('new string')).to.be.true; // eslint-disable-line no-unused-expressions
});
});
<|start_filename|>src/crudeditor-lib/views/search/reducer.spec.js<|end_filename|>
import { assert } from 'chai';
import { buildDefaultStoreState } from './reducer';
import {
DEFAULT_OFFSET,
DEFAULT_ORDER
} from './constants';
describe('search view reducer', () => {
describe('buildDefaultStoreState', () => {
const fields = [
{ name: 'one' },
{ name: 'two' },
{ name: 'three' },
{ name: 'four' }
];
fields.forEach(field => {
field.render = { // eslint-disable-line no-param-reassign
value: {
converter: {
format: value => value
}
}
}
});
const modelDefinition = {
ui: {
search: {
searchableFields: [
...fields
],
resultFields: [
...fields
],
pagination: {
defaultMax: 30
}
}
}
}
it('should return default state', () => {
const result = buildDefaultStoreState(modelDefinition);
assert.deepEqual(
result, {
resultFilter: {
one: null,
two: null,
three: null,
four: null
},
formFilter: {
one: null,
two: null,
three: null,
four: null
},
formattedFilter: {
one: null,
two: null,
three: null,
four: null
},
sortParams: {
field: 'one',
order: DEFAULT_ORDER
},
pageParams: {
max: modelDefinition.ui.search.pagination.defaultMax,
offset: DEFAULT_OFFSET
},
selectedInstances: [],
errors: {
fields: {}
},
status: 'uninitialized',
hideSearchForm: false,
resultInstances: undefined,
totalCount: undefined,
gotoPage: ''
}
)
});
});
});
<|start_filename|>src/demo/models/second-model/i18n/en.js<|end_filename|>
export default {
"model.name": "Contracts",
"model.tab.general.label": "General",
"model.tab.additional.label": "Additional",
"model.section.test.label": "My test section",
"model.field.testNumberTypeField.label": "Test Number Type Field",
"model.field.contractBoilerplates.label": "Contract Boilerplates",
"model.field.hierarchyCode.label": "Hierarchy Code",
"model.field.termsOfPaymentId.label": "Terms Of Payment Id",
"model.field.description.label": "Description",
"model.field.description.error.forbiddenWord": "Description may not contain `{forbiddenWord}`",
"model.field.termsOfDeliveryId.label": "Terms Of Delivery Id",
"model.field.freeShippingBoundary.label": "Free Shipping Boundary",
"model.field.createdOn.label": "Created On",
"model.field.changedOn.label": "Changed On",
"model.field.contractedCatalogs.label": "Contracted Catalogs",
"model.field.minOrderValueRequired.label": "Min Order Value Required",
"model.field.contractedClassificationGroups.label": "contractedClassificationGroups",
"model.field.extContractId.label": "Ext Contract Id",
"model.field.children.label": "children",
"model.field.changedBy.label": "Changed By",
"model.field.usages.label": "usages",
"model.field.currencyId.label": "currencyId",
"model.label.createChild": "Create child"
}
<|start_filename|>src/crudeditor-lib/views/search/index.js<|end_filename|>
import cloneDeep from 'lodash/cloneDeep';
import { buildFieldRender } from '../lib';
export { getViewState } from './selectors';
import { converter } from '../../../data-types-lib';
import { checkSearchUi } from '../../check-model';
import {
FIELD_TYPE_DECIMAL,
FIELD_TYPE_DECIMAL_RANGE,
FIELD_TYPE_INTEGER,
FIELD_TYPE_INTEGER_RANGE,
FIELD_TYPE_STRING_DATE,
FIELD_TYPE_STRING_DATE_ONLY,
FIELD_TYPE_STRING_DATE_RANGE,
FIELD_TYPE_STRING_INTEGER,
FIELD_TYPE_STRING_INTEGER_RANGE,
FIELD_TYPE_STRING_DECIMAL,
FIELD_TYPE_STRING_DECIMAL_RANGE,
UI_TYPE_STRING
} from '../../../data-types-lib/constants';
const rangeFieldType = {
[FIELD_TYPE_INTEGER]: FIELD_TYPE_INTEGER_RANGE,
[FIELD_TYPE_DECIMAL]: FIELD_TYPE_DECIMAL_RANGE,
[FIELD_TYPE_STRING_DATE]: FIELD_TYPE_STRING_DATE_RANGE,
[FIELD_TYPE_STRING_DATE_ONLY]: FIELD_TYPE_STRING_DATE_RANGE,
[FIELD_TYPE_STRING_INTEGER]: FIELD_TYPE_STRING_INTEGER_RANGE,
[FIELD_TYPE_STRING_DECIMAL]: FIELD_TYPE_STRING_DECIMAL_RANGE
};
export const getUi = ({ modelDefinition, i18n }) => {
const fieldsMeta = modelDefinition.model.fields;
const searchMeta = modelDefinition.ui.search ?
cloneDeep(modelDefinition.ui.search()) :
{};
if (!searchMeta.resultFields) {
searchMeta.resultFields = Object.keys(fieldsMeta).map(name => ({ name }));
}
if (!searchMeta.searchableFields) {
searchMeta.searchableFields = Object.keys(fieldsMeta).map(name => ({ name }));
}
if (!searchMeta.pagination) {
searchMeta.pagination = {
defaultMax: 30,
options: [
{ max: -1, label: i18n.getMessage('common.CrudEditor.search.all') },
{ max: 1000, label: '1000' },
{ max: 100, label: '100' },
{ max: 50, label: '50' },
{ max: 30, label: '30' },
{ max: 10, label: '10' }
]
}
}
checkSearchUi({ searchMeta, fieldsMeta });
searchMeta.resultFields.
filter(({ component }) => !component).
forEach(field => {
const defaultConverter = converter({
fieldType: fieldsMeta[field.name].type,
uiType: UI_TYPE_STRING
});
// eslint-disable-next-line no-param-reassign
field.format = defaultConverter ?
defaultConverter.format :
(value => value);
});
searchMeta.searchableFields.forEach(field => {
const fieldType = fieldsMeta[field.name].type;
field.render = buildFieldRender({ // eslint-disable-line no-param-reassign
render: field.render,
type: !field.render && rangeFieldType[fieldType] || fieldType
});
});
return searchMeta;
}
<|start_filename|>src/crudeditor-lib/check-model/searchUi.js<|end_filename|>
import PropTypes from 'prop-types';
import find from 'lodash/find';
import { uiTypes, allPropTypes } from './lib';
const searchUiPropTypes = /* istanbul ignore next */ fieldsMeta => ({
searchableFields: allPropTypes(
PropTypes.arrayOf(PropTypes.shape({
name: PropTypes.string.isRequired,
render: PropTypes.shape({
component: PropTypes.oneOfType([PropTypes.func, PropTypes.string]),
props: PropTypes.object,
value: PropTypes.shape({
propName: PropTypes.string,
type: PropTypes.oneOf(uiTypes),
converter: PropTypes.shape({
format: PropTypes.func,
parse: PropTypes.func
})
})
})
})).isRequired,
(props, propName, componentName) => { // eslint-disable-line consistent-return
const brokenField = find(props[propName], field => field.render && !field.render.component);
if (brokenField) {
return new Error(`${componentName}:
searchableField "${brokenField.name}" must have render.component since custom render is specified
`);
}
}
),
resultFields: allPropTypes(
PropTypes.arrayOf(PropTypes.shape({
name: PropTypes.string.isRequired,
sortable: PropTypes.bool,
sortByDefault: PropTypes.bool,
textAlignment: PropTypes.oneOf(['left', 'center', 'right']),
component: PropTypes.func
})).isRequired,
(props, propName, componentName) => { // eslint-disable-line consistent-return
const brokenField = find(props[propName], ({ name, component }) => !component && !fieldsMeta[name])
if (brokenField) {
return new Error(`${componentName}:
Composite field "${brokenField.name}" in resultFields must have "component" property
`);
}
}
),
pagination: allPropTypes(
PropTypes.shape({
defaultMax: PropTypes.number,
options: PropTypes.arrayOf(PropTypes.shape({
max: PropTypes.number,
label: PropTypes.string
}))
}),
(props, propName, componentName) => { // eslint-disable-line consistent-return
const pagination = props[propName];
if (pagination && pagination.options.findIndex(({ max }) => max === pagination.defaultMax) === -1) {
return new Error(`${componentName}:
pagination.defaultMax "${props[propName].defaultMax}" should be equal to one of "max" in pagination.options
`);
}
}
)
})
export default /* istanbul ignore next */ ({ searchMeta, fieldsMeta }) => PropTypes.checkPropTypes(
searchUiPropTypes(fieldsMeta),
searchMeta,
'property',
'React-CrudEditor Search UI'
)
<|start_filename|>src/components/SearchPaginationPanel/PaginationPanel.js<|end_filename|>
import React, { PureComponent } from 'react';
import PropTypes from 'prop-types';
import Pagination from 'react-bootstrap/lib/Pagination';
export default class PaginationPanel extends PureComponent {
static propTypes = {
max: PropTypes.number.isRequired,
totalCount: PropTypes.number.isRequired,
offset: PropTypes.number.isRequired,
onPaginate: PropTypes.func.isRequired,
gotoPage: PropTypes.string.isRequired,
onGotoPageChange: PropTypes.func.isRequired
};
static contextTypes = {
i18n: PropTypes.object.isRequired
};
handleGotoPageChange = ({
target: { value }
}) => this.props.onGotoPageChange(value);
handleGoToPage = event => {
event.preventDefault(); // prevent reload on submit
event.stopPropagation(); // in case we use component in outer 'form' element
const { totalCount, max, onPaginate, gotoPage, onGotoPageChange } = this.props;
let page = parseInt(gotoPage, 10); // 10 is a radix.
if (!page || page < 0 || page !== Number(gotoPage)) {
onGotoPageChange('');
return;
}
let maxPage = Math.ceil(totalCount / max);
if (page > maxPage) {
page = maxPage;
}
onPaginate(page);
};
render() {
const { totalCount, max, offset, onPaginate, gotoPage } = this.props;
return (
<div>
<div className="pull-left" style={{ marginRight: '1em' }}>
<Pagination
activePage={offset / max + 1}
onSelect={onPaginate}
items={Math.ceil(totalCount / max)}
prev={<span className="glyphicon glyphicon-backward" />}
next={<span className="glyphicon glyphicon-forward" />}
className="crud--search-pagination-panel__pagination"
maxButtons={5}
boundaryLinks={true}
/>
</div>
<form
className="pull-left"
onSubmit={this.handleGoToPage}
>
<div className="pull-left" style={{ marginRight: '1em' }}>
<input
className='form-control'
name='gotoPage'
style={{ width: '50px', textAlign: 'center' }}
onChange={this.handleGotoPageChange}
value={gotoPage}
/>
</div>
<div className="pull-left">
<button className='btn btn-default' type='submit'>
{this.context.i18n.getMessage('common.CrudEditor.pagination.goToPage')}
</button>
</div>
</form>
</div>
);
}
}
<|start_filename|>src/crudeditor-lib/views/create/workerSagas/save.js<|end_filename|>
import { call, put, select } from 'redux-saga/effects';
import redirectSaga from '../../../common/workerSagas/redirect';
import validateSaga from '../../../common/workerSagas/validate';
import saveSaga from '../../../common/workerSagas/save';
import { getDefaultNewInstance } from '../../search/selectors';
import { isAllowed } from '../../../lib';
import { VIEW_ERROR, VIEW_EDIT, VIEW_SHOW, PERMISSION_EDIT } from '../../../common/constants';
import { AFTER_ACTION_NEW, VIEW_INITIALIZE, VIEW_NAME } from '../constants';
/*
* XXX: in case of failure, a worker saga must dispatch an appropriate action and exit by throwing error(s).
*/
export default function*({
modelDefinition,
softRedirectSaga,
action: {
payload: /* istanbul ignore next */ { afterAction } = {},
meta
}
}) {
// Forwarding thrown error(s) to the parent saga.
yield call(validateSaga, { modelDefinition, meta });
const savedInstance = yield call(saveSaga, { modelDefinition, meta });
if (afterAction === AFTER_ACTION_NEW) {
// create another instance
yield put({
type: VIEW_INITIALIZE,
payload: {
predefinedFields: yield select(storeState => getDefaultNewInstance(storeState, modelDefinition))
},
meta
});
} else {
try {
const tab = yield select(storeState => storeState.views[VIEW_NAME].activeTab);
yield call(redirectSaga, {
modelDefinition,
softRedirectSaga,
action: {
payload: {
view: {
name: /* istanbul ignore next */ isAllowed(
modelDefinition.permissions.crudOperations,
PERMISSION_EDIT,
{ instance: savedInstance }
) ?
VIEW_EDIT :
VIEW_SHOW,
state: {
instance: savedInstance,
tab: tab && tab.tab
}
}
},
meta
}
})
} catch (err) {
/* istanbul ignore next */
yield call(softRedirectSaga, {
viewName: VIEW_ERROR,
viewState: err
});
}
}
}
<|start_filename|>src/crudeditor-lib/i18n/exceptions/en.js<|end_filename|>
export default {
"common.CrudEditor.default.doesnt.match.message": "The value does not match the required pattern ''{pattern}''",
"common.CrudEditor.default.invalid.email.message": "Not a valid e-mail address format",
"common.CrudEditor.default.invalid.max.message": "The value exceeds the maximum value ''{max}''",
"common.CrudEditor.default.invalid.min.message": "The value is less than the minimum value ''{min}''",
"common.CrudEditor.default.invalid.max.size.message": "The value exceeds the maximum size of ''{max}''",
"common.CrudEditor.default.invalid.min.size.message": "The value is less than the minimum size of ''{min}''",
"common.CrudEditor.default.invalid.validator.message": "The value does not pass custom validation",
"common.CrudEditor.default.blank.message": "The field cannot be blank",
"common.CrudEditor.default.null.message": "The property cannot be null",
"common.CrudEditor.default.not.unique.message": "The value must be unique",
"common.CrudEditor.default.invalid.url.message": "The value must be a valid URL",
"common.CrudEditor.default.invalid.date.message": "The value must be a valid Date",
"common.CrudEditor.default.invalid.decimal.message": "The value must be a valid number",
"common.CrudEditor.default.invalid.integer.message": "The value must be a valid number",
"common.CrudEditor.default.errorOccurred.message": "Error occurred"
}
<|start_filename|>src/crudeditor-lib/views/error/scenario.spec.js<|end_filename|>
import { expect } from 'chai';
import { runSaga } from 'redux-saga';
import scenarioSaga from './scenario';
import {
VIEW_INITIALIZE
} from './constants';
const arg = {
modelDefinition: {
api: {
get: _ => ({})
},
model: {
fields: {}
}
},
softRedirectSaga: _ => null,
viewState: {
code: 303,
message: 'Some error'
}
}
describe('error view / scenario', () => {
it('should initialize view with error', () => {
const dispatched = [];
runSaga({
dispatch: (action) => dispatched.push(action)
}, scenarioSaga, arg);
expect(dispatched[0]).to.deep.equal({
type: VIEW_INITIALIZE,
payload: arg.viewState,
meta: {
source: arg.source
}
})
});
})
<|start_filename|>src/crudeditor-lib/i18n/da.js<|end_filename|>
import exceptions from './exceptions/en';
/* eslint-disable max-len */
const common = {
"common.CrudEditor.new.title": "Ny",
"common.CrudEditor.create.header": "Opret {modelName}",
"common.CrudEditor.edit.header": "Rediger {modelName}",
"common.CrudEditor.show.header": "Vis {modelName}",
"common.CrudEditor.duplicate.header": "Dupliker {modelName}",
"common.CrudEditor.cancel.button": "Annuller",
"common.CrudEditor.save.button": "Gem",
"common.CrudEditor.saveAndNew.button": "Gem og Ny",
"common.CrudEditor.saveAndNext.button": "Gem og Næste",
"common.CrudEditor.search.header": "Søg {payload}",
"common.CrudEditor.search.button": "Søg",
"common.CrudEditor.reset.button": "Nulstil",
"common.CrudEditor.create.button": "Opret",
"common.CrudEditor.select.button": "Vælg",
"common.CrudEditor.export.button": "Eksporter",
"common.CrudEditor.close.button": "Luk",
"common.CrudEditor.actions.tableHeader": "Handlinger",
"common.CrudEditor.show.button": "Vis",
"common.CrudEditor.edit.button": "Rediger",
"common.CrudEditor.delete.button": "Slet",
"common.CrudEditor.deleteSelected.button": "Slet valgt",
"common.CrudEditor.duplicate.button": "Dupliker",
"common.CrudEditor.refresh.button": "Opdater",
"common.CrudEditor.revisions.button": "Revisioner",
"common.CrudEditor.delete.confirmation": "Vil du virkelig slette denne post?",
"common.CrudEditor.deleteSelected.confirmation": "Vil du virkelig slette de valgte poster?",
"common.CrudEditor.noItemsSelected.alert": "Ingen poster valgt!",
"common.CrudEditor.objectSaved.message": "Objekt oprettet.",
"common.CrudEditor.objectUpdated.message": "Objekt opdateret.",
"common.CrudEditor.objectSaveFailed.message": "Objekt kunne ikke gemmes.",
"common.CrudEditor.objectDeleted.message": "Objekt slettet.",
"common.CrudEditor.objectsDeleted.message": "Objekter {labels} slettet.",
"common.CrudEditor.objectsDeleteIsNoAllowed.message": "Du kan ikke slette nogle objekter på grund af sikkerhedsbegrænsninger.",
"common.CrudEditor.objectDeleteFailed.message": "Objektet kunne ikke slettes. Det er måske allerede i brug.",
"common.CrudEditor.objectsDeleteFailed.message": "{count} objekter kunne ikke slettes. De er måske allerede i brug.",
"common.CrudEditor.objectDuplicated.message": "Objekt kopieret.",
"common.CrudEditor.noAssociationEntriesFound.message": "Der blev ikke fundet nogen poster. Du kan {1} en ny post.",
"common.CrudEditor.message.ajax.loading": "Vent venligst...",
"common.CrudEditor.search.result.label": "Søgeresultater",
"common.CrudEditor.unsaved.confirmation": "Du har foretaget ændringer. Hvis du forlader denne side, kasseres disse ændringer.",
"common.CrudEditor.search.resultsPerPage": "Resultater pr. side",
"common.CrudEditor.search.all": "Alle",
"common.CrudEditor.found.items.message": "{count} element(er) blev fundet",
"common.CrudEditor.range.from": "fra",
"common.CrudEditor.range.to": "til",
"common.CrudEditor.confirm.action": "Bekræft",
"common.CrudEditor.search.showSearchForm": "Vis søgning fra",
"common.CrudEditor.search.hideSearchForm": "Skjul søgning fra",
"common.CrudEditor.pagination.goToPage": "Gå"
}
/* eslint-enable max-len */
export default {
...common,
...exceptions
}
<|start_filename|>src/demo/models/second-model/components/CustomTabComponent/index.js<|end_filename|>
import React from 'react';
import PropTypes from 'prop-types';
const CustomTabComponent = /* istanbul ignore next */ ({ viewName, instance }) => (
<div>
<h1>Custom Tab Component Example</h1>
<h4><a href="https://github.com/OpusCapita/react-crudeditor#tabformcomponent" target="_blank">Click me for Documentation reference</a></h4>
<h3>props.viewName: {viewName}</h3>
<h3>props.instance:</h3>
<ul>
{
Object.keys(instance).map(key => <li key={key}>{`${key}: ${JSON.stringify(instance[key])}`}</li>)
}
</ul>
</div>
)
CustomTabComponent.propTypes = {
viewName: PropTypes.string.isRequired,
instance: PropTypes.object.isRequired
}
export default CustomTabComponent;
<|start_filename|>src/crudeditor-lib/i18n/exceptions/ru.js<|end_filename|>
export default {
"common.CrudEditor.default.blank.message": "Данное поле не может быть пустым",
"common.CrudEditor.default.doesnt.match.message": "Значение не соответствует требуемому шаблону ''{pattern}''.",
"common.CrudEditor.default.invalid.email.message": "Недействительный формат email.",
"common.CrudEditor.default.invalid.max.message": "Значение превышает максимальное (''{max}'').",
"common.CrudEditor.default.invalid.max.size.message": "Значение превышает максимальный размер (''{max}'').",
"common.CrudEditor.default.invalid.min.message": "Значение меньше минимально допустимого (''{min}'').",
"common.CrudEditor.default.invalid.min.size.message": "Значение меньше минимального допустимого размера (''{min}'').",
"common.CrudEditor.default.invalid.validator.message": "Значение не проходит выборочную валидацию.",
"common.CrudEditor.default.not.unique.message": "Значение должно быть уникальным.",
"common.CrudEditor.default.null.message": "Свойство не может быть нулевым",
"common.CrudEditor.default.invalid.integer.message": "Значение должно быть действительным числом.",
"common.CrudEditor.default.invalid.url.message": "Значение должно быть действительным URL.",
"common.CrudEditor.default.invalid.date.message": "Значение должно быть действительной датой.",
"common.CrudEditor.default.errorOccurred.message": "Произошла ошибка"
}
<|start_filename|>src/crudeditor-lib/i18n/exceptions/sv.js<|end_filename|>
export default {
"common.CrudEditor.default.doesnt.match.message": "Värdet matchar inte det obligatoriska mönstret \"{pattern}\"",
"common.CrudEditor.default.invalid.email.message": "Inte ett giltigt e-postadressformat",
"common.CrudEditor.default.invalid.max.message": "Värdet överskrider högsta värdet \"{max}\"",
"common.CrudEditor.default.invalid.min.message": "Värdet underskrider lägsta värdet \"{min}\"",
"common.CrudEditor.default.invalid.max.size.message": "Värdet överskrider den största storleken på \"{max}\"",
"common.CrudEditor.default.invalid.min.size.message": "Värdet underskrider den minsta storleken på \"{min}\"",
"common.CrudEditor.default.invalid.validator.message": "Värdet godkänns inte vid anpassad validering",
"common.CrudEditor.default.blank.message": "Fältet får inte vara tomt",
"common.CrudEditor.default.null.message": "Egenskapen kan inte vara null",
"common.CrudEditor.default.not.unique.message": "Värdet måste vara unikt",
"common.CrudEditor.default.invalid.url.message": "Värdet måste vara en giltig URL",
"common.CrudEditor.default.invalid.date.message": "Värdet måste vara ett giltigt datum",
"common.CrudEditor.default.invalid.integer.message": "Värdet måste vara ett giltigt tal",
"common.CrudEditor.default.errorOccurred.message": "Fel har uppst\u00e5tt"
}
<|start_filename|>src/data-types-lib/fieldTypes/stringDateOnly/dateUiType.spec.js<|end_filename|>
import { expect } from 'chai';
import converter from './dateUiType';
describe('fieldTypes :: stringDateOnly <-> date', () => {
it('should convert stringDateOnly to date', () => {
const value = '2017-12-20';
const result = converter.format(value);
expect(result.valueOf()).to.equal(new Date(value).valueOf())
});
it('should convert date to stringDateOnly ("YYYY-MM-DD" String)', () => {
const date = '2017-12-20';
const value = new Date(date);
const result = converter.parse(value);
expect(result).to.equal(date)
});
});
<|start_filename|>src/components/SearchResult/index.js<|end_filename|>
import React, { PureComponent } from 'react';
import PropTypes from 'prop-types';
import ResultListing from '../SearchResultListing';
import BulkOperationsPanel from '../SearchBulkOperationsPanel';
import PaginationPanel from '../SearchPaginationPanel';
import WithSpinner from '../Spinner/SpinnerOverlayHOC';
import './SearchResult.less';
class SearchResult extends PureComponent {
static propTypes = {
model: PropTypes.shape({
data: PropTypes.shape({
totalCount: PropTypes.number
}).isRequired
}).isRequired
}
static contextTypes = {
i18n: PropTypes.object
};
render() {
const { model } = this.props;
const { i18n } = this.context;
return model.data.totalCount > 0 ? (
<div className="crud--search-result">
<div className="crud--search-result__table">
<ResultListing model={model} />
</div>
<div className="crud--search-result__footer">
<BulkOperationsPanel model={model} />
<PaginationPanel model={model} />
</div>
</div>
) : (
<div className="crud--search-result__no-items-found">
<span>{i18n.getMessage('common.CrudEditor.found.items.message', { count: 0 })}</span>
</div>
);
}
}
export default WithSpinner(SearchResult);
| OpusCapita/react-crudeditor |
<|start_filename|>index.js<|end_filename|>
'use strict';
const jsforce = require('jsforce');
const Downloader = require('./lib/downloader.js');
const ExcelBuilder = require('./lib/excelbuilder.js');
const Utils = require('./lib/utils.js');
module.exports = (config, logger) => {
// Check all mandatory config options
if (typeof config.username === 'undefined' || config.username === null ||
typeof config.password === 'undefined' || config.password === null) {
throw new Error('Not enough config options');
}
// Set default values
if (typeof config.loginUrl === 'undefined' || config.loginUrl === null) {
config.loginUrl = 'https://login.salesforce.com';
}
if (typeof config.apiVersion === 'undefined' || config.apiVersion === null) {
config.apiVersion = '48.0';
}
if (typeof config.output === 'undefined' || config.output === null) {
config.output = '.';
}
if (typeof config.debug === 'undefined' || config.debug === null) {
config.debug = false;
}
config.debug = (config.debug === "true" || config.debug === true);
if (typeof config.excludeManagedPackage === 'undefined' || config.excludeManagedPackage === null) {
config.excludeManagedPackage = true;
}
config.excludeManagedPackage = (config.excludeManagedPackage === "true" || config.excludeManagedPackage === true);
if (typeof config.projectName === 'undefined' || config.projectName === null) {
config.projectName = 'PROJECT';
}
if (typeof config.outputTime === 'undefined' || config.outputTime === null) {
config.outputTime = false;
}
if (typeof config.allCustomObjects === 'undefined' || config.allCustomObjects === null) {
config.allCustomObjects = true;
}
config.allCustomObjects = (config.allCustomObjects === "true" || config.allCustomObjects === true);
if (typeof config.lucidchart === 'undefined' || config.lucidchart === null) {
config.lucidchart = true;
}
config.lucidchart = (config.lucidchart === "true" || config.lucidchart === true);
if (typeof config.sobjects === 'undefined' || config.sobjects === null) {
config.objects = [
'Account',
'Contact',
'User'
];
} else {
// If an array is passed to the module
if (Array.isArray(config.sobjects)) {
config.objects = config.sobjects;
} else {
// Check and parse standObjects string for command-line
try {
config.objects = config.sobjects.split(',');
} catch (e) {
let errorMessage = 'Unable to parse sobjects parameter';
if (config.debug)
errorMessage += ' : ' + e;
throw new Error(errorMessage);
}
}
}
if (typeof config.techFieldPrefix === 'undefined' || config.techFieldPrefix === null) {
config.techFieldPrefix = 'TECH_';
}
if (typeof config.hideTechFields === 'undefined' || config.hideTechFields === null) {
config.hideTechFields = false;
}
if (typeof config.columns === 'undefined' || config.columns === null) {
config.columns = {
'ReadOnly': 5,
'Mandatory': 3,
'Name': 25,
'Description': 90,
'Helptext': 90,
'APIName': 25,
'Type': 27,
'Values': 45
};
}
var utils = new Utils();
// Clean folders that contain API files
if (config.cleanFolders) {
const statusRmDescribe = utils.rmDir(__dirname + '/files/describe', '.json', false);
const statusRmMetadata = utils.rmDir(__dirname + '/files/metadata', '.json', false);
logger('File folders cleaned');
}
// Main promise
const promise = new Promise((resolve, reject) => {
const conn = new jsforce.Connection({
loginUrl: config.loginUrl,
version: config.apiVersion
});
// Salesforce connection
conn.login(config.username, config.password).then(result => {
logger('Connected as ' + config.username);
if (config.debug) {
utils.log('Connected as ' + config.username, config);
}
if (config.allCustomObjects) {
conn.describeGlobal().then(res => {
for (let i = 0; i < res.sobjects.length; i++) {
let object = res.sobjects[i];
if (config.objects === undefined)
config.objects = [];
// If the sObject is a real custom object
if (object.custom && (object.name.indexOf('__c') !== -1)) {
if (config.debug)
utils.log('# excludeManagedPackage (' + config.excludeManagedPackage + '): ' + object.name, config);
if (config.excludeManagedPackage) {
if ((object.name.split('__').length - 1 < 2))
config.objects.push(object.name);
} else {
config.objects.push(object.name);
}
}
}
if (config.debug)
utils.log(JSON.stringify(config.objects), config);
const downloader = new Downloader(config, logger, conn);
const builder = new ExcelBuilder(config, logger);
// Download metadata files
downloader.execute().then(result => {
logger(result + ' downloaded');
// Generate the excel file
builder.generate().then(result => {
resolve();
});
})
});
} else {
if (config.objects.length > 0) {
const downloader = new Downloader(config, logger, conn);
const builder = new ExcelBuilder(config, logger);
// Download metadata files
downloader.execute().then(result => {
logger(result + ' downloaded');
// Generate the excel file
return builder.generate();
}).then(result => {
resolve();
});
}
}
}).catch(reject);
});
return promise;
};
<|start_filename|>lib/downloader.js<|end_filename|>
const fs = require('fs');
const path = require('path');
const bytes = require('bytes');
const Utils = require('./utils.js');
const FILE_DIR = '../files';
module.exports = class Downloader {
constructor(config, logger, conn) {
this.config = config;
this.logger = logger;
this.conn = conn;
this.utils = new Utils(logger);
}
downloadDescribe(sObject) {
const self = this;
return new Promise((resolve, reject) => {
self.conn.sobject(sObject).describe().then(meta => {
const filePath = path.join(__dirname, FILE_DIR, '/describe/', sObject + '.json');
fs.writeFileSync(filePath, JSON.stringify(meta.fields), 'utf-8');
const stats = fs.statSync(filePath);
resolve(stats.size);
}).catch(function(err) {
reject(sObject + ': ' + err);
if (self.config.debug) {
self.utils.log(err, self.config);
}
});
});
}
downloadMetadata(sobjectList) {
const self = this;
return new Promise((resolve, reject) => {
self.conn.metadata.read('CustomObject', sobjectList).then(metadata => {
let filePath = '';
if (sobjectList.length === 1) {
let fields = metadata.fields;
fields.sort(self.utils.sortByProperty('fullName'));
filePath = path.join(__dirname, FILE_DIR, '/metadata/', metadata.fullName + '.json');
fs.writeFileSync(filePath, JSON.stringify(metadata), 'utf-8');
} else {
for (let i = 0; i < metadata.length; i++) {
let fields = metadata[i].fields;
if ((!Array.isArray(fields) || (Array.isArray(fields) && (fields !== undefined || fields.length > 0)))) {
// Manage single object or an object array
if(fields != null && !Array.isArray(fields)){
let fieldsArray = new Array();
fieldsArray.push(fields);
fields = fieldsArray;
metadata[i].fields = fields;
}
filePath = path.join(__dirname, FILE_DIR, '/metadata/', metadata[i].fullName + '.json');
fs.writeFileSync(filePath, JSON.stringify(metadata[i]), 'utf-8');
} else {
self.config.objects.splice(self.config.objects.indexOf(metadata[i]), 1);
}
}
}
const stats = fs.statSync(filePath);
resolve(stats.size);
}).catch(function(err) {
reject(err);
if (self.config.debug) {
self.utils.log(err, self.config);
}
});
});
}
execute() {
const promise = new Promise((resolve, reject) => {
const self = this;
this.logger('Downloading...');
let downloadArray = new Array();
for (let object of self.config.objects) {
downloadArray.push(self.downloadDescribe(object));
}
let loop = ~~(self.config.objects.length / 10);
if (self.config.objects.length % 10 > 0)
loop++;
let j = 0;
for (let i = 0; i < loop; i++) {
let objectList = self.config.objects.slice(j, j + 10);
j += 10;
downloadArray.push(self.downloadMetadata(objectList));
}
Promise.all(
downloadArray
).then(results => {
let total = 0;
for (let fileSize of results) {
total += fileSize;
}
resolve(bytes.format(total, {
decimalPlaces: 2
}));
}).catch(err => {
if (self.config.debug) {
self.utils.log(err, self.config);
}
self.logger(err);
});
});
return promise;
}
}
<|start_filename|>lib/excelbuilder.js<|end_filename|>
const fs = require('fs');
const excel = require('excel4node');
const path = require('path');
const Utils = require('./utils.js');
const FILE_DIR = '../files';
const MAX_PICKLIST_VALUES = 2;
// Styles
var workbook = new excel.Workbook();
var startGeneration;
var global = workbook.createStyle({
font: {
size: 12
},
alignment: {
wrapText: true,
vertical: 'center',
},
border: {
left: {
style: 'thin',
color: 'b8b6b8'
},
right: {
style: 'thin',
color: 'b8b6b8'
},
top: {
style: 'thin',
color: 'b8b6b8'
},
bottom: {
style: 'thin',
color: 'b8b6b8'
}
}
});
var header = workbook.createStyle({
font: {
bold: true,
color: 'FFFFFF'
},
alignment: {
horizontal: 'center'
},
fill: {
type: 'pattern',
patternType: 'solid',
fgColor: '019cdd'
}
});
var subHeader = workbook.createStyle({
font: {
bold: true
},
fill: {
type: 'pattern',
patternType: 'solid',
fgColor: 'F5F4F2' // HTML style hex value. optional. defaults to black
}
});
var category = workbook.createStyle({
font: {
// bold: true,
color: '60809f'
},
fill: {
type: 'pattern',
patternType: 'solid',
fgColor: 'dbeaf7'
}
});
var validationCategory = workbook.createStyle({
font: {
// bold: true,
color: '703026'
},
fill: {
type: 'pattern',
patternType: 'solid',
fgColor: 'ffa293'
}
});
var indentLeft = workbook.createStyle({
alignment: {
indent: 1
}
});
var centerAlign = workbook.createStyle({
alignment: {
horizontal: 'center'
}
});
var bold = workbook.createStyle({
font: {
bold: true
}
});
var italic = workbook.createStyle({
font: {
italics: true
}
});
var redColor = workbook.createStyle({
font: {
color: 'FF0000'
}
});
var rowColor = workbook.createStyle({
fill: {
type: 'pattern',
patternType: 'solid',
fgColor: 'ffffff'
}
});
var alternateRowColor = workbook.createStyle({
fill: {
type: 'pattern',
patternType: 'solid',
fgColor: 'f2f1f3'
}
});
module.exports = class Downloader {
constructor(config, logger) {
this.config = config;
this.logger = logger;
this.utils = new Utils();
}
createHeader(worksheet) {
var columns = this.config.columns;
var columnsKeys = Object.keys(this.config.columns);
// Global sizes
worksheet.row(1).setHeight(40);
worksheet.row(2).setHeight(20);
if (columnsKeys.indexOf('ReadOnly') > -1)
worksheet.column(columnsKeys.indexOf('ReadOnly') + 1).setWidth(columns.ReadOnly);
if (columnsKeys.indexOf('Mandatory') > -1)
worksheet.column(columnsKeys.indexOf('Mandatory') + 1).setWidth(columns.Mandatory);
if (columnsKeys.indexOf('Name') > -1)
worksheet.column(columnsKeys.indexOf('Name') + 1).setWidth(columns.Name);
if (columnsKeys.indexOf('Description') > -1)
worksheet.column(columnsKeys.indexOf('Description') + 1).setWidth(columns.Description);
if (columnsKeys.indexOf('Helptext') > -1)
worksheet.column(columnsKeys.indexOf('Helptext') + 1).setWidth(columns.Helptext);
if (columnsKeys.indexOf('APIName') > -1)
worksheet.column(columnsKeys.indexOf('APIName') + 1).setWidth(columns.APIName);
if (columnsKeys.indexOf('Type') > -1)
worksheet.column(columnsKeys.indexOf('Type') + 1).setWidth(columns.Type);
if (columnsKeys.indexOf('Values') > -1)
worksheet.column(columnsKeys.indexOf('Values') + 1).setWidth(columns.Values);
// Build header and subheader
worksheet.cell(1, 1, 1, columnsKeys.length, true).string('SALESFORCE').style(global).style(header);
if (columnsKeys.indexOf('ReadOnly') > -1)
worksheet.cell(2, columnsKeys.indexOf('ReadOnly') + 1).string('R/O').style(global).style(subHeader).style(centerAlign);
if (columnsKeys.indexOf('Mandatory') > -1)
worksheet.cell(2, columnsKeys.indexOf('Mandatory') + 1).string('M').style(global).style(subHeader).style(centerAlign);
if (columnsKeys.indexOf('Name') > -1)
worksheet.cell(2, columnsKeys.indexOf('Name') + 1).string('Field Name').style(global).style(subHeader).style(indentLeft);
if (columnsKeys.indexOf('Description') > -1)
worksheet.cell(2, columnsKeys.indexOf('Description') + 1).string('Description').style(global).style(subHeader).style(indentLeft);
if (columnsKeys.indexOf('Helptext') > -1)
worksheet.cell(2, columnsKeys.indexOf('Helptext') + 1).string('Helptext').style(global).style(subHeader).style(indentLeft);
if (columnsKeys.indexOf('APIName') > -1)
worksheet.cell(2, columnsKeys.indexOf('APIName') + 1).string('API Name').style(global).style(subHeader).style(indentLeft);
if (columnsKeys.indexOf('Type') > -1)
worksheet.cell(2, columnsKeys.indexOf('Type') + 1).string('Type').style(global).style(subHeader).style(centerAlign);
if (columnsKeys.indexOf('Values') > -1)
worksheet.cell(2, columnsKeys.indexOf('Values') + 1).string('Values / Formula').style(global).style(subHeader).style(indentLeft);
return 3;
}
mapFields(fields) {
var fieldMap = {};
for (var i = 0; i < fields.length; i++) {
var field = fields[i];
fieldMap[field.fullName] = field;
}
return fieldMap;
}
writeFields(worksheet, fields, line, validationRules) {
var columns = this.config.columns;
var columnsKeys = Object.keys(this.config.columns);
var indexRow = 1;
// Foreach field
for (var j = 0; j < fields.length; j++) {
var field = fields[j];
if (!(this.config.hideTechFields && field.name.startsWith(this.config.techFieldPrefix))) {
var isCustom = field.custom;
if (!isCustom && j == 0) {
worksheet.cell(line, 1, line, columnsKeys.length, true).string('Standard Fields').style(global).style(category).style(indentLeft);
// Row height
worksheet.row(line).setHeight(25);
line++;
indexRow = 1;
}
var rowStyle = rowColor;
if (indexRow % 2 == 0) {
rowStyle = alternateRowColor;
}
if (columnsKeys.indexOf('ReadOnly') > -1)
worksheet.cell(line, columnsKeys.indexOf('ReadOnly') + 1).string(!field.updateable ? "✓" : '☐').style(global).style(centerAlign).style(rowStyle);
if (columnsKeys.indexOf('Mandatory') > -1)
worksheet.cell(line, columnsKeys.indexOf('Mandatory') + 1).string(!field.nillable && field.updateable && field.type != 'boolean' ? "*" : '').style(global).style(centerAlign).style(rowStyle).style(redColor);
if (columnsKeys.indexOf('Name') > -1)
worksheet.cell(line, columnsKeys.indexOf('Name') + 1).string(field.label != null ? field.label : field.name).style(global).style(bold).style(rowStyle).style(indentLeft);
if (columnsKeys.indexOf('Description') > -1)
worksheet.cell(line, columnsKeys.indexOf('Description') + 1).string(field.description != null ? field.description : '').style(global).style(rowStyle).style(indentLeft);
if (columnsKeys.indexOf('Helptext') > -1)
worksheet.cell(line, columnsKeys.indexOf('Helptext') + 1).string(field.inlineHelpText != null ? field.inlineHelpText : '').style(global).style(rowStyle).style(indentLeft);
if (columnsKeys.indexOf('APIName') > -1)
worksheet.cell(line, columnsKeys.indexOf('APIName') + 1).string(field.name).style(global).style(rowStyle).style(indentLeft);
// tooling
// worksheet.cell(line, columnsKeys.indexOf('APIName') + 4).string(field.LastModifiedDate != null ? field.LastModifiedDate : '').style(global).style(rowStyle).style(indentLeft);
// Type property
var type = this.utils.capitalize(field.type);
if (type == 'Int' || type == 'Double') {
type = 'Number';
}
if (type == 'Number' || type == 'Currency') {
var precision = parseInt(field.precision);
var scale = parseInt(field.scale);
var finalPrecision = precision - scale;
type = type + '(' + finalPrecision + ',' + field.scale + ')';
}
if (type == 'Boolean') {
type = 'Checkbox';
}
if (type == 'Reference' && field.referenceTo != null) {
type = 'Lookup(' + field.referenceTo + ')';
}
if (type == 'MasterDetail') {
type = 'Master-Detail(' + field.referenceTo + ')';
}
if ((type == 'Text' || type == 'Textarea' || type == 'String') && field.length != null) {
type = 'Text(' + field.length + ')';
}
if (field.calculatedFormula != null) {
type = 'Formula(' + field.type + ')';
}
if (!field.nillable) {
type += ' (Unique)';
}
if (field.externalId) {
type += '(External ID)';
}
if (columnsKeys.indexOf('Type') > -1)
worksheet.cell(line, columnsKeys.indexOf('Type') + 1).string(type).style(centerAlign).style(global).style(italic).style(rowStyle).style(indentLeft);
// Values property
var value = '';
if (type == 'Picklist' || type == 'MultiselectPicklist') {
if (field.globalPicklist != null) {
value = 'globalPicklist(' + field.globalPicklist + ')';
} else {
var valuesArray = field.picklistValues;
var k = 0;
while (k < valuesArray.length && k < MAX_PICKLIST_VALUES) {
value += valuesArray[k].value + '\n';
k++;
}
if (valuesArray.length > MAX_PICKLIST_VALUES * 2) {
value += '...\n';
}
if (valuesArray.length - MAX_PICKLIST_VALUES >= MAX_PICKLIST_VALUES) {
k = valuesArray.length - 1
while (k >= valuesArray.length - MAX_PICKLIST_VALUES) {
value += valuesArray[k].value + '\n';
k--;
}
}
if (valuesArray.length > MAX_PICKLIST_VALUES * 2) {
value += '(Total: ' + valuesArray.length + ' values)';
}
}
}
if (field.calculatedFormula != null) {
value = field.calculatedFormula;
}
if (columnsKeys.indexOf('Values') > -1)
worksheet.cell(line, columnsKeys.indexOf('Values') + 1).string(value).style(global).style(rowStyle).style(indentLeft);
if (((!field.label.length < 24) || (!field.name.length < 24)) && !value.includes('\n'))
worksheet.row(line).setHeight(25);
line++;
indexRow++;
if (!isCustom && j + 1 < fields.length && fields[j + 1].custom) {
worksheet.cell(line, 1, line, columnsKeys.length, true).string('Custom Fields').style(global).style(category).style(indentLeft);
// Row height
worksheet.row(line).setHeight(25);
line++;
indexRow = 1;
}
}
}
if (validationRules !== undefined) {
worksheet.cell(line, 1, line, columnsKeys.length, true).string('Validation Rules').style(global).style(validationCategory).style(indentLeft);
// Row height
worksheet.row(line).setHeight(25);
line++;
worksheet.cell(line, 1, line, 2, true).string('Active').style(global).style(rowStyle).style(subHeader).style(centerAlign);
worksheet.cell(line, 3).string('Name').style(global).style(rowStyle).style(subHeader).style(indentLeft);
worksheet.cell(line, 4).string('Description').style(global).style(rowStyle).style(subHeader).style(indentLeft);
worksheet.cell(line, 5).string('Error display field').style(global).style(rowStyle).style(subHeader).style(centerAlign);
worksheet.cell(line, 6).string('Error message').style(global).style(rowStyle).style(subHeader).style(indentLeft);
if (columnsKeys.indexOf('Helptext') > -1){
worksheet.cell(line, 7, line, 8, true).string('Condition formula').style(global).style(rowStyle).style(subHeader).style(indentLeft);
}else{
worksheet.cell(line, 7).string('Condition formula').style(global).style(rowStyle).style(subHeader).style(indentLeft);
}
worksheet.row(line).setHeight(20);
line++;
indexRow = 1;
if (Array.isArray(validationRules)) {
for (var k = 0; k < validationRules.length; k++) {
rowStyle = rowColor;
if (indexRow % 2 == 0) {
rowStyle = alternateRowColor;
}
worksheet.cell(line, 1, line, 2, true).string(validationRules[k].active === "true" ? "✓" : '☐').style(global).style(rowStyle).style(centerAlign);
worksheet.cell(line, 3).string(validationRules[k].fullName != null ? validationRules[k].fullName : '').style(global).style(rowStyle).style(indentLeft);
worksheet.cell(line, 4).string(validationRules[k].description != null ? validationRules[k].description : '').style(global).style(rowStyle).style(indentLeft);
worksheet.cell(line, 5).string(validationRules[k].errorDisplayField != null ? validationRules[k].errorDisplayField : '').style(global).style(rowStyle).style(centerAlign);
worksheet.cell(line, 6).string(validationRules[k].errorMessage != null ? validationRules[k].errorMessage : '').style(global).style(rowStyle).style(indentLeft);
if (columnsKeys.indexOf('Helptext') > -1){
worksheet.cell(line, 7, line, 8, true).string(validationRules[k].errorConditionFormula != null ? validationRules[k].errorConditionFormula : '').style(global).style(rowStyle).style(indentLeft);
}else{
worksheet.cell(line, 7).string(validationRules[k].errorConditionFormula != null ? validationRules[k].errorConditionFormula : '').style(global).style(rowStyle).style(indentLeft);
}
line++;
indexRow++;
}
} else {
rowStyle = rowColor;
if (indexRow % 2 == 0) {
rowStyle = alternateRowColor;
}
worksheet.cell(line, 1, line, 2, true).string(validationRules.active === "true" ? "✓" : '☐').style(global).style(rowStyle).style(centerAlign);
worksheet.cell(line, 3).string(validationRules.fullName != null ? validationRules.fullName : '').style(global).style(rowStyle).style(indentLeft);
worksheet.cell(line, 4).string(validationRules.description != null ? validationRules.description : '').style(global).style(rowStyle).style(indentLeft);
worksheet.cell(line, 5).string(validationRules.errorDisplayField != null ? validationRules.errorDisplayField : '').style(global).style(rowStyle).style(centerAlign);
worksheet.cell(line, 6).string(validationRules.errorMessage != null ? validationRules.errorMessage : '').style(global).style(rowStyle).style(indentLeft);
worksheet.cell(line, 7).string(validationRules.errorConditionFormula != null ? validationRules.errorConditionFormula : '').style(global).style(rowStyle).style(indentLeft);
line++;
indexRow++;
}
}
}
generateChart(objectName, fields){
var chart = '<html>' + '\n' + '<div>';
var cpt = 0;
// Foreach field
for (var j = 0; j < fields.length; j++) {
var field = fields[j];
// Type property
var type = this.utils.capitalize(field.type);
var add = false;
var attribute = null;
var fieldLength = field.length != null ? field.length : '';
var relationObject = '';
var attributeKey = '';
var attributeType = '';
if (type == 'Reference' && field.referenceTo != null) {
add = true;
attributeKey = 'FOREIGN KEY';
attributeType = 'LOOKUP';
relationObject = field.referenceTo;
}
if (type == 'MasterDetail') {
add = true;
attributeKey = 'FOREIGN KEY';
attributeType = 'MASTER DETAIL';
relationObject = field.referenceTo;
}
if (type === 'Id'){
add = true;
attributeKey = 'PRIMARY KEY';
attributeType = 'ID';
}
if(add){
var fieldLabel = field.label != null ? field.label : field.name;
var fieldName = field.name;
if(type === 'Id'){
chart += 'postgresql;ELSA;Salesforce;"' + objectName + ' (' + objectName + ')";"' + objectName + ' ID (' + fieldName + ')";' + cpt + ';"' + attributeType + '";' + fieldLength + ';"' + attributeKey + '";;' + '\n';
}else{
chart += 'postgresql;ELSA;Salesforce;"' + objectName + ' (' + objectName + ')";"' + fieldLabel + ' (' + fieldName + ')";' + cpt + ';"' + attributeType + '";' + fieldLength + ';"' + attributeKey + '";"Salesforce";"' + relationObject + ' (' + relationObject + ')";"' + relationObject + ' ID (Id)"' + '\n';
}
cpt++;
}
}
chart += '</div>' + '\n' + '</html>'
return chart;
}
generate() {
const promise = new Promise((resolve, reject) => {
this.logger('Generating...');
let sObjects = this.config.objects;
var chart = '';
for (var i = 0; i < sObjects.length; i++) {
var cur = i + 1;
var worksheet = workbook.addWorksheet(sObjects[i]);
var line = this.createHeader(worksheet);
var describePath = path.join(__dirname, FILE_DIR, '/describe/' + sObjects[i] + '.json');
var metadataPath = path.join(__dirname, FILE_DIR, '/metadata/' + sObjects[i] + '.json');
if (fs.existsSync(describePath)) {
var currentObjectFieldsDescribe = JSON.parse(fs.readFileSync(describePath));
if (fs.existsSync(metadataPath)) {
var currentObjectFieldsMetadata = JSON.parse(fs.readFileSync(metadataPath));
if(currentObjectFieldsMetadata.fields != null){
var fieldsMap = this.mapFields(currentObjectFieldsMetadata.fields);
}
}
for (var j = 0; j < currentObjectFieldsDescribe.length; j++) {
var field = currentObjectFieldsDescribe[j];
var fieldName = currentObjectFieldsDescribe[j].name;
if (fieldsMap[fieldName] != null) {
var correspondingField = fieldsMap[fieldName];
if (correspondingField.description != null)
currentObjectFieldsDescribe[j].description = correspondingField.description;
if (correspondingField.type === 'MasterDetail')
currentObjectFieldsDescribe[j].type = correspondingField.type;
}
}
}
currentObjectFieldsDescribe.sort(this.utils.sortByTwoProperty('custom', 'name'));
if (this.config.debug) {
this.utils.log('#' + sObjects[i] + '\n#Validation RULES ' + JSON.stringify(currentObjectFieldsMetadata.validationRules), this.config);
}
this.writeFields(worksheet, currentObjectFieldsDescribe, line, currentObjectFieldsMetadata.validationRules);
if(this.config.lucidchart)
chart += this.generateChart(sObjects[i], currentObjectFieldsDescribe);
}
if(this.config.lucidchart){
// Generate chart file (Lucidchart)
this.logger('Saving lucidchart file...');
const filePath = path.join(this.config.output, 'lucidchart.txt');
fs.writeFileSync(filePath, chart, 'utf-8');
this.logger('Lucidchart.txt file successfully saved!');
}
// Generate output Excel file
var currentDate = new Date(Date.now());
var currentDateString = currentDate.toISOString();
if(this.config.outputTime){
currentDateString = currentDateString.replace('T', '_').replace('Z', '').replace(/:/g,'_').replace('.','_');
}else{
currentDateString = currentDateString.substring(0, currentDateString.indexOf('T'));
}
var fileName = this.config.projectName + '_Data_Dictionary_' + currentDateString + '.xlsx'
var outputFile = path.join(this.config.output, fileName);
this.logger('Saving ' + fileName + '...');
workbook.write(outputFile);
this.logger(fileName + ' successfully saved!');
resolve();
});
return promise;
}
}
| gavignon/sfdc-generate-data-dictionary |
<|start_filename|>Syn.Speech/Decoder/Search/PartitionActiveListFactory.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder.Search
{
/// <summary>
/// A factory for PartitionActiveLists
/// </summary>
public class PartitionActiveListFactory : ActiveListFactory
{
/**
///
/// @param absoluteBeamWidth
/// @param relativeBeamWidth
*/
public PartitionActiveListFactory(int absoluteBeamWidth, double relativeBeamWidth)
:base(absoluteBeamWidth, relativeBeamWidth)
{
}
public PartitionActiveListFactory()
{
}
/*
/// (non-Javadoc)
*
/// @see edu.cmu.sphinx.util.props.Configurable#newProperties(edu.cmu.sphinx.util.props.PropertySheet)
*/
public new void newProperties(PropertySheet ps)
{
base.newProperties(ps);
}
/*
/// (non-Javadoc)
*
/// @see edu.cmu.sphinx.decoder.search.ActiveListFactory#newInstance()
*/
public override ActiveList newInstance()
{
return new PartitionActiveList(absoluteBeamWidth, logRelativeBeamWidth,this);
}
}
/**
/// An active list that does absolute beam with pruning by partitioning the
/// token list based on absolute beam width, instead of sorting the token
/// list, and then chopping the list up with the absolute beam width. The
/// expected run time of this partitioning algorithm is O(n), instead of O(n log n)
/// for merge sort.
/// <p/>
/// This class is not thread safe and should only be used by a single thread.
/// <p/>
/// Note that all scores are maintained in the LogMath log base.
*/
class PartitionActiveList:ActiveList
{
private int _size;
private int absoluteBeamWidth;
private float logRelativeBeamWidth;
private Token bestToken;
// when the list is changed these things should be
// changed/updated as well
private Token[] tokenList;
private Partitioner partitioner = new Partitioner();
PartitionActiveListFactory parent = null;
/** Creates an empty active list
/// @param absoluteBeamWidth
/// @param logRelativeBeamWidth*/
public PartitionActiveList(int absoluteBeamWidth,
float logRelativeBeamWidth, PartitionActiveListFactory _parent)
{
this.absoluteBeamWidth = absoluteBeamWidth;
this.logRelativeBeamWidth = logRelativeBeamWidth;
int listSize = 2000;
if (absoluteBeamWidth > 0) {
listSize = absoluteBeamWidth / 3;
}
this.tokenList = new Token[listSize];
parent = _parent;
}
/**
/// Adds the given token to the list
*
/// @param token the token to add
*/
public override void add(Token token)
{
if (_size < tokenList.Length)
{
tokenList[_size] = token;
//token.setLocation(_size);
_size++;
} else {
// token array too small, double the capacity
doubleCapacity();
add(token);
}
if (bestToken == null || token.getScore() > bestToken.getScore()) {
bestToken = token;
}
}
/** Doubles the capacity of the Token array. */
private void doubleCapacity()
{
Array.Copy(tokenList,tokenList, tokenList.Length* 2);
}
/**
/// Replaces an old token with a new token
*
/// @param oldToken the token to replace (or null in which case, replace works like add).
/// @param newToken the new token to be placed in the list.
*/
////TODO: EXTRA
//public void replace(Token oldToken, Token newToken)
//{
// if (oldToken != null) {
// int location = oldToken.getLocation();
// // check to see if the old token is still in the list
// if (location != -1 && tokenList[location] == oldToken) {
// tokenList[location] = newToken;
// newToken.setLocation(location);
// oldToken.setLocation(-1);
// } else {
// add(newToken);
// }
// } else {
// add(newToken);
// }
// if (bestToken == null || newToken.getScore() > bestToken.getScore()) {
// bestToken = newToken;
// }
//}
/**
/// Purges excess members. Remove all nodes that fall below the relativeBeamWidth
*
/// @return a (possible new) active list
*/
public override ActiveList purge()
{
// if the absolute beam is zero, this means there
// should be no constraint on the abs beam size at all
// so we will only be relative beam pruning, which means
// that we don't have to sort the list
if (absoluteBeamWidth > 0) {
// if we have an absolute beam, then we will
// need to sort the tokens to apply the beam
if (_size > absoluteBeamWidth) {
_size = partitioner.partition(tokenList, _size,
absoluteBeamWidth) + 1;
}
}
return this;
}
/**
/// gets the beam threshold best upon the best scoring token
*
/// @return the beam threshold
*/
public override float getBeamThreshold()
{
return getBestScore() + logRelativeBeamWidth;
}
/**
/// gets the best score in the list
*
/// @return the best score
*/
public override float getBestScore()
{
float bestScore = -float.MaxValue;
if (bestToken != null) {
bestScore = bestToken.getScore();
}
// A sanity check
// for (Token t : this) {
// if (t.getScore() > bestScore) {
// System.out.println("GBS: found better score "
// + t + " vs. " + bestScore);
// }
// }
return bestScore;
}
/**
/// Sets the best scoring token for this active list
*
/// @param token the best scoring token
*/
public override void setBestToken(Token token)
{
bestToken = token;
}
/**
/// Gets the best scoring token for this active list
*
/// @return the best scoring token
*/
public override Token getBestToken() {
return bestToken;
}
/**
/// Retrieves the iterator for this tree.
*
/// @return the iterator for this token list
*/
public IEnumerator<Token> iterator()
{
return tokenList.ToList().GetEnumerator();
}
/**
/// Gets the list of all tokens
*
/// @return the list of tokens
*/
public override List<Token> getTokens()
{
return tokenList.ToList();
}
/**
/// Returns the number of tokens on this active list
*
/// @return the size of the active list
*/
public override int size()
{
return _size;
}
/* (non-Javadoc)
/// @see edu.cmu.sphinx.decoder.search.ActiveList#createNew()
*/
public override ActiveList newInstance()
{
return parent.newInstance();
}
}
}
<|start_filename|>Syn.Speech/Helper/AbstractMap.cs<|end_filename|>
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
namespace Syn.Speech.Helper
{
public abstract class AbstractMap<T, U> : IEnumerable, ICollection<KeyValuePair<T, U>>, IEnumerable<KeyValuePair<T, U>>, IDictionary<T, U>
{
protected AbstractMap()
{
}
public virtual void Clear()
{
EntrySet().Clear();
}
public virtual bool ContainsKey(object name)
{
return EntrySet().Any(p => p.Key.Equals((T)name));
}
public abstract ICollection<KeyValuePair<T, U>> EntrySet();
public virtual U Get(object key)
{
return EntrySet().Where(p => p.Key.Equals(key)).Select(p => p.Value).FirstOrDefault();
}
protected virtual IEnumerator<KeyValuePair<T, U>> InternalGetEnumerator()
{
return EntrySet().GetEnumerator();
}
public virtual bool IsEmpty()
{
return !EntrySet().Any();
}
public virtual U Put(T key, U value)
{
throw new NotSupportedException();
}
public virtual U Remove(object key)
{
Iterator<U> iterator = EntrySet() as Iterator<U>;
if (iterator == null)
{
throw new NotSupportedException();
}
while (iterator.hasNext())
{
U local = iterator.next();
if (local.Equals((T)key))
{
iterator.remove();
return local;
}
}
return default(U);
}
void ICollection<KeyValuePair<T, U>>.Add(KeyValuePair<T, U> item)
{
Put(item.Key, item.Value);
}
bool ICollection<KeyValuePair<T, U>>.Contains(KeyValuePair<T, U> item)
{
throw new NotImplementedException();
}
void ICollection<KeyValuePair<T, U>>.CopyTo(KeyValuePair<T, U>[] array, int arrayIndex)
{
EntrySet().CopyTo(array, arrayIndex);
}
bool ICollection<KeyValuePair<T, U>>.Remove(KeyValuePair<T, U> item)
{
Remove(item.Key);
return true;
}
void IDictionary<T, U>.Add(T key, U value)
{
Put(key, value);
}
bool IDictionary<T, U>.ContainsKey(T key)
{
return ContainsKey(key);
}
bool IDictionary<T, U>.Remove(T key)
{
if (ContainsKey(key))
{
Remove(key);
return true;
}
return false;
}
bool IDictionary<T, U>.TryGetValue(T key, out U value)
{
if (ContainsKey(key))
{
value = Get(key);
return true;
}
value = default(U);
return false;
}
IEnumerator<KeyValuePair<T, U>> IEnumerable<KeyValuePair<T, U>>.GetEnumerator()
{
return InternalGetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return InternalGetEnumerator();
}
public virtual int Count
{
get { return EntrySet().Count; }
}
public U this[T key]
{
get { return Get(key); }
set { Put(key, value); }
}
public virtual IEnumerable<T> Keys
{
get { return EntrySet().Select(p => p.Key); }
}
int ICollection<KeyValuePair<T, U>>.Count
{
get { return Count; }
}
bool ICollection<KeyValuePair<T, U>>.IsReadOnly
{
get { return false; }
}
ICollection<T> IDictionary<T, U>.Keys
{
get { return Keys.ToList<T>(); }
}
ICollection<U> IDictionary<T, U>.Values
{
get { return (ICollection<U>)Values.ToList<U>(); }
}
public virtual IEnumerable<U> Values
{
get { return EntrySet().Select(p => p.Value); }
}
}
}
<|start_filename|>Syn.Speech/Alignment/Utterance.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Runtime.InteropServices;
//PATROLLED
namespace Syn.Speech.Alignment
{
/// <summary>
/// Holds all the data for an utterance to be spoken. It is incrementally
/// modified by various UtteranceProcessor implementations. An utterance
/// contains a set of Features (essential a set of properties) and a set of
/// Relations. A Relation is an ordered set of Item graphs. The utterance
/// contains a set of features and implements FeatureSet so that applications
/// can set/get features directly from the utterance. If a feature query is not
/// found in the utterance feature set, the query is forwarded to the FeatureSet
/// of the voice associated with the utterance.
/// </summary>
public class Utterance
{
private readonly FeatureSet features;
private readonly FeatureSet relations;
/// <summary>
/// Creates an utterance with the given set of tokenized text.
/// </summary>
/// <param name="tokenizer">The list of tokens for this utterance.</param>
public Utterance(CharTokenizer tokenizer)
{
features = new FeatureSet();
relations = new FeatureSet();
setTokenList(tokenizer);
}
/// <summary>
/// Creates a new relation with the given name and adds it to this utterance.
/// </summary>
/// <param name="name">The name of the new relation.</param>
/// <returns>the newly created relation</returns>
public virtual Relation createRelation(string name)
{
Relation relation = new Relation(name, this);
relations.setObject(name, relation);
return relation;
}
/// <summary>
/// Retrieves a relation from this utterance.
/// </summary>
/// <param name="name">The name of the Relation.</param>
/// <returns>The relation or null if the relation is not found</returns>
public virtual Relation getRelation(string name)
{
return (Relation)relations.getObject(name);
}
/// <summary>
///Determines if this utterance contains a relation with the given name.
/// </summary>
/// <param name="name">The name of the relation of interest.</param>
/// <returns></returns>
public virtual bool hasRelation(string name)
{
return relations.isPresent(name);
}
/// <summary>
/// Removes the named feature from this set of features.
/// </summary>
/// <param name="name">The name of the feature of interest.</param>
public virtual void remove(string name)
{
features.remove(name);
}
/// <summary>
/// Convenience method that sets the named feature as an int.
/// </summary>
/// <param name="name">The name of the feature.</param>
/// <param name="value">The value of the feature.</param>
public virtual void setInt(string name, int value)
{
features.setInt(name, value);
}
/// <summary>
/// Convenience method that sets the named feature as a float.
/// </summary>
/// <param name="name">The name of the feature.</param>
/// <param name="value">The value of the feature.</param>
public virtual void setFloat(string name, float value)
{
features.setFloat(name, value);
}
/// <summary>
/// Convenience method that sets the named feature as a String.
/// </summary>
/// <param name="name">The name of the feature.</param>
/// <param name="value">The value of the feature.</param>
public virtual void setString(string name, string value)
{
features.setString(name, value);
}
/// <summary>
/// Sets the named feature.
/// </summary>
/// <param name="name">The name of the feature.</param>
/// <param name="value">The value of the feature.</param>
public virtual void setObject(string name, object value)
{
features.setObject(name, value);
}
/// <summary>
/// Returns the Item in the given Relation associated with the given time.
/// </summary>
/// <param name="relation">The name of the relation.</param>
/// <param name="time">The time.</param>
/// <returns></returns>
/// <exception>if the Segment durations have not been
/// calculated in the Utterance or if the given relation is not
/// present in the Utterance</exception>
public virtual Item getItem(string relation, float time)
{
Relation segmentRelation = null;
string pathName = null;
if (relation.Equals(Relation.WORD))
{
pathName = "R:SylStructure.parent.parent.R:Word";
}
else if (relation.Equals(Relation.TOKEN))
{
pathName = "R:SylStructure.parent.parent.R:Token.parent";
}
else
{
throw new ArgumentException(
"Utterance.getItem(): relation cannot be " + relation);
}
PathExtractor path = new PathExtractor(pathName, false);
// get the Item in the Segment Relation with the given time
Item segmentItem = getItem(segmentRelation, time);
if (segmentItem != null)
{
return path.findItem(segmentItem);
}
else
{
return null;
}
}
private static Item getItem(Relation segmentRelation, float time)
{
Item lastSegment = segmentRelation.getTail();
// If given time is closer to the front than the end, search from
// the front; otherwise, start search from end
// this might not be the best strategy though.
float lastSegmentEndTime = getSegmentEnd(lastSegment);
if (time < 0 || lastSegmentEndTime < time)
{
return null;
}
else if (lastSegmentEndTime - time > time)
{
return findFromFront(segmentRelation, time);
}
else
{
return findFromEnd(segmentRelation, time);
}
}
private static Item findFromEnd(Relation segmentRelation, float time)
{
Item item = segmentRelation.getTail();
while (item != null && getSegmentEnd(item) > time)
{
item = item.getPrevious();
}
if (item != segmentRelation.getTail())
{
item = item.getNext();
}
return item;
}
private static Item findFromFront([In] Relation segmentRelation, [In] float time)
{
Item item = segmentRelation.getHead();
while (item != null && time > getSegmentEnd(item))
{
item = item.getNext();
}
return item;
}
private static float getSegmentEnd(Item segment)
{
FeatureSet segmentFeatureSet = segment.getFeatures();
return segmentFeatureSet.getFloat("end");
}
/// <summary>
/// Sets the token list for this utterance. Note that this could be
/// optimized by turning the token list directly into the token relation.
/// </summary>
/// <param name="tokenizer">The tokenList.</param>
private void setTokenList(IEnumerator<Token> tokenizer)
{
Relation relation = createRelation(Relation.TOKEN);
while (tokenizer.MoveNext())
{
Token token = tokenizer.Current;
string tokenWord = token.getWord();
if (!string.IsNullOrEmpty(tokenWord))
{
Item item = relation.appendItem();
FeatureSet featureSet = item.getFeatures();
featureSet.setString("name", tokenWord);
featureSet.setString("whitespace", token.getWhitespace());
featureSet.setString("prepunctuation",
token.getPrepunctuation());
featureSet.setString("punc", token.getPostpunctuation());
featureSet.setString("file_pos", token.getPosition().ToString(CultureInfo.InvariantCulture));
featureSet.setString("line_number", token.getLineNumber().ToString(CultureInfo.InvariantCulture));
}
}
}
}
}
<|start_filename|>Syn.Speech/Decoder/Search/ActiveList.cs<|end_filename|>
using System;
using System.Collections.Generic;
using Syn.Speech.Common;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder.Search
{
/// <summary>
/// An active list is maintained as a sorted list
/// <p/>
/// Note that all scores are represented in LogMath logbase
/// </summary>
///
public abstract class ActiveList : SortedSet<Token>//,IActiveList
{
/// <summary>
/// property that sets the desired (or target) size for this active list. This is sometimes referred to as the beam size
/// </summary>
[S4Integer(defaultValue = 2000)]
public static String PROP_ABSOLUTE_BEAM_WIDTH = "absoluteBeamWidth";
/// <summary>
/// Property that sets the minimum score relative to the maximum score in the list for pruning. Tokens with a score
/// less than relativeBeamWidth/// maximumScore will be pruned from the list
/// <summary>
[S4Double(defaultValue = 0.0)]
public static String PROP_RELATIVE_BEAM_WIDTH = "relativeBeamWidth";
/// <summary>
/// Property that indicates whether or not the active list will implement 'strict pruning'. When strict pruning is
/// enabled, the active list will not remove tokens from the active list until they have been completely scored. If
/// strict pruning is not enabled, tokens can be removed from the active list based upon their entry scores. The
/// default setting is false (disabled).
/// <summary>
[S4Boolean(defaultValue = true)]
public static String PROP_STRICT_PRUNING = "strictPruning";
/// <summary>
/// Adds the given token to the list, keeping track of the lowest scoring token
/// </summary>
/// <param name="token">token the token to add</param>
abstract public void add(Token token);
/// <summary>
/// Replaces an old token with a new token
/// </summary>
/// <param name="oldToken">the token to replace (or null in which case, replace works like add).</param>
/// <param name="newToken">the new token to be placed in the list.</param>
// abstract public void replace(Token oldToken, Token newToken);
/// <summary>
/// Purges the active list of excess members returning a (potentially new) active list
/// </summary>
/// <returns>a purged active list</returns>
abstract public ActiveList purge();
/// <summary>
/// Returns the size of this list
/// </summary>
/// <returns>the size</returns>
abstract public int size();
/// <summary>
/// Gets the list of all tokens
/// </summary>
/// <returns>set of tokens</returns>
abstract public List<Token> getTokens();
/// <summary>
/// gets the beam threshold best upon the best scoring token
/// </summary>
/// <returns>the beam threshold</returns>
abstract public float getBeamThreshold();
/// <summary>
/// gets the best score in the list
/// </summary>
/// <returns>the best score</returns>
abstract public float getBestScore();
/// <summary>
/// Sets the best scoring token for this active list
/// </summary>
/// <param name="token">token the best scoring token</param>
abstract public void setBestToken(Token token);
/// <summary>
/// Gets the best scoring token for this active list
/// </summary>
/// <returns>the best scoring token</returns>
abstract public Token getBestToken();
/// <summary>
/// Creates a new empty version of this active list with the same general properties.
/// </summary>
/// <returns>a new active list.</returns>
abstract public ActiveList newInstance();
}
}
<|start_filename|>Syn.Speech/Alignment/UsEnglishWordExpander.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Runtime.InteropServices;
using Syn.Speech.Helper;
//PATROLLED
using Syn.Speech.Properties;
using Syn.Speech.Recognizers;
namespace Syn.Speech.Alignment
{
/// <summary>
/// Converts the Tokens (in US English words) in an Utterance into a list of
/// words. It puts the produced list back into the Utterance. Usually, the
/// tokens that gets expanded are numbers like "23" (to "twenty" "three").
/// <p/>
/// /// It translates the following code from flite: <br/>
/// <code>
/// lang/usenglish/us_text.c
/// </code>
/// </summary>
public class UsEnglishWordExpander : Object, IWordExpander
{
private static readonly Pattern alphabetPattern = Pattern.Compile(UsEnglish.RX_ALPHABET);
private static readonly Pattern commaIntPattern = Pattern.Compile(UsEnglish.RX_COMMAINT);
private static readonly Pattern digits2DashPattern = Pattern.Compile("[0-9]+(-[0-9]+)(-[0-9]+)+");
private static readonly Pattern digitsPattern = Pattern.Compile(UsEnglish.RX_DIGITS);
private static readonly Pattern digitsSlashDigitsPattern = Pattern.Compile("[0-9]+/[0-9]+");
private static readonly Pattern dottedAbbrevPattern = Pattern.Compile(UsEnglish.RX_DOTTED_ABBREV);
private static readonly Pattern doublePattern = Pattern.Compile(UsEnglish.RX_DOUBLE);
private static readonly Pattern drStPattern = Pattern.Compile("([dD][Rr]|[Ss][Tt])");
private static readonly Pattern fourDigitsPattern = Pattern.Compile("[0-9][0-9][0-9][0-9]");
private static readonly Pattern illionPattern;
private static readonly Pattern numberTimePattern;
private static readonly Pattern numessPattern;
private static readonly Pattern ordinalPattern;
private static readonly Pattern romanNumbersPattern;
private static readonly Pattern sevenPhoneNumberPattern;
private static readonly Pattern threeDigitsPattern;
private static readonly Pattern usMoneyPattern;
private static readonly HashMap<string, string> kingSectionLikeMap = new HashMap<string, string>();
private const string KING_NAMES = "kingNames";
private const string KING_TITLES = "kingTitles";
private const string SECTION_TYPES = "sectionTypes";
private static readonly HashMap<string, string[]> usStatesMap = new HashMap<string, string[]>();
private WordRelation wordRelation;
private Item tokenItem;
private readonly DecisionTree cart;
// King-like words
private static readonly string[] kingNames = {"louis", "henry", "charles",
"philip", "george", "edward", "pius", "william", "richard",
"ptolemy", "john", "paul", "peter", "nicholas", "frederick",
"james", "alfonso", "ivan", "napoleon", "leo", "gregory",
"catherine", "alexandria", "pierre", "elizabeth", "mary", "elmo",
"erasmus"};
private static readonly string[] kingTitles = {"king", "queen", "pope",
"duke", "tsar", "emperor", "shah", "caesar", "duchess", "tsarina",
"empress", "baron", "baroness", "sultan", "count", "countess"};
// Section-like words
private static readonly string[] sectionTypes = {"section", "chapter",
"part", "phrase", "verse", "scene", "act", "book", "volume",
"chap", "war", "apollo", "trek", "fortran"};
private readonly PronounceableFSM prefixFSM;
private readonly PronounceableFSM suffixFSM;
// List of US states abbreviations and their full names
private static readonly string[][] usStates = {
new[] {"AL", "ambiguous", "alabama"}, new[] {"Al", "ambiguous", "alabama"},
new[] {"Ala", "", "alabama"}, new[] {"AK", "", "alaska"}, new[] {"Ak", "", "alaska"},
new[] {"AZ", "", "arizona"}, new[] {"Az", "", "arizona"}, new[] {"CA", "", "california"},
new[] {"Ca", "", "california"}, new[] {"Cal", "ambiguous", "california"},
new[] {"Calif", "", "california"}, new[] {"CO", "ambiguous", "colorado"}, new[] {"Co", "ambiguous", "colorado"},
new[] {"Colo", "", "colorado"}, new[] {"DC", "", "d", "c"}, new[] {"DE", "", "delaware"},
new[] {"De", "ambiguous", "delaware"}, new[] {"Del", "ambiguous", "delaware"},
new[] {"FL", "", "florida"}, new[] {"Fl", "ambiguous", "florida"}, new[] {"Fla", "", "florida"},
new[] {"GA", "", "georgia"}, new[] {"Ga", "", "georgia"}, new[] {"HI", "ambiguous", "hawaii"},
new[] {"Hi", "ambiguous", "hawaii"}, new[] {"IA", "", "iowa"}, new[] {"Ia", "ambiguous", "iowa"},
new[] {"IN", "ambiguous", "indiana"}, new[] {"In", "ambiguous", "indiana"}, new[] {"Ind", "ambiguous", "indiana"},
new[] {"ID", "ambiguous", "idaho"}, new[] {"IL", "ambiguous", "illinois"},
new[] {"Il", "ambiguous", "illinois"}, new[] {"ILL", "ambiguous", "illinois"},
new[] {"KS", "", "kansas"}, new[] {"Ks", "", "kansas"},
new[] {"Kans", "", "kansas"}, new[] {"KY", "ambiguous", "kentucky"},
new[] {"Ky", "ambiguous", "kentucky"}, new[] {"LA", "ambiguous", "louisiana"},
new[] {"La", "ambiguous", "louisiana"},
new[] {"Lou", "ambiguous", "louisiana"},
new[] {"Lous", "ambiguous", "louisiana"},
new[] {"MA", "ambiguous", "massachusetts"},
new[] {"Mass", "ambiguous", "massachusetts"},
new[] {"Ma", "ambiguous", "massachusetts"}, new[] {"MD", "ambiguous", "maryland"},
new[] {"Md", "ambiguous", "maryland"}, new[] {"ME", "ambiguous", "maine"},
new[] {"Me", "ambiguous", "maine"}, new[] {"MI", "", "michigan"}, new[] {"Mi", "ambiguous", "michigan"},
new[] {"Mich", "ambiguous", "michigan"}, new[] {"MN", "ambiguous", "minnestota"},
new[] {"Minn", "ambiguous", "minnestota"}, new[] {"MS", "ambiguous", "mississippi"},
new[] {"Miss", "ambiguous", "mississippi"}, new[] {"MT", "ambiguous", "montanna"},
new[] {"Mt", "ambiguous", "montanna"}, new[] {"MO", "ambiguous", "missouri"},
new[] {"Mo", "ambiguous", "missouri"}, new[] {"NC", "ambiguous", "north", "carolina"},
new[] {"ND", "ambiguous", "north", "dakota"}, new[] {"NE", "ambiguous", "nebraska"},
new[] {"Ne", "ambiguous", "nebraska"}, new[] {"Neb", "ambiguous", "nebraska"},
new[] {"NH", "ambiguous", "new", "hampshire"}, new[] {"NV", "", "nevada"},
new[] {"Nev", "", "nevada"}, new[] {"NY", "", "new", "york"},
new[] {"OH", "ambiguous", "ohio"}, new[] {"OK", "ambiguous", "oklahoma"},
new[] {"Okla", "", "oklahoma"}, new[] {"OR", "ambiguous", "oregon"},
new[] {"Or", "ambiguous", "oregon"}, new[] {"Ore", "ambiguous", "oregon"},
new[] {"PA", "ambiguous", "pennsylvania"}, new[] {"Pa", "ambiguous", "pennsylvania"},
new[] {"Penn", "ambiguous", "pennsylvania"}, new[] {"RI", "ambiguous", "rhode", "island"},
new[] {"SC", "ambiguous", "south", "carlolina"}, new[] {"SD", "ambiguous", "south", "dakota"},
new[] {"TN", "ambiguous", "tennesee"}, new[] {"Tn", "ambiguous", "tennesee"},
new[] {"Tenn", "ambiguous", "tennesee"}, new[] {"TX", "ambiguous", "texas"},
new[] {"Tx", "ambiguous", "texas"}, new[] {"Tex", "ambiguous", "texas"},
new[] {"UT", "ambiguous", "utah"}, new[] {"VA", "ambiguous", "virginia"},
new[] {"WA", "ambiguous", "washington"}, new[] {"Wa", "ambiguous", "washington"},
new[] {"Wash", "ambiguous", "washington"}, new[] {"WI", "ambiguous", "wisconsin"},
new[] {"Wi", "ambiguous", "wisconsin"}, new[] {"WV", "ambiguous", "west", "virginia"},
new[] {"WY", "ambiguous", "wyoming"}, new[] {"Wy", "ambiguous", "wyoming"},
new[] {"Wyo", "", "wyoming"}, new[] {"PR", "ambiguous", "puerto", "rico"}
};
static UsEnglishWordExpander()
{
alphabetPattern = Pattern.Compile(UsEnglish.RX_ALPHABET);
commaIntPattern = Pattern.Compile(UsEnglish.RX_COMMAINT);
digits2DashPattern = Pattern.Compile(UsEnglish.RX_DIGITS2DASH);
digitsPattern = Pattern.Compile(UsEnglish.RX_DIGITS);
digitsSlashDigitsPattern =
Pattern.Compile(UsEnglish.RX_DIGITSSLASHDIGITS);
dottedAbbrevPattern = Pattern.Compile(UsEnglish.RX_DOTTED_ABBREV);
doublePattern = Pattern.Compile(UsEnglish.RX_DOUBLE);
drStPattern = Pattern.Compile(UsEnglish.RX_DRST);
fourDigitsPattern = Pattern.Compile(UsEnglish.RX_FOUR_DIGIT);
Pattern.Compile(UsEnglish.RX_HAS_VOWEL);
illionPattern = Pattern.Compile(UsEnglish.RX_ILLION);
numberTimePattern = Pattern.Compile(UsEnglish.RX_NUMBER_TIME);
numessPattern = Pattern.Compile(UsEnglish.RX_NUMESS);
ordinalPattern = Pattern.Compile(UsEnglish.RX_ORDINAL_NUMBER);
romanNumbersPattern = Pattern.Compile(UsEnglish.RX_ROMAN_NUMBER);
sevenPhoneNumberPattern =
Pattern.Compile(UsEnglish.RX_SEVEN_DIGIT_PHONE_NUMBER);
threeDigitsPattern = Pattern.Compile(UsEnglish.RX_THREE_DIGIT);
usMoneyPattern = Pattern.Compile(UsEnglish.RX_US_MONEY);
for (int i = 0; i < kingNames.Length; i++)
{
kingSectionLikeMap.put(kingNames[i], KING_NAMES);
}
for (int i = 0; i < kingTitles.Length; i++)
{
kingSectionLikeMap.put(kingTitles[i], KING_TITLES);
}
for (int i = 0; i < sectionTypes.Length; i++)
{
kingSectionLikeMap.put(sectionTypes[i], SECTION_TYPES);
}
// Again map for constant time searching.
for (int i = 0; i < usStates.Length; i++)
{
usStatesMap.put(usStates[i][0], usStates[i]);
}
}
/// <summary>
/// Constructs a default USTokenWordProcessor. It uses the USEnglish regular
/// expression set (USEngRegExp) by default.
/// </summary>
/// <exception cref="IllegalStateException">The cart to use to classify numbers.</exception>
public UsEnglishWordExpander()
{
try
{
cart = new DecisionTree(Resources.nums_cart);
prefixFSM = new PrefixFSM(Resources.prefix_fsm);
suffixFSM = new SuffixFSM(Resources.suffix_fsm);
//cart = new DecisionTree(getClass().getResource("nums_cart.txt"));
//prefixFSM = new PrefixFSM(getClass().getResource("prefix_fsm.txt"));
//suffixFSM = new SuffixFSM(getClass().getResource("suffix_fsm.txt"));
}
catch (IOException e)
{
throw new IllegalStateException("resources not found");
}
}
/// <summary>
/// Returns the currently processing token Item.
/// </summary>
/// <returns>The current token Item; null if no item</returns>
public virtual Item getTokenItem()
{
return tokenItem;
}
/// <summary>
/// process the utterance
/// </summary>
/// <param name="text">The text.</param>
/// <exception cref="IllegalStateException"></exception>
/// <returns>The utterance contain the tokens</returns>
public virtual List<string> expand(string text)
{
string simplifiedText = simplifyChars(text);
CharTokenizer tokenizer = new CharTokenizer();
tokenizer.setWhitespaceSymbols(UsEnglish.WHITESPACE_SYMBOLS);
tokenizer.setSingleCharSymbols(UsEnglish.SINGLE_CHAR_SYMBOLS);
tokenizer.setPrepunctuationSymbols(UsEnglish.PREPUNCTUATION_SYMBOLS);
tokenizer.setPostpunctuationSymbols(UsEnglish.PUNCTUATION_SYMBOLS);
tokenizer.setInputText(simplifiedText);
Utterance utterance = new Utterance(tokenizer);
Relation tokenRelation;
if ((tokenRelation = utterance.getRelation(Relation.TOKEN)) == null)
{
throw new IllegalStateException("token relation does not exist");
}
wordRelation = WordRelation.createWordRelation(utterance, this);
for (tokenItem = tokenRelation.getHead(); tokenItem != null; tokenItem =
tokenItem.getNext())
{
FeatureSet featureSet = tokenItem.getFeatures();
string tokenVal = featureSet.getString("name");
// convert the token into a list of words
tokenToWords(tokenVal);
}
List<string> words = new List<string>();
for (Item item = utterance.getRelation(Relation.WORD).getHead(); item != null; item =
item.getNext())
{
if (!string.IsNullOrEmpty(item.ToString()) && !item.ToString().Contains("#"))
{
words.Add(item.ToString());
}
}
return words;
}
private string simplifyChars(string text)
{
text = text.Replace('’', '\'');
text = text.Replace('‘', '\'');
text = text.Replace('”', '"');
text = text.Replace('“', '"');
text = text.Replace('»', '"');
text = text.Replace('«', '"');
text = text.Replace('–', '-');
text = text.Replace('—', ' ');
text = text.Replace('…', ' ');
text = text.Replace((char)0xc, ' ');
return text;
}
/// <summary>
/// Returns true if the given token matches part of a phone number
/// </summary>
/// <param name="tokenVal">The token.</param>
/// <returns>true or false</returns>
private bool matchesPartPhoneNumber(string tokenVal)
{
string n_name = (string)tokenItem.findFeature("n.name");
string n_n_name = (string)tokenItem.findFeature("n.n.name");
string p_name = (string)tokenItem.findFeature("p.name");
string p_p_name = (string)tokenItem.findFeature("p.p.name");
bool matches3DigitsP_name = matches(threeDigitsPattern, p_name);
return ((matches(threeDigitsPattern, tokenVal) && ((!matches(
digitsPattern, p_name) && matches(threeDigitsPattern, n_name) && matches(
fourDigitsPattern, n_n_name))
|| (matches(sevenPhoneNumberPattern, n_name)) || (!matches(
digitsPattern, p_p_name) && matches3DigitsP_name && matches(
fourDigitsPattern, n_name)))) || (matches(
fourDigitsPattern, tokenVal) && (!matches(digitsPattern,
n_name) && matches3DigitsP_name && matches(threeDigitsPattern,
p_p_name))));
}
/// <summary>
/// Converts the given Token into (word) Items in the WordRelation.
/// </summary>
/// <param name="tokenVal">the string value of the token, which may or may not be
/// same as the one in called "name" in flite</param>
private void tokenToWords(string tokenVal)
{
FeatureSet tokenFeatures = tokenItem.getFeatures();
string itemName = tokenFeatures.getString("name");
int tokenLength = tokenVal.Length;
if (tokenFeatures.isPresent("phones"))
{
wordRelation.addWord(tokenVal);
}
else if ((tokenVal.Equals("a") || tokenVal.Equals("A"))
&& ((tokenItem.getNext() == null)
|| !(tokenVal.Equals(itemName)) || !(((string)tokenItem
.findFeature("punc")).Equals(""))))
{
/* if A is a sub part of a token, then its ey not ah */
wordRelation.addWord("_a");
}
else if (matches(alphabetPattern, tokenVal))
{
if (matches(romanNumbersPattern, tokenVal))
{
/* XVIII */
romanToWords(tokenVal);
}
else if (matches(illionPattern, tokenVal)
&& matches(usMoneyPattern,
(string)tokenItem.findFeature("p.name")))
{
/* $ X -illion */
wordRelation.addWord(tokenVal);
wordRelation.addWord("dollars");
}
else if (matches(drStPattern, tokenVal))
{
/* St Andrew's St, Dr King Dr */
drStToWords(tokenVal);
}
else if (tokenVal.Equals("Mr"))
{
tokenItem.getFeatures().setString("punc", "");
wordRelation.addWord("mister");
}
else if (tokenVal.Equals("Mrs"))
{
tokenItem.getFeatures().setString("punc", "");
wordRelation.addWord("missus");
}
else if (tokenLength == 1
&& char.IsUpper(tokenVal[0])
&& ((string)tokenItem.findFeature("n.whitespace"))
.Equals(" ")
&& char.IsUpper(((string)tokenItem
.findFeature("n.name"))[0]))
{
tokenFeatures.setString("punc", "");
string aaa = tokenVal.ToLower();
if (aaa.Equals("a"))
{
wordRelation.addWord("_a");
}
else
{
wordRelation.addWord(aaa);
}
}
else if (isStateName(tokenVal))
{
/*
* The name of a US state isStateName() has already added the
* full name of the state, so we're all set.
*/
}
else if (tokenLength > 1 && !isPronounceable(tokenVal))
{
/* Need common exception list */
/* unpronouncable list of alphas */
NumberExpander.expandLetters(tokenVal, wordRelation);
}
else
{
/* just a word */
wordRelation.addWord(tokenVal.ToLower());
}
}
else if (matches(dottedAbbrevPattern, tokenVal))
{
/* U.S.A. */
// remove all dots
NumberExpander.expandLetters(tokenVal.Replace(".", ""),
wordRelation);
}
else if (matches(commaIntPattern, tokenVal))
{
/* 99,999,999 */
NumberExpander.expandReal(tokenVal.Replace(",", "").Replace("'", ""), wordRelation);
}
else if (matches(sevenPhoneNumberPattern, tokenVal))
{
/* 234-3434 telephone numbers */
int dashIndex = tokenVal.IndexOf('-');
string aaa = tokenVal.Substring(0, dashIndex);
string bbb = tokenVal.Substring(dashIndex + 1);
NumberExpander.expandDigits(aaa, wordRelation);
wordRelation.addBreak();
NumberExpander.expandDigits(bbb, wordRelation);
}
else if (matchesPartPhoneNumber(tokenVal))
{
/* part of a telephone number */
string punctuation = (string)tokenItem.findFeature("punc");
if (punctuation.Equals(""))
{
tokenItem.getFeatures().setString("punc", ",");
}
NumberExpander.expandDigits(tokenVal, wordRelation);
wordRelation.addBreak();
}
else if (matches(numberTimePattern, tokenVal))
{
/* 12:35 */
int colonIndex = tokenVal.IndexOf(':');
string aaa = tokenVal.Substring(0, colonIndex);
string bbb = tokenVal.Substring(colonIndex + 1);
NumberExpander.expandNumber(aaa, wordRelation);
if (!(bbb.Equals("00")))
{
NumberExpander.expandID(bbb, wordRelation);
}
}
else if (matches(digits2DashPattern, tokenVal))
{
/* 999-999-999 */
digitsDashToWords(tokenVal);
}
else if (matches(digitsPattern, tokenVal))
{
digitsToWords(tokenVal);
}
else if (tokenLength == 1
&& char.IsUpper(tokenVal[0])
&& ((string)tokenItem.findFeature("n.whitespace"))
.Equals(" ")
&& char.IsUpper(((string)tokenItem
.findFeature("n.name"))[0]))
{
tokenFeatures.setString("punc", "");
string aaa = tokenVal.ToLower();
if (aaa.Equals("a"))
{
wordRelation.addWord("_a");
}
else
{
wordRelation.addWord(aaa);
}
}
else if (matches(doublePattern, tokenVal))
{
NumberExpander.expandReal(tokenVal, wordRelation);
}
else if (matches(ordinalPattern, tokenVal))
{
/* explicit ordinals */
string aaa = tokenVal.Substring(0, tokenLength - 2);
NumberExpander.expandOrdinal(aaa, wordRelation);
}
else if (matches(usMoneyPattern, tokenVal))
{
/* US money */
usMoneyToWords(tokenVal);
}
else if (tokenLength > 0 && tokenVal[tokenLength - 1] == '%')
{
/* Y% */
tokenToWords(tokenVal.Substring(0, tokenLength - 1));
wordRelation.addWord("percent");
}
else if (matches(numessPattern, tokenVal))
{
NumberExpander.expandNumess(tokenVal.Substring(0, tokenLength - 1), wordRelation);
}
else if (matches(digitsSlashDigitsPattern, tokenVal)
&& tokenVal.Equals(itemName))
{
digitsSlashDigitsToWords(tokenVal);
}
else if (tokenVal.IndexOf('-') != -1)
{
dashToWords(tokenVal);
}
else if (tokenLength > 1 && !matches(alphabetPattern, tokenVal))
{
notJustAlphasToWords(tokenVal);
}
else if (tokenVal.Equals("&"))
{
// &
wordRelation.addWord("and");
}
else if (tokenVal.Equals("-"))
{
// Skip it
}
else
{
// Just a word.
wordRelation.addWord(tokenVal.ToLower());
}
}
/// <summary>
/// Convert the given digit token with dashes (e.g. 999-999-999) into (word)
/// Items in the WordRelation.
/// </summary>
/// <param name="tokenVal">The digit string.</param>
private void digitsDashToWords([In] string tokenVal)
{
int tokenLength = tokenVal.Length;
int a = 0;
for (int p = 0; p <= tokenLength; p++)
{
if (p == tokenLength || tokenVal[p] == '-')
{
string aaa = tokenVal.Substring(a, p);
NumberExpander.expandDigits(aaa, wordRelation);
wordRelation.addBreak();
a = p + 1;
}
}
}
/// <summary>
/// Convert the given digit token into (word) Items in the WordRelation.
/// </summary>
/// <param name="tokenVal">The digit string.</param>
private void digitsToWords(string tokenVal)
{
FeatureSet featureSet = tokenItem.getFeatures();
string nsw = "";
if (featureSet.isPresent("nsw"))
{
nsw = featureSet.getString("nsw");
}
if (nsw.Equals("nide"))
{
NumberExpander.expandID(tokenVal, wordRelation);
}
else
{
string rName = featureSet.getString("name");
string digitsType = null;
if (tokenVal.Equals(rName))
{
digitsType = (string)cart.interpret(tokenItem);
}
else
{
featureSet.setString("name", tokenVal);
digitsType = (string)cart.interpret(tokenItem);
featureSet.setString("name", rName);
}
if (digitsType.Equals("ordinal"))
{
NumberExpander.expandOrdinal(tokenVal, wordRelation);
}
else if (digitsType.Equals("digits"))
{
NumberExpander.expandDigits(tokenVal, wordRelation);
}
else if (digitsType.Equals("year"))
{
NumberExpander.expandID(tokenVal, wordRelation);
}
else
{
NumberExpander.expandNumber(tokenVal, wordRelation);
}
}
}
/// <summary>
/// Converts the given Roman numeral string into (word) Items in the WordRelation.
/// </summary>
/// <param name="romanString">The roman numeral string.</param>
private void romanToWords(string romanString)
{
string punctuation = (string)tokenItem.findFeature("p.punc");
if (punctuation.Equals(""))
{
/* no preceeding punctuation */
//string n = String.valueOf(NumberExpander.expandRoman(romanString));
var n = NumberExpander.expandRoman(romanString).ToString(CultureInfo.InvariantCulture);
if (kingLike(tokenItem))
{
wordRelation.addWord("the");
NumberExpander.expandOrdinal(n, wordRelation);
}
else if (sectionLike(tokenItem))
{
NumberExpander.expandNumber(n, wordRelation);
}
else
{
NumberExpander.expandLetters(romanString, wordRelation);
}
}
else
{
NumberExpander.expandLetters(romanString, wordRelation);
}
}
/// <summary>
/// Returns true if the given key is in the {@link #kingSectionLikeMap} map,
/// and the value is the same as the given value.
/// </summary>
/// <param name="key">key to look for in the map.</param>
/// <param name="value">the value to match.</param>
/// <returns>true if it matches, or false if it does not or if the key is not
/// mapped to any value in the map.</returns>
private static bool inKingSectionLikeMap(string key, string value)
{
if (kingSectionLikeMap.ContainsKey(key))
{
return kingSectionLikeMap.get(key).Equals(value);
}
return false;
}
/// <summary>
/// Returns true if the given token item contains a token that is in a
/// king-like context, e.g., "King" or "Louis".
/// </summary>
/// <param name="tokenItem">the token item to check.</param>
/// <returns>true or false</returns>
public static bool kingLike(Item tokenItem)
{
string kingName = ((string)tokenItem.findFeature("p.name")).ToLower();
if (inKingSectionLikeMap(kingName, KING_NAMES))
{
return true;
}
else
{
string kingTitle =
((string)tokenItem.findFeature("p.p.name")).ToLower();
return inKingSectionLikeMap(kingTitle, KING_TITLES);
}
}
/// <summary>
/// Returns true if the given token item contains a token that is in a
/// section-like context, e.g., "chapter" or "act".
/// </summary>
/// <param name="tokenItem">the token item to check.</param>
/// <returns>true or false</returns>
public static bool sectionLike(Item tokenItem)
{
string sectionType = ((string)tokenItem.findFeature("p.name")).ToLower();
return inKingSectionLikeMap(sectionType, SECTION_TYPES);
}
/// <summary>
/// Converts the given string containing "St" and "Dr" to (word) Items in the WordRelation.
/// </summary>
/// <param name="drStString">The string with "St" and "Dr".</param>
private void drStToWords(string drStString)
{
string street = null;
string saint = null;
char c0 = drStString[0];
if (c0 == 's' || c0 == 'S')
{
street = "street";
saint = "saint";
}
else
{
street = "drive";
saint = "doctor";
}
FeatureSet featureSet = tokenItem.getFeatures();
string punctuation = featureSet.getString("punc");
string featPunctuation = (string)tokenItem.findFeature("punc");
if (tokenItem.getNext() == null || punctuation.IndexOf(',') != -1)
{
wordRelation.addWord(street);
}
else if (featPunctuation.Equals(","))
{
wordRelation.addWord(saint);
}
else
{
string pName = (string)tokenItem.findFeature("p.name");
string nName = (string)tokenItem.findFeature("n.name");
char p0 = pName[0];
char n0 = nName[0];
if (char.IsUpper(p0) && char.IsLower(n0))
{
wordRelation.addWord(street);
}
else if (char.IsDigit(p0) && char.IsLower(n0))
{
wordRelation.addWord(street);
}
else if (char.IsLower(p0) && char.IsUpper(n0))
{
wordRelation.addWord(saint);
}
else
{
string whitespace = (string)tokenItem.findFeature("n.whitespace");
if (whitespace.Equals(" "))
{
wordRelation.addWord(saint);
}
else
{
wordRelation.addWord(street);
}
}
}
if (punctuation != null && punctuation.Equals("."))
{
featureSet.setString("punc", "");
}
}
/// <summary>
/// Converts US money string into (word) Items in the WordRelation.
/// </summary>
/// <param name="tokenVal">The US money string.</param>
private void usMoneyToWords(string tokenVal)
{
int dotIndex = tokenVal.IndexOf('.');
if (matches(illionPattern, (string)tokenItem.findFeature("n.name")))
{
NumberExpander.expandReal(tokenVal.Substring(1), wordRelation);
}
else if (dotIndex == -1)
{
string aaa = tokenVal.Substring(1);
tokenToWords(aaa);
if (aaa.Equals("1"))
{
wordRelation.addWord("dollar");
}
else
{
wordRelation.addWord("dollars");
}
}
else if (dotIndex == (tokenVal.Length - 1)
|| (tokenVal.Length - dotIndex) > 3)
{
// Simply read as mumble point mumble.
NumberExpander.expandReal(tokenVal.Substring(1), wordRelation);
wordRelation.addWord("dollars");
}
else
{
string aaa = tokenVal.Substring(1, dotIndex).Replace(",", "");
string bbb = tokenVal.Substring(dotIndex + 1);
NumberExpander.expandNumber(aaa, wordRelation);
if (aaa.Equals("1"))
{
wordRelation.addWord("dollar");
}
else
{
wordRelation.addWord("dollars");
}
if (bbb.Equals("00"))
{
// Add nothing to the word list.
}
else
{
NumberExpander.expandNumber(bbb, wordRelation);
if (bbb.Equals("01"))
{
wordRelation.addWord("cent");
}
else
{
wordRelation.addWord("cents");
}
}
}
}
/// <summary>
/// Convert the given digits/digits string into word (Items) in the WordRelation.
/// </summary>
/// <param name="tokenVal">The digits/digits string.</param>
private void digitsSlashDigitsToWords([In] string tokenVal)
{
/* might be fraction, or not */
int index = tokenVal.IndexOf('/');
string aaa = tokenVal.Substring(0, index);
string bbb = tokenVal.Substring(index + 1);
int a;
// if the previous token is a number, add an "and"
if (matches(digitsPattern, (string)tokenItem.findFeature("p.name"))
&& tokenItem.getPrevious() != null)
{
wordRelation.addWord("and");
}
if (aaa.Equals("1") && bbb.Equals("2"))
{
wordRelation.addWord("a");
wordRelation.addWord("half");
}
else if ((a = int.Parse(aaa)) < (int.Parse(bbb)))
{
NumberExpander.expandNumber(aaa, wordRelation);
NumberExpander.expandOrdinal(bbb, wordRelation);
if (a > 1)
{
wordRelation.addWord("'s");
}
}
else
{
NumberExpander.expandNumber(aaa, wordRelation);
wordRelation.addWord("slash");
NumberExpander.expandNumber(bbb, wordRelation);
}
}
/// <summary>
/// Convert the given dashed string (e.g. "aaa-bbb") into (word) Items in the WordRelation.
/// </summary>
/// <param name="tokenVal">The dashed string.</param>
private void dashToWords([In] string tokenVal)
{
int index = tokenVal.IndexOf('-');
string aaa = tokenVal.Substring(0, index);
string bbb = tokenVal.Substring(index + 1, tokenVal.Length);
if (matches(digitsPattern, aaa) && matches(digitsPattern, bbb))
{
FeatureSet featureSet = tokenItem.getFeatures();
featureSet.setString("name", aaa);
tokenToWords(aaa);
wordRelation.addWord("to");
featureSet.setString("name", bbb);
tokenToWords(bbb);
featureSet.setString("name", "");
}
else
{
tokenToWords(aaa);
tokenToWords(bbb);
}
}
/// <summary>
/// Convert the given string (which does not only consist of alphabet) into (word) Items in the WordRelation.
/// </summary>
/// <param name="tokenVal">The string.</param>
private void notJustAlphasToWords(string tokenVal)
{
/* its not just alphas */
int index = 0;
int tokenLength = tokenVal.Length;
for (; index < tokenLength - 1; index++)
{
if (isTextSplitable(tokenVal, index))
{
break;
}
}
if (index == tokenLength - 1)
{
wordRelation.addWord(tokenVal.ToLower());
return;
}
string aaa = tokenVal.Substring(0, index + 1);
string bbb = tokenVal.Substring(index + 1, tokenLength);
FeatureSet featureSet = tokenItem.getFeatures();
featureSet.setString("nsw", "nide");
tokenToWords(aaa);
tokenToWords(bbb);
}
/// <summary>
/// Returns true if the given word is pronounceable. This method is
/// originally called us_aswd() in Flite 1.1.
/// </summary>
/// <param name="word">The word to test.</param>
/// <returns>true if the word is pronounceable, false otherwise</returns>
public virtual bool isPronounceable(string word)
{
string lcWord = word.ToLower();
return prefixFSM.accept(lcWord) && suffixFSM.accept(lcWord);
}
/// <summary>
/// Returns true if the given token is the name of a US state. If it is, it
/// will add the name of the state to (word) Items in the WordRelation.
/// </summary>
/// <param name="tokenVal">The token string.</param>
private bool isStateName([In] string tokenVal)
{
string[] state = usStatesMap.get(tokenVal);
if (state != null)
{
bool expandState = false;
// check to see if the state initials are ambiguous
// in the English language
if (state[1].Equals("ambiguous"))
{
string previous = (string)tokenItem.findFeature("p.name");
string next = (string)tokenItem.findFeature("n.name");
int nextLength = next.Length;
FeatureSet featureSet = tokenItem.getFeatures();
// check if the previous word starts with a capital letter,
// is at least 3 letters long, is an alphabet sequence,
// and has a comma.
bool previousIsCity =
(char.IsUpper(previous[0])
&& previous.Length > 2
&& matches(alphabetPattern, previous) && tokenItem
.findFeature("p.punc").Equals(","));
// check if next token starts with a lower case, or
// this is the end of sentence, or if next token
// is a period (".") or a zip code (5 or 10 digits).
bool nextIsGood =
(char.IsLower(next[0]))
|| tokenItem.getNext() == null
|| featureSet.getString("punc").Equals(".") || ((nextLength == 5 || nextLength == 10) && matches(
digitsPattern, next));
if (previousIsCity && nextIsGood)
{
expandState = true;
}
else
{
expandState = false;
}
}
else
{
expandState = true;
}
if (expandState)
{
for (int j = 2; j < state.Length; j++)
{
if (state[j] != null)
{
wordRelation.addWord(state[j]);
}
}
return true;
}
}
return false;
}
/// <summary>
/// Determines if the given input matches the given Pattern.
/// </summary>
/// <param name="pattern">The pattern to match.</param>
/// <param name="input">the string to test.</param>
/// <returns><code>true</code> if the input string matches the given Pattern;
/// <code>false</code> otherwise</returns>
private static bool matches(Pattern pattern, string input)
{
var matcher = pattern.Matcher(input);
return matcher.Matches();
}
/// <summary>
/// Determines if the character at the given position of the given input
/// text is splittable. A character is splittable if:
/// 1) the character and the following character are not letters in the
/// English alphabet (A-Z and a-z)
/// 2) the character and the following character are not digits (0-9)
/// </summary>
/// <param name="text">The text containing the character of interest.</param>
/// <param name="index">The index of the character of interest.</param>
/// <returns>true if the position of the given text is splittable false otherwise</returns>
private static bool isTextSplitable(string text, int index)
{
char c0 = text[index];
char c1 = text[index + 1];
if (char.IsLetter(c0) && char.IsLetter(c1))
{
return false;
}
else if (char.IsDigit(c0) && char.IsDigit(c1))
{
return false;
}
else if (c0 == '\'' || char.IsLetter(c1))
{
return false;
}
else if (c1 == '\'' || char.IsLetter(c0))
{
return false;
}
else
{
return true;
}
}
}
}
<|start_filename|>Syn.Speech/Alignment/FeatureSet.cs<|end_filename|>
using Syn.Speech.Helper;
//PATROLLED
namespace Syn.Speech.Alignment
{
public class FeatureSet
{
private readonly LinkedHashMap<string, object> featureMap;
//internal static DecimalFormat formatter;
public FeatureSet()
{
featureMap = new LinkedHashMap<string, object>();
}
public virtual bool isPresent(string name)
{
return featureMap.ContainsKey(name);
}
public virtual void remove(string name)
{
featureMap.Remove(name);
}
public virtual string getString(string name)
{
return (string)getObject(name);
}
public virtual int getInt(string name)
{
return (int)getObject(name);
}
public virtual float getFloat(string name)
{
return (float) getObject(name);
}
public virtual object getObject(string name)
{
return featureMap.Get(name);
}
public virtual void setInt(string name, int value)
{
setObject(name, value);
}
public virtual void setFloat(string name, float value)
{
setObject(name, value);
}
public virtual void setString(string name, string value)
{
setObject(name, value);
}
public virtual void setObject(string name, object value)
{
featureMap.Put(name, value);
}
}
}
<|start_filename|>Syn.Speech/Decoder/Search/ActiveListFactory.cs<|end_filename|>
using System;
using Syn.Speech.Common;
using Syn.Speech.Util;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder.Search
{
/// <summary>
/// Creates new active lists.
/// </summary>
public abstract class ActiveListFactory:IConfigurable
{
/// <summary>
/// property that sets the desired (or target) size for this active list. This is sometimes referred to as the beam
/// size
/// </summary>
[S4Integer(defaultValue = -1)]
public static String PROP_ABSOLUTE_BEAM_WIDTH = "absoluteBeamWidth";
/// <summary>
/// Property that sets the minimum score relative to the maximum score in the list for pruning. Tokens with a score
/// less than relativeBeamWidth/// maximumScore will be pruned from the list
/// </summary>
[S4Double(defaultValue = 1E-80)]
public static String PROP_RELATIVE_BEAM_WIDTH = "relativeBeamWidth";
/// <summary>
/// Property that indicates whether or not the active list will implement 'strict pruning'. When strict pruning is
/// enabled, the active list will not remove tokens from the active list until they have been completely scored. If
/// strict pruning is not enabled, tokens can be removed from the active list based upon their entry scores. The
/// default setting is false (disabled).
/// </summary>
[S4Boolean(defaultValue = true)]
public static String PROP_STRICT_PRUNING = "strictPruning";
protected LogMath logMath;
protected int absoluteBeamWidth;
protected float logRelativeBeamWidth;
/// <summary>
/// Initializes a new instance of the <see cref="ActiveListFactory"/> class.
/// </summary>
/// <param name="absoluteBeamWidth">Width of the absolute beam.</param>
/// <param name="relativeBeamWidth">Width of the relative beam.</param>
protected ActiveListFactory(int absoluteBeamWidth,double relativeBeamWidth)
{
logMath = LogMath.getLogMath();
this.absoluteBeamWidth = absoluteBeamWidth;
this.logRelativeBeamWidth = logMath.linearToLog(relativeBeamWidth);
}
protected ActiveListFactory()
{
}
public void newProperties(PropertySheet ps)
{
logMath = LogMath.getLogMath();
absoluteBeamWidth = ps.getInt(PROP_ABSOLUTE_BEAM_WIDTH);
double relativeBeamWidth = ps.getDouble(PROP_RELATIVE_BEAM_WIDTH);
logRelativeBeamWidth = logMath.linearToLog(relativeBeamWidth);
}
/// <summary>
/// Creates a new active list of a particular type
/// </summary>
/// <returns></returns>
public abstract ActiveList newInstance();
}
}
<|start_filename|>Syn.Speech/Decoder/Search/Token.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Runtime.Serialization;
using System.Text;
using Syn.Speech.Common;
using Syn.Speech.Common.FrontEnd;
using Syn.Speech.Decoder.Scorer;
using Syn.Speech.Linguist;
//PATROLLED
namespace Syn.Speech.Decoder.Search
{
/// <summary>
/// Represents a single state in the recognition trellis. Subclasses of a token are used to represent the various
/// emitting state.
/// All scores are maintained in LogMath log base
/// </summary>
public class Token: IScoreable
{
private static int curCount;
private static int lastCount;
private static String scoreFmt = "0.0000000E00";
private static String numFmt = "0000";
private Token predecessor;
CultureInfo culture;
private float logLanguageScore;
private float logTotalScore;
private float logInsertionScore;
private float logAcousticScore;
private float logWorkingScore;
private ISearchState searchState;
//private int location;
private int frameNumber;
private IData myData;
/// <summary>
/// A collection of arbitrary properties assigned to this token. This field becomes lazy initialized to reduce
/// memory footprint.
/// </summary>
private Dictionary<String, Object> tokenProps;
/// <summary>
/// Internal constructor for a token. Used by classes Token, CombineToken, ParallelToken
/// </summary>
/// <param name="predecessor">the predecessor for this token</param>
/// <param name="state">the SentenceHMMState associated with this token</param>
/// <param name="logTotalScore">the total entry score for this token (in LogMath log base)</param>
/// <param name="logInsertionScore"></param>
/// <param name="logLanguageScore">the language score associated with this token (in LogMath log base)</param>
/// <param name="frameNumber">the frame number associated with this token</param>
public Token(Token predecessor,
ISearchState state,
float logTotalScore,
float logInsertionScore,
float logLanguageScore,
int frameNumber)
{
this.predecessor = predecessor;
this.searchState = state;
this.logTotalScore = logTotalScore;
this.logInsertionScore = logInsertionScore;
this.logLanguageScore = logLanguageScore;
this.frameNumber = frameNumber;
//this.location = -1;
curCount++;
}
/// <summary>
/// Creates the initial token with the given word history depth
/// </summary>
/// <param name="state">the SearchState associated with this token</param>
/// <param name="frameNumber">the frame number for this token</param>
public Token(ISearchState state, int frameNumber)
:this(null, state, 0.0f, 0.0f, 0.0f, frameNumber)
{
}
/// <summary>
/// Creates a Token with the given acoustic and language scores and predecessor.
/// </summary>
/// <param name="predecessor">the predecessor Token</param>
/// <param name="logTotalScore">the log acoustic score</param>
/// <param name="logAcousticScore">the log language score</param>
/// <param name="logInsertionScore"></param>
/// <param name="logLanguageScore"></param>
public Token(Token predecessor,
float logTotalScore,
float logAcousticScore,
float logInsertionScore,
float logLanguageScore)
: this(predecessor, null, logTotalScore, logInsertionScore, logLanguageScore, 0)
{
this.logAcousticScore = logAcousticScore;
}
/// <summary>
/// Returns the predecessor for this token, or null if this token has no predecessors
/// </summary>
/// <returns></returns>
public Token getPredecessor()
{
return predecessor;
}
/// <summary>
/// Returns the frame number for this token. Note that for tokens that are associated with non-emitting states, the
/// frame number represents the next frame number. For emitting states, the frame number represents the current
/// frame number.
/// </summary>
/// <returns></returns>
public int getFrameNumber()
{
return frameNumber;
}
/// <summary>
/// Sets the feature for this Token.
/// </summary>
/// <param name="data"></param>
public void setData(IData data)
{
myData = data;
}
/// <summary>
/// Returns the feature for this Token.
/// </summary>
/// <returns></returns>
public IData getData()
{
return myData;
}
/// <summary>
/// Returns the score for the token. The score is a combination of language and acoustic scores
/// </summary>
/// <returns></returns>
public float getScore()
{
return logTotalScore;
}
/// <summary>
/// Calculates a score against the given feature. The score can be retrieved
/// with get score. The token will keep a reference to the scored feature-vector.
/// </summary>
/// <param name="feature">the feature to be scored</param>
/// <returns>the score for the feature</returns>
public float calculateScore(IData feature)
{
logAcousticScore = ((IScoreProvider) searchState).getScore(feature);
logTotalScore += logAcousticScore;
setData(feature);
return logTotalScore;
}
public float[] calculateComponentScore(IData feature)
{
return ((IScoreProvider) searchState).getComponentScore(feature);
}
/// <summary>
/// Normalizes a previously calculated score
/// </summary>
/// <param name="maxLogScore">the score to normalize this score with</param>
/// <returns>the normalized score</returns>
public float normalizeScore(float maxLogScore)
{
logTotalScore -= maxLogScore;
logAcousticScore -= maxLogScore;
return logTotalScore;
}
/// <summary>
/// Gets the working score. The working score is used to maintain non-final
/// scores during the search. Some search algorithms such as bushderby use
/// the working score
/// </summary>
/// <returns>the working score (in logMath log base)</returns>
public float getWorkingScore()
{
return logWorkingScore;
}
/// <summary>
/// Sets the working score for this token
/// </summary>
/// <param name="logScore">the working score (in logMath log base)</param>
public void setWorkingScore(float logScore)
{
logWorkingScore = logScore;
}
/// <summary>
/// Sets the score for this token
/// </summary>
/// <param name="logScore">the new score for the token (in logMath log base)</param>
public void setScore(float logScore)
{
this.logTotalScore = logScore;
}
/// <summary>
/// Returns the language score associated with this token
/// </summary>
/// <returns></returns>
public float getLanguageScore()
{
return logLanguageScore;
}
/// <summary>
/// Returns the insertion score associated with this token.
/// Insertion score is the score of the transition between
/// states. It might be transition score from the acoustic model,
/// phone insertion score or word insertion probability from
/// the linguist.
/// </summary>
/// <returns></returns>
public float getInsertionScore()
{
return logInsertionScore;
}
/// <summary>
/// Returns the acoustic score for this token (in logMath log base).
/// Acoustic score is a sum of frame GMM.
/// </summary>
/// <returns></returns>
public float getAcousticScore()
{
return logAcousticScore;
}
/// <summary>
/// Returns the SearchState associated with this token
/// </summary>
/// <returns></returns>
public ISearchState getSearchState()
{
return searchState;
}
/// <summary>
/// Determines if this token is associated with an emitting state. An emitting state is a state that can be scored
/// acoustically.
/// </summary>
/// <returns></returns>
public bool isEmitting()
{
return searchState.isEmitting();
}
/// <summary>
/// Determines if this token is associated with a final SentenceHMM state.
/// </summary>
/// <returns></returns>
public bool isFinal()
{
return searchState.isFinal();
}
/// <summary>
/// Determines if this token marks the end of a word
/// </summary>
/// <returns></returns>
public bool isWord()
{
return searchState is IWordSearchState;
}
/// <summary>
/// Retrieves the string representation of this object
/// </summary>
/// <returns></returns>
public override String ToString()
{
return
getFrameNumber().ToString(numFmt) + ' ' +
getScore().ToString(scoreFmt) + ' ' +
getAcousticScore().ToString(scoreFmt) + ' ' +
getLanguageScore().ToString(scoreFmt) + ' ' +
getSearchState() + (tokenProps == null ? "" : " " + tokenProps);
}
public void GetObjectData(SerializationInfo info, StreamingContext context)
{
throw new Exception("dummy Token serializer");
}
/// <summary>
/// dumps a branch of tokens
/// </summary>
public void dumpTokenPath()
{
dumpTokenPath(true);
}
/// <summary>
/// dumps a branch of tokens
/// </summary>
/// <param name="includeHMMStates">if true include all sentence hmm states</param>
public void dumpTokenPath(Boolean includeHMMStates)
{
Token token = this;
List<Token> list = new List<Token>();
while (token != null)
{
list.Add(token);
token = token.getPredecessor();
}
for (int i = list.Count - 1; i >= 0; i--)
{
token = list[i];
if (includeHMMStates ||
(!(token.getSearchState() is IHMMSearchState))) {
Console.Out.WriteLine(" " + token);
}
}
Console.Out.WriteLine();
}
/// <summary>
/// Returns the string of words leading up to this token.
/// </summary>
/// <param name="wantFiller">if true, filler words are added</param>
/// <param name="wantPronunciations">if true append [ phoneme phoneme ... ] after each word</param>
/// <returns></returns>
public String getWordPath(Boolean wantFiller, Boolean wantPronunciations)
{
StringBuilder sb = new StringBuilder();
Token token = this;
while (token != null)
{
if (token.isWord())
{
IWordSearchState wordState =(IWordSearchState) token.getSearchState();
IPronunciation pron = wordState.getPronunciation();
IWord word = wordState.getPronunciation().getWord();
//Console.Out.WriteLine(token.getFrameNumber() + " " + word + " " + token.logLanguageScore + " " + token.logAcousticScore);
if (wantFiller || !word.isFiller())
{
if (wantPronunciations)
{
sb.Insert(0, ']');
IUnit[] u = pron.getUnits();
for (int i = u.Length - 1; i >= 0; i--)
{
if (i < u.Length - 1)
sb.Insert(0, ',');
sb.Insert(0, u[i].getName());
}
sb.Insert(0, '[');
}
sb.Insert(0, word.getSpelling());
sb.Insert(0, ' ');
}
}
token = token.getPredecessor();
}
return sb.ToString().Trim();
}
/// <summary>
/// Returns the string of words for this token, with no embedded filler words
/// </summary>
/// <returns>the string of words</returns>
public String getWordPathNoFiller()
{
return getWordPath(false, false);
}
/// <summary>
/// Returns the string of words for this token, with embedded silences
/// </summary>
/// <returns>the string of words</returns>
public String getWordPath()
{
return getWordPath(true, false);
}
/// <summary>
/// Returns the string of words and units for this token, with embedded silences.
/// </summary>
/// <returns>the string of words and units</returns>public IWord getWord()
public String getWordUnitPath()
{
StringBuilder sb = new StringBuilder();
Token token = this;
while (token != null)
{
ISearchState searchState = token.getSearchState();
if (searchState is IWordSearchState)
{
IWordSearchState wordState = (IWordSearchState) searchState;
IWord word = wordState.getPronunciation().getWord();
sb.Insert(0, ' ' + word.getSpelling());
}
else if (searchState is IUnitSearchState)
{
IUnitSearchState unitState = (IUnitSearchState) searchState;
IUnit unit = unitState.getUnit();
sb.Insert(0, ' ' + unit.getName());
}
token = token.getPredecessor();
}
return sb.ToString().Trim();
}
/// <summary>
/// Returns the word of this Token, the search state is a WordSearchState. If the search state is not a
/// WordSearchState, return null.
/// </summary>
/// <returns>the word of this Token, or null if this is not a word token</returns>
public IWord getWord()
{
if (isWord())
{
IWordSearchState wordState = (IWordSearchState) searchState;
return wordState.getPronunciation().getWord();
}
else {
return null;
}
}
/// <summary>
/// Shows the token count
/// </summary>
public static void showCount()
{
Console.Out.WriteLine("Cur count: " + curCount + " new " +
(curCount - lastCount));
lastCount = curCount;
}
/// <summary>
/// Returns the location of this Token in the ActiveList. In the HeapActiveList implementation, it is the index of
/// the Token in the array backing the heap.
/// </summary>
/// <returns></returns>
//public int getLocation()
//{
// return location;
//}
///// <summary>
///// Sets the location of this Token in the ActiveList.
///// </summary>
///// <param name="location"></param>
//public void setLocation(int location)
//{
// this.location = location;
//}
/// <summary>
/// Determines if this branch is valid
/// </summary>
/// <returns>true if the token and its predecessors are valid</returns>
public Boolean validate()
{
return true;
}
/// <summary>
/// Return the DecimalFormat object for formatting the print out of scores.
/// </summary>
/// <returns>the DecimalFormat object for formatting score print outs</returns>
protected static String getScoreFormat()
{
return scoreFmt;
}
/// <summary>
/// Return the DecimalFormat object for formatting the print out of numbers
/// </summary>
/// <returns></returns>
protected static String getNumberFormat()
{
return numFmt;
}
/// <summary>
/// Returns the application object
/// </summary>
/// <returns></returns>
//public Dictionary<String, Object> getTokenProps()
//{
// if (tokenProps == null)
// tokenProps = new Dictionary<String, Object>();
// return tokenProps;
//}
/// <summary>
///
///A {@code Scoreable} comparator that is used to order scoreables according to their score,
/// in descending order.
///<p>Note: since a higher score results in a lower natural order,
/// statements such as {@code Collections.min(list, Scoreable.COMPARATOR)}
/// actually return the Scoreable with the <b>highest</b> score,
/// in contrast to the natural meaning of the word "min".
/// </summary>
/// <param name="t1"></param>
/// <param name="t2"></param>
/// <returns></returns>
int IComparer<IScoreable>.Compare(IScoreable t1, IScoreable t2)
{
if (t1.getScore() > t2.getScore())
{
return -1;
}
else if (t1.getScore() == t2.getScore())
{
return 0;
}
else
{
return 1;
}
}
//public void GetObjectData(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context)
//{
// throw new NotImplementedException();
//}
public void update(Token predecessor, ISearchState nextState,
float logEntryScore, float insertionProbability,
float languageProbability, int currentFrameNumber)
{
this.predecessor = predecessor;
this.searchState = nextState;
this.logTotalScore = logEntryScore;
this.logInsertionScore = insertionProbability;
this.logLanguageScore = languageProbability;
this.frameNumber = currentFrameNumber;
}
}
}
<|start_filename|>Syn.Speech/Decoder/IResultListener.cs<|end_filename|>
using Syn.Speech.Common;
//PATROLLED
namespace Syn.Speech.Decoder
{
public interface IResultListener: Util.Props.IConfigurable
{
/// <summary>
/// Method called when a new result is generated
/// </summary>
/// <param name="result">The new result.</param>
void newResult(Results.Result result);
}
}
<|start_filename|>Syn.Speech/Alignment/ItemContents.cs<|end_filename|>
//PATROLLED
namespace Syn.Speech.Alignment
{
public class ItemContents
{
private readonly FeatureSet features;
private readonly FeatureSet relations;
public ItemContents()
{
features = new FeatureSet();
relations = new FeatureSet();
}
public virtual void addItemRelation(string relationName, Item item)
{
relations.setObject(relationName, item);
}
public virtual void removeItemRelation(string relationName)
{
relations.remove(relationName);
}
public virtual Item getItemRelation(string relationName)
{
return (Item)relations.getObject(relationName);
}
public virtual FeatureSet getFeatures()
{
return features;
}
}
}
<|start_filename|>Syn.Speech/Decoder/Pruner/SimplePruner.cs<|end_filename|>
using System;
using Syn.Speech.Common;
using Syn.Speech.Decoder.Search;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder.Pruner
{
/// <summary>
/// Performs the default pruning behavior which is to invoke the purge on the active list
/// </summary>
public class SimplePruner: IPruner
{
private String name;
/* (non-Javadoc)
/// @see edu.cmu.sphinx.util.props.Configurable#newProperties(edu.cmu.sphinx.util.props.PropertySheet)
*/
public void newProperties(PropertySheet ps)
{
}
public SimplePruner() {
}
/* (non-Javadoc)
/// @see edu.cmu.sphinx.util.props.Configurable#getName()
*/
public string getName() {
return name;
}
/** Starts the pruner */
public void startRecognition() {
}
/**
/// prunes the given set of states
*
/// @param activeList a activeList of tokens
*/
public ActiveList prune(ActiveList activeList)
{
return activeList.purge();
}
/** Performs post-recognition cleanup. */
public void stopRecognition() {
}
/* (non-Javadoc)
/// @see edu.cmu.sphinx.decoder.pruner.Pruner#allocate()
*/
public void allocate()
{
}
/* (non-Javadoc)
/// @see edu.cmu.sphinx.decoder.pruner.Pruner#deallocate()
*/
public void deallocate()
{
}
}
}
<|start_filename|>Syn.Speech/Results/SausageMaker.cs<|end_filename|>
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using Syn.Speech.Helper;
using Syn.Speech.Linguist.Dictionary;
using Syn.Speech.Util;
//PATROLLED
namespace Syn.Speech.Results
{
/**
* <p/>
* The SausageMaker takes word lattices as input and turns them into sausages (Confusion Networks) according to Mangu,
* Brill and Stolcke, "Finding Consensus in Speech Recognition: word error minimization and other applications of
* confusion networks", Computer Speech and Language, 2000. Note that the <code>getBestHypothesis</code> of the
* ConfidenceResult object returned by the {@link #score(Result) score} method returns the path where all the words have
* the highest posterior probability within its corresponding time slot. </p>
*
* @author pgorniak
*/
public class SausageMaker : AbstractSausageMaker {
/** Construct an empty sausage maker */
public SausageMaker() {
}
public SausageMaker(float languageWieght) {
languageWeight = languageWieght;
}
/**
* Construct a sausage maker
*
* @param l the lattice to construct a sausage from
*/
public SausageMaker(Lattice l) {
lattice = l;
}
/**
* Perform the inter word clustering stage of the algorithm
*
* @param clusters the current cluster set
*/
protected void interWordCluster(List<Cluster> clusters) {
while (interWordClusterStep(clusters)) ;
}
/**
* Returns the latest begin time of all nodes in the given cluster.
*
* @param cluster the cluster to examine
* @return the latest begin time
*/
public int getLatestBeginTime(List<Node> cluster) {
if (cluster.IsEmpty()) {
return -1;
}
int startTime = 0;
foreach (Node n in cluster) {
if (n.getBeginTime() > startTime) {
startTime = n.getBeginTime();
}
}
return startTime;
}
/**
* Returns the earliest end time of all nodes in the given cluster.
*
* @param cluster the cluster to examine
* @return the earliest end time
*/
public int getEarliestEndTime(List<Node> cluster) {
if (cluster.Count == 0) {
return -1;
}
int endTime = Integer.MAX_VALUE;
foreach (Node n in cluster) {
if (n.getEndTime() < endTime) {
endTime = n.getEndTime();
}
}
return endTime;
}
/**
* Perform one inter word clustering step of the algorithm
*
* @param clusters the current cluster set
*/
protected bool interWordClusterStep(List<Cluster> clusters) {
Cluster toBeMerged1 = null;
Cluster toBeMerged2 = null;
double maxSim = Double.NegativeInfinity;
//TODO: Check Behaviour
for (int i = 0; i < clusters.Count; i++)
{
Cluster c1 = clusters[i];
if (i + 1 >= clusters.Count) break;
for (int j = i + 1; j < clusters.Count; j++)
{
Cluster c2 = clusters[j];
double sim = interClusterDistance(c1, c2);
if (sim > maxSim && hasOverlap(c1, c2))
{
maxSim = sim;
toBeMerged1 = c1;
toBeMerged2 = c2;
}
}
}
if (toBeMerged1 != null) {
clusters.Remove(toBeMerged2);
toBeMerged1.add(toBeMerged2);
return true;
}
return false;
}
/**
* Find the string edit distance between to lists of objects.
* Objects are compared using .equals()
* TODO: could be moved to a general utility class
*
* @param p1 the first list
* @param p2 the second list
* @return the string edit distance between the two lists
*/
protected static int stringEditDistance(IList p1, IList p2) {
if (p1.Count==0) {
return p2.Count;
}
if (p2.Count==0) {
return p1.Count;
}
int[,] distances = new int[p1.Count + 1,p2.Count + 1];
for (int i = 0; i <= p1.Count; i++) {
distances[i,0] = i;
}
for (int j = 0; j <= p2.Count; j++) {
distances[0,j] = j;
}
for (int i = 1; i <= p1.Count; i++) {
for (int j = 1; j <= p2.Count; j++) {
int min = Math.Min(distances[i - 1,j - 1]
+ (p1[i - 1].Equals(p2[j - 1]) ? 0 : 1),
distances[i - 1,j] + 1);
min = Math.Min(min, distances[i,j - 1] + 1);
distances[i,j] = min;
}
}
return distances[p1.Count,p2.Count];
}
/**
* Compute the phonetic similarity of two lattice nodes, based on the string edit distance between their most likely
* pronunciations. TODO: maybe move to Node.java?
*
* @param n1 the first node
* @param n2 the second node
* @return the phonetic similarity, between 0 and 1
*/
protected double computePhoneticSimilarity(Node n1, Node n2) {
Pronunciation p1 = n1.getWord().getMostLikelyPronunciation();
Pronunciation p2 = n2.getWord().getMostLikelyPronunciation();
double sim = stringEditDistance(p1.getUnits().ToList(),p2.getUnits().ToList());
sim /= (p1.getUnits().Length + p2.getUnits().Length);
return 1 - sim;
}
/**
* Calculate the distance between two clusters
*
* @param c1 the first cluster
* @param c2 the second cluster
* @return the inter cluster similarity, or Double.NEGATIVE_INFINITY if these clusters should never be clustered
* together.
*/
protected double interClusterDistance(Cluster c1, Cluster c2) {
if (areClustersInRelation(c1, c2)) {
return Double.NegativeInfinity;
}
float totalSim = LogMath.LOG_ZERO;
float wordPairCount = (float) 0.0;
HashSet<String> wordsSeen1 = new HashSet<String>();
LogMath logMath = LogMath.getLogMath();
foreach (Node node1 in c1.getElements()) {
String word1 = node1.getWord().getSpelling();
if (wordsSeen1.Contains(word1)) {
continue;
}
wordsSeen1.Add(word1);
HashSet<String> wordsSeen2 = new HashSet<String>();
foreach (Node node2 in c2.getElements()) {
String word2 = node2.getWord().getSpelling();
if (wordsSeen2.Contains(word2)) {
continue;
}
wordsSeen2.Add(word2);
float sim = (float) computePhoneticSimilarity(node1, node2);
sim = logMath.linearToLog(sim);
sim += (float)wordSubClusterProbability(c1, word1);
sim += (float)wordSubClusterProbability(c2, word2);
totalSim = logMath.addAsLinear(totalSim, sim);
wordPairCount++;
}
}
return totalSim - logMath.logToLinear(wordPairCount);
}
/**
* Check whether these to clusters stand in a relation to each other. Two clusters are related if a member of one is
* an ancestor of a member of the other cluster.
*
* @param cluster1 the first cluster
* @param cluster2 the second cluster
* @return true if the clusters are related
*/
protected bool areClustersInRelation(Cluster cluster1, Cluster cluster2) {
foreach (Node n1 in cluster1.getElements()) {
foreach (Node n2 in cluster2.getElements()) {
if (n1.hasAncestralRelationship(n2)) {
return true;
}
}
}
return false;
}
/**
* Calculate the distance between two clusters, forcing them to have the same words in them, and to not be related
* to each other.
*
* @param cluster1 the first cluster
* @param cluster2 the second cluster
* @return The intra-cluster distance, or Double.NEGATIVE_INFINITY if the clusters should never be clustered
* together.
*/
protected double intraClusterDistance(Cluster cluster1, Cluster cluster2) {
LogMath logMath = LogMath.getLogMath();
double maxSim = Double.NegativeInfinity;
foreach (Node node1 in cluster1.getElements()) {
foreach (Node node2 in cluster2.getElements()) {
if (!node1.getWord().getSpelling().Equals(
node2.getWord().getSpelling()))
return Double.NegativeInfinity;
if (node1.hasAncestralRelationship(node2))
return Double.NegativeInfinity;
double overlap = getOverlap(node1, node2);
if (overlap > 0.0) {
overlap = logMath.logToLinear((float) overlap);
overlap += node1.getPosterior() + node2.getPosterior();
if (overlap > maxSim) {
maxSim = overlap;
}
}
}
}
return maxSim;
}
/**
* Perform the intra word clustering stage of the algorithm
*
* @param clusters the current list of clusters
*/
protected void intraWordCluster(List<Cluster> clusters) {
while (intraWordClusterStep(clusters)) ;
}
/**
* Perform a step of the intra word clustering stage
*
* @param clusters the current list of clusters
* @return did two clusters get merged?
*/
protected bool intraWordClusterStep(List<Cluster> clusters) {
Cluster toBeMerged1 = null;
Cluster toBeMerged2 = null;
double maxSim = Double.NegativeInfinity;
//TODO: Check Behaviour
for (int i = 0; i < clusters.Count;i++)
{
Cluster c1 = clusters[i];
if (i + 1 >= clusters.Count) break;
for (int j = i + 1; j < clusters.Count; j++)
{
Cluster c2 = clusters[j];
double sim = intraClusterDistance(c1, c2);
if (sim > maxSim)
{
maxSim = sim;
toBeMerged1 = c1;
toBeMerged2 = c2;
}
}
}
if (toBeMerged1 != null) {
clusters.Remove(toBeMerged2);
toBeMerged1.add(toBeMerged2);
return true;
}
return false;
}
/**
* Turn the lattice contained in this sausage maker into a sausage object.
*
* @return the sausage producing by collapsing the lattice.
*/
public Sausage makeSausage() {
List<Cluster> clusters = new List<Cluster>(lattice.nodes.size());
foreach (Node n in lattice.nodes.values()) {
n.cacheDescendants();
Cluster bucket = new Cluster(n);
clusters.Add(bucket);
}
intraWordCluster(clusters);
interWordCluster(clusters);
clusters = topologicalSort(clusters);
return sausageFromClusters(clusters);
}
/** @see edu.cmu.sphinx.result.ConfidenceScorer#score(edu.cmu.sphinx.result.Result) */
public override IConfidenceResult score(Result result) {
lattice = new Lattice(result);
LatticeOptimizer lop = new LatticeOptimizer(lattice);
lop.optimize();
lattice.computeNodePosteriors(languageWeight);
return makeSausage();
}
/**
* Topologically sort the clusters. Note that this is a brute force sort by removing the min cluster from the list
* of clusters, since Collections.sort() does not work in all cases.
*
* @param clusters the list of clusters to be topologically sorted
* @return a topologically sorted list of clusters
*/
private List<Cluster> topologicalSort(List<Cluster> clusters) {
var comparator = new ClusterComparator();
List<Cluster> sorted = new List<Cluster>(clusters.Count);
while (!clusters.IsEmpty())
{
Cluster cluster = Java.Min(clusters, comparator);
clusters.Remove(cluster);
sorted.Add(cluster);
}
return sorted;
}
}
}
<|start_filename|>Syn.Speech/Result/IResultListener.cs<|end_filename|>
namespace Syn.Speech.Result
{
public interface IResultListener: Util.Props.IConfigurable
{
void newResult(Result result);
}
}
<|start_filename|>Syn.Speech/Decoder/Scorer/MaxScoreNormalizer.cs<|end_filename|>
using System.Collections.Generic;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder.Scorer
{
/// <summary>
/// Performs a simple normalization of all token-scores by
/// </summary>
public class MaxScoreNormalizer : IScoreNormalizer
{
public void newProperties(PropertySheet ps)
{
}
public MaxScoreNormalizer()
{
}
public IScoreable normalize(List<IScoreable> scoreableList, IScoreable bestToken)
{
foreach (var scoreable in scoreableList)
{
scoreable.normalizeScore(bestToken.getScore());
}
return bestToken;
}
}
}
<|start_filename|>Syn.Speech/Linguist/Language/NGram/ProbDepth.cs<|end_filename|>
namespace Syn.Speech.Linguist.Language.NGram
{
/// <summary>
/// Class for returning results from {@link BackoffLanguageModel}
/// </summary>
public class ProbDepth
{
public float probability;
public int depth;
public ProbDepth(float probability, int depth)
{
this.probability = probability;
this.depth = depth;
}
}
}
<|start_filename|>Syn.Speech/Alignment/Alignment.cs<|end_filename|>
using System;
using System.Collections.Generic;
using Syn.Speech.Alignment.Comparer;
using Syn.Speech.Helper;
//PATROLLED + REFACTORED
namespace Syn.Speech.Alignment
{
public class Alignment
{
internal readonly List<Integer> Shifts;
internal readonly List<string> Query;
public readonly List<Integer> Indices;
private readonly List<Node> _alignment;
private LongTextAligner _aligner;
public Alignment(LongTextAligner longTextAligner, List<string> query, Range range)
{
_aligner = longTextAligner;
Query = query;
Indices = new List<Integer>();
var shiftSet = new SortedSet<Integer>();
for (var i = 0; i < query.Count; i++)
{
if (_aligner.TupleIndex.ContainsKey(query[i]))
{
Indices.Add(i);
foreach (var shift in _aligner.TupleIndex.Get(query[i]))
{
if (range.contains(shift))
shiftSet.Add(shift);
}
}
}
Shifts = new List<Integer>(shiftSet);
var cost = new HashMap<Node, Integer>();
var openSet = new PriorityQueue<Node>(1, new NodeComparer(cost));
var closedSet = new HashSet<Node>();
var parents = new HashMap<Node, Node>();
var startNode = new Node(_aligner, this, 0, 0);
cost.Put(startNode, 0);
openSet.Add(startNode);
while (openSet.Count !=0)
{
Node q = openSet.Dequeue();
if (closedSet.Contains(q))
continue;
if (q.IsTarget) {
var backtrace = new List<Node>();
while (parents.ContainsKey(q)) {
if (!q.IsBoundary && q.HasMatch)
backtrace.Add(q);
q = parents.Get(q);
}
_alignment = new List<Node>(backtrace);
_alignment.Reverse();
//Collections.reverse(alignment);
return;
}
closedSet.Add(q);
foreach (Node nb in q.Adjacent()) {
if (closedSet.Contains(nb))
continue;
// FIXME: move to appropriate location
int l = Math.Abs(Indices.Count - Shifts.Count - q.QueryIndex +
q.DatabaseIndex) -
Math.Abs(Indices.Count - Shifts.Count -
nb.QueryIndex +
nb.DatabaseIndex);
Integer oldScore = cost.Get(nb);
Integer qScore = cost.Get(q);
if (oldScore == null)
oldScore = Integer.MAX_VALUE;
if (qScore == null)
qScore = Integer.MAX_VALUE;
int newScore = qScore + nb.GetValue() - l;
if (newScore < oldScore) {
cost.Put(nb, newScore);
openSet.Add(nb);
parents.Put(nb, q);
}
}
}
_alignment = new List<Node>();
}
public List<Node> GetIndices()
{
return _alignment;
}
}
}
<|start_filename|>Syn.Speech/Result/FrameStatistics.cs<|end_filename|>
using Syn.Speech.Common.FrontEnd;
using Syn.Speech.Linguist.Acoustic;
namespace Syn.Speech.Result
{
/// <summary>
/// Contains statistics about a frame.
/// <p/>
/// Note that all scores are maintained in LogMath log base
/// </summary>
public abstract class FrameStatistics
{
/// <summary>
/// Gets the frame number
/// </summary>
/// <returns></returns>
public abstract int getFrameNumber();
/// <summary>
/// Gets the feature associated with this frame
/// </summary>
/// <returns></returns>
public abstract IData getData();
/// <summary>
/// Gets the best score for this frame
/// </summary>
/// <returns></returns>
public abstract float getBestScore();
/// <summary>
/// Gets the unit that had the best score for this frame
/// </summary>
/// <returns></returns>
public abstract Unit getBestUnit();
/// <summary>
/// Gets the best scoring hmm state for this frame
/// </summary>
/// <returns></returns>
public abstract int getBestState();
}
}
<|start_filename|>Syn.Speech/Decoder/Pruner/NullPruner.cs<|end_filename|>
using Syn.Speech.Common;
using Syn.Speech.Decoder.Search;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder.Pruner
{
/// <summary>
/// A Null pruner. Does no actual pruning
/// </summary>
public class NullPruner : IPruner
{
/* (non-Javadoc)
* @see edu.cmu.sphinx.util.props.Configurable#newProperties(edu.cmu.sphinx.util.props.PropertySheet)
*/
public void newProperties(PropertySheet ps)
{
}
/** Creates a simple pruner */
public NullPruner()
{
}
/** starts the pruner */
public void startRecognition()
{
}
public ActiveList prune(ActiveList activeList)
{
return activeList;
}
/** Performs post-recognition cleanup. */
public void stopRecognition()
{
}
/* (non-Javadoc)
* @see edu.cmu.sphinx.decoder.pruner.Pruner#allocate()
*/
public void allocate()
{
}
/* (non-Javadoc)
* @see edu.cmu.sphinx.decoder.pruner.Pruner#deallocate()
*/
public void deallocate()
{
}
}
}
<|start_filename|>Syn.Speech/Decoder/Search/WordPruningBreadthFirstSearchManager.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using Syn.Speech.Common.FrontEnd;
using Syn.Speech.Decoder.Pruner;
using Syn.Speech.Decoder.Scorer;
using Syn.Speech.Linguist;
using Syn.Speech.Util;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder.Search
{
/// <summary>
/// Provides the breadth first search. To perform recognition an application should call initialize before recognition
/// begins, and repeatedly call <code> recognize </code> until Result.isFinal() returns true. Once a final result has
/// been obtained, <code> stopRecognition </code> should be called.
///
/// All scores and probabilities are maintained in the log math log domain.
/// </summary>
public class WordPruningBreadthFirstSearchManager : TokenSearchManager
{
/** The property that defines the name of the linguist to be used by this search manager. */
[S4Component(type = typeof(Linguist.Linguist))]
public static String PROP_LINGUIST = "linguist";
/** The property that defines the name of the linguist to be used by this search manager. */
[S4Component(type = typeof(IPruner))]
public static String PROP_PRUNER = "pruner";
/** The property that defines the name of the scorer to be used by this search manager. */
[S4Component(type = typeof(IAcousticScorer))]
public static String PROP_SCORER = "scorer";
/**
/// The property than, when set to <code>true</code> will cause the recognizer to count up all the tokens in the
/// active list after every frame.
*/
[S4Boolean(defaultValue = false)]
public static String PROP_SHOW_TOKEN_COUNT = "showTokenCount";
/**
/// The property that controls the number of frames processed for every time
/// the decode growth step is skipped. Setting this property to zero disables
/// grow skipping. Setting this number to a small integer will increase the
/// speed of the decoder but will also decrease its accuracy. The higher the
/// number, the less often the grow code is skipped. Values like 6-8 is known
/// to be the good enough for large vocabulary tasks. That means that one of
/// 6 frames will be skipped.
*/
[S4Integer(defaultValue = 0)]
public static String PROP_GROW_SKIP_INTERVAL = "growSkipInterval";
/** The property that defines the type of active list to use */
[S4Component(type = typeof(ActiveListManager))]
public static String PROP_ACTIVE_LIST_MANAGER = "activeListManager";
/** The property for checking if the order of states is valid. */
[S4Boolean(defaultValue = false)]
public static String PROP_CHECK_STATE_ORDER = "checkStateOrder";
/** The property that specifies the maximum lattice edges */
[S4Integer(defaultValue = 100)]
public static String PROP_MAX_LATTICE_EDGES = "maxLatticeEdges";
/**
/// The property that controls the amount of simple acoustic lookahead performed. Setting the property to zero
/// (the default) disables simple acoustic lookahead. The lookahead need not be an integer.
*/
[S4Double(defaultValue = 0)]
public static String PROP_ACOUSTIC_LOOKAHEAD_FRAMES = "acousticLookaheadFrames";
/** The property that specifies the relative beam width */
[S4Double(defaultValue = 0.0)]
// TODO: this should be a more meaningful default e.g. the common 1E-80
public static String PROP_RELATIVE_BEAM_WIDTH = "relativeBeamWidth";
// -----------------------------------
// Configured Subcomponents
// -----------------------------------
private Linguist.Linguist linguist; // Provides grammar/language info
private IPruner pruner; // used to prune the active list
private IAcousticScorer scorer; // used to score the active list
private ActiveListManager activeListManager;
private LogMath logMath;
// -----------------------------------
// Configuration data
// -----------------------------------
private Boolean _showTokenCount;
private Boolean _checkStateOrder;
private int growSkipInterval;
private float relativeBeamWidth;
private float acousticLookaheadFrames;
private int maxLatticeEdges = 100;
// -----------------------------------
// Instrumentation
// -----------------------------------
private Timer scoreTimer;
private Timer pruneTimer;
private Timer growTimer;
private StatisticsVariable totalTokensScored;
private StatisticsVariable curTokensScored;
private StatisticsVariable tokensCreated;
private long tokenSum;
private int tokenCount;
// -----------------------------------
// Working data
// -----------------------------------
private int currentFrameNumber; // the current frame number
protected ActiveList activeList; // the list of active tokens
private List<Token> resultList; // the current set of results
protected Dictionary<Object, Token> bestTokenMap;
private AlternateHypothesisManager loserManager;
private int numStateOrder;
// private TokenTracker tokenTracker;
// private TokenTypeTracker tokenTypeTracker;
private Boolean streamEnd;
/**
///
/// @param linguist
/// @param pruner
/// @param scorer
/// @param activeListManager
/// @param showTokenCount
/// @param relativeWordBeamWidth
/// @param growSkipInterval
/// @param checkStateOrder
/// @param buildWordLattice
/// @param maxLatticeEdges
/// @param acousticLookaheadFrames
/// @param keepAllTokens
*/
public WordPruningBreadthFirstSearchManager(Linguist.Linguist linguist, IPruner pruner,
IAcousticScorer scorer, ActiveListManager activeListManager,
Boolean showTokenCount, double relativeWordBeamWidth,
int growSkipInterval,
Boolean checkStateOrder, Boolean buildWordLattice,
int maxLatticeEdges, float acousticLookaheadFrames,
Boolean keepAllTokens)
{
this.logMath = LogMath.getLogMath();
this.linguist = linguist;
this.pruner = pruner;
this.scorer = scorer;
this.activeListManager = activeListManager;
this._showTokenCount = showTokenCount;
this.growSkipInterval = growSkipInterval;
this._checkStateOrder = checkStateOrder;
this.buildWordLattice = buildWordLattice;
this.maxLatticeEdges = maxLatticeEdges;
this.acousticLookaheadFrames = acousticLookaheadFrames;
this.keepAllTokens = keepAllTokens;
this.relativeBeamWidth = logMath.linearToLog(relativeWordBeamWidth);
}
public WordPruningBreadthFirstSearchManager()
{
}
/*
/// (non-Javadoc)
*
/// @see edu.cmu.sphinx.util.props.Configurable#newProperties(edu.cmu.sphinx.util.props.PropertySheet)
*/
public override void newProperties(PropertySheet ps)
{
logMath = LogMath.getLogMath();
linguist = (Linguist.Linguist)ps.getComponent(PROP_LINGUIST);
pruner = (IPruner)ps.getComponent(PROP_PRUNER);
scorer = (IAcousticScorer)ps.getComponent(PROP_SCORER);
activeListManager = (ActiveListManager)ps.getComponent(PROP_ACTIVE_LIST_MANAGER);
_showTokenCount = ps.getBoolean(PROP_SHOW_TOKEN_COUNT);
growSkipInterval = ps.getInt(PROP_GROW_SKIP_INTERVAL);
_checkStateOrder = ps.getBoolean(PROP_CHECK_STATE_ORDER);
maxLatticeEdges = ps.getInt(PROP_MAX_LATTICE_EDGES);
acousticLookaheadFrames = ps.getFloat(PROP_ACOUSTIC_LOOKAHEAD_FRAMES);
relativeBeamWidth = logMath.linearToLog(ps.getDouble(PROP_RELATIVE_BEAM_WIDTH));
}
/*
/// (non-Javadoc)
*
/// @see edu.cmu.sphinx.decoder.search.SearchManager#allocate()
*/
override public void allocate()
{
// tokenTracker = new TokenTracker();
// tokenTypeTracker = new TokenTypeTracker();
scoreTimer = TimerPool.getTimer(this, "Score");
pruneTimer = TimerPool.getTimer(this, "Prune");
growTimer = TimerPool.getTimer(this, "Grow");
totalTokensScored = StatisticsVariable.getStatisticsVariable("totalTokensScored");
curTokensScored = StatisticsVariable.getStatisticsVariable("curTokensScored");
tokensCreated = StatisticsVariable.getStatisticsVariable("tokensCreated");
try
{
linguist.allocate();
pruner.allocate();
scorer.allocate();
}
catch (IOException e)
{
throw new SystemException("Allocation of search manager resources failed", e);
}
}
/*
/// (non-Javadoc)
*
/// @see edu.cmu.sphinx.decoder.search.SearchManager#deallocate()
*/
public void deallocate()
{
try
{
scorer.deallocate();
pruner.deallocate();
linguist.deallocate();
}
catch (IOException e)
{
throw new SystemException("Deallocation of search manager resources failed", e);
}
}
/// <summary>
/// Called at the start of recognition. Gets the search manager ready to recognize
/// </summary>
override public void startRecognition()
{
linguist.startRecognition();
pruner.startRecognition();
scorer.startRecognition();
localStart();
}
/**
/// Performs the recognition for the given number of frames.
*
/// @param nFrames the number of frames to recognize
/// @return the current result
*/
override public Results.Result recognize(int nFrames)
{
Boolean done = false;
Results.Result result = null;
streamEnd = false;
for (int i = 0; i < nFrames && !done; i++)
{
done = recognize();
}
if (!streamEnd)
{
result = new Results.Result(loserManager, activeList,
resultList, currentFrameNumber, done, linguist.getSearchGraph().getWordTokenFirst());
}
// tokenTypeTracker.show();
if (_showTokenCount)
{
showTokenCount();
}
return result;
}
private Boolean recognize()
{
activeList = activeListManager.getEmittingList();
Boolean more = scoreTokens();
if (more)
{
pruneBranches();
currentFrameNumber++;
if (growSkipInterval == 0 || (currentFrameNumber % growSkipInterval) != 0)
{
clearCollectors();
growEmittingBranches();
growNonEmittingBranches();
}
}
return !more;
}
/**
/// Clears lists and maps before next expansion stage
*/
private void clearCollectors()
{
resultList = new List<Token>();
createBestTokenMap();
activeListManager.clearEmittingList();
}
/**
/// creates a new best token map with the best size
*/
protected void createBestTokenMap()
{
int mapSize = activeList.size() * 10;
if (mapSize == 0)
{
mapSize = 1;
}
bestTokenMap = new Dictionary<Object, Token>(mapSize);
foreach (KeyValuePair<Object, Token> iter in bestTokenMap)
iter.Value.setScore(0.3f);
}
/** Terminates a recognition */
override public void stopRecognition()
{
localStop();
scorer.stopRecognition();
pruner.stopRecognition();
linguist.stopRecognition();
}
/** Gets the initial grammar node from the linguist and creates a GrammarNodeToken */
protected void localStart()
{
ISearchGraph searchGraph = linguist.getSearchGraph();
currentFrameNumber = 0;
curTokensScored.value = 0;
numStateOrder = searchGraph.getNumStateOrder();
activeListManager.setNumStateOrder(numStateOrder);
if (buildWordLattice)
{
loserManager = new AlternateHypothesisManager(maxLatticeEdges);
}
ISearchState state = searchGraph.getInitialState();
activeList = activeListManager.getEmittingList();
activeList.add(new Token(state, currentFrameNumber));
clearCollectors();
growBranches();
growNonEmittingBranches();
// tokenTracker.setEnabled(false);
// tokenTracker.startUtterance();
}
/** Local cleanup for this search manager */
protected void localStop()
{
// tokenTracker.stopUtterance();
}
/**
/// Goes through the active list of tokens and expands each token, finding the set of successor tokens until all the
/// successor tokens are emitting tokens.
*/
protected void growBranches()
{
growTimer.start();
float relativeBeamThreshold = activeList.getBeamThreshold();
Trace.WriteLine("Frame: " + currentFrameNumber
+ " thresh : " + relativeBeamThreshold + " bs "
+ activeList.getBestScore() + " tok "
+ activeList.getBestToken());
foreach (Token token in activeList.getTokens())
{
if (token.getScore() >= relativeBeamThreshold && allowExpansion(token))
{
collectSuccessorTokens(token);
}
}
growTimer.stop();
}
/**
/// Grows the emitting branches. This version applies a simple acoustic lookahead based upon the rate of change in
/// the current acoustic score.
*/
protected void growEmittingBranches()
{
if (acousticLookaheadFrames > 0F)
{
growTimer.start();
float bestScore = -float.MaxValue;
foreach (Token t in activeList.getTokens())
{
float score = t.getScore() + t.getAcousticScore()
* acousticLookaheadFrames;
if (score > bestScore)
{
bestScore = score;
}
t.setWorkingScore(score);
}
float relativeBeamThreshold = bestScore + relativeBeamWidth;
foreach (Token t in activeList.getTokens())
{
if (t.getWorkingScore() >= relativeBeamThreshold)
{
collectSuccessorTokens(t);
}
}
growTimer.stop();
}
else
{
growBranches();
}
}
/** Grow the non-emitting branches, until the tokens reach an emitting state. */
private void growNonEmittingBranches()
{
for (IEnumerator<ActiveList> i = activeListManager.getNonEmittingListIterator(); i.MoveNext(); )
{
activeList = i.Current;
if (activeList != null)
{
// TO DO: look for new implementation
//i.Current.Remove()
pruneBranches();
growBranches();
}
}
}
/**
/// Calculate the acoustic scores for the active list. The active list should contain only emitting tokens.
*
/// @return <code>true</code> if there are more frames to score, otherwise, false
*/
protected Boolean scoreTokens()
{
Boolean moreTokens;
scoreTimer.start();
IData data = scorer.calculateScores(activeList.getTokens().ConvertAll(x => (IScoreable)x));
scoreTimer.stop();
Token bestToken = null;
if (data is Token)
{
bestToken = (Token)data;
}
else if (data == null)
{
streamEnd = true;
}
moreTokens = (bestToken != null);
activeList.setBestToken(bestToken);
//monitorWords(activeList);
monitorStates(activeList);
// System.out.println("BEST " + bestToken);
curTokensScored.value += activeList.size();
totalTokensScored.value += activeList.size();
return moreTokens;
}
/**
/// Keeps track of and reports all of the active word histories for the given active list
*
/// @param activeList the active list to track
*/
private void monitorWords(ActiveList activeList)
{
// WordTracker tracker1 = new WordTracker(currentFrameNumber);
//
// for (Token t : activeList) {
// tracker1.add(t);
// }
// tracker1.dump();
//
// TokenTracker tracker2 = new TokenTracker();
//
// for (Token t : activeList) {
// tracker2.add(t);
// }
// tracker2.dumpSummary();
// tracker2.dumpDetails();
//
// TokenTypeTracker tracker3 = new TokenTypeTracker();
//
// for (Token t : activeList) {
// tracker3.add(t);
// }
// tracker3.dump();
// StateHistoryTracker tracker4 = new StateHistoryTracker(currentFrameNumber);
// for (Token t : activeList) {
// tracker4.add(t);
// }
// tracker4.dump();
}
/**
/// Keeps track of and reports statistics about the number of active states
*
/// @param activeList the active list of states
*/
private void monitorStates(ActiveList activeList)
{
tokenSum += activeList.size();
tokenCount++;
if ((tokenCount % 1000) == 0)
{
Trace.WriteLine("Average Tokens/State: " + (tokenSum / tokenCount));
}
}
/** Removes unpromising branches from the active list */
protected void pruneBranches()
{
pruneTimer.start();
activeList = pruner.prune(activeList);
pruneTimer.stop();
}
/**
/// Gets the best token for this state
*
/// @param state the state of interest
/// @return the best token
*/
protected Token getBestToken(ISearchState state)
{
Object key = getStateKey(state);
return bestTokenMap[key];
}
/**
/// Sets the best token for a given state
*
/// @param token the best token
/// @param state the state
*/
protected void setBestToken(Token token, ISearchState state)
{
Object key = getStateKey(state);
bestTokenMap.Add(key, token);
}
/**
/// Returns the state key for the given state. This key is used
/// to store bestToken into the bestToken map. All tokens with
/// the same key are basically shared. This method adds flexibility in
/// search.
///
/// For example this key will allow HMM states that have identical word
/// histories and are in the same HMM state to be treated equivalently.
/// When used as the best token key, only the best scoring token with a
/// given word history survives per HMM.
/// <pre>
/// Boolean equal = hmmSearchState.getLexState().equals(
/// other.hmmSearchState.getLexState())
/// && hmmSearchState.getWordHistory().equals(
/// other.hmmSearchState.getWordHistory());
/// </pre>
///
/// @param state
/// the state to get the key for
/// @return the key for the given state
*/
protected Object getStateKey(ISearchState state)
{
return state;
}
/** Checks that the given two states are in legitimate order.
/// @param fromState
/// @param toState*/
private void checkStateOrder(ISearchState fromState, ISearchState toState)
{
if (fromState.getOrder() == numStateOrder - 1)
{
return;
}
if (fromState.getOrder() > toState.getOrder())
{
throw new Exception("IllegalState order: from "
+ fromState.GetType().Name + ' '
+ fromState.toPrettyString()
+ " order: " + fromState.getOrder()
+ " to "
+ toState.GetType().Name + ' '
+ toState.toPrettyString()
+ " order: " + toState.getOrder());
}
}
/**
/// Collects the next set of emitting tokens from a token and accumulates them in the active or result lists
*
/// @param token the token to collect successors from be immediately expanded are placed. Null if we should always
/// expand all nodes.
*/
protected void collectSuccessorTokens(Token token)
{
// tokenTracker.add(token);
// tokenTypeTracker.add(token);
// If this is a final state, add it to the final list
if (token.isFinal())
{
resultList.Add(getResultListPredecessor(token));
return;
}
// if this is a non-emitting token and we've already
// visited the same state during this frame, then we
// are in a grammar loop, so we don't continue to expand.
// This check only works properly if we have kept all of the
// tokens (instead of skipping the non-word tokens).
// Note that certain linguists will never generate grammar loops
// (lextree linguist for example). For these cases, it is perfectly
// fine to disable this check by setting keepAllTokens to false
if (!token.isEmitting() && (keepAllTokens && isVisited(token)))
{
return;
}
ISearchState state = token.getSearchState();
ISearchStateArc[] arcs = state.getSuccessors();
Token predecessor = getResultListPredecessor(token);
// For each successor
// calculate the entry score for the token based upon the
// predecessor token score and the transition probabilities
// if the score is better than the best score encountered for
// the SearchState and frame then create a new token, add
// it to the lattice and the SearchState.
// If the token is an emitting token add it to the list,
// otherwise recursively collect the new tokens successors.
foreach (ISearchStateArc arc in arcs)
{
ISearchState nextState = arc.getState();
if (_checkStateOrder)
{
checkStateOrder(state, nextState);
}
// We're actually multiplying the variables, but since
// these come in log(), multiply gets converted to add
float logEntryScore = token.getScore() + arc.getProbability();
Token bestToken = getBestToken(nextState);
//
if (bestToken == null)
{
Token newBestToken = new Token(predecessor, nextState, logEntryScore, arc.getInsertionProbability(),
arc.getLanguageProbability(), currentFrameNumber);
tokensCreated.value++;
setBestToken(newBestToken, nextState);
activeListAdd(newBestToken);
}
else if (bestToken.getScore() < logEntryScore)
{
// System.out.println("Updating " + bestToken + " with " +
// newBestToken);
Token oldPredecessor = bestToken.getPredecessor();
bestToken.update(predecessor, nextState, logEntryScore, arc.getInsertionProbability(),
arc.getLanguageProbability(), currentFrameNumber);
if (buildWordLattice && nextState is IWordSearchState)
{
loserManager.addAlternatePredecessor(bestToken, oldPredecessor);
}
}
else if (buildWordLattice && nextState is IWordSearchState)
{
if (predecessor != null)
{
loserManager.addAlternatePredecessor(bestToken, predecessor);
}
}
}
}
/**
/// Determines whether or not we've visited the state associated with this token since the previous frame.
*
/// @param t
/// @return true if we've visited the search state since the last frame
*/
private Boolean isVisited(Token t)
{
ISearchState curState = t.getSearchState();
t = t.getPredecessor();
while (t != null && !t.isEmitting())
{
if (curState.Equals(t.getSearchState()))
{
Trace.WriteLine("CS " + curState + " match " + t.getSearchState());
return true;
}
t = t.getPredecessor();
}
return false;
}
protected void activeListAdd(Token token)
{
activeListManager.add(token);
}
//protected void activeListReplace(Token old, Token newToken)
//{
// activeListManager.replace(old, newToken);
//}
/**
/// Determine if the given token should be expanded
*
/// @param t the token to test
/// @return <code>true</code> if the token should be expanded
*/
protected Boolean allowExpansion(Token t)
{
return true; // currently disabled
}
/** Counts all the tokens in the active list (and displays them). This is an expensive operation. */
private void showTokenCount()
{
HashSet<Token> tokenSet = new HashSet<Token>();
foreach (Token token in activeList.getTokens())
{
while (token != null)
{
tokenSet.Add(token);
//TODO: CHECK SEMANTICS
//token = token.getPredecessor();
}
}
Trace.WriteLine("Token Lattice size: " + tokenSet.Count);
tokenSet = new HashSet<Token>();
foreach (Token token in resultList)
{
while (token != null)
{
tokenSet.Add(token);
//token = token.getPredecessor();
}
}
Trace.WriteLine("Result Lattice size: " + tokenSet.Count);
}
/**
/// Returns the ActiveList.
*
/// @return the ActiveList
*/
public ActiveList getActiveList()
{
return activeList;
}
/**
/// Sets the ActiveList.
*
/// @param activeList the new ActiveList
*/
public void setActiveList(ActiveList activeList)
{
this.activeList = activeList;
}
/**
/// Returns the result list.
*
/// @return the result list
*/
public List<Token> getResultList()
{
return resultList;
}
/**
/// Sets the result list.
*
/// @param resultList the new result list
*/
public void setResultList(List<Token> resultList)
{
this.resultList = resultList;
}
/**
/// Returns the current frame number.
*
/// @return the current frame number
*/
public int getCurrentFrameNumber()
{
return currentFrameNumber;
}
/**
/// Returns the Timer for growing.
*
/// @return the Timer for growing
*/
public Timer getGrowTimer()
{
return growTimer;
}
/**
/// Returns the tokensCreated StatisticsVariable.
*
/// @return the tokensCreated StatisticsVariable.
*/
public StatisticsVariable getTokensCreated()
{
return tokensCreated;
}
}
}
<|start_filename|>Syn.Speech/Decoder/Search/SimpleActiveList.cs<|end_filename|>
using System.Collections.Generic;
using Syn.Speech.Common;
//PATROLLED
namespace Syn.Speech.Decoder.Search
{
public class SimpleActiveList : ActiveList
{
private int absoluteBeamWidth = 2000;
private float logRelativeBeamWidth;
private Token bestToken;
private List<Token> tokenList = new List<Token>();
/**
* Creates an empty active list
*
* @param absoluteBeamWidth the absolute beam width
* @param logRelativeBeamWidth the relative beam width (in the log domain)
*/
public SimpleActiveList(int absoluteBeamWidth,
float logRelativeBeamWidth)
{
this.absoluteBeamWidth = absoluteBeamWidth;
this.logRelativeBeamWidth = logRelativeBeamWidth;
}
/**
* Adds the given token to the list
*
* @param token the token to add
*/
public override void add(Token token)
{
tokenList.Add(token);
if (bestToken == null || token.getScore() > bestToken.getScore())
{
bestToken = token;
}
}
/**
* Replaces an old token with a new token
*
* @param oldToken the token to replace (or null in which case, replace works like add).
* @param newToken the new token to be placed in the list.
*/
public void replace(Token oldToken, Token newToken)
{
add(newToken);
if (oldToken != null)
{
if (!tokenList.Remove(oldToken))
{
// Some optional debugging code here to dump out the paths
// when this "should never happen" error happens
// System.out.println("SimpleActiveList: remove "
// + oldToken + " missing, but replaced by "
// + newToken);
// oldToken.dumpTokenPath(true);
// newToken.dumpTokenPath(true);
}
}
}
/**
* Purges excess members. Remove all nodes that fall below the relativeBeamWidth
*
* @return a (possible new) active list
*/
public override ActiveList purge()
{
if (absoluteBeamWidth > 0 && tokenList.Count > absoluteBeamWidth)
{
tokenList.Sort(new ScoreableComparator());
//List<Token>(tokenList, Scoreable.COMPARATOR);
tokenList = tokenList.GetRange(0, absoluteBeamWidth);
}
return this;
}
/**
* Retrieves the iterator for this tree.
*
* @return the iterator for this token list
*/
public IEnumerator<Token> iterator()
{
return tokenList.GetEnumerator();
}
/**
* Gets the set of all tokens
*
* @return the set of tokens
*/
public override List<Token> getTokens()
{
return tokenList;
}
/**
* Returns the number of tokens on this active list
*
* @return the size of the active list
*/
public override int size()
{
return tokenList.Count;
}
/**
* gets the beam threshold best upon the best scoring token
*
* @return the beam threshold
*/
public override float getBeamThreshold()
{
return getBestScore() + logRelativeBeamWidth;
}
/**
* gets the best score in the list
*
* @return the best score
*/
public override float getBestScore()
{
float bestScore = -float.MaxValue;
if (bestToken != null)
{
bestScore = bestToken.getScore();
}
return bestScore;
}
/**
* Sets the best scoring token for this active list
*
* @param token the best scoring token
*/
public override void setBestToken(Token token)
{
bestToken = token;
}
/**
* Gets the best scoring token for this active list
*
* @return the best scoring token
*/
public override Token getBestToken()
{
return bestToken;
}
/* (non-Javadoc)
* @see edu.cmu.sphinx.decoder.search.ActiveList#createNew()
*/
public override ActiveList newInstance()
{
return new SimpleActiveList(absoluteBeamWidth, logRelativeBeamWidth);
}
}
}
<|start_filename|>Syn.Speech/Decoder/Search/ISearchManager.cs<|end_filename|>
using Syn.Speech.Common;
using Syn.Speech.Util.Props;
namespace Syn.Speech.Decoder.Search
{
public interface ISearchManager: IConfigurable
{
/// <summary>
/// Allocates the resources necessary for this search. This should be called once before an recognitions are
/// performed
/// </summary>
void allocate();
/// <summary>
/// Deallocates resources necessary for this search. This should be called once after all recognitions are completed
/// at the search manager is no longer needed.
/// </summary>
void deallocate();
/// <summary>
/// Prepares the SearchManager for recognition. This method must be called before recognize
/// is called. Typically, start and stop are called bracketing an utterance.
/// </summary>
void startRecognition();
/// <summary>
/// Performs post-recognition cleanup. This method should be called after recognize returns a final result.
/// </summary>
void stopRecognition();
/// <summary>
/// Performs recognition. Processes no more than the given number of frames before returning. This method returns a
/// partial result after nFrames have been processed, or a final result if recognition completes while processing
/// frames. If a final result is returned, the actual number of frames processed can be retrieved from the result.
/// This method may block while waiting for frames to arrive.
/// </summary>
/// <param name="nFrames">the maximum number of frames to process. A final result may be returned before all nFrames are processed.</param>
/// <returns>the recognition result, the result may be a partial or a final result; or return null if no frames are
/// arrived</returns>
Results.Result recognize(int nFrames);
}
}
<|start_filename|>Syn.Speech/Result/Nbest.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
namespace Syn.Speech.Result
{
/// <summary>
/// NBest list with A*
/// </summary>
public class Nbest
{
protected Lattice lattice;
public Nbest(Lattice lattice)
{
this.lattice = lattice;
}
public List<String> getNbest(int n)
{
lattice.computeNodePosteriors(1.0f);
HashSet<String> result = new HashSet<String>();
BoundedPriorityQueue<NBestPath> queue =
new BoundedPriorityQueue<NBestPath>(n);
queue.add(new NBestPath("<s>", lattice.getInitialNode(), 0, 0));
while (result.Count < n && queue.size() > 0)
{
NBestPath path = queue.poll();
if (path.node.Equals(lattice.terminalNode)) {
result.Add(path.path);
continue;
}
foreach (Edge e in path.node.getLeavingEdges())
{
Node newNode = e.getToNode();
double newForwardScore = path.forwardScore
+ e.getAcousticScore() + e.getLMScore();
double newScore = newForwardScore + newNode.getBackwardScore();
String newPathString = getNewPathString(path, newNode);
NBestPath newPath = new NBestPath(newPathString, newNode, newScore, newForwardScore);
queue.add(newPath);
}
// printQueue(queue);
}
return result.ToList();
}
private String getNewPathString(NBestPath path, Node newNode) {
String newPathString;
if (newNode.getWord().isSentenceEndWord())
newPathString = path.path + " </s>";
else if (newNode.getWord().isFiller())
newPathString = path.path;
else
newPathString = path.path + " " + newNode.getWord();
return newPathString;
}
private void printQueue(BoundedPriorityQueue<NBestPath> queue)
{
Trace.WriteLine("");
foreach (NBestPath p in queue)
{
Trace.WriteLine(p);
}
}
}
class NBestPath:IComparable<NBestPath>
{
public String path;
public Node node;
double score;
public double forwardScore;
public NBestPath(String path, Node node, double score,
double forwardScore)
:base()
{
this.path = path;
this.node = node;
this.score = score;
this.forwardScore = forwardScore;
}
public int compareTo(NBestPath o)
{
return score.CompareTo(o.score);
}
override
public String ToString() {
return path + " [" + score + ',' + forwardScore + ']';
}
int IComparable<NBestPath>.CompareTo(NBestPath other)
{
return compareTo(other);
}
}
}
<|start_filename|>Syn.Speech/Api/SpeechResult.cs<|end_filename|>
using System;
using System.Collections.Generic;
using Syn.Speech.Common;
using Syn.Speech.Result;
//PATROLLED
namespace Syn.Speech.Api
{
/// <summary>
/// High-level wrapper for {@link Result} instance.
/// </summary>
public sealed class SpeechResult: ISpeechResult
{
private IResult result;
private Lattice lattice;
/// <summary>
/// Initializes a new instance of the <see cref="SpeechResult"/> class.
/// </summary>
/// <param name="result">Recognition result returned by <see cref="Recognizer.Recognizer"/>.</param>
public SpeechResult(IResult result)
{
this.result = result;
lattice = new Lattice(result);
new LatticeOptimizer(lattice).optimize();
}
/// <summary>
/// Returns {@link List} of words of the recognition result.
/// Within the list words are ordered by time frame.
/// </summary>
/// <returns>words that form the result</returns>
public List<WordResult> getWords()
{
return result.getTimedBestResult(true);
}
/// <summary>
/// Returns string representation of the result.
/// </summary>
/// <returns></returns>
public String getHypothesis()
{
return result.getBestResultNoFiller();
}
/// <summary>
/// Return N best hypothesis.
/// </summary>
/// <param name="n">n number of hypothesis to return.</param>
/// <returns>List of several best hypothesis</returns>
public List<String> getNbest(int n)
{
return new Nbest(lattice).getNbest(n);
}
/// <summary>
/// Returns lattice for the recognition result.
/// </summary>
/// <returns>lattice object</returns>
public Lattice getLattice() {
return lattice;
}
/// <summary>
/// Return Result object of current SpeechResult
/// </summary>
/// <returns>Result object stored in this.result</returns>
public IResult getResult() {
return result;
}
}
}
<|start_filename|>Syn.Speech/FrontEnds/Denoise/Denoise.cs<|end_filename|>
using System;
using Syn.Speech.Common.FrontEnd;
using Syn.Speech.Util.Props;
namespace Syn.Speech.FrontEnd.Denoise
{
/**
/// The noise filter, same as implemented in sphinxbase/sphinxtrain/pocketsphinx.
///
/// Noise removal algorithm is inspired by the following papers Computationally
/// Efficient Speech Enchancement by Spectral Minina Tracking by <NAME>
///
/// Power-Normalized Cepstral Coefficients (PNCC) for Robust Speech Recognition
/// by <NAME>.
///
/// For the recent research and state of art see papers about IMRCA and A
/// Minimum-Mean-Square-Error Noise Reduction Algorithm On Mel-Frequency Cepstra
/// For Robust Speech Recognition by <NAME> and others
///
*/
public class Denoise:BaseDataProcessor
{
double[] power;
double[] noise;
double[] floor;
double[] peak;
[S4Double(defaultValue = 0.7)]
public static String LAMBDA_POWER = "lambdaPower";
double lambdaPower;
[S4Double(defaultValue = 0.999)]
public static String LAMBDA_A = "lambdaA";
double lambdaA;
[S4Double(defaultValue = 0.5)]
public static String LAMBDA_B = "lambdaB";
double lambdaB;
[S4Double(defaultValue = 0.85)]
public static String LAMBDA_T = "lambdaT";
double lambdaT;
[S4Double(defaultValue = 0.2)]
public static String MU_T = "muT";
double muT;
[S4Double(defaultValue = 2.0)]
public static String EXCITATION_THRESHOLD = "excitationThreshold";
double excitationThreshold;
[S4Double(defaultValue = 20.0)]
public static String MAX_GAIN = "maxGain";
double maxGain;
[S4Integer(defaultValue = 4)]
public static String SMOOTH_WINDOW = "smoothWindow";
int smoothWindow;
static double EPS = 1e-10;
public Denoise(double lambdaPower, double lambdaA, double lambdaB,
double lambdaT, double muT, double excitationThreshold,
double maxGain, int smoothWindow) {
this.lambdaPower = lambdaPower;
this.lambdaA = lambdaA;
this.lambdaB = lambdaB;
this.lambdaT = lambdaT;
this.muT = muT;
this.excitationThreshold = excitationThreshold;
this.maxGain = maxGain;
this.smoothWindow = smoothWindow;
}
public Denoise() {
}
/*
/// (non-Javadoc)
///
/// @see
/// edu.cmu.sphinx.util.props.Configurable#newProperties(edu.cmu.sphinx.util
/// .props.PropertySheet)
*/
override
public void newProperties(PropertySheet ps)
{
base.newProperties(ps);
lambdaPower = ps.getDouble(LAMBDA_POWER);
lambdaA = ps.getDouble(LAMBDA_A);
lambdaB = ps.getDouble(LAMBDA_B);
lambdaT = ps.getDouble(LAMBDA_T);
muT = ps.getDouble(MU_T);
excitationThreshold = ps.getDouble(EXCITATION_THRESHOLD);
maxGain = ps.getDouble(MAX_GAIN);
smoothWindow = ps.getInt(SMOOTH_WINDOW);
}
public IData getData()
{
IData inputData = getPredecessor().getData();
int i;
if (inputData is DataStartSignal) {
power = null;
noise = null;
floor = null;
peak = null;
return inputData;
}
if (!(inputData is DoubleData)) {
return inputData;
}
DoubleData inputDoubleData = (DoubleData) inputData;
double[] input = inputDoubleData.getValues();
int length = input.Length;
if (power == null)
initStatistics(input, length);
updatePower(input);
estimateEnvelope(power, noise);
double[] signal = new double[length];
for (i = 0; i < length; i++) {
signal[i] = Math.Max(power[i] - noise[i], 0.0);
}
estimateEnvelope(signal, floor);
tempMasking(signal);
powerBoosting(signal);
double[] gain = new double[length];
for (i = 0; i < length; i++) {
gain[i] = signal[i] / (power[i] + EPS);
gain[i] = Math.Min(Math.Max(gain[i], 1.0 / maxGain), maxGain);
}
double[] smoothGain = smooth(gain);
for (i = 0; i < length; i++) {
input[i] *= smoothGain[i];
}
return inputData;
}
private double[] smooth(double[] gain)
{
double[] result = new double[gain.Length];
for (int i = 0; i < gain.Length; i++) {
int start = Math.Max(i - smoothWindow, 0);
int end = Math.Min(i + smoothWindow + 1, gain.Length);
double sum = 0.0;
for (int j = start; j < end; j++) {
sum += gain[j];
}
result[i] = sum / (end - start);
}
return result;
}
private void powerBoosting(double[] signal)
{
for (int i = 0; i < signal.Length; i++)
{
if (signal[i] < floor[i])
signal[i] = floor[i];
if (signal[i] < excitationThreshold* noise[i])
signal[i] = floor[i];
}
}
private void tempMasking(double[] signal)
{
for (int i = 0; i < signal.Length; i++)
{
double _in = signal[i];
peak[i] *= lambdaT;
if (signal[i] < lambdaT* peak[i])
signal[i] = peak[i]* muT;
if (_in > peak[i])
peak[i] = _in;
}
}
private void updatePower(double[] input)
{
for (int i = 0; i < input.Length; i++)
{
power[i] = lambdaPower* power[i] + (1 - lambdaPower)* input[i];
}
}
private void estimateEnvelope(double[] signal, double[] envelope)
{
for (int i = 0; i < signal.Length; i++)
{
if (signal[i] > envelope[i])
envelope[i] = lambdaA* envelope[i] + (1 - lambdaA)* signal[i];
else
envelope[i] = lambdaB* envelope[i] + (1 - lambdaB)* signal[i];
}
}
private void initStatistics(double[] input, int length)
{
/* no previous data, initialize the statistics */
input.CopyTo(power, length);
input.CopyTo(noise, length);
floor = new double[length];
peak = new double[length];
for (int i = 0; i < length; i++)
{
floor[i] = input[i] / maxGain;
}
}
}
}
<|start_filename|>Syn.Speech/Linguist/Language/NGram/ILanguageModel.cs<|end_filename|>
using System;
using System.Collections.Generic;
using Syn.Speech.Linguist.Dictionary;
using Syn.Speech.Util.Props;
namespace Syn.Speech.Linguist.Language.NGram
{
/// <summary>
/// Represents the generic interface to an N-Gram language model.
///
/// Note that all probabilities are in LogMath log base, except as otherwise
/// noted.
/// </summary>
public abstract class ILanguageModel : IConfigurable
{
/** The property specifying the location of the language model. */
[S4String(defaultValue = ".")]
public static String PROP_LOCATION = "location";
/** The property specifying the unigram weight */
[S4Double(defaultValue = 1.0)]
public static String PROP_UNIGRAM_WEIGHT = "unigramWeight";
/**
/// The property specifying the maximum depth reported by the language model
/// (from a getMaxDepth()) call. If this property is set to (-1) (the
/// default) the language model reports the implicit depth of the model.
/// This property allows a deeper language model to be used. For instance, a
/// trigram language model could be used as a bigram model by setting this
/// property to 2. Note if this property is set to a value greater than the
/// implicit depth, the implicit depth is used. Legal values for this
/// property are 1..N and -1.
*/
[S4Integer(defaultValue = -1)]
public static String PROP_MAX_DEPTH = "maxDepth";
/** The property specifying the dictionary to use */
[S4Component(type = typeof(IDictionary))]
public static String PROP_DICTIONARY = "dictionary";
/**
/// Create the language model
*
/// @throws java.io.IOException
*/
public abstract void allocate();
/**
/// Deallocate resources allocated to this language model
*
/// @throws IOException
*/
public abstract void deallocate();
/**
/// Gets the n-gram probability of the word sequence represented by the word
/// list
*
/// @param wordSequence the wordSequence
/// @return the probability of the word sequence in LogMath log base
*/
public abstract float getProbability(WordSequence wordSequence);
/**
/// Gets the smear term for the given wordSequence. Used in
/// {@link LexTreeLinguist}. See
/// {@link LexTreeLinguist#PROP_WANT_UNIGRAM_SMEAR} for details.
*
/// @param wordSequence the word sequence
/// @return the smear term associated with this word sequence
*/
public abstract float getSmear(WordSequence wordSequence);
/**
/// Returns the set of words in the language model. The set is unmodifiable.
*
/// @return the unmodifiable set of words
*/
public abstract List<String> getVocabulary();
/**
/// Returns the maximum depth of the language model
*
/// @return the maximum depth of the language model
*/
public abstract int getMaxDepth();
public abstract void newProperties(PropertySheet ps);
}
}
<|start_filename|>Syn.Speech/Decoder/AbstractDecoder.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Diagnostics;
using Syn.Speech.Common;
using Syn.Speech.Decoder.Search;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder
{
/// <summary>
/// An abstract decoder which implements all functionality which is independent of the used decoding-paradigm (pull/push).
/// </summary>
public abstract class AbstractDecoder : IResultProducer,IConfigurable
{
/// <summary>
/// The property that defines the name of the search manager to use
/// </summary>
[S4Component(type=typeof(ISearchManager))]
public static String PROP_SEARCH_MANAGER = "searchManager";
protected ISearchManager searchManager=null;
/// <summary>
/// The property that defines the name of the search manager to use
/// </summary>
[S4ComponentList(type = typeof(IResultListener))]
public static String PROP_RESULT_LISTENERS = "resultListeners";
protected List<IResultListener> resultListeners = new List<IResultListener>();
/// <summary>
/// If set to true the used search-manager will be automatically allocated
/// in <code>newProperties()</code>.
/// </summary>
[S4Boolean(defaultValue = false)]
public static String AUTO_ALLOCATE = "autoAllocate";
/// <summary>
/// If set to <code>false</code> the used search-manager all registered
/// result listeners will be notified only for final results. Per default
/// non-final results don't trigger notification, because in most
/// application the utterance final result will be sufficient.
/// </summary>
[S4Boolean(defaultValue = false)]
public static String FIRE_NON_FINAL_RESULTS = "fireNonFinalResults";
private Boolean fireNonFinalResults=false;
private String name;
public AbstractDecoder()
{
}
/// <summary>
///
/// </summary>
/// <param name="searchManager"></param>
/// <param name="fireNonFinalResults"></param>
/// <param name="autoAllocate"></param>
/// <param name="resultListeners"></param>
public AbstractDecoder(ISearchManager searchManager, Boolean fireNonFinalResults, Boolean autoAllocate, List<IResultListener> resultListeners)
{
String name = base.GetType().Name;
init( name, searchManager, fireNonFinalResults, autoAllocate, resultListeners);
}
/// <summary>
/// Decode frames until recognition is complete
/// </summary>
/// <param name="referenceText">the reference text (or null)</param>
/// <returns>a result</returns>
public abstract Results.Result decode(String referenceText);
void IConfigurable.newProperties(PropertySheet ps)
{
init(ps.InstanceName,
(ISearchManager)ps.getComponent(PROP_SEARCH_MANAGER),
ps.getBoolean(FIRE_NON_FINAL_RESULTS),
ps.getBoolean(AUTO_ALLOCATE),
ps.getComponentList<IResultListener>(PROP_RESULT_LISTENERS)
);
newProperties(ps);
}
public abstract void newProperties(PropertySheet ps);
private void init(String name, ISearchManager searchManager, Boolean fireNonFinalResults, Boolean autoAllocate, List<IResultListener> listeners)
{
this.name = name;
this.searchManager = searchManager;
this.fireNonFinalResults = fireNonFinalResults;
if (autoAllocate) {
searchManager.allocate();
}
foreach (IResultListener listener in listeners)
{
addResultListener(listener);
}
}
/// <summary>
/// Allocate resources necessary for decoding
/// </summary>
public void allocate()
{
searchManager.allocate();
}
/// <summary>
/// Deallocate resources
/// </summary>
public void deallocate()
{
searchManager.deallocate();
}
/// <summary>
/// Adds a result listener to this recognizer. A result listener is called whenever a new result is generated by the
/// recognizer. This method can be called in any state.
/// </summary>
/// <param name="resultListener"></param>
public void addResultListener(IResultListener resultListener)
{
resultListeners.Add(resultListener);
}
/// <summary>
/// Removes a previously added result listener. This method can be called in any state.
/// </summary>
/// <param name="resultListener">the listener to remove</param>
public void removeResultListener(IResultListener resultListener)
{
resultListeners.Remove(resultListener);
}
/// <summary>
/// Fires new results as soon as they become available.
/// </summary>
/// <param name="result">the new result</param>
protected void fireResultListeners(Results.Result result)
{
if (fireNonFinalResults || result.isFinal())
{
foreach (IResultListener resultListener in resultListeners)
{
resultListener.newResult(result);
}
}
else
{
Trace.WriteLine("skipping non-final result " + result);
}
}
public override String ToString()
{
return name;
}
}
}
<|start_filename|>Syn.Speech/Decoder/Search/SimpleActiveListFactory.cs<|end_filename|>
using System.Collections.Generic;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder.Search
{
/** A factory for simple active lists */
public class SimpleActiveListFactory : ActiveListFactory
{
/**
*
* @param absoluteBeamWidth
* @param relativeBeamWidth
* @param logMath
*/
public SimpleActiveListFactory(int absoluteBeamWidth,
double relativeBeamWidth)
: base(absoluteBeamWidth, relativeBeamWidth)
{
;
}
public SimpleActiveListFactory()
{
}
/*
* (non-Javadoc)
*
* @see edu.cmu.sphinx.util.props.Configurable#newProperties(edu.cmu.sphinx.util.props.PropertySheet)
*/
public new void newProperties(PropertySheet ps)
{
base.newProperties(ps);
}
/*
* (non-Javadoc)
*
* @see edu.cmu.sphinx.decoder.search.ActiveListFactory#newInstance()
*/
public override ActiveList newInstance()
{
return new SimpleActiveList(absoluteBeamWidth, logRelativeBeamWidth);
}
/**
* An active list that tries to be simple and correct. This type of active list will be slow, but should exhibit
* correct behavior. Faster versions of the ActiveList exist (HeapActiveList, TreeActiveList).
* <p/>
* This class is not thread safe and should only be used by a single thread.
* <p/>
* Note that all scores are maintained in the LogMath log domain
*/
}
}
<|start_filename|>Syn.Speech/Alignment/NumberExpander.cs<|end_filename|>
using System.Globalization;
using System.Runtime.InteropServices;
//PATROLLED
namespace Syn.Speech.Alignment
{
public class NumberExpander
{
private static readonly string[] digit2num = {"zero", "one", "two", "three",
"four", "five", "six", "seven", "eight", "nine"};
private static readonly string[] digit2teen = {"ten", /* shouldn't get called */
"eleven", "twelve", "thirteen", "fourteen", "fifteen", "sixteen",
"seventeen", "eighteen", "nineteen"};
private static readonly string[] digit2enty = {"zero", /* shouldn't get called */
"ten", "twenty", "thirty", "forty", "fifty", "sixty", "seventy", "eighty",
"ninety"};
private static readonly string[] ord2num = {"zeroth", "first", "second",
"third", "fourth", "fifth", "sixth", "seventh", "eighth", "ninth"};
private static readonly string[] ord2teen = {"tenth", /* shouldn't get called */
"eleventh", "twelfth", "thirteenth", "fourteenth", "fifteenth",
"sixteenth", "seventeenth", "eighteenth", "nineteenth"};
private static readonly string[] ord2enty = {"zeroth", /* shouldn't get called */
"tenth", "twentieth", "thirtieth", "fortieth", "fiftieth", "sixtieth",
"seventieth", "eightieth", "ninetieth"};
private static readonly string[] digit2Numness = {
"", "tens", "twenties", "thirties", "fourties", "fifties",
"sixties", "seventies", "eighties", "nineties"
};
//Unconstructable
private NumberExpander(){}
public static void expandNumber(string numberString, WordRelation wordRelation)
{
int numDigits = numberString.Length;
if (numDigits == 0) {
// wordRelation = null;
} else if (numDigits == 1) {
expandDigits(numberString, wordRelation);
} else if (numDigits == 2) {
expand2DigitNumber(numberString, wordRelation);
} else if (numDigits == 3) {
expand3DigitNumber(numberString, wordRelation);
} else if (numDigits < 7) {
expandBelow7DigitNumber(numberString, wordRelation);
} else if (numDigits < 10) {
expandBelow10DigitNumber(numberString, wordRelation);
} else if (numDigits < 13) {
expandBelow13DigitNumber(numberString, wordRelation);
} else {
expandDigits(numberString, wordRelation);
}
}
private static void expand2DigitNumber(string numberString, WordRelation wordRelation)
{
if (numberString[0] == '0') {
// numberString is "0X"
if (numberString[1] == '0') {
// numberString is "00", do nothing
} else {
// numberString is "01", "02" ...
string number = digit2num[numberString[1] - '0'];
wordRelation.addWord(number);
}
} else if (numberString[1] == '0') {
// numberString is "10", "20", ...
string number = digit2enty[numberString[0] - '0'];
wordRelation.addWord(number);
} else if (numberString[0] == '1') {
// numberString is "11", "12", ..., "19"
string number = digit2teen[numberString[1] - '0'];
wordRelation.addWord(number);
} else {
// numberString is "2X", "3X", ...
string enty = digit2enty[numberString[0] - '0'];
wordRelation.addWord(enty);
expandDigits(numberString.Substring(1, numberString.Length),
wordRelation);
}
}
private static void expand3DigitNumber(string numberString, WordRelation wordRelation)
{
if (numberString[0] == '0') {
expandNumberAt(numberString, 1, wordRelation);
} else {
string hundredDigit = digit2num[numberString[0] - '0'];
wordRelation.addWord(hundredDigit);
wordRelation.addWord("hundred");
expandNumberAt(numberString, 1, wordRelation);
}
}
private static void expandBelow7DigitNumber(string numberString, WordRelation wordRelation)
{
expandLargeNumber(numberString, "thousand", 3, wordRelation);
}
private static void expandBelow10DigitNumber(string numberString, WordRelation wordRelation)
{
expandLargeNumber(numberString, "million", 6, wordRelation);
}
private static void expandBelow13DigitNumber([In] string numberString, WordRelation wordRelation)
{
expandLargeNumber(numberString, "billion", 9, wordRelation);
}
private static void expandLargeNumber(string numberString, string order, int numberZeroes, WordRelation wordRelation)
{
int numberDigits = numberString.Length;
// parse out the prefix, e.g., "113" in "113,000"
int i = numberDigits - numberZeroes;
string part = numberString.Substring(0, i);
// get how many thousands/millions/billions
Item oldTail = wordRelation.getTail();
expandNumber(part, wordRelation);
if (wordRelation.getTail() != oldTail) {
wordRelation.addWord(order);
}
expandNumberAt(numberString, i, wordRelation);
}
private static void expandNumberAt(string numberString, int startIndex, WordRelation wordRelation)
{
expandNumber(numberString.Substring(startIndex, numberString.Length), wordRelation);
}
public static void expandDigits(string numberString, WordRelation wordRelation)
{
int numberDigits = numberString.Length;
for (int i = 0; i < numberDigits; i++) {
char digit = numberString[i];
if (char.IsDigit(digit)) {
wordRelation.addWord(digit2num[numberString[i] - '0']);
} else {
wordRelation.addWord("umpty");
}
}
}
public static void expandOrdinal(string rawNumberString, WordRelation wordRelation)
{
expandNumber(rawNumberString.Replace(",", ""), wordRelation);
// get the last in the list of number strings
Item lastItem = wordRelation.getTail();
if (lastItem != null)
{
FeatureSet featureSet = lastItem.getFeatures();
string lastNumber = featureSet.getString("name");
string ordinal = findMatchInArray(lastNumber, digit2num, ord2num);
if (ordinal == null)
{
ordinal = findMatchInArray(lastNumber, digit2teen, ord2teen);
}
if (ordinal == null)
{
ordinal = findMatchInArray(lastNumber, digit2enty, ord2enty);
}
if (lastNumber.Equals("hundred"))
{
ordinal = "hundredth";
}
else if (lastNumber.Equals("thousand"))
{
ordinal = "thousandth";
}
else if (lastNumber.Equals("billion"))
{
ordinal = "billionth";
}
// if there was an ordinal, set the last element of the list
// to that ordinal; otherwise, don't do anything
if (ordinal != null)
{
wordRelation.setLastWord(ordinal);
}
}
}
public static void expandNumess(string rawString, WordRelation wordRelation)
{
if (rawString.Length == 4)
{
expand2DigitNumber(rawString.Substring(0, 2), wordRelation);
expandNumess(rawString.Substring(2), wordRelation);
}
else
{
wordRelation.addWord(digit2Numness[rawString[0] - '0']);
}
}
private static string findMatchInArray( string strToMatch, string[] matchInArray, string[] returnInArray)
{
for (int i = 0; i < matchInArray.Length; i++)
{
if (strToMatch.Equals(matchInArray[i]))
{
if (i < returnInArray.Length)
{
return returnInArray[i];
}
else
{
return null;
}
}
}
return null;
}
public static void expandID(string numberString, WordRelation wordRelation)
{
int numberDigits = numberString.Length;
if ((numberDigits == 4) && (numberString[2] == '0')
&& (numberString[3] == '0'))
{
if (numberString[1] == '0')
{ // e.g. 2000, 3000
expandNumber(numberString, wordRelation);
}
else
{
expandNumber(numberString.Substring(0, 2), wordRelation);
wordRelation.addWord("hundred");
}
}
else if ((numberDigits == 2) && (numberString[0] == '0'))
{
wordRelation.addWord("oh");
expandDigits(numberString.Substring(1, 2), wordRelation);
}
else if ((numberDigits == 4 && numberString[1] == '0')
|| numberDigits < 3)
{
expandNumber(numberString, wordRelation);
}
else if (numberDigits % 2 == 1)
{
string firstDigit = digit2num[numberString[0] - '0'];
wordRelation.addWord(firstDigit);
expandID(numberString.Substring(1, numberDigits), wordRelation);
}
else
{
expandNumber(numberString.Substring(0, 2), wordRelation);
expandID(numberString.Substring(2, numberDigits), wordRelation);
}
}
public static void expandReal(string numberString, WordRelation wordRelation)
{
int stringLength = numberString.Length;
int position;
if (numberString[0] == '-')
{
// negative real numbers
wordRelation.addWord("minus");
expandReal(numberString.Substring(1, stringLength), wordRelation);
}
else if (numberString[0] == '+')
{
// prefixed with a '+'
wordRelation.addWord("plus");
expandReal(numberString.Substring(1, stringLength), wordRelation);
}
else if ((position = numberString.IndexOf('e')) != -1
|| (position = numberString.IndexOf('E')) != -1)
{
// numbers with 'E' or 'e'
expandReal(numberString.Substring(0, position), wordRelation);
wordRelation.addWord("e");
expandReal(numberString.Substring(position + 1), wordRelation);
}
else if ((position = numberString.IndexOf('.')) != -1)
{
// numbers with '.'
string beforeDot = numberString.Substring(0, position);
if (beforeDot.Length > 0)
{
expandReal(beforeDot, wordRelation);
}
wordRelation.addWord("point");
string afterDot = numberString.Substring(position + 1);
if (afterDot.Length > 0)
{
expandDigits(afterDot, wordRelation);
}
}
else
{
// everything else
expandNumber(numberString, wordRelation);
}
}
public static void expandLetters(string letters, WordRelation wordRelation)
{
letters = letters.ToLower();
char c;
for (int i = 0; i < letters.Length; i++)
{
// if this is a number
c = letters[i];
if (char.IsDigit(c))
{
wordRelation.addWord(digit2num[c - '0']);
}
else if (letters.Equals("a"))
{
wordRelation.addWord("_a");
}
else
{
wordRelation.addWord(c.ToString(CultureInfo.InvariantCulture));
}
}
}
public static int expandRoman(string roman)
{
int value = 0;
for (int p = 0; p < roman.Length; p++)
{
char c = roman[p];
if (c == 'X')
{
value += 10;
}
else if (c == 'V')
{
value += 5;
}
else if (c == 'I')
{
if (p + 1 < roman.Length)
{
char p1 = roman[p + 1];
if (p1 == 'V')
{
value += 4;
p++;
}
else if (p1 == 'X')
{
value += 9;
p++;
}
else
{
value += 1;
}
}
else
{
value += 1;
}
}
}
return value;
}
}
}
<|start_filename|>Syn.Speech/Decoder/Scorer/SimpleAcousticScorer.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Diagnostics;
using Syn.Speech.Common.FrontEnd;
using Syn.Speech.Decoder.Search;
using Syn.Speech.FrontEnd;
using Syn.Speech.FrontEnd.EndPoint;
using Syn.Speech.FrontEnd.Util;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder.Scorer
{
/// <summary>
/// Implements some basic scorer functionality, including a simple default
/// acoustic scoring implementation which scores within the current thread,
/// that can be changed by overriding the {@link #doScoring} method.
///
/// Note that all scores are maintained in LogMath log base.
///
/// @author <NAME>
/// </summary>
public class SimpleAcousticScorer : IAcousticScorer
{
/** Property the defines the frontend to retrieve features from for scoring */
[S4Component(type = typeof(BaseDataProcessor))]
public static String FEATURE_FRONTEND = "frontend";
protected BaseDataProcessor frontEnd;
/**
/// An optional post-processor for computed scores that will normalize scores. If not set, no normalization will
/// applied and the token scores will be returned unchanged.
*/
[S4Component(type = typeof(IScoreNormalizer), mandatory = false)]
public static String SCORE_NORMALIZER = "scoreNormalizer";
private IScoreNormalizer scoreNormalizer;
private Boolean useSpeechSignals;
void IConfigurable.newProperties(PropertySheet ps)
{
newProperties(ps);
}
virtual public void newProperties(PropertySheet ps)
{
///base.newProperties(ps);
///not mandatory
this.frontEnd = (BaseDataProcessor) ps.getComponent(FEATURE_FRONTEND);
this.scoreNormalizer = (IScoreNormalizer) ps.getComponent(SCORE_NORMALIZER);
}
/**
/// @param frontEnd the frontend to retrieve features from for scoring
/// @param scoreNormalizer optional post-processor for computed scores that will normalize scores. If not set, no normalization will
/// applied and the token scores will be returned unchanged.
*/
public SimpleAcousticScorer(BaseDataProcessor frontEnd, IScoreNormalizer scoreNormalizer)
{
this.frontEnd = frontEnd;
this.scoreNormalizer = scoreNormalizer;
}
public SimpleAcousticScorer()
{
}
/// <summary>
/// Scores the given set of states.
/// </summary>
/// <param name="scoreableList">A list containing scoreable objects to be scored</param>
/// <returns>The best scoring scoreable, or <code>null</code> if there are no more features to score</returns>
public IData calculateScores(List<IScoreable> scoreableList)
{
try
{
IData data;
while ((data = getNextData()) is Signal)
{
if (data is SpeechEndSignal || data is DataEndSignal)
return data;
}
if (data == null || scoreableList.Count==0)
return null;
// convert the data to FloatData if not yet done
if (data is DoubleData)
data = DataUtil.DoubleData2FloatData((DoubleData) data);
IScoreable bestToken = doScoring(scoreableList, data);
// apply optional score normalization
if (scoreNormalizer != null && bestToken is Token)
bestToken = scoreNormalizer.normalize(scoreableList, bestToken);
return bestToken;
}
catch (Exception e)
{
//e.printStackTrace();
Trace.WriteLine(e.Message);
return null;
}
}
private IData getNextData()
{
IData data = frontEnd.getData();
// reconfigure the scorer for the coming data stream
if (data is DataStartSignal)
handleDataStartSignal((DataStartSignal)data);
if (data is DataEndSignal)
handleDataEndSignal((DataEndSignal)data);
return data;
}
/** Handles the first element in a feature-stream.
/// @param dataStartSignal*/
protected void handleDataStartSignal(DataStartSignal dataStartSignal)
{
Dictionary<String, Object> dataProps = dataStartSignal.getProps();
useSpeechSignals = dataProps.ContainsKey(DataStartSignal.SPEECH_TAGGED_FEATURE_STREAM) && (Boolean) dataProps[DataStartSignal.SPEECH_TAGGED_FEATURE_STREAM];
}
/** Handles the last element in a feature-stream.
/// @param dataEndSignal*/
protected void handleDataEndSignal(DataEndSignal dataEndSignal)
{
// we don't treat the end-signal here, but extending classes might do
}
public void startRecognition()
{
if (!useSpeechSignals)
{
IData firstData = getNextData();
if (firstData == null)
return;
Trace.Assert(firstData is DataStartSignal,
"The first element in an sphinx4-feature stream must be a DataStartSignal but was a " + firstData.GetType().Name);
}
if (!useSpeechSignals)
return;
IData data;
while (!((data = getNextData()) is SpeechStartSignal))
{
if (data == null)
{
break;
}
}
}
public void stopRecognition()
{
// nothing needs to be done here
}
/**
/// Scores a a list of <code>Scoreable</code>s given a <code>Data</code>-object.
*
/// @param scoreableList The list of Scoreables to be scored
/// @param data The <code>Data</code>-object to be used for scoring.
/// @return the best scoring <code>Scoreable</code> or <code>null</code> if the list of scoreables was empty.
/// @throws Exception
*/
protected IScoreable doScoring(List<IScoreable> scoreableList, IData data)
{
IEnumerator<IScoreable> i = scoreableList.GetEnumerator();
IScoreable best = i.Current;
best.calculateScore(data);
while (i.MoveNext())
{
IScoreable scoreable = i.Current;
if (scoreable.calculateScore(data) > best.getScore())
best = scoreable;
}
return best;
}
// Even if we don't do any meaningful allocation here, we implement the methods because
// most extending scorers do need them either.
public void allocate()
{
}
public void deallocate()
{
}
}
}
<|start_filename|>Syn.Speech/Alignment/OpEnum.cs<|end_filename|>
using Syn.Speech.Helper;
//PATROLLED
namespace Syn.Speech.Alignment
{
internal class OpEnum
{
private static readonly HashMap<string, OpEnum> map = new HashMap<string, OpEnum>();
public static OpEnum NEXT = new OpEnum("n");
public static OpEnum PREV = new OpEnum("p");
public static OpEnum NEXT_NEXT = new OpEnum("nn");
public static OpEnum PREV_PREV = new OpEnum("pp");
public static OpEnum PARENT = new OpEnum("parent");
public static OpEnum DAUGHTER = new OpEnum("daughter");
public static OpEnum LAST_DAUGHTER = new OpEnum("daughtern");
public static OpEnum RELATION = new OpEnum("R");
private readonly string name;
static OpEnum(){}
private OpEnum(string name)
{
this.name = name;
map.put(this.name, this);
}
public static OpEnum getInstance(string name)
{
return map.get(name);
}
public override string ToString()
{
return name;
}
}
}
<|start_filename|>Syn.Speech/Class1.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using java.lang;
using java.util;
namespace Syn.Speech
{
class Class1
{
public void Test()
{
}
}
}
<|start_filename|>Syn.Speech/Decoder/Scorer/IScoreNormalizer.cs<|end_filename|>
using System.Collections.Generic;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder.Scorer
{
/// <summary>
/// Describes all API-elements that are necessary to normalize token-scores after these have been computed by an
/// AcousticScorer.
///
/// @author <NAME>
/// @see Decoder.Scorer.AcousticScorer
/// @see Decoder.Search.Token
/// </summary>
public interface IScoreNormalizer:IConfigurable
{
/**
/// Normalizes the scores of a set of Tokens.
*
/// @param scoreableList The set of scores to be normalized
/// @param bestToken The best scoring Token of the above mentioned list. Although not strictly necessary it's
/// included because of convenience reasons and to reduce computational overhead.
/// @return The best token after the all <code>Token</code>s have been normalized. In most cases normalization won't
/// change the order but to keep the API open for any kind of approach it seemed reasonable to include this.
*/
IScoreable normalize(List<IScoreable> scoreableList, IScoreable bestToken);
}
}
<|start_filename|>Syn.Speech/Decoder/Scorer/BackgroundModelNormalizer.cs<|end_filename|>
using System.Collections.Generic;
using System.Diagnostics;
using Syn.Speech.Decoder.Search;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder.Scorer
{
/// <summary>
/// Normalizes a set of Tokens against the best scoring Token of a background model.
/// </summary>
public class BackgroundModelNormalizer : IScoreNormalizer
{
/// <summary>
/// The active list provider used to determined the best token for normalization. If this reference is not defined no normalization will be applied.
/// </summary>
[S4Component(type = typeof(SimpleBreadthFirstSearchManager), mandatory = false)]
public const string ACTIVE_LIST_PROVIDER = "activeListProvider";
private SimpleBreadthFirstSearchManager activeListProvider;
public BackgroundModelNormalizer()
{
}
public void newProperties(PropertySheet ps)
{
this.activeListProvider = (SimpleBreadthFirstSearchManager)ps.getComponent(ACTIVE_LIST_PROVIDER);
Trace.WriteLine("no active list set.");
}
/// <summary>
/// Initializes a new instance of the <see cref="BackgroundModelNormalizer"/> class.
/// </summary>
/// <param name="activeListProvider">The active list provider used to determined the best token for normalization.
/// If this reference is not defined no normalization will be applied.</param>
public BackgroundModelNormalizer(SimpleBreadthFirstSearchManager activeListProvider)
{
this.activeListProvider = activeListProvider;
Trace.WriteLine("no active list set.");
}
public IScoreable normalize(List<IScoreable> scoreableList, IScoreable bestToken)
{
if (activeListProvider == null)
{
return bestToken;
}
var normToken = (Token)activeListProvider.getActiveList().getBestToken();
Trace.Assert(bestToken.getFrameNumber() == normToken.getFrameNumber() - 1, "frame numbers should be equal for a meaningful normalization");
//TODO: CHECK SYNTAX
//assert bestToken.getFrameNumber() == normToken.getFrameNumber() - 1 : "frame numbers should be equal for a meaningful normalization";
float normScore = normToken.getScore();
foreach (IScoreable scoreable in scoreableList)
{
if (scoreable is Token)
{
scoreable.normalizeScore(normScore);
}
}
return bestToken;
}
}
}
<|start_filename|>Syn.Speech/Decoder/Scorer/IScoreable.cs<|end_filename|>
using System.Collections.Generic;
using Syn.Speech.Common.FrontEnd;
namespace Syn.Speech.Decoder.Scorer
{
public interface IScoreable:IData,IComparer<IScoreable>
{
/// <summary>
/// Calculates a score against the given data. The score can be retrieved with get score
/// </summary>
/// <param name="data">the data to be scored</param>
/// <returns>the score for the data</returns>
float calculateScore(IData data);
/// <summary>
/// Retrieves a previously calculated (and possibly normalized) score
/// </summary>
/// <returns>the score</returns>
float getScore();
/// <summary>
/// Normalizes a previously calculated score
/// </summary>
/// <param name="maxScore"></param>
/// <returns>the normalized score</returns>
float normalizeScore(float maxScore);
/// <summary>
/// Returns the frame number that this Scoreable should be scored against.
/// </summary>
/// <returns>the frame number that this Scoreable should be scored against.</returns>
int getFrameNumber();
}
}
<|start_filename|>Syn.Speech/Decoder/Search/ActiveListManager.cs<|end_filename|>
using System;
using System.Collections.Generic;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder.Search
{
/// <summary>
/// An active list is maintained as a sorted list
/// </summary>
public abstract class ActiveListManager:IConfigurable
{
/** The property that specifies the absolute word beam width */
[S4Integer(defaultValue = 2000)]
public static String PROP_ABSOLUTE_WORD_BEAM_WIDTH =
"absoluteWordBeamWidth";
/** The property that specifies the relative word beam width */
[S4Double(defaultValue = 0.0)]
public static String PROP_RELATIVE_WORD_BEAM_WIDTH =
"relativeWordBeamWidth";
/**
/// Adds the given token to the list
*
/// @param token the token to add
*/
public abstract void add(Token token);
/**
/// Replaces an old token with a new token
*
/// @param oldToken the token to replace (or null in which case, replace works like add).
/// @param newToken the new token to be placed in the list.
*/
//public abstract void replace(Token oldToken, Token newToken);
/**
/// Returns an Iterator of all the non-emitting ActiveLists. The iteration order is the same as the search state
/// order.
*
/// @return an Iterator of non-emitting ActiveLists
*/
public abstract IEnumerator<ActiveList> getNonEmittingListIterator();
/**
/// Returns the emitting ActiveList from the manager
*
/// @return the emitting ActiveList
*/
public abstract ActiveList getEmittingList();
/**
/// Clears emitting list in manager
*/
public abstract void clearEmittingList();
/** Dumps out debug info for the active list manager */
public abstract void dump();
/**
/// Sets the total number of state types to be managed
*
/// @param numStateOrder the total number of state types
*/
public abstract void setNumStateOrder(int numStateOrder);
public abstract void newProperties(PropertySheet ps);
}
}
<|start_filename|>Syn.Speech/Result/WordResult.cs<|end_filename|>
using System;
using Syn.Speech.Common;
using Syn.Speech.Linguist.Dictionary;
using Syn.Speech.Util;
namespace Syn.Speech.Result
{
/// <summary>
/// Represents a word in a recognition result.
/// This is designed specifically for obtaining confidence scores.
/// All scores are maintained in LogMath log base.
/// </summary>
public class WordResult
{
private IWord word;
private TimeFrame timeFrame;
private double score;
private double confidence;
/// </summary>
/// Construct a word result from a string and a confidence score.
///
/// @param w the word
/// @param confidence the confidence for this word
/// </summary>
public WordResult(String w, double confidence)
{
Pronunciation[] pros = {Pronunciation.UNKNOWN};
word = new Word(w, pros, false);
timeFrame = TimeFrame.NULL;
this.confidence = confidence;
this.score = LogMath.LOG_ZERO;
}
/// </summary>
/// Construct a word result with full information.
///
/// @param w the word object to store
/// @param timeFrame time frame
/// @param ef word end time
/// @param score score of the word
/// @param confidence confidence (posterior) of the word
/// </summary>
public WordResult(IWord w, TimeFrame timeFrame,
double score, double confidence)
{
this.word = w;
this.timeFrame = timeFrame;
this.score = score;
this.confidence = confidence;
}
/// </summary>
/// Construct a WordResult using a Node object and a confidence (posterior).
///
/// This does not use the posterior stored in the Node object, just its
/// word, start and end.
///
/// TODO: score is currently set to zero
///
/// @param node the node to extract information from
/// @param confidence the confidence (posterior) to assign
/// </summary>
public WordResult(Node node, double confidence):
this(node.getWord(),
new TimeFrame(node.getBeginTime(), node.getEndTime()),
LogMath.LOG_ZERO, confidence)
{
}
/// </summary>
/// Gets the total score for this word.
///
/// @return the score for the word (in LogMath log base)
/// </summary>
public double getScore()
{
return score;
}
/// </summary>
/// Returns a log confidence score for this WordResult.
///
/// Use the getLogMath().logToLinear() method to convert the log confidence
/// score to linear. The linear value should be between 0.0 and 1.0
/// (inclusive) for this word.
///
/// @return a log confidence score which linear value is in [0, 1]
/// </summary>
public double getConfidence()
{
// TODO: can confidence really be greater than 1?
return Math.Min(confidence, LogMath.LOG_ONE);
}
/// </summary>
/// Gets the pronunciation for this word.
///
/// @return the pronunciation for the word
/// </summary>
public IPronunciation getPronunciation()
{
return word.getMostLikelyPronunciation();
}
/// </summary>
/// Gets the word object associated with the given result.
///
/// @return the word object
/// </summary>
public IWord getWord()
{
return word;
}
/// </summary>
/// Gets time frame for the word
/// </summary>
public TimeFrame getTimeFrame()
{
return timeFrame;
}
/// </summary>
/// Does this word result represent a filler token?
///
/// @return true if this is a filler
/// </summary>
public Boolean isFiller()
{
return word.isFiller() || word.ToString().Equals("<skip>");
}
override
public String ToString() {
return String.Format("{{0}, {1}, [{2}]}", word, confidence, timeFrame);
}
}
}
<|start_filename|>Syn.Speech/Linguist/Language/NGram/IBackoffLanguageModel.cs<|end_filename|>
namespace Syn.Speech.Linguist.Language.NGram
{
/// <summary>
///
/// Represents the generic interface to an N-Gram language model
/// that uses backoff to estimate unseen probabilities. Backoff
/// depth is important in search space optimization, for example
/// it's used in LexTreeLinguist to collapse states which has
/// only unigram backoff. This ways unlikely sequences are penalized.
/// </summary>
public abstract class IBackoffLanguageModel : ILanguageModel
{
public abstract ProbDepth getProbDepth(WordSequence wordSequence);
}
}
<|start_filename|>Syn.Speech/Result/Node.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using Syn.Speech.Common;
using Syn.Speech.Util;
namespace Syn.Speech.Result
{
/// <summary>
///
/**
/// <p/>
/// A node is part of Lattices, representing the theory that a word was spoken over a given period of time. A node also
/// has a set of entering and leaving {@link edu.cmu.sphinx.result.Edge edges}, connecting it to other nodes. One can get
/// and set the beginning and end frames of the word via the getBeginTime and getEndTime methods. When setting these
/// times, the beginning time must be earlier or equal to the end time, otherwise an error will be thrown. </p>
/// <p/>
/// <p/>
/// The posterior probability of any word in a word lattice is the probability that the node representing that word
/// occurs on any path through the lattice. It is usually computed as the ratio of the total likelihood scores of all
/// paths through the lattice that pass through the node, to the total likelihood score of all paths through the lattice.
/// Path scores are usually computed using the acoustic likelihoods of the nodes, although language scores can also be
/// incorporated. The posterior probabilities of an entire lattice is usually computed efficiently using the
/// Forward-Backward Algorithm. Refer to the {@link edu.cmu.sphinx.result.Lattice#computeNodePosteriors
/// computeNodePosteriors} method in the Lattice class for details. </p>
*/
/// </summary>
public class Node
{
// used to generate unique IDs for new Nodes.
private static int nodeCount=0;
private String id;
private IWord word;
// TODO: use TimeFrame
private int beginTime = -1;
private int endTime = -1;
private List<Edge> enteringEdges= new List<Edge>();
private List<Edge> leavingEdges = new List<Edge>();
private double forwardScore;
private double backwardScore;
private double posterior;
private Node bestPredecessor;
private double viterbiScore;
private List<Node> descendants=null;
/**
/// Create a new Node
*
/// @param word the word of this node
/// @param beginTime the start time of the word
/// @param endTime the end time of the word
*/
public Node(IWord word, int beginTime, int endTime)
:this(getNextNodeId(), word, beginTime, endTime)
{
}
/**
/// Create a new Node with given ID. Used when creating a Lattice from a .LAT file
*
/// @param id
/// @param word
/// @param beginTime
/// @param endTime
*/
public Node(String id, IWord word, int beginTime, int endTime)
{
this.id = id;
this.word = word;
this.beginTime = beginTime;
this.endTime = endTime;
if (endTime != -1) {
if (beginTime > endTime) {
throw new Exception("Begin time (" + beginTime +
") later than end time (" + endTime + ')');
}
}
this.forwardScore = LogMath.LOG_ZERO;
this.backwardScore = LogMath.LOG_ZERO;
this.posterior = LogMath.LOG_ZERO;
}
/**
/// Get a unique ID for a new Node. Used when creating a Lattice from a .LAT file
*
/// @return the unique ID for a new node
*/
protected static String getNextNodeId()
{
return nodeCount.ToString();
}
/**
/// Test if a node has an Edge to a Node
*
/// @param n
/// @return unique Node ID
*/
public Boolean hasEdgeToNode(Node n)
{
return getEdgeToNode(n) != null;
}
/**
/// given a node find the edge to that node
*
/// @param n the node of interest
/// @return the edge to that node or <code> null</code> if no edge could be found.
*/
public Edge getEdgeToNode(Node n)
{
foreach (Edge e in leavingEdges)
{
if (e.getToNode() == n)
{
return e;
}
}
return null;
}
/**
/// Test is a Node has an Edge from a Node
*
/// @param n
/// @return true if this node has an Edge from n
*/
public Boolean hasEdgeFromNode(Node n)
{
return getEdgeFromNode(n) != null;
}
/**
/// given a node find the edge from that node
*
/// @param n the node of interest
/// @return the edge from that node or <code> null</code> if no edge could be found.
*/
public Edge getEdgeFromNode(Node n)
{
foreach (Edge e in enteringEdges)
{
if (e.getFromNode() == n)
{
return e;
}
}
return null;
}
/**
/// Test if a Node has all Edges from the same Nodes and another Node.
*
/// @param n
/// @return true if this Node has Edges from the same Nodes as n
*/
public Boolean hasEquivalentEnteringEdges(Node n)
{
if (enteringEdges.Count != n.getEnteringEdges().Count)
{
return false;
}
foreach (Edge e in enteringEdges)
{
Node fromNode = e.getFromNode();
if (!n.hasEdgeFromNode(fromNode))
{
return false;
}
}
return true;
}
/**
/// Test if a Node has all Edges to the same Nodes and another Node.
*
/// @param n the node of interest
/// @return true if this Node has all Edges to the sames Nodes as n
*/
public Boolean hasEquivalentLeavingEdges(Node n)
{
if (leavingEdges.Count != n.getLeavingEdges().Count)
{
return false;
}
foreach (Edge e in leavingEdges)
{
Node toNode = e.getToNode();
if (!n.hasEdgeToNode(toNode))
{
return false;
}
}
return true;
}
/**
/// Get the Edges to this Node
*
/// @return Edges to this Node
*/
public List<Edge> getEnteringEdges()
{
return enteringEdges;
}
/**
/// Get the Edges from this Node
*
/// @return Edges from this Node
*/
public List<Edge> getLeavingEdges()
{
return leavingEdges;
}
/**
/// Returns a copy of the Edges to this Node, so that the underlying data structure will not be modified.
*
/// @return a copy of the edges to this node
*/
public List<Edge> getCopyOfEnteringEdges()
{
return new List<Edge>(enteringEdges);
}
/**
/// Returns a copy of the Edges from this Node, so that the underlying data structure will not be modified.
*
/// @return a copy of the edges from this node
*/
public List<Edge> getCopyOfLeavingEdges()
{
return new List<Edge>(leavingEdges);
}
/**
/// Add an Edge from this Node
*
/// @param e
*/
public void addEnteringEdge(Edge e)
{
enteringEdges.Add(e);
}
/**
/// Add an Edge to this Node
*
/// @param e
*/
public void addLeavingEdge(Edge e)
{
leavingEdges.Add(e);
}
/**
/// Remove an Edge from this Node
*
/// @param e
*/
public void removeEnteringEdge(Edge e)
{
enteringEdges.Remove(e);
}
/**
/// Remove an Edge to this Node
*
/// @param e the edge to remove
*/
public void removeLeavingEdge(Edge e)
{
leavingEdges.Remove(e);
}
/**
/// Get the ID associated with this Node
*
/// @return the ID
*/
public String getId()
{
return id;
}
/**
/// Get the word associated with this Node
*
/// @return the word
*/
public IWord getWord()
{
return word;
}
/**
/// Get the frame number when the word began
*
/// @return the begin frame number, or -1 if the frame number is unknown
*/
public int getBeginTime()
{
if (beginTime == -1) {
calculateBeginTime();
}
return beginTime;
}
/**
/// Sets the frame number when the word began. The begin time must be not be later than the time returned by the
/// getEndTime() method, otherwise an error will be thrown.
*
/// @param beginTime the frame number when the word began
*/
public void setBeginTime(int beginTime)
{
if (beginTime > getEndTime())
{
throw new Exception("Attempting to set a begin time (" + beginTime +
") that is later than the end time (" +
getEndTime() + ").");
}
this.beginTime = beginTime;
}
/**
/// Get the frame number when the word ends
*
/// @return the end time, or -1 if the frame number if is unknown
*/
public int getEndTime()
{
return endTime;
}
/**
/// Sets the frame number when the words ended. The end time must not be earlier than the time returned by the
/// getEndTime() method, otherwise an error will be thrown.
*
/// @param endTime the frame number when the word ended
*/
public void setEndTime(int endTime)
{
if (getBeginTime() > endTime) {
throw new Exception("Attempting to set an end time (" + endTime +
") that is earlier than the start time (" +
getBeginTime() + ").");
}
this.endTime = endTime;
}
/**
/// Returns a description of this Node that contains the word, the start time, and the end time.
*
/// @return a description of this Node
*/
override
public String ToString()
{
return ("Node(" + word.getSpelling() + "," + getBeginTime() + "|"+
getEndTime() + ')');
}
/**
/// Internal routine when dumping Lattices as AiSee files
*
/// @param f
/// @throws IOException
*/
public void dumpAISee(StreamWriter f)
{
String posterior = getPosterior().ToString();
if (getPosterior() == LogMath.LOG_ZERO)
{
posterior = "log zero";
}
f.Write("node: { title: \"" + id + "\" label: \""
+ getWord() + '[' + getBeginTime() + ',' + getEndTime() +
" p:" + posterior + "]\" }\n");
}
/**
/// Internal routine when dumping Lattices as Graphviz files
///
/// @param f
/// @throws IOException
*/
public void dumpDot(StreamWriter f)
{
String posterior = getPosterior().ToString();
if (getPosterior() == LogMath.LOG_ZERO)
{
posterior = "log zero";
}
String label = getWord().ToString() + '[' + getBeginTime() + ',' + getEndTime() + " p:" + posterior + ']';
f.Write("\tnode" + id + " [ label=\"" + label + "\" ]\n");
}
/**
/// Internal routine used when dumping Lattices as .LAT files
*
/// @param f
/// @throws IOException
*/
public void dump(StreamWriter f)
{
f.WriteLine("node: " + id + ' ' + word.getSpelling() +
//" a:" + getForwardProb() + " b:" + getBackwardProb()
" p:" + getPosterior());
}
/**
/// Internal routine used when loading Lattices from .LAT files
*
/// @param lattice
/// @param tokens
*/
public static void load(Lattice lattice, StringTokenizer tokens) {
String id = tokens.nextToken();
String label = tokens.nextToken();
lattice.addNode(id, label, 0, 0);
}
/**
/// Returns the backward score, which is calculated during the computation of the posterior score for this node.
*
/// @return Returns the backwardScore.
*/
public double getBackwardScore()
{
return backwardScore;
}
/**
/// Sets the backward score for this node.
*
/// @param backwardScore The backwardScore to set.
*/
public void setBackwardScore(double backwardScore)
{
this.backwardScore = backwardScore;
}
/**
/// Returns the forward score, which is calculated during the computation of the posterior score for this node.
*
/// @return Returns the forwardScore.
*/
public double getForwardScore()
{
return forwardScore;
}
/**
/// Sets the backward score for this node.
*
/// @param forwardScore The forwardScore to set.
*/
public void setForwardScore(double forwardScore)
{
this.forwardScore = forwardScore;
}
/**
/// Returns the posterior probability of this node. Refer to the javadocs for this class for a description of
/// posterior probabilities.
*
/// @return Returns the posterior probability of this node.
*/
public double getPosterior()
{
return posterior;
}
/**
/// Sets the posterior probability of this node. Refer to the javadocs for this class for a description of posterior
/// probabilities.
*
/// @param posterior The node posterior probability to set.
*/
public void setPosterior(double posterior)
{
this.posterior = posterior;
}
/** @see java.lang.Object#hashCode() */
override
public int GetHashCode()
{
return id.GetHashCode();
}
/**
/// Assumes ids are unique node identifiers
*
/// @see java.lang.Object#equals(java.lang.Object)
*/
override
public Boolean Equals(Object obj)
{
return obj is Node && id.Equals(((Node) obj).getId());
}
/**
/// Calculates the begin time of this node, in the event that the begin time was not specified. The begin time is the
/// latest of the end times of its predecessor nodes.
*/
private void calculateBeginTime()
{
beginTime = 0;
foreach (Edge edge in enteringEdges)
{
if (edge.getFromNode().getEndTime() > beginTime)
{
beginTime = edge.getFromNode().getEndTime();
}
}
}
/**
/// Get the nodes at the other ends of outgoing edges of this node.
*
/// @return a list of child nodes
*/
public List<Node> getChildNodes()
{
List<Node> childNodes = new List<Node>();
foreach (Edge edge in leavingEdges)
{
childNodes.Add(edge.getToNode());
}
return childNodes;
}
protected void cacheDescendants()
{
descendants = new List<Node>();
cacheDescendantsHelper(this);
}
protected void cacheDescendantsHelper(Node n)
{
foreach (Node child in n.getChildNodes())
{
if (descendants.Contains(child))
{
continue;
}
descendants.Add(child);
cacheDescendantsHelper(child);
}
}
protected Boolean isAncestorHelper(List<Node> children, Node node, List<Node> seenNodes)
{
foreach (Node n in children)
{
if (seenNodes.Contains(n))
{
continue;
}
seenNodes.Add(n);
if (n.Equals(node))
{
return true;
}
if (isAncestorHelper(n.getChildNodes(), node, seenNodes))
{
return true;
}
}
return false;
}
/**
/// Check whether this node is an ancestor of another node.
*
/// @param node the Node to check
/// @return whether this node is an ancestor of the passed in node.
*/
public Boolean isAncestorOf(Node node)
{
if (descendants != null)
{
return descendants.Contains(node);
}
if (this.Equals(node))
{
return true; // node is its own ancestor
}
List<Node> seenNodes = new List<Node>();
seenNodes.Add(this);
return isAncestorHelper(this.getChildNodes(), node, seenNodes);
}
/**
/// Check whether this node has an ancestral relationship with another node (i.e. either this node is an ancestor of
/// the other node, or vice versa)
*
/// @param node the Node to check for a relationship
/// @return whether a relationship exists
*/
public Boolean hasAncestralRelationship(Node node)
{
return this.isAncestorOf(node) || node.isAncestorOf(this);
}
/**
/// Returns true if the given node is equivalent to this node. Two nodes are equivalent only if they have the same
/// word, the same number of entering and leaving edges, and that their begin and end times are the same.
*
/// @param other the Node we're comparing to
/// @return true if the Node is equivalent; false otherwise
*/
public Boolean isEquivalent(Node other)
{
return
((word.getSpelling().Equals(other.getWord().getSpelling()) &&
(getEnteringEdges().Count == other.getEnteringEdges().Count &&
getLeavingEdges().Count == other.getLeavingEdges().Count)) &&
(getBeginTime() == other.getBeginTime() &&
endTime == other.getEndTime()));
}
/**
/// Returns a leaving edge that is equivalent to the given edge. Two edges are eqivalent if Edge.isEquivalent()
/// returns true.
*
/// @param edge the Edge to compare the leaving edges of this node against
/// @return an equivalent edge, if any; or null if no equivalent edge
*/
public Edge findEquivalentLeavingEdge(Edge edge)
{
foreach (Edge e in leavingEdges)
{
if (e.isEquivalent(edge))
{
return e;
}
}
return null;
}
/**
/// Returns the best predecessor for this node.
*
/// @return Returns the bestPredecessor.
*/
public Node getBestPredecessor()
{
return bestPredecessor;
}
/**
/// Sets the best predecessor of this node.
*
/// @param bestPredecessor The bestPredecessor to set.
*/
public void setBestPredecessor(Node bestPredecessor)
{
this.bestPredecessor = bestPredecessor;
}
/**
/// Returns the Viterbi score for this node. The Viterbi score is usually computed during the speech recognition
/// process.
*
/// @return Returns the viterbiScore.
*/
public double getViterbiScore()
{
return viterbiScore;
}
/**
/// Sets the Viterbi score for this node. The Viterbi score is usually computed during the speech recognition
/// process.
*
/// @param viterbiScore The viterbiScore to set.
*/
public void setViterbiScore(double viterbiScore)
{
this.viterbiScore = viterbiScore;
}
}
}
<|start_filename|>Syn.Speech/Alignment/WordRelation.cs<|end_filename|>
//PATROLLED
namespace Syn.Speech.Alignment
{
/// <summary>
/// Helper class to add words and breaks into a Relation object.
/// </summary>
public class WordRelation
{
private readonly Relation relation;
private readonly UsEnglishWordExpander tokenToWords;
private WordRelation(Relation parentRelation, UsEnglishWordExpander tokenToWords)
{
relation = parentRelation;
this.tokenToWords = tokenToWords;
}
/// <summary>
/// Creates a WordRelation object with the given utterance and TokenToWords.
/// </summary>
/// <param name="utterance">the Utterance from which to create a Relation.</param>
/// <param name="tokenToWords">The TokenToWords object to use.</param>
/// <returns>a WordRelation object</returns>
public static WordRelation createWordRelation(Utterance utterance, UsEnglishWordExpander tokenToWords)
{
Relation relation = utterance.createRelation(Relation.WORD);
return new WordRelation(relation, tokenToWords);
}
/// <summary>
/// Adds a break as a feature to the last item in the list.
/// </summary>
public virtual void addBreak()
{
Item wordItem = relation.getTail();
if (wordItem != null)
{
FeatureSet featureSet = wordItem.getFeatures();
featureSet.setString("break", "1");
}
}
/// <summary>
///Adds a word as an Item to this WordRelation object.
/// </summary>
/// <param name="word">The word to add.</param>
public virtual void addWord(string word)
{
Item tokenItem = tokenToWords.getTokenItem();
Item wordItem = tokenItem.createDaughter();
FeatureSet featureSet = wordItem.getFeatures();
featureSet.setString("name", word);
relation.appendItem(wordItem);
}
/// <summary>
/// Sets the last Item in this WordRelation to the given word.
/// </summary>
/// <param name="word">The word to set.</param>
public virtual void setLastWord(string word)
{
Item lastItem = relation.getTail();
FeatureSet featureSet = lastItem.getFeatures();
featureSet.setString("name", word);
}
/// <summary>
/// Returns the last item in this WordRelation.
/// </summary>
/// <returns>The last item</returns>
public virtual Item getTail()
{
return relation.getTail();
}
}
}
<|start_filename|>Syn.Speech/Alignment/SuffixFSM.cs<|end_filename|>
//PATROLLED
using System.IO;
namespace Syn.Speech.Alignment
{
/// <summary>
/// Implements a finite state machine that checks if a given string is a suffix.
/// </summary>
public class SuffixFSM : PronounceableFSM
{
public SuffixFSM(FileInfo path)
: base(path, false)
{
}
public SuffixFSM(string path)
: base(path, false)
{
}
}
}
<|start_filename|>Syn.Speech/Util/CustomThreadFactory.cs<|end_filename|>
using java.lang;
using java.util.concurrent;
using java.util.concurrent.atomic;
//PATROLLED
namespace Syn.Speech.Util
{
public class CustomThreadFactory : ThreadFactory
{
private AtomicInteger poolNumber = new AtomicInteger(1);
readonly ThreadGroup group;
readonly AtomicInteger threadNumber = new AtomicInteger(1);
readonly string namePrefix;
readonly bool daemon;
readonly int priority;
public CustomThreadFactory(string namePrefix, bool daemon, int priority)
{
if (priority > Thread.MAX_PRIORITY || priority < Thread.MIN_PRIORITY)
throw new IllegalArgumentException("illegal thread priority");
SecurityManager s = java.lang.System.getSecurityManager();
this.group = s != null ? s.getThreadGroup() : Thread.currentThread().getThreadGroup();
this.namePrefix = namePrefix + "-" + poolNumber.getAndIncrement() + "-thread-";
this.daemon = daemon;
this.priority = priority;
}
public Thread newThread(Runnable r)
{
Thread t = new Thread(group, r, namePrefix + threadNumber.getAndIncrement(), 0);
if (t.isDaemon() != daemon)
t.setDaemon(daemon);
if (t.getPriority() != priority)
t.setPriority(priority);
return t;
}
}
}
<|start_filename|>Syn.Speech/Alignment/IWordExpander.cs<|end_filename|>
using System.Collections.Generic;
//PATROLLED
namespace Syn.Speech.Alignment
{
public interface IWordExpander
{
List<string> expand(string text);
}
}
<|start_filename|>Syn.Speech/Util/NISTAlign.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using Syn.Speech.Helper;
using Syn.Speech.Results;
//PATROLLED + REFACTORED
namespace Syn.Speech.Util
{
/// <summary>
/// Implements a portion of the NIST align/scoring algorithm to compare a reference string to a hypothesis string.
/// It only keeps track of substitutions, insertions, and deletions.
/// </summary>
public class NISTAlign
{
/* Constants that help with the align. The following are
* used in the backtrace table and backtrace list.
*/
const int Ok = 0;
const int Substitution = 1;
const int Insertion = 2;
const int Deletion = 3;
//Constants that help with the align. The following are used to create the penalty table.
private const int MaxPenalty = 1000000;
private const int SubstitutionPenalty = 100;
private const int InsertionPenalty = 75;
private const int DeletionPenalty = 75;
/// <summary>
/// Used for padding out aligned strings.
/// </summary>
private const string Stars = "********************************************";
private const string Spaces = " ";
private const string HRule = "============================================================================";
//Totals over the life of this class. These can be reset to 0 with a call to resetTotals.
private int _totalSentences;
private int _totalSentencesWithErrors;
private int _totalSentencesWithSubtitutions;
private int _totalSentencesWithInsertions;
private int _totalSentencesWithDeletions;
private int _totalReferenceWords;
private int _totalHypothesisWords;
private int _totalAlignedWords;
private int _totalWordsCorrect;
private int _totalSubstitutions;
private int _totalInsertions;
private int _totalDeletions;
//Error values for one call to 'align'
private int _substitutions;
private int _insertions;
private int _deletions;
private int _correct;
//The raw reference string. Updated with each call to 'align'.
private string _rawReference;
/**
* The reference annotation; typically the name of the audio file for the reference string. This is an optional
* part of the rawReference string. If it is included, it is appended to the end of the string in parentheses.
* Updated with each call to 'align'.
*/
private string _referenceAnnotation;
/**
* Ordered list of words from rawReference after the annotation has been removed. Updated with each call to
* 'align'.
*/
private LinkedList<Object> _referenceItems;
/** Aligned list of words from rawReference. Created in alignWords. Updated with each call to 'align'. */
private LinkedList<string> _alignedReferenceWords;
/** The raw hypothesis string. Updated with each call to 'align'. */
private string _rawHypothesis;
/// <summary>
/// Ordered list of words from rawHypothesis after the annotation has been removed. Updated with each call to'align'.
/// </summary>
private LinkedList<Object> _hypothesisItems;
/// <summary>
/// Aligned list of words from rawHypothesis. Created in alignWords. Updated with each call to 'align'.
/// </summary>
private LinkedList<string> _alignedHypothesisWords;
/** Helpers to create percentage strings. */
static readonly string percentageFormat = "##0.0%";
private bool _showResults;
private bool _showAlignedResults;
/** Creates a new NISTAlign object. */
public NISTAlign(bool showResults, bool showAlignedResults)
{
_showResults = showResults;
_showAlignedResults = showAlignedResults;
ResetTotals();
}
/**
* Sets whether results are displayed
*
* @param showResults true if the results should be displayed
*/
public void SetShowResults(bool showResults)
{
_showResults = showResults;
}
/**
* Sets whether aligned results are displayed
*
* @param showAlignedResults true if the aligned results should be displayed
*/
public void SetShowAlignedResults(bool showAlignedResults)
{
_showAlignedResults = showAlignedResults;
}
/** Reset the total insertions, deletions, and substitutions counts for this class. */
public void ResetTotals()
{
_totalSentences = 0;
_totalSentencesWithErrors = 0;
_totalSentencesWithSubtitutions = 0;
_totalSentencesWithInsertions = 0;
_totalSentencesWithDeletions = 0;
_totalReferenceWords = 0;
_totalHypothesisWords = 0;
_totalAlignedWords = 0;
_totalWordsCorrect = 0;
_totalSubstitutions = 0;
_totalInsertions = 0;
_totalDeletions = 0;
}
/**
* Performs the NIST alignment on the reference and hypothesis strings. This has the side effect of updating nearly
* all the fields of this class.
*
* @param reference the reference string
* @param hypothesis the hypothesis string
* @return true if the reference and hypothesis match
*/
public bool Align(string reference, string hypothesis)
{
int annotationIndex;
// Save the original strings for future reference.
//
_rawReference = reference;
_rawHypothesis = hypothesis;
// Strip the annotation off the reference string and
// save it.
//
annotationIndex = _rawReference.IndexOf('(');
if (annotationIndex != -1)
{
_referenceAnnotation = _rawReference.Substring(annotationIndex);
_referenceItems = ToList(_rawReference.Substring(0, annotationIndex));
}
else
{
_referenceAnnotation = null;
_referenceItems = ToList(_rawReference);
}
// Strip the annotation off the hypothesis string.
// If one wanted to be anal retentive, they might compare
// the hypothesis annotation to the reference annotation,
// but I'm not quite that obsessive.
//
annotationIndex = _rawHypothesis.IndexOf('(');
if (annotationIndex != -1)
{
_hypothesisItems = ToList(
_rawHypothesis.Substring(0, annotationIndex));
}
else
{
_hypothesisItems = ToList(_rawHypothesis);
}
// Reset the counts for this sentence.
//
_substitutions = 0;
_insertions = 0;
_deletions = 0;
// Turn the list of reference and hypothesis words into two
// aligned lists of strings. This has the side effect of
// creating alignedReferenceWords and alignedHypothesisWords.
//
AlignWords(Backtrace(CreateBacktraceTable(_referenceItems, _hypothesisItems, new CustomComparator1())), new CustomStringRenderer1());
// Compute the number of correct words in the hypothesis.
//
_correct = _alignedReferenceWords.Count
- (_insertions + _deletions + _substitutions);
// Update the totals that are kept over the lifetime of this
// class.
//
UpdateTotals();
return (_insertions + _deletions + _substitutions) == 0;
}
/**
* Returns the reference string. This string will be filtered (all spurious whitespace removed and annotation
* removed) and set to all lower case.
*
* @return the reference string
*/
public string GetReference()
{
return Tostring(_referenceItems);
}
/**
* Returns the hypothesis string. This string will be filtered (all spurious whitespace removed and annotation
* removed) and set to all lower case.
*
* @return the hypothesis string
*/
public string GetHypothesis()
{
return Tostring(_hypothesisItems);
}
/**
* Returns the aligned reference string.
*
* @return the aligned reference string
*/
public string GetAlignedReference()
{
return Tostring(_alignedReferenceWords);
}
/**
* Returns the aligned hypothesis string.
*
* @return the aligned hypothesis string
*/
public string GetAlignedHypothesis()
{
return Tostring(_alignedHypothesisWords);
}
/**
* Gets the total number of word errors for all calls to align.
*
* @return the total number of word errors for all calls to align
*/
public int GetTotalWordErrors()
{
return _totalSubstitutions + _totalInsertions + _totalDeletions;
}
/**
* Returns the total word accuracy.
*
* @return the accuracy between 0.0 and 1.0
*/
public float GetTotalWordAccuracy()
{
if (_totalReferenceWords == 0)
{
return 0;
}
else
{
return _totalWordsCorrect / ((float)_totalReferenceWords);
}
}
/**
* Returns the total word accuracy.
*
* @return the accuracy between 0.0 and 1.0
*/
public float GetTotalWordErrorRate()
{
if (_totalReferenceWords == 0)
{
return 0;
}
else
{
return GetTotalWordErrors()
/ ((float)_totalReferenceWords);
}
}
/**
* Returns the total sentence accuracy.
*
* @return the accuracy between 0.0 and 1.0
*/
public float GetTotalSentenceAccuracy()
{
var totalSentencesCorrect = _totalSentences - _totalSentencesWithErrors;
if (_totalSentences == 0)
{
return 0;
}
else
{
return (totalSentencesCorrect / (float)_totalSentences);
}
}
/**
* Gets the total number of words
*
* @return the total number of words
*/
public int GetTotalWords()
{
return _totalReferenceWords;
}
/**
* Gets the total number of substitution errors
*
* @return the total number of substitutions
*/
public int GetTotalSubstitutions()
{
return _totalSubstitutions;
}
/**
* Gets the total number of insertion errors
*
* @return the total number of insertion errors
*/
public int GetTotalInsertions()
{
return _totalInsertions;
}
/**
* Gets the total number of deletions
*
* @return the total number of deletions
*/
public int GetTotalDeletions()
{
return _totalDeletions;
}
/**
* Gets the total number of sentences
*
* @return the total number of sentences
*/
public int GetTotalSentences()
{
return _totalSentences;
}
/**
* Gets the total number of sentences with errors
*
* @return the total number of sentences with errors
*/
public int GetTotalSentencesWithErrors()
{
return _totalSentencesWithDeletions;
}
/**
* Prints the results for this sentence to System.out. If you want the output to match the NIST output, see
* printNISTSentenceSummary.
*
* @see #printNISTSentenceSummary
*/
public void PrintSentenceSummary()
{
if (_showResults)
{
Console.WriteLine("REF: " + Tostring(_referenceItems));
Console.WriteLine("HYP: " + Tostring(_hypothesisItems));
}
if (_showAlignedResults)
{
Console.WriteLine("ALIGN_REF: " + Tostring(_alignedReferenceWords));
Console.WriteLine("ALIGN_HYP: " + Tostring(_alignedHypothesisWords));
}
}
/**
* Prints the total summary for all calls. If you want the output to match the NIST output, see
* printNISTTotalSummary.
*
* @see #printNISTTotalSummary
*/
public void PrintTotalSummary()
{
if (_totalSentences > 0)
{
Console.Write(
" Accuracy: " + ToPercentage("##0.000%",
GetTotalWordAccuracy()));
Console.WriteLine(
" Errors: " + GetTotalWordErrors()
+ " (Sub: " + _totalSubstitutions
+ " Ins: " + _totalInsertions
+ " Del: " + _totalDeletions + ')');
Console.WriteLine(
" Words: " + _totalReferenceWords
+ " Matches: " + _totalWordsCorrect
+ " WER: " + ToPercentage("##0.000%",
GetTotalWordErrorRate()));
Console.WriteLine(
" Sentences: " + _totalSentences
+ " Matches: " + (_totalSentences - _totalSentencesWithErrors)
+ " SentenceAcc: " + ToPercentage("##0.000%",
GetTotalSentenceAccuracy()));
}
}
/** Prints the results for this sentence to System.out. This matches the output from the NIST aligner. */
public void PrintNistSentenceSummary()
{
var sentenceErrors = _substitutions + _insertions + _deletions;
Console.WriteLine();
Console.Write("REF: " + Tostring(_alignedReferenceWords));
if (_referenceAnnotation != null)
{
Console.Write(' ' + _referenceAnnotation);
}
Console.WriteLine();
Console.Write("HYP: " + Tostring(_alignedHypothesisWords));
if (_referenceAnnotation != null)
{
Console.Write(' ' + _referenceAnnotation);
}
Console.WriteLine();
Console.WriteLine();
if (_referenceAnnotation != null)
{
Console.WriteLine("SENTENCE " + _totalSentences
+ " " + _referenceAnnotation);
}
else
{
Console.WriteLine("SENTENCE " + _totalSentences);
}
Console.WriteLine("Correct = "
+ ToPercentage("##0.0%",
_correct,
_referenceItems.Count)
+ PadLeft(5, _correct)
+ " ("
+ PadLeft(6, _totalWordsCorrect)
+ ')');
Console.WriteLine("Errors = "
+ ToPercentage("##0.0%",
sentenceErrors,
_referenceItems.Count)
+ PadLeft(5, sentenceErrors)
+ " ("
+ PadLeft(6, _totalSentencesWithErrors)
+ ')');
Console.WriteLine();
Console.WriteLine(HRule);
}
/** Prints the summary for all calls to align to System.out. This matches the output from the NIST aligner. */
public void PrintNistTotalSummary()
{
var totalSentencesCorrect = _totalSentences - _totalSentencesWithErrors;
Console.WriteLine();
Console.WriteLine("---------- SUMMARY ----------");
Console.WriteLine();
Console.WriteLine("SENTENCE RECOGNITION PERFORMANCE:");
Console.WriteLine("sentences " + _totalSentences);
Console.WriteLine(" correct " + ToPercentage("##0.0%", totalSentencesCorrect, _totalSentences) + " (" + PadLeft(4, totalSentencesCorrect) + ')');
Console.WriteLine(" with error(s) "
+ ToPercentage("##0.0%", _totalSentencesWithErrors, _totalSentences)
+ " (" + PadLeft(4, _totalSentencesWithErrors) + ')');
Console.WriteLine(" with substitutions(s) "
+ ToPercentage("##0.0%", _totalSentencesWithSubtitutions, _totalSentences)
+ " (" + PadLeft(4, _totalSentencesWithSubtitutions) + ')');
Console.WriteLine(" with insertion(s) "
+ ToPercentage("##0.0%", _totalSentencesWithInsertions, _totalSentences)
+ " (" + PadLeft(4, _totalSentencesWithInsertions) + ')');
Console.WriteLine(" with deletions(s) "
+ ToPercentage("##0.0%", _totalSentencesWithDeletions, _totalSentences)
+ " (" + PadLeft(4, _totalSentencesWithDeletions) + ')');
Console.WriteLine();
Console.WriteLine();
Console.WriteLine();
Console.WriteLine("WORD RECOGNITION PERFORMANCE:");
Console.WriteLine("Correct = "
+ ToPercentage("##0.0%", _totalWordsCorrect, _totalReferenceWords)
+ " (" + PadLeft(6, _totalWordsCorrect) + ')');
Console.WriteLine("Substitutions = "
+ ToPercentage("##0.0%", _totalSubstitutions, _totalReferenceWords)
+ " (" + PadLeft(6, _totalSubstitutions) + ')');
Console.WriteLine("Deletions = "
+ ToPercentage("##0.0%", _totalDeletions, _totalReferenceWords)
+ " (" + PadLeft(6, _totalDeletions) + ')');
Console.WriteLine("Insertions = "
+ ToPercentage("##0.0%", _totalInsertions, _totalReferenceWords)
+ " (" + PadLeft(6, _totalInsertions) + ')');
Console.WriteLine("Errors = "
+ ToPercentage("##0.0%", GetTotalWordErrors(), _totalReferenceWords)
+ " (" + PadLeft(6, GetTotalWordErrors()) + ')');
Console.WriteLine();
Console.WriteLine("Ref. words = " + PadLeft(6, _totalReferenceWords));
Console.WriteLine("Hyp. words = " + PadLeft(6, _totalHypothesisWords));
Console.WriteLine("Aligned words = " + PadLeft(6, _totalAlignedWords));
Console.WriteLine();
Console.WriteLine(
"WORD ACCURACY= "
+ ToPercentage("##0.000%", _totalWordsCorrect, _totalReferenceWords)
+ " ("
+ PadLeft(5, _totalWordsCorrect)
+ '/'
+ PadLeft(5, _totalReferenceWords)
+ ") ERRORS= "
+ ToPercentage("##0.000%",
GetTotalWordErrors(),
_totalReferenceWords)
+ " ("
+ PadLeft(5, GetTotalWordErrors())
+ '/'
+ PadLeft(5, _totalReferenceWords)
+ ')');
Console.WriteLine();
}
/**
* Creates the backtrace table. This is magic. The basic idea is that the penalty table contains a set of penalty
* values based on some strategically selected numbers. I'm not quite sure what they are, but they help determine
* the backtrace table values. The backtrace table contains information used to help determine if words matched
* (OK), were inserted (INSERTION), substituted (SUBSTITUTION), or deleted (DELETION).
*
* @param referenceItems the ordered list of reference words
* @param hypothesisItems the ordered list of hypothesis words
* @return the backtrace table
*/
int[,] CreateBacktraceTable<T>(LinkedList<T> referenceItems, LinkedList<T> hypothesisItems, IComparator comparator)
{
int[,] penaltyTable;
int[,] backtraceTable;
int penalty;
int minPenalty;
penaltyTable = new int[referenceItems.Count + 1, hypothesisItems.Count + 1];
backtraceTable = new int[referenceItems.Count + 1, hypothesisItems.Count + 1];
// Initialize the penaltyTable and the backtraceTable. The
// rows of each table represent the words in the reference
// string. The columns of each table represent the words in
// the hypothesis string.
//
penaltyTable[0, 0] = 0;
backtraceTable[0, 0] = Ok;
// The lower left of the tables represent deletions. If you
// think about this, a shorter hypothesis string will have
// deleted words from the reference string.
//
for (var i = 1; i <= referenceItems.Count; i++)
{
penaltyTable[i, 0] = DeletionPenalty * i;
backtraceTable[i, 0] = Deletion;
}
// The upper right of the tables represent insertions. If
// you think about this, a longer hypothesis string will have
// inserted words.
//
for (var j = 1; j <= hypothesisItems.Count; j++)
{
penaltyTable[0, j] = InsertionPenalty * j;
backtraceTable[0, j] = Insertion;
}
// Row-by-row, column-by-column, fill out the tables.
// The goal is to keep the penalty for each cell to a
// minimum.
//
for (var i = 1; i <= referenceItems.Count; i++)
{
for (var j = 1; j <= hypothesisItems.Count; j++)
{
minPenalty = MaxPenalty;
// First assume that this represents a deletion.
//
penalty = penaltyTable[i - 1, j] + DeletionPenalty;
if (penalty < minPenalty)
{
minPenalty = penalty;
penaltyTable[i, j] = penalty;
backtraceTable[i, j] = Deletion;
}
// If the words match, we'll assume it's OK.
// Otherwise, we assume we have a substitution.
//
if (comparator.IsSimilar(referenceItems.ElementAt(i - 1), (hypothesisItems.ElementAt(j - 1))))
{
penalty = penaltyTable[i - 1, j - 1];
if (penalty < minPenalty)
{
minPenalty = penalty;
penaltyTable[i, j] = penalty;
backtraceTable[i, j] = Ok;
}
}
else
{
penalty = penaltyTable[i - 1, j - 1] + SubstitutionPenalty;
if (penalty < minPenalty)
{
minPenalty = penalty;
penaltyTable[i, j] = penalty;
backtraceTable[i, j] = Substitution;
}
}
// If you've made it this far, it should be obvious I
// have no idea what the heck this code is doing. I'm
// just doing a transliteration.
//
penalty = penaltyTable[i, j - 1] + InsertionPenalty;
if (penalty < minPenalty)
{
minPenalty = penalty;
penaltyTable[i, j] = penalty;
backtraceTable[i, j] = Insertion;
}
}
}
return backtraceTable;
}
/**
* Backtraces through the penalty table. This starts at the "lower right" corner (i.e., the last word of the longer
* of the reference vs. hypothesis strings) and works its way backwards.
*
* @param backtraceTable created from call to createBacktraceTable
* @return a linked list of Integers representing the backtrace
*/
LinkedList<Integer> Backtrace(int[,] backtraceTable)
{
var list = new LinkedList<Integer>();
var i = _referenceItems.Count;
var j = _hypothesisItems.Count;
while ((i >= 0) && (j >= 0))
{
Java.Add(list, backtraceTable[i, j]);
switch (backtraceTable[i, j])
{
case Ok:
i--;
j--;
break;
case Substitution:
i--;
j--;
_substitutions++;
break;
case Insertion:
j--;
_insertions++;
break;
case Deletion:
i--;
_deletions++;
break;
}
}
return list;
}
/**
* Based on the backtrace information, words are aligned as appropriate with insertions and deletions causing
* asterisks to be placed in the word lists. This generates the alignedReferenceWords and alignedHypothesisWords
* lists.
*
* @param backtrace the backtrace list created in backtrace
*/
void AlignWords(LinkedList<Integer> backtrace, IStringRenderer renderer)
{
var referenceWordsIterator = _referenceItems.GetEnumerator();
var hypothesisWordsIterator = _hypothesisItems.GetEnumerator();
string referenceWord;
string hypothesisWord;
Object a = null;
Object b = null;
_alignedReferenceWords = new LinkedList<string>();
_alignedHypothesisWords = new LinkedList<string>();
for (var m = backtrace.Count - 2; m >= 0; m--)
{
int backtraceEntry = backtrace.ElementAt(m);
if (backtraceEntry != Insertion)
{
referenceWordsIterator.MoveNext();
a = referenceWordsIterator.Current;
referenceWord = renderer.GetRef(a, b);
}
else
{
referenceWord = null;
}
if (backtraceEntry != Deletion)
{
hypothesisWordsIterator.MoveNext();
b = hypothesisWordsIterator.Current;
hypothesisWord = renderer.GetHyp(a, b);
}
else
{
hypothesisWord = null;
}
switch (backtraceEntry)
{
case Substitution:
{
referenceWord = referenceWord.ToUpper();
hypothesisWord = hypothesisWord.ToUpper();
break;
}
case Insertion:
{
hypothesisWord = hypothesisWord.ToUpper();
break;
}
case Deletion:
{
referenceWord = referenceWord.ToUpper();
break;
}
case Ok:
break;
}
// Expand the missing words out to be all *'s.
//
if (referenceWord == null)
{
referenceWord = Stars.Substring(0, hypothesisWord.Length);
}
if (hypothesisWord == null)
{
hypothesisWord = Stars.Substring(0, referenceWord.Length);
}
// Fill the words up with spaces so they are the same
// Length.
//
if (referenceWord.Length > hypothesisWord.Length)
{
hypothesisWord = hypothesisWord + (Spaces.Substring(0, referenceWord.Length - hypothesisWord.Length));
}
else if (referenceWord.Length < hypothesisWord.Length)
{
referenceWord = referenceWord + (Spaces.Substring(0, hypothesisWord.Length - referenceWord.Length));
}
Java.Add(_alignedReferenceWords, referenceWord);
Java.Add(_alignedHypothesisWords, hypothesisWord);
}
}
/** Updates the total counts based on the current alignment. */
void UpdateTotals()
{
_totalSentences++;
if ((_substitutions + _insertions + _deletions) != 0)
{
_totalSentencesWithErrors++;
}
if (_substitutions != 0)
{
_totalSentencesWithSubtitutions++;
}
if (_insertions != 0)
{
_totalSentencesWithInsertions++;
}
if (_deletions != 0)
{
_totalSentencesWithDeletions++;
}
_totalReferenceWords += _referenceItems.Count;
_totalHypothesisWords += _hypothesisItems.Count;
_totalAlignedWords += _alignedReferenceWords.Count;
_totalWordsCorrect += _correct;
_totalSubstitutions += _substitutions;
_totalInsertions += _insertions;
_totalDeletions += _deletions;
}
/**
* Turns the numerator/denominator into a percentage.
*
* @param pattern percentage pattern (ala DecimalFormat)
* @param numerator the numerator
* @param denominator the denominator
* @return a string that represents the percentage value.
*/
string ToPercentage(string pattern, int numerator, int denominator)
{
var toReturn = numerator / (double)denominator;
return PadLeft(6, toReturn.ToString(pattern));
}
/**
* Turns the float into a percentage.
*
* @param pattern percentage pattern (ala DecimalFormat)
* @param value the floating point value
* @return a string that represents the percentage value.
*/
string ToPercentage(string pattern, float value)
{
return value.ToString(pattern);
}
/**
* Turns the integer into a left-padded string.
*
* @param width the total width of string, including spaces
* @param i the integer
* @return a string padded left with spaces
*/
string PadLeft(int width, int i)
{
return PadLeft(width, Integer.ToString(i));
}
/**
* Pads a string to the left with spaces (i.e., prepends spaces to the string so it fills out the given width).
*
* @param width the total width of string, including spaces
* @param string the string to pad
* @return a string padded left with spaces
*/
string PadLeft(int width, string value)
{
var len = value.Length;
if (len < width)
{
return Spaces.Substring(0, width - len) + (value);
}
else
{
return value;
}
}
/**
* Converts the given string or words to a LinkedList.
*
* @param s the string of words to parse to a LinkedList
* @return a list, one word per item
*/
LinkedList<Object> ToList(string s)
{
var list = new LinkedList<Object>();
var st = new StringTokenizer(s.Trim());
while (st.hasMoreTokens())
{
var token = st.nextToken();
list.Add(token);
}
return list;
}
/**
* convert the list of words back to a space separated string
*
* @param list the list of words
* @return a space separated string
*/
private string Tostring<T>(LinkedList<T> list)
{
if (list == null || list.Count == 0)
return "";
var sb = new StringBuilder();
var iterator = list.GetEnumerator();
while (iterator.MoveNext())
sb.Append(iterator.Current).Append(' ');
sb.Length = (sb.Length - 1);
return sb.ToString();
}
/**
* Take two filenames -- the first contains a list of reference sentences, the second contains a list of hypothesis
* sentences. Aligns each pair of sentences and outputs the individual and total results.
*/
public static void Main(string[] args)
{
var align = new NISTAlign(true, true);
string reference;
string hypothesis;
try
{
var referenceFile = new StreamReader(args[0]);
var hypothesisFile = new StreamReader(args[1]);
try
{
while (true)
{
reference = referenceFile.ReadLine();
hypothesis = hypothesisFile.ReadLine();
if ((reference == null) || (hypothesis == null))
{
break;
}
else
{
align.Align(reference, hypothesis);
align.PrintNistSentenceSummary();
}
}
}
catch (IOException e)
{
}
align.PrintNistTotalSummary();
}
catch (Exception e)
{
Console.WriteLine(e);
Console.WriteLine();
Console.WriteLine("Usage: align <reference file> <hypothesis file>");
Console.WriteLine();
}
}
interface IComparator
{
bool IsSimilar(Object obj, Object hyp);
}
public interface IStringRenderer
{
string GetRef(Object obj, Object hyp);
string GetHyp(Object @ref, Object hyp);
}
public class CustomComparator1 : IComparator
{
public bool IsSimilar(Object obj, Object hyp)
{
if (obj is string && hyp is string)
{
return obj.Equals(hyp);
}
return false;
}
}
public class CustomComparator2 : IComparator
{
public bool IsSimilar(object refObject, object hypObject)
{
if (refObject is String && hypObject is ConfusionSet)
{
var word = (String)refObject;
var set = (ConfusionSet)hypObject;
if (set.ContainsWord(word))
{
return true;
}
}
return false;
}
}
public class CustomStringRenderer1 : IStringRenderer
{
public string GetRef(Object obj, Object hyp)
{
return (string)obj;
}
public string GetHyp(Object @ref, Object hyp)
{
return (string)hyp;
}
}
public class CustomStringRenderer2 : IStringRenderer
{
public string GetRef(object obj, object hyp)
{
return (string)obj;
}
public string GetHyp(object refObject, object hypObject)
{
var word = (String)refObject;
var set = (ConfusionSet)hypObject;
if (set.ContainsWord(word))
return word;
var res = set.GetBestHypothesis().ToString();
return res;
}
}
}
}
<|start_filename|>Syn.Speech/Decoder/Search/SimpleBreadthFirstSearchManager.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using Syn.Speech.Common.FrontEnd;
using Syn.Speech.Decoder.Pruner;
using Syn.Speech.Decoder.Scorer;
using Syn.Speech.Helper;
using Syn.Speech.Linguist;
using Syn.Speech.Results;
using Syn.Speech.Util;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder.Search
{
/// <summary>
/// Provides the breadth first search. To perform recognition an application should call initialize before recognition
/// begins, and repeatedly call <code> recognize </code> until Result.isFinal() returns true. Once a final result has
/// been obtained, <code> terminate </code> should be called.
///
/// All scores and probabilities are maintained in the log math log domain.
///
/// For information about breadth first search please refer to "Spoken Language Processing", <NAME>, PTR
///
/// </summary>
public class SimpleBreadthFirstSearchManager : TokenSearchManager
{
/// <summary>
/// The property that defines the name of the linguist to be used by this search manager.
/// </summary>
[S4Component(type=typeof(Linguist.Linguist))]
public static String PROP_LINGUIST = "linguist";
/// <summary>
/// The property that defines the name of the linguist to be used by this search manager.
/// </summary>
[S4Component(type=typeof(IPruner))]
public static String PROP_PRUNER = "pruner";
/// <summary>
/// The property that defines the name of the scorer to be used by this search manager.
/// </summary>
[S4Component(type=typeof(IAcousticScorer))]
public static String PROP_SCORER = "scorer";
/// <summary>
/// The property that defines the name of the active list factory to be used by this search manager.
/// </summary>
[S4Component(type=typeof(ActiveListFactory))]
public static String PROP_ACTIVE_LIST_FACTORY = "activeListFactory";
/// <summary>
///
/// The property that when set to <code>true</code> will cause the recognizer to count up all the tokens in the
/// active list after every frame.
/// </summary>
[S4Boolean(defaultValue = false)]
public static String PROP_SHOW_TOKEN_COUNT = "showTokenCount";
/// <summary>
/// The property that sets the minimum score relative to the maximum score in the word list for pruning. Words with a
/// score less than relativeBeamWidth/// maximumScore will be pruned from the list
/// </summary>
[S4Double(defaultValue = 0.0)]
public static String PROP_RELATIVE_WORD_BEAM_WIDTH = "relativeWordBeamWidth";
/// <summary>
/// The property that controls whether or not relative beam pruning will be performed on the entry into a
/// state.
/// </summary>
[S4Boolean(defaultValue = false)]
public static String PROP_WANT_ENTRY_PRUNING = "wantEntryPruning";
/// <summary>
/// The property that controls the number of frames processed for every time the decode growth step is skipped.
/// Setting this property to zero disables grow skipping. Setting this number to a small integer will increase the
/// speed of the decoder but will also decrease its accuracy. The higher the number, the less often the grow code is
/// skipped.
/// </summary>
[S4Integer(defaultValue = 0)]
public static String PROP_GROW_SKIP_INTERVAL = "growSkipInterval";
protected Linguist.Linguist linguist=null; // Provides grammar/language info
private IPruner pruner=null; // used to prune the active list
private IAcousticScorer scorer=null; // used to score the active list
protected int currentFrameNumber; // the current frame number
protected ActiveList activeList; // the list of active tokens
protected List<Token> resultList; // the current set of results
protected LogMath logMath;
private String name;
// ------------------------------------
// monitoring data
// ------------------------------------
private Timer scoreTimer; // TODO move these timers out
private Timer pruneTimer;
protected Timer growTimer;
private StatisticsVariable totalTokensScored;
private StatisticsVariable tokensPerSecond;
private StatisticsVariable curTokensScored;
private StatisticsVariable tokensCreated;
private StatisticsVariable viterbiPruned;
private StatisticsVariable beamPruned;
// ------------------------------------
// Working data
// ------------------------------------
protected Boolean _showTokenCount=false;
private Boolean wantEntryPruning;
protected Dictionary<ISearchState, Token> bestTokenMap;
private float logRelativeWordBeamWidth;
private int totalHmms;
private double startTime;
private float threshold;
private float wordThreshold;
private int growSkipInterval;
protected ActiveListFactory activeListFactory;
protected Boolean streamEnd;
public SimpleBreadthFirstSearchManager()
{
}
/**
///
/// @param linguist
/// @param pruner
/// @param scorer
/// @param activeListFactory
/// @param showTokenCount
/// @param relativeWordBeamWidth
/// @param growSkipInterval
/// @param wantEntryPruning
*/
public SimpleBreadthFirstSearchManager(Linguist.Linguist linguist, IPruner pruner,
IAcousticScorer scorer, ActiveListFactory activeListFactory,
Boolean showTokenCount, double relativeWordBeamWidth,
int growSkipInterval, Boolean wantEntryPruning)
{
this.name = GetType().Name;
this.logMath = LogMath.getLogMath();
this.linguist = linguist;
this.pruner = pruner;
this.scorer = scorer;
this.activeListFactory = activeListFactory;
this._showTokenCount = showTokenCount;
this.growSkipInterval = growSkipInterval;
this.wantEntryPruning = wantEntryPruning;
this.logRelativeWordBeamWidth = logMath.linearToLog(relativeWordBeamWidth);
this.keepAllTokens = true;
}
override public void newProperties(PropertySheet ps)
{
logMath = LogMath.getLogMath();
name = ps.InstanceName;
linguist = (Linguist.Linguist)ps.getComponent(PROP_LINGUIST);
pruner = (IPruner) ps.getComponent(PROP_PRUNER);
scorer = (IAcousticScorer) ps.getComponent(PROP_SCORER);
activeListFactory = (ActiveListFactory) ps.getComponent(PROP_ACTIVE_LIST_FACTORY);
_showTokenCount = ps.getBoolean(PROP_SHOW_TOKEN_COUNT);
double relativeWordBeamWidth = ps.getDouble(PROP_RELATIVE_WORD_BEAM_WIDTH);
growSkipInterval = ps.getInt(PROP_GROW_SKIP_INTERVAL);
wantEntryPruning = ps.getBoolean(PROP_WANT_ENTRY_PRUNING);
logRelativeWordBeamWidth = logMath.linearToLog(relativeWordBeamWidth);
this.keepAllTokens = true;
}
/** Called at the start of recognition. Gets the search manager ready to recognize */
override public void startRecognition()
{
Trace.WriteLine("starting recognition");
linguist.startRecognition();
pruner.startRecognition();
scorer.startRecognition();
localStart();
if (startTime == 0.0) {
startTime = Extensions.currentTimeMillis();
}
}
/**
/// Performs the recognition for the given number of frames.
*
/// @param nFrames the number of frames to recognize
/// @return the current result or null if there is no Result (due to the lack of frames to recognize)
*/
override public Result recognize(int nFrames)
{
bool done = false;
Result result = null;
streamEnd = false;
for (int i = 0; i < nFrames && !done; i++)
{
done = recognize();
}
// generate a new temporary result if the current token is based on a final search state
// remark: the first check for not null is necessary in cases that the search space does not contain scoreable tokens.
if (activeList.getBestToken() != null)
{
// to make the current result as correct as possible we undo the last search graph expansion here
ActiveList fixedList = undoLastGrowStep();
// Now create the result using the fixed active-list.
if (!streamEnd)
{
result = new Results.Result(fixedList,resultList, currentFrameNumber, done);
}
}
if (_showTokenCount) {
showTokenCount();
}
return result;
}
/**
/// Because the growBranches() is called although no data is left after the last speech frame, the ordering of the
/// active-list might depend on the transition probabilities and (penalty-scores) only. Therefore we need to undo the last
/// grow-step up to final states or the last emitting state in order to fix the list.
/// @return newly created list
*/
protected ActiveList undoLastGrowStep()
{
var fixedList = activeList.newInstance();
foreach (var token in activeList.getTokens())
{
var curToken = token.getPredecessor();
// remove the final states that are not the real final ones because they're just hide prior final tokens:
while (curToken.getPredecessor() != null && (
(curToken.isFinal() && curToken.getPredecessor() != null && !curToken.getPredecessor().isFinal())
|| (curToken.isEmitting() && curToken.getData() == null) // the so long not scored tokens
|| (!curToken.isFinal() && !curToken.isEmitting()))) {
curToken = curToken.getPredecessor();
}
fixedList.add(curToken);
}
return fixedList;
}
/// <summary>
/// Terminates a recognition
/// </summary>
override public void stopRecognition()
{
localStop();
scorer.stopRecognition();
pruner.stopRecognition();
linguist.stopRecognition();
Trace.WriteLine("recognition stopped");
}
/**
/// Performs recognition for one frame. Returns true if recognition has been completed.
*
/// @return <code>true</code> if recognition is completed.
*/
protected bool recognize()
{
bool more = scoreTokens(); // score emitting tokens
if (more)
{
pruneBranches(); // eliminate poor branches
currentFrameNumber++;
if (growSkipInterval == 0
|| (currentFrameNumber % growSkipInterval) != 0) {
growBranches(); // extend remaining branches
}
}
return !more;
}
/// <summary>
/// Gets the initial grammar node from the linguist and creates a GrammarNodeToken.
/// </summary>
protected void localStart()
{
currentFrameNumber = 0;
curTokensScored.value = 0;
ActiveList newActiveList = activeListFactory.newInstance();
ISearchState state = linguist.getSearchGraph().getInitialState();
newActiveList.add(new Token(state, currentFrameNumber));
activeList = newActiveList;
growBranches();
}
/** Local cleanup for this search manager */
protected void localStop()
{
}
/**
/// Goes through the active list of tokens and expands each token, finding the set of successor tokens until all the
/// successor tokens are emitting tokens.
*/
protected void growBranches()
{
int mapSize = activeList.size()*10;
if (mapSize == 0) {
mapSize = 1;
}
growTimer.start();
bestTokenMap = new Dictionary<ISearchState, Token>(mapSize);
ActiveList oldActiveList = activeList;
resultList = new List<Token>();
activeList = activeListFactory.newInstance();
threshold = oldActiveList.getBeamThreshold();
wordThreshold = oldActiveList.getBestScore() + logRelativeWordBeamWidth;
foreach (Token token in oldActiveList.getTokens())
{
collectSuccessorTokens(token);
}
growTimer.stop();
#if Debug
int hmms = activeList.size();
totalHmms += hmms;
Trace.WriteLine("Frame: " + currentFrameNumber + " Hmms: "
+ hmms + " total " + totalHmms);
#endif
}
/// <summary>
/// Calculate the acoustic scores for the active list. The active list should contain only emitting tokens.
/// </summary>
/// <returns><code>true</code> if there are more frames to score, otherwise, false</returns>
protected bool scoreTokens()
{
bool hasMoreFrames = false;
scoreTimer.start();
IData data = scorer.calculateScores(activeList.getTokens().ConvertAll(x => (IScoreable)x));
scoreTimer.stop();
Token bestToken = null;
if (data is Token)
{
bestToken = (Token)data;
}
else if (data == null)
{
streamEnd = true;
}
if (bestToken != null) {
hasMoreFrames = true;
activeList.setBestToken(bestToken);
}
// update statistics
curTokensScored.value += activeList.size();
totalTokensScored.value += activeList.size();
tokensPerSecond.value = totalTokensScored.value / getTotalTime();
// if (logger.isLoggable(Level.FINE)) {
// logger.fine(currentFrameNumber + " " + activeList.size()
// + " " + curTokensScored.value + " "
// + (int) tokensPerSecond.value);
// }
return hasMoreFrames;
}
/**
/// Returns the total time since we start4ed
*
/// @return the total time (in seconds)
*/
private double getTotalTime()
{
return (Extensions.currentTimeMillis() - startTime) / 1000.0;
}
/** Removes unpromising branches from the active list */
protected void pruneBranches()
{
int startSize = activeList.size();
pruneTimer.start();
activeList = pruner.prune(activeList);
beamPruned.value += startSize - activeList.size();
pruneTimer.stop();
}
/**
/// Gets the best token for this state
*
/// @param state the state of interest
/// @return the best token
*/
protected Token getBestToken(ISearchState state)
{
Token best = null;
if (bestTokenMap.ContainsKey(state))
{
best = bestTokenMap[state];
Trace.WriteLine("BT " + best + " for state " + state);
}
return best;
}
protected Token setBestToken(Token token, ISearchState state)
{
bestTokenMap.Add(state, token);
return token;
}
public ActiveList getActiveList()
{
return activeList;
}
/**
/// Collects the next set of emitting tokens from a token and accumulates them in the active or result lists
*
/// @param token the token to collect successors from
*/
protected void collectSuccessorTokens(Token token)
{
ISearchState state = token.getSearchState();
// If this is a final state, add it to the final list
if (token.isFinal()) {
resultList.Add(token);
}
if (token.getScore() < threshold)
{
return;
}
if (state is IWordSearchState
&& token.getScore() < wordThreshold)
{
return;
}
ISearchStateArc[] arcs = state.getSuccessors();
// For each successor
// calculate the entry score for the token based upon the
// predecessor token score and the transition probabilities
// if the score is better than the best score encountered for
// the SearchState and frame then create a new token, add
// it to the lattice and the SearchState.
// If the token is an emitting token add it to the list,
// otherwise recursively collect the new tokens successors.
foreach (ISearchStateArc arc in arcs)
{
ISearchState nextState = arc.getState();
// We're actually multiplying the variables, but since
// these come in log(), multiply gets converted to add
float logEntryScore = token.getScore() + arc.getProbability();
if (wantEntryPruning) { // false by default
if (logEntryScore < threshold) {
continue;
}
if (nextState is IWordSearchState
&& logEntryScore < wordThreshold) {
continue;
}
}
Token predecessor = getResultListPredecessor(token);
// if not emitting, check to see if we've already visited
// this state during this frame. Expand the token only if we
// haven't visited it already. This prevents the search
// from getting stuck in a loop of states with no
// intervening emitting nodes. This can happen with nasty
// jsgf grammars such as ((foo*)*)*
if (!nextState.isEmitting())
{
Token newToken = new Token(predecessor, nextState, logEntryScore,
arc.getInsertionProbability(),
arc.getLanguageProbability(),
currentFrameNumber);
tokensCreated.value++;
if (!isVisited(newToken))
{
collectSuccessorTokens(newToken);
}
continue;
}
Token bestToken = getBestToken(nextState);
if (bestToken == null)
{
Token newToken = new Token(predecessor, nextState, logEntryScore,
arc.getInsertionProbability(),
arc.getLanguageProbability(),
currentFrameNumber);
tokensCreated.value++;
setBestToken(newToken, nextState);
activeList.add(newToken);
}
else
{
if (bestToken.getScore() <= logEntryScore)
{
bestToken.update(predecessor as Token, nextState, logEntryScore,
arc.getInsertionProbability(),
arc.getLanguageProbability(),
currentFrameNumber);
viterbiPruned.value++;
}
else
{
viterbiPruned.value++;
}
}
//Token bestToken = getBestToken(nextState);
//Boolean firstToken = bestToken == null;
//if (firstToken || bestToken.getScore() <= logEntryScore) {
// Token newToken = new Token(predecessor, nextState, logEntryScore,
// arc.getInsertionProbability(),
// arc.getLanguageProbability(),
// currentFrameNumber);
// tokensCreated.value++;
// setBestToken(newToken, nextState);
// if (!newToken.isEmitting()) {
// if (!isVisited(newToken)) {
// collectSuccessorTokens(newToken);
// }
// } else {
// if (firstToken) {
// activeList.add(newToken);
// } else {
// activeList.replace(bestToken, newToken);
// viterbiPruned.value++;
// }
// }
//} else {
// viterbiPruned.value++;
//}
}
}
/**
/// Determines whether or not we've visited the state associated with this token since the previous frame.
*
/// @param t the token to check
/// @return true if we've visited the search state since the last frame
*/
private Boolean isVisited(Token t)
{
ISearchState curState = t.getSearchState();
t = t.getPredecessor();
while (t != null && !t.isEmitting())
{
if (curState.Equals(t.getSearchState()))
{
return true;
}
t = t.getPredecessor();
}
return false;
}
/** Counts all the tokens in the active list (and displays them). This is an expensive operation. */
protected void showTokenCount()
{
List<Token> tokenSet = new List<Token>();
foreach (Token tk in activeList.getTokens())
{
Token token = tk;
while (token != null)
{
tokenSet.Add(token);
token = token.getPredecessor();
}
}
Trace.WriteLine("Token Lattice size: " + tokenSet.Count.ToString());
tokenSet = new List<Token>();
foreach (Token tk in resultList)
{
Token token = tk;
while (token != null)
{
tokenSet.Add(token);
token = token.getPredecessor();
}
}
Trace.WriteLine("Result Lattice size: " + tokenSet.Count.ToString());
}
/**
/// Returns the best token map.
*
/// @return the best token map
*/
protected Dictionary<ISearchState, Token> getBestTokenMap()
{
return bestTokenMap;
}
/**
/// Sets the best token Map.
*
/// @param bestTokenMap the new best token Map
*/
protected void setBestTokenMap(Dictionary<ISearchState, Token> bestTokenMap)
{
this.bestTokenMap = bestTokenMap;
}
/**
/// Returns the result list.
*
/// @return the result list
*/
public List<Token> getResultList()
{
return resultList;
}
/**
/// Returns the current frame number.
*
/// @return the current frame number
*/
public int getCurrentFrameNumber()
{
return currentFrameNumber;
}
/**
/// Returns the Timer for growing.
*
/// @return the Timer for growing
*/
public Timer getGrowTimer()
{
return growTimer;
}
/**
/// Returns the tokensCreated StatisticsVariable.
*
/// @return the tokensCreated StatisticsVariable.
*/
public StatisticsVariable getTokensCreated()
{
return tokensCreated;
}
/// <summary>
/// @see Search.SearchManager#allocate()
/// </summary>
override public void allocate()
{
totalTokensScored = StatisticsVariable.getStatisticsVariable("totalTokensScored");
tokensPerSecond = StatisticsVariable.getStatisticsVariable("tokensScoredPerSecond");
curTokensScored = StatisticsVariable.getStatisticsVariable("curTokensScored");
tokensCreated = StatisticsVariable.getStatisticsVariable("tokensCreated");
viterbiPruned = StatisticsVariable.getStatisticsVariable("viterbiPruned");
beamPruned = StatisticsVariable.getStatisticsVariable("beamPruned");
try
{
linguist.allocate();
pruner.allocate();
scorer.allocate();
}
catch (IOException e)
{
throw new SystemException("Allocation of search manager resources failed", e);
}
scoreTimer = TimerPool.getTimer(this, "Score");
pruneTimer = TimerPool.getTimer(this, "Prune");
growTimer = TimerPool.getTimer(this, "Grow");
}
/*
/// (non-Javadoc)
*
/// @see edu.cmu.sphinx.decoder.search.SearchManager#deallocate()
*/
public void deallocate()
{
try
{
scorer.deallocate();
pruner.deallocate();
linguist.deallocate();
}
catch (IOException e)
{
throw new SystemException("Deallocation of search manager resources failed", e);
}
}
public override String ToString()
{
return name;
}
}
}
<|start_filename|>Syn.Speech/Linguist/Acoustic/IAcousticModel.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Reflection;
using Syn.Speech.Common;
using Syn.Speech.Util.Props;
namespace Syn.Speech.Linguist.Acoustic
{
/// <summary>
/// Represents the generic interface to the Acoustic Model for sphinx4
/// </summary>
public abstract class IAcousticModel:IConfigurable
{
/** The directory where the acoustic model data can be found. */
[S4String(defaultValue = ".")]
public static String PROP_LOCATION = "location";
/**
/// Gets this acoustic model ready to use, allocating all necessary resources.
*
/// @throws IOException if the model could not be loaded
*/
public abstract void allocate();
/** Deallocates previously allocated resources */
public abstract void deallocate();
/**
/// Returns the name of this AcousticModel, or null if it has no name.
*
/// @return the name of this AcousticModel, or null if it has no name
*/
public abstract String getName();
/**
/// Given a unit, returns the HMM that best matches the given unit. If exactMatch is false and an exact match is not
/// found, then different word positions are used. If any of the contexts are non-silence filler units. a silence
/// filler unit is tried instead.
*
/// @param unit the unit of interest
/// @param position the position of the unit of interest
/// @param exactMatch if true, only an exact match is acceptable.
/// @return the HMM that best matches, or null if no match could be found.
*/
public abstract IHMM lookupNearestHMM(IUnit unit, HMMPosition position, Boolean exactMatch);
/**
/// Returns an iterator that can be used to iterate through all the HMMs of the acoustic model
*
/// @return an iterator that can be used to iterate through all HMMs in the model. The iterator returns objects of
/// type <code>HMM</code>.
*/
public abstract IEnumerator<IHMM> getHMMIterator();
/**
/// Returns an iterator that can be used to iterate through all the CI units in the acoustic model
*
/// @return an iterator that can be used to iterate through all CI units. The iterator returns objects of type
/// <code>Unit</code>
*/
public abstract IEnumerator<IUnit> getContextIndependentUnitIterator();
/**
/// Returns the size of the left context for context dependent units
*
/// @return the left context size
*/
public abstract int getLeftContextSize();
/**
/// Returns the size of the right context for context dependent units
*
/// @return the left context size
*/
public abstract int getRightContextSize();
/**
/// Returns the properties of this acoustic model.
*
/// @return the properties of this acoustic model
*/
public abstract PropertyInfo[] getProperties();
public abstract void newProperties(PropertySheet ps);
void IConfigurable.newProperties(PropertySheet ps)
{
newProperties(ps);
}
}
}
<|start_filename|>Syn.Speech/Helper/TreeSet.cs<|end_filename|>
using System.Collections.Generic;
using System.Linq;
namespace Syn.Speech.Helper
{
public class TreeSet<T> : AbstractSet<T>
{
private SortedDictionary<T, int> dict;
public TreeSet()
{
this.dict = new SortedDictionary<T, int>();
}
public TreeSet(IEnumerable<T> items)
{
this.dict = new SortedDictionary<T, int>();
foreach (var i in items)
AddItem(i);
}
public override bool AddItem(T element)
{
if (!this.dict.ContainsKey(element))
{
this.dict[element] = 0;
return true;
}
return false;
}
public override void Clear()
{
this.dict.Clear();
}
private int Compare(T a, T b)
{
return Comparer<T>.Default.Compare(a, b);
}
public override bool Contains(object item)
{
return this.dict.ContainsKey((T)item);
}
public T First()
{
if (this.dict.Count == 0)
{
throw new NoSuchMethodException();
}
return this.dict.Keys.First<T>();
}
public ICollection<T> HeadSet(T toElement)
{
List<T> list = new List<T>();
foreach (T t in this)
{
if (this.Compare(t, toElement) >= 0)
return list;
list.Add(t);
}
return list;
}
public override Iterator<T> Iterator()
{
return new EnumeratorWrapper<T>(this.dict.Keys, this.dict.Keys.GetEnumerator());
}
public override bool Remove(object element)
{
return this.dict.Remove((T)element);
}
public override int Count
{
get { return this.dict.Count; }
}
public override string ToString()
{
return "[" + string.Join(", ", this.Select(d => d.ToString()).ToArray()) + "]";
}
}
}
<|start_filename|>Syn.Speech/Decoder/Search/SortingActiveListFactory.cs<|end_filename|>
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using Syn.Speech.Common;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder.Search
{
/// <summary>
/// @author plamere
/// </summary>
public class SortingActiveListFactory: ActiveListFactory
{
/**
/// @param absoluteBeamWidth
/// @param relativeBeamWidth
/// @param logMath
*/
public SortingActiveListFactory(int absoluteBeamWidth,
double relativeBeamWidth)
:base(absoluteBeamWidth, relativeBeamWidth)
{
}
public SortingActiveListFactory()
{
}
/*
/// (non-Javadoc)
*
/// @see edu.cmu.sphinx.util.props.Configurable#newProperties(edu.cmu.sphinx.util.props.PropertySheet)
*/
public new void newProperties(PropertySheet ps)
{
base.newProperties(ps);
}
/*
/// (non-Javadoc)
*
/// @see edu.cmu.sphinx.decoder.search.ActiveListFactory#newInstance()
*/
override public ActiveList newInstance()
{
SortingActiveList newObject = new SortingActiveList(absoluteBeamWidth, logRelativeBeamWidth);
newObject.activeListFactory = this;
return newObject;
}
}
/**
/// An active list that tries to be simple and correct. This type of active list will be slow, but should exhibit
/// correct behavior. Faster versions of the ActiveList exist (HeapActiveList, TreeActiveList).
/// <p/>
/// This class is not thread safe and should only be used by a single thread.
/// <p/>
/// Note that all scores are maintained in the LogMath log base.
*/
class SortingActiveList : ActiveList
{
private static int DEFAULT_SIZE = 1000;
private int absoluteBeamWidth;
private float logRelativeBeamWidth;
private Token bestToken;
// when the list is changed these things should be
// changed/updated as well
private List<Token> tokenList;
public SortingActiveListFactory activeListFactory=null;
/** Creates an empty active list
/// @param absoluteBeamWidth
/// @param logRelativeBeamWidth*/
public SortingActiveList(int absoluteBeamWidth, float logRelativeBeamWidth)
{
this.absoluteBeamWidth = absoluteBeamWidth;
this.logRelativeBeamWidth = logRelativeBeamWidth;
int initListSize = absoluteBeamWidth > 0 ? absoluteBeamWidth : DEFAULT_SIZE;
this.tokenList = new List<Token>(initListSize);
}
/**
/// Adds the given token to the list
*
/// @param token the token to add
*/
override public void add(Token token)
{
//token.setLocation(tokenList.Count);
tokenList.Add(token);
if (bestToken == null || token.getScore() > bestToken.getScore()) {
bestToken = token;
}
}
/**
/// Replaces an old token with a new token
*
/// @param oldToken the token to replace (or null in which case, replace works like add).
/// @param newToken the new token to be placed in the list.
*/
//TODO: EXTRA
//override public void replace(Token oldToken, Token newToken)
//{
// if (oldToken != null)
// {
// int location = oldToken.getLocation();
// // just a sanity check:
// if (tokenList[location] != oldToken)
// {
// Trace.WriteLine("SortingActiveList: replace " + oldToken
// + " not where it should have been. New "
// + newToken + " location is " + location + " found "
// + tokenList[location]);
// }
// tokenList[location]= newToken;
// newToken.setLocation(location);
// if (bestToken == null
// || newToken.getScore() > bestToken.getScore()) {
// bestToken = newToken;
// }
// }
// else
// {
// add(newToken);
// }
//}
/**
/// Purges excess members. Reduce the size of the token list to the absoluteBeamWidth
*
/// @return a (possible new) active list
*/
override public ActiveList purge()
{
// if the absolute beam is zero, this means there
// should be no constraint on the abs beam size at all
// so we will only be relative beam pruning, which means
// that we don't have to sort the list
if (absoluteBeamWidth > 0 && tokenList.Count > absoluteBeamWidth)
{
tokenList.Sort(new ScoreableComparatorToken());
tokenList = tokenList.Take(absoluteBeamWidth).ToList();
}
return this;
}
/**
/// gets the beam threshold best upon the best scoring token
*
/// @return the beam threshold
*/
override public float getBeamThreshold()
{
return getBestScore() + logRelativeBeamWidth;
}
/**
/// gets the best score in the list
*
/// @return the best score
*/
override public float getBestScore()
{
float bestScore = -float.MaxValue;
if (bestToken != null) {
bestScore = bestToken.getScore();
}
return bestScore;
}
/**
/// Sets the best scoring token for this active list
*
/// @param token the best scoring token
*/
override public void setBestToken(Token token)
{
bestToken = token;
}
/**
/// Gets the best scoring token for this active list
*
/// @return the best scoring token
*/
override public Token getBestToken()
{
return bestToken;
}
/**
/// Retrieves the iterator for this tree.
*
/// @return the iterator for this token list
*/
public IEnumerator<Token> iterator()
{
return tokenList.GetEnumerator();
}
/**
/// Gets the list of all tokens
*
/// @return the list of tokens
*/
override public List<Token> getTokens()
{
return tokenList;
}
/**
/// Returns the number of tokens on this active list
*
/// @return the size of the active list
*/
override public int size()
{
return tokenList.Count;
}
/* (non-Javadoc)
/// @see edu.cmu.sphinx.decoder.search.ActiveList#newInstance()
*/
override public ActiveList newInstance()
{
if(activeListFactory!=null)
return activeListFactory.newInstance();
return null;
}
}
}
<|start_filename|>Syn.Speech/Alignment/PrefixFSM.cs<|end_filename|>
//PATROLLED
using System.IO;
namespace Syn.Speech.Alignment
{
public class PrefixFSM : PronounceableFSM
{
public PrefixFSM(FileInfo path) : base(path, true)
{
}
public PrefixFSM(string path)
: base(path, true)
{
}
}
}
<|start_filename|>Syn.Speech/Alignment/CharTokenizer.cs<|end_filename|>
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Text;
//PATROLLED
namespace Syn.Speech.Alignment
{
public sealed class CharTokenizer : IEnumerator<Token>
{
public const int EOF = -1;
public const string DEFAULT_WHITESPACE_SYMBOLS = " \t\n\r";
public const string DEFAULT_SINGLE_CHAR_SYMBOLS = "(){}[]";
public const string DEFAULT_PREPUNCTUATION_SYMBOLS = "\"'`({[";
public const string DEFAULT_POSTPUNCTUATION_SYMBOLS = "\"'`.,:;!?(){}[]";
private int lineNumber;
private string inputText;
private StringReader _reader;
private int currentChar;
private int currentPosition;
private string whitespaceSymbols = DEFAULT_WHITESPACE_SYMBOLS;
private string singleCharSymbols = DEFAULT_SINGLE_CHAR_SYMBOLS;
private string prepunctuationSymbols = DEFAULT_PREPUNCTUATION_SYMBOLS;
private string postpunctuationSymbols = DEFAULT_POSTPUNCTUATION_SYMBOLS;
private string errorDescription;
private Token token;
private Token lastToken;
/// <summary>
/// Initializes a new instance of the <see cref="CharTokenizer"/> class.
/// </summary>
public CharTokenizer() { }
public CharTokenizer(string _string)
{
setInputText(_string);
}
public CharTokenizer(StringReader file)
{
setInputReader(file);
}
public void setWhitespaceSymbols(string symbols)
{
whitespaceSymbols = symbols;
}
public void setSingleCharSymbols(string symbols)
{
singleCharSymbols = symbols;
}
public void setPrepunctuationSymbols(string symbols)
{
prepunctuationSymbols = symbols;
}
public void setPostpunctuationSymbols(string symbols)
{
postpunctuationSymbols = symbols;
}
public void setInputText(string inputString)
{
inputText = inputString;
currentPosition = 0;
if (inputText == null)
return;
getNextChar();
}
public void setInputReader(StringReader reader)
{
_reader = reader;
getNextChar();
}
private int getNextChar()
{
if (_reader != null)
{
try
{
int readVal = _reader.Read();
if (readVal == -1)
{
currentChar = EOF;
}
else
{
currentChar = (char)readVal;
}
}
catch (IOException ioe)
{
currentChar = EOF;
errorDescription = ioe.Message;
}
}
else if (inputText != null)
{
if (currentPosition < inputText.Length)
{
currentChar = inputText[currentPosition];
}
else
{
currentChar = EOF;
}
}
if (currentChar != EOF)
{
currentPosition++;
}
if (currentChar == '\n')
{
lineNumber++;
}
return currentChar;
}
private string getTokenOfCharClass(string charClass)
{
return getTokenByCharClass(charClass, true);
}
private string getTokenNotOfCharClass(string endingCharClass)
{
return getTokenByCharClass(endingCharClass, false);
}
private void removeTokenPostpunctuation()
{
if (token == null)
{
return;
}
var tokenWord = token.getWord();
int tokenLength = tokenWord.Length;
int position = tokenLength - 1;
//while (position > 0 && postpunctuationSymbols.IndexOf((int)tokenWord.charAt(position)) != -1)
while (position > 0 && postpunctuationSymbols.IndexOf(tokenWord[position]) != -1)
{
position--;
}
if (tokenLength - 1 != position)
{
// Copy postpunctuation from token
token.setPostpunctuation(tokenWord.Substring(position + 1));
// truncate token at postpunctuation
token.setWord(tokenWord.Substring(0, position + 1));
}
else
{
token.setPostpunctuation("");
}
}
private string getTokenByCharClass(string charClass, bool containThisCharClass)
{
var buffer = new StringBuilder();
// if we want the returned string to contain chars in charClass, then
// containThisCharClass is TRUE and
// (charClass.indexOf(currentChar) != 1) == containThisCharClass)
// returns true; if we want it to stop at characters of charClass,
// then containThisCharClass is FALSE, and the condition returns
// false.
while ((charClass.IndexOf((char)currentChar) != -1) == containThisCharClass
&& singleCharSymbols.IndexOf((char)currentChar) == -1
&& currentChar != EOF)
{
buffer.Append((char)currentChar);
getNextChar();
}
return buffer.ToString();
}
public void remove()
{
throw new InvalidOperationException();
}
public bool hasErrors()
{
return errorDescription != null;
}
public string getErrorDescription()
{
return errorDescription;
}
public bool isSentenceSeparator()
{
string tokenWhiteSpace = token.getWhitespace();
string lastTokenPostpunctuation = null;
if (lastToken != null)
{
lastTokenPostpunctuation = lastToken.getPostpunctuation();
}
if (lastToken == null || token == null)
{
return false;
}
else if (tokenWhiteSpace.IndexOf('\n') != tokenWhiteSpace
.LastIndexOf('\n'))
{
return true;
}
else if (lastTokenPostpunctuation.IndexOf(':') != -1
|| lastTokenPostpunctuation.IndexOf('?') != -1
|| lastTokenPostpunctuation.IndexOf('!') != -1)
{
return true;
}
else if (lastTokenPostpunctuation.IndexOf('.') != -1
&& tokenWhiteSpace.Length > 1
&& char.IsUpper(token.getWord()[0]))
{
return true;
}
else
{
string lastWord = lastToken.getWord();
int lastWordLength = lastWord.Length;
if (lastTokenPostpunctuation.IndexOf('.') != -1
&&
/* next word starts with a capital */
char.IsUpper(token.getWord()[0])
&&
/* last word isn't an abbreviation */
!(char.IsUpper(lastWord
[lastWordLength - 1]) || (lastWordLength < 4 && char
.IsUpper(lastWord[0]))))
{
return true;
}
}
return false;
}
public void Dispose()
{
throw new NotImplementedException();
}
public bool MoveNext()
{
return currentChar != EOF;
}
public void Reset()
{
throw new NotImplementedException();
}
public Token Current
{
get
{
lastToken = token;
token = new Token();
token.setWhitespace(getTokenOfCharClass(whitespaceSymbols));
token.setPrepunctuation(getTokenOfCharClass(prepunctuationSymbols));
if (singleCharSymbols.IndexOf((char)currentChar) != -1)
{
token.setWord(((char)currentChar).ToString(CultureInfo.InvariantCulture));
getNextChar();
}
else
{
token.setWord(getTokenNotOfCharClass(whitespaceSymbols));
}
token.setPosition(currentPosition);
token.setLineNumber(lineNumber);
removeTokenPostpunctuation();
return token;
}
}
object IEnumerator.Current
{
get { return Current; }
}
}
}
<|start_filename|>Syn.Speech/Decoder/Search/Partitioner.cs<|end_filename|>
using System;
//PATROLLED
namespace Syn.Speech.Decoder.Search
{
/// <summary>
/// Partitions a list of tokens according to the token score, used
/// in {@link PartitionActiveListFactory}. This method is supposed
/// to provide O(n) performance so it's more preferable than
/// </summary>
public class Partitioner
{
/** Max recursion depth **/
private int MAX_DEPTH = 50;
/**
/// Partitions sub-array of tokens around the end token.
/// Put all elements less or equal then pivot to the start of the array,
/// shifting new pivot position
*
/// @param tokens the token array to partition
/// @param start the starting index of the subarray
/// @param end the pivot and the ending index of the subarray, inclusive
/// @return the index (after partitioning) of the element around which the array is partitioned
*/
private int endPointPartition(Token[] tokens, int start, int end)
{
Token pivot = tokens[end];
float pivotScore = pivot.getScore();
int i = start;
int j = end - 1;
while (true) {
while (i < end && tokens[i].getScore() >= pivotScore)
i++;
while (j > i && tokens[j].getScore() < pivotScore)
j--;
if (j <= i)
break;
Token current = tokens[j];
setToken(tokens, j, tokens[i]);
setToken(tokens, i, current);
}
setToken(tokens, end, tokens[i]);
setToken(tokens, i, pivot);
return i;
}
/**
/// Partitions sub-array of tokens around the x-th token by selecting the midpoint of the token array as the pivot.
/// Partially solves issues with slow performance on already sorted arrays.
*
/// @param tokens the token array to partition
/// @param start the starting index of the subarray
/// @param end the ending index of the subarray, inclusive
/// @return the index of the element around which the array is partitioned
*/
private int midPointPartition(Token[] tokens, int start, int end)
{
int middle = (start + end) >> 1;
Token temp = tokens[end];
setToken(tokens, end, tokens[middle]);
setToken(tokens, middle, temp);
return endPointPartition(tokens, start, end);
}
/**
/// Partitions the given array of tokens in place, so that the highest scoring n token will be at the beginning of
/// the array, not in any order.
*
/// @param tokens the array of tokens to partition
/// @param size the number of tokens to partition
/// @param n the number of tokens in the final partition
/// @return the index of the last element in the partition
*/
public int partition(Token[] tokens, int size, int n)
{
if (tokens.Length > n) {
return midPointSelect(tokens, 0, size - 1, n, 0);
} else {
return findBest(tokens, size);
}
}
/**
/// Simply find the best token and put it in the last slot
///
/// @param tokens array of tokens
/// @param size the number of tokens to partition
/// @return index of the best token
*/
private int findBest(Token[] tokens, int size)
{
int r = -1;
float lowestScore = float.MaxValue;
for (int i = 0; i < tokens.Length; i++)
{
float currentScore = tokens[i].getScore();
if (currentScore <= lowestScore) {
lowestScore = currentScore;
r = i; // "r" is the returned index
}
}
// exchange tokens[r] <=> last token,
// where tokens[r] has the lowest score
int last = size - 1;
if (last >= 0) {
Token lastToken = tokens[last];
setToken(tokens, last, tokens[r]);
setToken(tokens, r, lastToken);
}
// return the last index
return last;
}
private void setToken(Token[] list, int index, Token token)
{
list[index] = token;
//TODO: CHECK SEMANTICS
//token.setLocation(index);
}
/**
/// Selects the token with the ith largest token score.
*
/// @param tokens the token array to partition
/// @param start the starting index of the subarray
/// @param end the ending index of the subarray, inclusive
/// @param targetSize target size of the partition
/// @param depth recursion depth to avoid stack overflow and fall back to simple partition.
/// @return the index of the token with the ith largest score
*/
private int midPointSelect(Token[] tokens, int start, int end, int targetSize, int depth)
{
if (depth > MAX_DEPTH) {
return simplePointSelect (tokens, start, end, targetSize);
}
if (start == end) {
return start;
}
int partitionToken = midPointPartition(tokens, start, end);
int newSize = partitionToken - start + 1;
if (targetSize == newSize) {
return partitionToken;
} else if (targetSize < newSize) {
return midPointSelect(tokens, start, partitionToken - 1, targetSize, depth + 1);
} else {
return midPointSelect(tokens, partitionToken + 1, end, targetSize - newSize, depth + 1);
}
}
/**
/// Fallback method to get the partition
*
/// @param tokens the token array to partition
/// @param start the starting index of the subarray
/// @param end the ending index of the subarray, inclusive
/// @param targetSize target size of the partition
/// @return the index of the token with the ith largest score
*/
private int simplePointSelect(Token[] tokens, int start, int end, int targetSize)
{
Array.Sort(tokens, start, end + 1);
return start + targetSize - 1;
}
}
}
<|start_filename|>Syn.Speech/Decoder/Scorer/IAcousticScorer.cs<|end_filename|>
using System.Collections.Generic;
using Syn.Speech.Common.FrontEnd;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder.Scorer
{
/// <summary>
/// Provides a mechanism for scoring a set of HMM states
/// </summary>
public interface IAcousticScorer: IConfigurable
{
/// <summary>
/// Allocates resources for this scorer
/// </summary>
void allocate();
/// <summary>
/// Deallocates resources for this scorer
/// </summary>
void deallocate();
/// <summary>
/// starts the scorer
/// </summary>
void startRecognition();
/// <summary>
/// stops the scorer
/// </summary>
void stopRecognition();
/// <summary>
/// Scores the given set of states
/// </summary>
/// <param name="scorableList">a list containing Scoreable objects to be scored</param>
/// <returns>the best scoring scoreable, or null if there are no more frames to score</returns>
IData calculateScores(List<IScoreable> scorableList);
}
}
<|start_filename|>Syn.Speech/FrontEnds/SpeechStartSignal.cs<|end_filename|>
using System;
namespace Syn.Speech.FrontEnd
{
/// <summary>
/// A signal that indicates the start of speech.
/// </summary>
public class SpeechStartSignal: Signal
{
/** Constructs a SpeechStartSignal. */
public SpeechStartSignal()
:this(DateTime.Now.Ticks)
{
}
/**
/// Constructs a SpeechStartSignal at the given time.
*
/// @param time the time this SpeechStartSignal is created
*/
public SpeechStartSignal(long time)
: base(time)
{
}
/**
/// Returns the string "SpeechStartSignal".
*
/// @return the string "SpeechStartSignal"
*/
override
public String ToString()
{
return "SpeechStartSignal";
}
}
}
<|start_filename|>Syn.Speech/FrontEnds/SpeechEndSignal.cs<|end_filename|>
using System;
namespace Syn.Speech.FrontEnd
{
/// <summary>
/// A signal that indicates the end of speech.
/// </summary>
public class SpeechEndSignal: Signal
{
/** Constructs a SpeechEndSignal. */
public SpeechEndSignal()
:this(DateTime.Now.Ticks)
{
}
/**
/// Constructs a SpeechEndSignal with the given creation time.
*
/// @param time the creation time of the SpeechEndSignal
*/
public SpeechEndSignal(long time)
:base(time)
{
;
}
/**
/// Returns the string "SpeechEndSignal".
*
/// @return the string "SpeechEndSignal"
*/
override
public String ToString()
{
return "SpeechEndSignal";
}
}
}
<|start_filename|>Syn.Speech/Decoder/Search/TokenArrayIterator.cs<|end_filename|>
using System.Collections;
using System.Collections.Generic;
using Syn.Speech.Helper;
//PATROLLED
namespace Syn.Speech.Decoder.Search
{
class TokenArrayIterator : IEnumerator<Token> {
private Token[] tokenArray;
private int size;
private int pos;
TokenArrayIterator(Token[] tokenArray, int size) {
this.tokenArray = tokenArray;
this.pos = 0;
this.size = size;
}
/** Returns true if the iteration has more tokens. */
public bool hasNext() {
return pos < size;
}
/** Returns the next token in the iteration. */
public Token next() {
if (pos >= tokenArray.Length) {
throw new NoSuchElementException();
}
return tokenArray[pos++];
}
/** Unimplemented, throws an Error if called. */
public void remove() {
throw new Error("TokenArrayIterator.remove() unimplemented");
}
public void Dispose()
{
//throw new System.NotImplementedException();
}
public bool MoveNext()
{
return this.hasNext();
}
public void Reset()
{
//throw new System.NotImplementedException();
}
public Token Current { get { return this.next(); } }
object IEnumerator.Current
{
get { return Current; }
}
}
}
<|start_filename|>Syn.Speech/Instrumentation/SausageAccuracyTracker.cs<|end_filename|>
using System;
using Syn.Speech.Decoders.Search;
using Syn.Speech.Recognizers;
using Syn.Speech.Results;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Instrumentation
{
/// <summary>
/// Tracks and reports recognition accuracy based upon the highest scoring path in a Result.
/// </summary>
public class SausageAccuracyTracker : AccuracyTracker
{
/** The property that defines whether the full token path is displayed */
[S4Boolean(defaultValue = false)]
public const String PROP_SHOW_FULL_PATH = "showFullPath";
/** The property with language model weight for posterior probability computation */
[S4Double(defaultValue = 10.5f)]
public const String PROP_LANGUAGE_WEIGHT = "languageWeight";
private bool _showFullPath;
private float languageModelWeight;
public SausageAccuracyTracker(Recognizer recognizer, bool showSummary, bool showDetails, bool showResults, bool showAlignedResults, bool showRawResults, bool showFullPath, float languageWeight)
: base(recognizer, showSummary, showDetails, showResults, showAlignedResults, showRawResults)
{
_showFullPath = showFullPath;
languageModelWeight = languageWeight;
}
public SausageAccuracyTracker()
{
}
public override void newProperties(PropertySheet ps)
{
base.newProperties(ps);
_showFullPath = ps.getBoolean(PROP_SHOW_FULL_PATH);
languageModelWeight = ps.getFloat(PROP_LANGUAGE_WEIGHT);
}
/// <summary>
/// Dumps the best path
/// </summary>
/// <param name="result">The result to dump.</param>
private void showFullPath(Result result)
{
if (_showFullPath)
{
Console.WriteLine();
Token bestToken = result.getBestToken();
if (bestToken != null)
{
bestToken.dumpTokenPath();
}
else
{
Console.WriteLine("Null result");
}
Console.WriteLine();
}
}
public override void newResult(Result result)
{
String @ref = result.getReferenceText();
if (result.isFinal() && @ref != null)
{
Lattice lattice = new Lattice(result);
LatticeOptimizer optimizer = new LatticeOptimizer(lattice);
optimizer.optimize();
lattice.computeNodePosteriors(languageModelWeight);
SausageMaker sausageMaker = new SausageMaker(lattice);
Sausage sausage = sausageMaker.makeSausage();
sausage.removeFillers();
getAligner().alignSausage(@ref, sausage);
showFullPath(result);
showDetails(result.ToString());
}
}
}
}
<|start_filename|>Syn.Speech/Decoder/Adaptation/Stats.cs<|end_filename|>
using System;
using Syn.Speech.Api;
using Syn.Speech.Common;
using Syn.Speech.Decoder.Search;
using Syn.Speech.FrontEnd;
using Syn.Speech.Linguist;
using Syn.Speech.Linguist.Acoustic.Tiedstate;
using Syn.Speech.Util;
//PATROLLED
namespace Syn.Speech.Decoder.Adaptation
{
public class Stats
{
private ClusteredDensityFileData means;
private double[][][][][] regLs;
private double[][][][] regRs;
private int nrOfClusters;
private Sphinx3Loader loader;
private float varFlor;
private LogMath logMath = LogMath.getLogMath();
public Stats(ILoader loader, ClusteredDensityFileData means)
{
this.loader = (Sphinx3Loader)loader;
this.nrOfClusters = means.getNumberOfClusters();
this.means = means;
this.varFlor = (float)1e-5;
this.invertVariances();
this.init();
}
private void init()
{
int len = loader.getVectorLength()[0];
this.regLs = new double[nrOfClusters][][][][];
this.regRs = new double[nrOfClusters][][][];
for (int i = 0; i < nrOfClusters; i++)
{
this.regLs[i] = new double[loader.getNumStreams()][][][];
this.regRs[i] = new double[loader.getNumStreams()][][];
for (int j = 0; j < loader.getNumStreams(); j++)
{
len = loader.getVectorLength()[j];
//TODO: CHECK SEMANTICS
///this.regLs[i][j] = new double[len][len + 1][len + 1];
/// this.regRs[i][j] = new double[len][len + 1];
this.regLs[i][j] = new double[len][][];
this.regRs[i][j] = new double[len][];
}
}
}
public ClusteredDensityFileData getClusteredData()
{
return this.means;
}
public double[][][][][] getRegLs()
{
return regLs;
}
public double[][][][] getRegRs()
{
return regRs;
}
private void invertVariances()
{
for (int i = 0; i < loader.getNumStates(); i++)
{
for (int k = 0; k < loader.getNumGaussiansPerState(); k++)
{
for (int l = 0; l < loader.getVectorLength()[0]; l++)
{
if (loader.getVariancePool().get(
i * loader.getNumGaussiansPerState() + k)[l] <= 0f)
{
this.loader.getVariancePool().get(
i * loader.getNumGaussiansPerState() + k)[l] = (float)0.5;
}
else if (loader.getVariancePool().get(
i * loader.getNumGaussiansPerState() + k)[l] < varFlor)
{
this.loader.getVariancePool().get(
i * loader.getNumGaussiansPerState() + k)[l] = (float)(1f / varFlor);
}
else
{
this.loader.getVariancePool().get(
i * loader.getNumGaussiansPerState() + k)[l] = (float)(1f / loader
.getVariancePool().get(
i * loader.getNumGaussiansPerState()
+ k)[l]);
}
}
}
}
}
private float[] computePosterios(float[] componentScores, int numStreams)
{
float[] posteriors = componentScores;
int step = componentScores.Length / numStreams;
int startIdx = 0;
for (int i = 0; i < numStreams; i++)
{
float max = posteriors[startIdx];
for (int j = startIdx + 1; j < startIdx + step; j++)
{
if (posteriors[j] > max)
{
max = posteriors[j];
}
}
for (int j = startIdx; j < startIdx + step; j++)
{
posteriors[j] = (float)logMath.logToLinear(posteriors[j] - max);
}
startIdx += step;
}
return posteriors;
}
public void collect(SpeechResult result) {
Token token = result.getResult().getBestToken();
float[] componentScore, featureVector, posteriors, tmean;
int[] len;
float dnom, wtMeanVar, wtDcountVar, wtDcountVarMean, mean;
int mId, cluster;
int numStreams, gauPerState;
if (token == null)
throw new Exception("Best token not found!");
do {
FloatData feature = (FloatData) token.getData();
ISearchState ss = token.getSearchState();
if (!(ss is IHMMSearchState && ss.isEmitting())) {
token = token.getPredecessor();
continue;
}
componentScore = token.calculateComponentScore(feature);
featureVector = FloatData.toFloatData(feature).getValues();
mId = (int) ((IHMMSearchState) token.getSearchState()).getHMMState()
.getMixtureId();
if (loader is Sphinx3Loader && ((Sphinx3Loader)loader).hasTiedMixtures())
// use CI phone ID for tied mixture model
mId = ((Sphinx3Loader)loader).getSenone2Ci()[mId];
len = loader.getVectorLength();
numStreams = loader.getNumStreams();
gauPerState = loader.getNumGaussiansPerState();
posteriors = this.computePosterios(componentScore, numStreams);
int featVectorStartIdx = 0;
for (int i = 0; i < numStreams; i++) {
for (int j = 0; j < gauPerState; j++) {
cluster = means.getClassIndex(mId * numStreams
* gauPerState + i * gauPerState + j);
dnom = posteriors[i * gauPerState + j];
if (dnom > 0f) {
tmean = loader.getMeansPool().get(
mId * numStreams * gauPerState + i
* gauPerState + j);
for (int k = 0; k < len[i]; k++) {
mean = posteriors[i * gauPerState + j]
* featureVector[k + featVectorStartIdx];
wtMeanVar = mean
* loader.getVariancePool().get(
mId * numStreams * gauPerState + i
* gauPerState + j)[k];
wtDcountVar = dnom
* loader.getVariancePool().get(
mId * numStreams * gauPerState + i
* gauPerState + j)[k];
for (int p = 0; p < len[i]; p++) {
wtDcountVarMean = wtDcountVar * tmean[p];
for (int q = p; q < len[i]; q++) {
regLs[cluster][i][k][p][q] += wtDcountVarMean
* tmean[q];
}
regLs[cluster][i][k][p][len[i]] += wtDcountVarMean;
regRs[cluster][i][k][p] += wtMeanVar * tmean[p];
}
regLs[cluster][i][k][len[i]][len[i]] += wtDcountVar;
regRs[cluster][i][k][len[i]] += wtMeanVar;
}
}
}
featVectorStartIdx += len[i];
}
token = token.getPredecessor();
} while (token != null);
}
/// <summary>
/// Fill lower part of Legetter's set of G matrices.
/// </summary>
public void fillRegLowerPart()
{
for (int i = 0; i < this.nrOfClusters; i++)
{
for (int j = 0; j < loader.getNumStreams(); j++)
{
for (int l = 0; l < loader.getVectorLength()[j]; l++)
{
for (int p = 0; p <= loader.getVectorLength()[j]; p++)
{
for (int q = p + 1; q <= loader.getVectorLength()[j]; q++)
{
regLs[i][j][l][q][p] = regLs[i][j][l][p][q];
}
}
}
}
}
}
public Transform createTransform()
{
Transform transform = new Transform(loader, nrOfClusters);
transform.update(this);
return transform;
}
}
}
<|start_filename|>Syn.Speech/Api/SpeechAligner.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using Syn.Speech.Logging;
using Syn.Speech.Alignment;
using Syn.Speech.Helper;
using Syn.Speech.Linguist.Language.Grammar;
using Syn.Speech.Linguist.Language.NGram;
using Syn.Speech.Recognizers;
using Syn.Speech.Results;
using Syn.Speech.Util;
//PATROLLED + REFACTORED
namespace Syn.Speech.Api
{
public class SpeechAligner
{
//private Logger logger = Logger.getLogger(typeof(SpeechAligner).Name);
private const int TupleSize = 3;
private const int MinLmAlignSize = 20;
private readonly Context _context;
private readonly Recognizer _recognizer;
private readonly AlignerGrammar _grammar;
private readonly DynamicTrigramModel _languageModel;
public SpeechAligner(string amPath, string dictPath, string g2PPath)
{
var configuration = new Configuration();
configuration.AcousticModelPath = amPath;
configuration.DictionaryPath = dictPath;
_context = new Context(configuration);
if (g2PPath != null)
{
_context.SetLocalProperty("dictionary->allowMissingWords", "true");
_context.SetLocalProperty("dictionary->createMissingWords", "true");
_context.SetLocalProperty("dictionary->g2pModelPath", g2PPath);
_context.SetLocalProperty("dictionary->g2pMaxPron", "2");
}
_context.SetLocalProperty("lexTreeLinguist->languageModel","dynamicTrigramModel");
_recognizer = (Recognizer)_context.GetInstance(typeof(Recognizer));
_grammar = (AlignerGrammar)_context.GetInstance(typeof(AlignerGrammar));
_languageModel = (DynamicTrigramModel)_context.GetInstance(typeof(DynamicTrigramModel));
Tokenizer = new UsEnglishTokenizer();
}
public List<WordResult> Align(FileInfo audioUrl, string transScript)
{
return Align(audioUrl, Tokenizer.Expand(transScript));
}
public List<WordResult> Align(FileInfo audioUrl, List<string> sentenceTranscript)
{
var transcript = SentenceToWords(sentenceTranscript);
var aligner = new LongTextAligner(transcript, TupleSize);
var alignedWords = new Dictionary<int, WordResult>();
var ranges = new LinkedList<Range>();
//var texts = new ArrayDeque();
//var timeFrames = new ArrayDeque();
var texts = new LinkedList<List<string>>();
var timeFrames = new LinkedList<TimeFrame>();
ranges.AddLast(new Range(0, transcript.Count));
texts.Offer(transcript);
TimeFrame totalTimeFrame = TimeFrame.Infinite;
timeFrames.Offer(totalTimeFrame);
long lastFrame = TimeFrame.Infinite.End;
for (int i = 0; i < 4; i++)
{
if (i == 3)
{
_context.SetLocalProperty("decoder->searchManager", "alignerSearchManager");
}
while (texts.Count != 0)
{
Debug.Assert(texts.Count == ranges.Count);
Debug.Assert(texts.Count == timeFrames.Count);
var text = texts.Poll();
var frame = timeFrames.Poll();
var range = ranges.Poll();
if (i < 3 && texts.Count < MinLmAlignSize)
{
continue;
}
this.LogInfo("Aligning frame " + frame + " to text " + text + " range " + range);
if (i < 3)
{
_languageModel.SetText(text);
}
_recognizer.Allocate();
if (i == 3)
{
_grammar.SetWords(text);
}
_context.SetSpeechSource(audioUrl.OpenRead(), frame);
var hypothesis = new List<WordResult>();
Result speechResult;
while (null != (speechResult = _recognizer.Recognize()))
{
hypothesis.AddRange(speechResult.GetTimedBestResult(false));
}
if (i == 0)
{
if (hypothesis.Count > 0)
{
lastFrame = hypothesis[hypothesis.Count - 1].TimeFrame.End;
}
}
var words = new List<string>();
foreach (WordResult wr in hypothesis)
{
words.Add(wr.Word.Spelling);
}
int[] alignment = aligner.Align(words, range);
List<WordResult> results = hypothesis;
this.LogInfo("Decoding result is " + results);
// dumpAlignment(transcript, alignment, results);
DumpAlignmentStats(transcript, alignment, results);
for (int j = 0; j < alignment.Length; j++)
{
if (alignment[j] != -1)
{
alignedWords.Add(alignment[j], hypothesis[j]);
}
}
_recognizer.Deallocate();
}
ScheduleNextAlignment(transcript, alignedWords, ranges, texts, timeFrames, lastFrame);
}
return new List<WordResult>(alignedWords.Values);
}
public List<string> SentenceToWords(List<string> sentenceTranscript)
{
var transcript = new List<string>();
foreach (var sentence in sentenceTranscript)
{
string[] words = sentence.Split("\\s+");
foreach (var word in words)
{
if (word.Length > 0) { transcript.Add(word);}
}
}
return transcript;
}
private void DumpAlignmentStats(List<String> transcript, int[] alignment, List<WordResult> results)
{
int insertions = 0;
int deletions = 0;
int size = transcript.Count;
int[] aid = alignment;
int lastId = -1;
for (int ij = 0; ij < aid.Length; ++ij)
{
if (aid[ij] == -1)
{
insertions++;
}
else
{
if (aid[ij] - lastId > 1)
{
deletions += aid[ij] - lastId;
}
lastId = aid[ij];
}
}
if (lastId >= 0 && transcript.Count - lastId > 1)
{
deletions += transcript.Count - lastId;
}
this.LogInfo(String.Format("Size {0} deletions {1} insertions {2} error rate {3}", size, insertions, deletions,(insertions + deletions) / ((float)size) * 100f));
}
private void ScheduleNextAlignment(List<string> transcript, Dictionary<int, WordResult> alignedWords, LinkedList<Range> ranges, LinkedList<List<string>> texts, LinkedList<TimeFrame> timeFrames, long lastFrame)
{
int prevKey = -1;
long prevEnd = 0;
foreach (var e in alignedWords)
{
if (e.Key - prevKey > 1)
{
CheckedOffer(transcript, texts, timeFrames, ranges,
prevKey + 1, e.Key, prevEnd, e.Value.TimeFrame.Start);
}
prevKey = e.Key;
prevEnd = e.Value.TimeFrame.End;
}
if (transcript.Count - prevKey > 1)
{
CheckedOffer(transcript, texts, timeFrames, ranges,
prevKey + 1, transcript.Count, prevEnd, lastFrame);
}
}
private void DumpAlignment(List<string> transcript, int[] alignment, List<WordResult> results)
{
this.LogInfo("Alignment");
int[] aid = alignment;
int lastId = -1;
for (int ij = 0; ij < aid.Length; ++ij)
{
if (aid[ij] == -1)
{
this.LogInfo(string.Format("+ {0}", results[ij]));
}
else
{
if (aid[ij] - lastId > 1)
{
foreach (string result1 in transcript.GetRange(lastId + 1, aid[ij]))
{
this.LogInfo(string.Format("- %-25s", result1));
}
}
else
{
this.LogInfo(string.Format(" %-25s", transcript[aid[ij]]));
}
lastId = aid[ij];
}
}
if (lastId >= 0 && transcript.Count - lastId > 1)
{
foreach (string result1 in transcript.GetRange(lastId + 1,
transcript.Count))
{
this.LogInfo(string.Format("- %-25s", result1));
}
}
}
private void CheckedOffer(List<string> transcript, LinkedList<List<string>> texts, LinkedList<TimeFrame> timeFrames, LinkedList<Range> ranges, int start, int end, long timeStart, long timeEnd)
{
var wordDensity = ((double)(timeEnd - timeStart)) / (end - start);
// Skip range if it's too short, average word is less than 10
// milliseconds
if (wordDensity < 10.0)
{
this.LogInfo("Skipping text range due to a high density " + transcript.GetRange(start, end));
return;
}
texts.Offer(transcript.GetRange(start, end));
timeFrames.Offer(new TimeFrame(timeStart, timeEnd));
ranges.Offer(new Range(start, end - 1));
}
public ITextTokenizer Tokenizer { get; set; }
}
}
<|start_filename|>Syn.Speech/Decoder/Search/AlternateHypothesisManager.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Diagnostics;
using Syn.Speech.Common;
//PATROLLED
namespace Syn.Speech.Decoder.Search
{
/// <summary>
/// Manager for pruned hypothesis
/// </summary>
public class AlternateHypothesisManager
{
private Dictionary<Token, List<Token>> viterbiLoserMap = new Dictionary<Token, List<Token>>();
private int maxEdges;
/**
* Creates an alternate hypotheses manager
*
* @param maxEdges the maximum edges allowed
*/
public AlternateHypothesisManager(int maxEdges)
{
this.maxEdges = maxEdges;
}
/**
* Collects adds alternate predecessors for a token that would have lost because of viterbi.
*
* @param token - a token that has an alternate lower scoring predecessor that still might be of interest
* @param predecessor - a predecessor that scores lower than token.getPredecessor().
*/
public void addAlternatePredecessor(Token token, Token predecessor)
{
Trace.Assert(predecessor != token.getPredecessor());
List<Token> list = viterbiLoserMap[token];
if (list == null)
{
list = new List<Token>();
viterbiLoserMap.Add(token, list);
}
list.Add(predecessor);
}
/**
* Returns a list of alternate predecessors for a token.
*
* @param token - a token that may have alternate lower scoring predecessor that still might be of interest
* @return A list of predecessors that scores lower than token.getPredecessor().
*/
public List<Token> getAlternatePredecessors(Token token)
{
return viterbiLoserMap[token];
}
/** Purge all but max number of alternate preceding token hypotheses. */
public void purge()
{
int max = maxEdges - 1;
foreach (var entry in viterbiLoserMap)
{
List<Token> list = entry.Value;
list.Sort(new ScoreableComparator());
List<Token> newList = list.GetRange(0, list.Count > max ? max : list.Count);
viterbiLoserMap.Add(entry.Key, newList);
}
}
public bool hasAlternatePredecessors(Token token)
{
return viterbiLoserMap.ContainsKey(token);
}
}
}
<|start_filename|>Syn.Speech/Result/Result.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using Syn.Speech.Common;
using Syn.Speech.Common.FrontEnd;
using Syn.Speech.Decoder.Search;
using Syn.Speech.FrontEnd;
using Syn.Speech.Util;
namespace Syn.Speech.Result
{
/// <summary>
/// Provides recognition results. Results can be partial or final. A result should not be modified
/// before it is a final result. Note that a result may not contain all possible information.
///
/// The following methods are not yet defined but should be:
///
///
/// public Result getDAG(int compressionLevel);
///
/// </summary>
public class Result
{
private ActiveList activeList;
private List<Token> resultList;
private AlternateHypothesisManager alternateHypothesisManager;
private Boolean _isFinal=false;
private String reference =String.Empty;
private Boolean wordTokenFirst;
private int currentFrameNumber=-1;
private LogMath logMath = null;
/// <summary>
/// Determines if the result is a final result. A final result is guaranteed to no longer be
/// modified by the SearchManager that generated it. Non-final results can be modifed by a
/// <code>SearchManager.recognize</code> calls.
/// </summary>
/// <returns>true if the result is a final result</returns>
/// <summary>
///Creates a result
///@param activeList the active list associated with this result
///@param resultList the result list associated with this result
///@param frameNumber the frame number for this result.
///@param isFinal if true, the result is a final result
/// <summary>
public Result(AlternateHypothesisManager alternateHypothesisManager,
ActiveList activeList, List<Token> resultList, int frameNumber,
Boolean isFinal, Boolean wordTokenFirst)
: this(activeList, resultList, frameNumber, isFinal)
{
this.alternateHypothesisManager = alternateHypothesisManager;
this.wordTokenFirst = wordTokenFirst;
}
/// <summary>
///Creates a result
///@param activeList the active list associated with this result
///@param resultList the result list associated with this result
///@param frameNumber the frame number for this result.
///@param isFinal if true, the result is a final result. This means that the last frame in the
/// speech segment has been decoded.
/// <summary>
public Result(ActiveList activeList, List<Token> resultList,int frameNumber, Boolean isFinal)
{
this.activeList = activeList;
this.resultList = resultList;
this.currentFrameNumber = frameNumber;
this._isFinal = isFinal;
logMath = LogMath.getLogMath();
}
/// <summary>
///Determines if the result is a final result. A final result is guaranteed to no longer be
///modified by the SearchManager that generated it. Non-final results can be modifed by a
///<code>SearchManager.recognize</code> calls.
///@return true if the result is a final result
/// <summary>
public Boolean isFinal()
{
return _isFinal;
}
/// <summary>
///Returns the log math used for this Result.
///
///@return the log math used
/// <summary>
public LogMath getLogMath()
{
return logMath;
}
/// <summary>
///Returns a list of active tokens for this result. The list contains zero or active
///<code>Token</code> objects that represents the leaf nodes of all active branches in the
///result (sometimes referred to as the 'lattice').
///<p/>
///The lattice is live and may be modified by a SearchManager during a recognition. Once the
///Result is final, the lattice is fixed and will no longer be modified by the SearchManager.
///Applications can modify the lattice (to prepare for a re-recognition, for example) only after
///<code>isFinal</code> returns <code>true</code>
///
///@return a list containing the active tokens for this result
///@see Token
/// <summary>
public ActiveList getActiveTokens()
{
return activeList;
}
/// <summary>
///Returns a list of result tokens for this result. The list contains zero or more result
///<code>Token</code> objects that represents the leaf nodes of all final branches in the result
///(sometimes referred to as the 'lattice').
///<p/>
///The lattice is live and may be modified by a SearchManager during a recognition. Once the
///Result is final, the lattice is fixed and will no longer be modified by the SearchManager.
///Applications can modify the lattice (to prepare for a re-recognition, for example) only after
///<code>isFinal</code> returns <code>true</code>
///
///return a list containing the final result tokens for this result
///see Token
/// <summary>
///
public List<Token> getResultTokens()
{
return resultList;
}
/// <summary>
///Returns the AlternateHypothesisManager Used to construct a Lattice
///
///@return the AlternateHypothesisManager
/// <summary>
public AlternateHypothesisManager getAlternateHypothesisManager()
{
return alternateHypothesisManager;
}
/// <summary>
///Returns the current frame number
///
///@return the frame number
/// <summary>
public int getFrameNumber()
{
return currentFrameNumber;
}
/// <summary>
///Returns the best scoring final token in the result. A final token is a token that has reached
///a final state in the current frame.
///
///@return the best scoring final token or null
/// <summary>
public Token getBestFinalToken()
{
Token bestToken = null;
foreach (Token token in resultList)
{
if (bestToken == null || token.getScore() > bestToken.getScore())
{
bestToken = token;
}
}
return bestToken;
}
/// <summary>
///Returns the best scoring token in the result. First, the best final token is retrieved. A
///final token is one that has reached the final state in the search space. If no final tokens
///can be found, then the best, non-final token is returned.
///
///@return the best scoring token or null
/// <summary>
public Token getBestToken()
{
Token bestToken = getBestFinalToken();
if (bestToken == null) {
bestToken = getBestActiveToken();
}
return bestToken;
}
/// <summary>
///Returns the best scoring token in the active set
///
///@return the best scoring token or null
/// <summary>
public Token getBestActiveToken()
{
Token bestToken = null;
if (activeList != null) {
foreach (Token token in activeList.getTokens())
{
if (bestToken == null
|| token.getScore() > bestToken.getScore())
{
bestToken = token;
}
}
}
return bestToken;
}
/// <summary>
///Searches through the n-best list to find the the branch that matches the given string
///
///@param text the string to search for
///@return the token at the head of the branch or null
/// <summary>
public Token findToken(String text)
{
text = text.Trim();
foreach (Token token in resultList)
{
if (text.Equals(token.getWordPathNoFiller()))
{
return token;
}
}
return null;
}
/// <summary>
///Searches through the n-best list to find the the branch that matches the beginning of the
///given string
///
///@param text the string to search for
///@return the list token at the head of the branch
/// <summary>
public List<Token> findPartialMatchingTokens(String text)
{
List<Token> list = new List<Token>();
text = text.Trim();
foreach (Token token in activeList.getTokens())
{
if (text.StartsWith(token.getWordPathNoFiller()))
{
list.Add(token);
}
}
return list;
}
/// <summary>
///Returns the best scoring token that matches the beginning of the given text.
///
///@param text the text to match
/// <summary>
public Token getBestActiveParitalMatchingToken(String text)
{
List<Token> matchingList = findPartialMatchingTokens(text);
Token bestToken = null;
foreach (Token token in matchingList)
{
if (bestToken == null || token.getScore() > bestToken.getScore())
{
bestToken = token;
}
}
return bestToken;
}
/// <summary>
///Returns detailed frame statistics for this result
///
///@return frame statistics for this result as an array, with one element per frame or
/// <code>null</code> if no frame statistics are available.
/// <summary>
public IFrameStatistics[] getFrameStatistics()
{
return null; // [[[ TBD: write me ]]]
}
/// <summary>
///Gets the starting frame number for the result. Note that this method is currently not
///implemented, and always returns zero.
///
///@return the starting frame number for the result
/// <summary>
public int getStartFrame()
{
return 0;
}
/// <summary>
///Gets the ending frame number for the result. Note that this method is currently not
///implemented, and always returns zero.
///
///@return the ending frame number for the result
/// <summary>
public int getEndFrame()
{
return 0; // [[[ TBD: write me ]]]
}
/// <summary>
///Gets the feature frames associated with this result
///
///@return the set of feature frames associated with this result, or null if the frames are not
/// available.
/// <summary>
public List<IData> getDataFrames()
{
// find the best token, and then trace back for all the features
Token token = getBestToken();
if (token == null)
return null;
LinkedList<IData> featureList = new LinkedList<IData>();
do {
IData feature = token.getData();
if (feature != null)
featureList.AddFirst(feature);
token = token.getPredecessor();
} while (token != null);
return featureList.ToList();
}
/// <summary>
///Returns the string of the best result, removing any filler words. This method first attempts
///to return the best final result, that is, the result that has reached the final state of the
///search space. If there are no best final results, then the best non-final result, that is,
///the one that did not reach the final state, is returned.
///
///@return the string of the best result, removing any filler words
/// <summary>
public String getBestResultNoFiller()
{
Token token = getBestToken();
if (token == null) {
return "";
} else {
return token.getWordPathNoFiller();
}
}
/// <summary>
///Returns the string of the best final result, removing any filler words. A final result is a
///path that has reached the final state. A Result object can also contain paths that did not
///reach the final state, and those paths are not returned by this method.
///
///@return the string of the best result, removing any filler words, or null if there are no
/// best results
/// <summary>
public String getBestFinalResultNoFiller()
{
Token token = getBestFinalToken();
if (token == null) {
return "";
} else {
return token.getWordPathNoFiller();
}
}
/// <summary>
///The method is used when the application wants the phonemes on the best final path. Note that
///words may have more than one pronunciation, so this is not equivalent to the word path e.g.
///one[HH,W,AH,N] to[T,UW] three[TH,R,IY]
///
///@return the String of words and associated phonemes on the best path
/// <summary>
public String getBestPronunciationResult()
{
Token token = getBestFinalToken();
if (token == null) {
return "";
}
else {
return token.getWordPath(false, true);
}
}
/// <summary>
///Returns the string of words (with timestamp) for this token.
///
///@param withFillers true if we want filler words included, false otherwise
///@param wordTokenFirst true if the word tokens come before other types of tokens
///@return the string of words
/// <summary>
public List<WordResult> getTimedBestResult(Boolean withFillers)
{
Token token = getBestToken();
if (token == null)
{
return (List<WordResult>)new List<WordResult> { };
}
else {
if (wordTokenFirst) {
return getTimedWordPath(token, withFillers);
} else {
return getTimedWordTokenLastPath(token, withFillers);
}
}
}
/// <summary>
///Returns the string of words (with timestamp) for this token. This method assumes that the
///word tokens come before other types of token.
///
///@param withFillers true if we want filler words, false otherwise
///@return list of word with timestamps
/// <summary>
public List<WordResult> getTimedWordPath(Token token, Boolean withFillers)
{
// Get to the first emitting token.
while (token != null && !token.isEmitting())
{
token = token.getPredecessor();
}
List<WordResult> result = new List<WordResult>();
if (token != null)
{
IData prevWordFirstFeature = token.getData();
IData prevFeature = prevWordFirstFeature;
token = token.getPredecessor();
while (token != null)
{
if (token.isWord()) {
IWord word = token.getWord();
if (withFillers || !word.isFiller())
{
TimeFrame timeFrame =
new TimeFrame(
((FloatData) prevFeature)
.getCollectTime(),
((FloatData) prevWordFirstFeature)
.getCollectTime());
result.Add(new WordResult(word, timeFrame, token
.getScore(), 1.0f));
}
prevWordFirstFeature = prevFeature;
}
IData feature = token.getData();
if (feature != null) {
prevFeature = feature;
}
token = token.getPredecessor();
}
}
result.Reverse();
return result;
}
/// <summary>
///Returns the string of words for this token, each with the starting sample number as the
///timestamp. This method assumes that the word tokens come after the unit and hmm tokens.
///
///@return the string of words, each with the starting sample number
/// <summary>
public List<WordResult> getTimedWordTokenLastPath(Token token, Boolean withFillers)
{
IWord word = null;
IData lastFeature = null;
IData lastWordFirstFeature = null;
List<WordResult> result = new List<WordResult>();
while (token != null)
{
if (token.isWord())
{
if (word != null && lastFeature != null) {
if (withFillers || !word.isFiller()) {
TimeFrame timeFrame = new TimeFrame(((FloatData) lastFeature).getCollectTime(),
((FloatData) lastWordFirstFeature).getCollectTime());
result.Add(new WordResult(word, timeFrame, token.getScore(), 1.0f));
}
word = token.getWord();
lastWordFirstFeature = lastFeature;
}
word = token.getWord();
}
IData feature = token.getData();
if (feature != null) {
lastFeature = feature;
if (lastWordFirstFeature == null) {
lastWordFirstFeature = lastFeature;
}
}
token = token.getPredecessor();
}
result.Reverse();
return result;
}
/// <summary> Returns a string representation of this object/// <summary>
override public String ToString()
{
Token token = getBestToken();
if (token == null) {
return "";
} else {
return token.getWordPath();
}
}
/// <summary>
///Sets the results as a final result
///
///@param finalResult if true, the result should be made final
/// <summary>
public void setFinal(Boolean finalResult)
{
this._isFinal = finalResult;
}
/// <summary>
///Determines if the Result is valid. This is used for testing and debugging
///
/// @return true if the result is properly formed.
/// <summary>
public Boolean validate()
{
Boolean valid = true;
foreach (Token token in activeList.getTokens())
{
if (!token.validate())
{
valid = false;
token.dumpTokenPath();
}
}
return valid;
}
/// <summary>
///Sets the reference text
///
///@param ref the reference text
/// <summary>
public void setReferenceText(String _ref)
{
reference = _ref;
}
/// <summary>
///Retrieves the reference text. The reference text is a transcript of the text that was spoken.
///
///@return the reference text or null if no reference text exists.
/// <summary>
public String getReferenceText()
{
return reference;
}
}
}
<|start_filename|>Syn.Speech/Decoder/Adaptation/ClusteredDensityFileData.cs<|end_filename|>
using System;
using System.Collections.Generic;
using Syn.Speech.Common;
//PATROLLED
namespace Syn.Speech.Decoder.Adaptation
{
public class ClusteredDensityFileData
{
private int numberOfClusters;
private int[] corespondingClass;
public ClusteredDensityFileData(ILoader loader, int numberOfClusters)
{
this.numberOfClusters = numberOfClusters;
kMeansClustering(loader, 30);
}
public int getNumberOfClusters()
{
return this.numberOfClusters;
}
public int getClassIndex(int gaussian)
{
return corespondingClass[gaussian];
}
private float euclidianDistance(float[] a, float[] b)
{
double s = 0, d;
for (int i = 0; i < a.Length; i++)
{
d = a[i] - b[i];
s += d * d;
}
return (float)Math.Sqrt(s);
}
private bool isEqual(float[] a, float[] b)
{
if (a.Length != b.Length)
{
return false;
}
for (int i = 0; i < a.Length; i++)
{
if (a[i] != b[i])
{
return false;
}
}
return true;
}
private void kMeansClustering(ILoader loader, int maxIterations) {
var initialData = loader.getMeansPool();
List<float[]> oldCentroids = new List<float[]>(numberOfClusters);
List<float[]> centroids = new List<float[]>(numberOfClusters);
int numberOfElements = initialData.size(), nrOfIterations = maxIterations, index;
int[] count = new int[numberOfClusters];
double distance, min;
float[] currentValue, centroid;
//var array = new float[numberOfClusters][numberOfElements][];
var array = new float[numberOfClusters][][];
bool converged = false;
Random randomGenerator = new Random();
for (int i = 0; i < numberOfClusters; i++) {
index = randomGenerator.Next(numberOfElements);
centroids.Add(initialData.get(index));
oldCentroids.Add(initialData.get(index));
count[i] = 0;
}
index = 0;
while (!converged && nrOfIterations > 0) {
corespondingClass = new int[initialData.size()];
//array = new float[numberOfClusters][numberOfElements][];
array = new float[numberOfClusters][][];
for (int i = 0; i < numberOfClusters; i++)
{
oldCentroids[i] = centroids[i];
//oldCentroids.set(i, centroids[i]);
count[i] = 0;
}
for (int i = 0; i < initialData.size(); i++) {
currentValue = initialData.get(i);
min = this.euclidianDistance(oldCentroids[0], currentValue);
index = 0;
for (int k = 1; k < numberOfClusters; k++) {
distance = this.euclidianDistance(oldCentroids[k],
currentValue);
if (distance < min) {
min = distance;
index = k;
}
}
array[index][count[index]] = currentValue;
corespondingClass[i] = index;
count[index]++;
}
for (int i = 0; i < numberOfClusters; i++) {
centroid = new float[initialData.get(0).Length];
if (count[i] > 0) {
for (int j = 0; j < count[i]; j++) {
for (int k = 0; k < initialData.get(0).Length; k++) {
centroid[k] += array[i][j][k];
}
}
for (int k = 0; k < initialData.get(0).Length; k++) {
centroid[k] /= count[i];
}
centroids[i] = centroid;
//centroids.set(i, centroid);
}
}
converged = true;
for (int i = 0; i < numberOfClusters; i++) {
converged = converged
&& (this.isEqual(centroids[i], oldCentroids[i]));
}
nrOfIterations--;
}
}
}
}
<|start_filename|>Syn.Speech/Result/LatticeOptimizer.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Diagnostics;
namespace Syn.Speech.Result
{
/// <summary>
/// Class used to collapse all equivalent paths in a Lattice. Results in a Lattices that is deterministic (no Node has
/// Edges to two or more equivalent Nodes), and minimal (no Node has Edge from two or more equivalent Nodes).
/// </summary>
public class LatticeOptimizer
{
protected Lattice lattice;
/**
/// Create a new Lattice optimizer
*
/// @param lattice
*/
public LatticeOptimizer(Lattice lattice) {
this.lattice = lattice;
}
/**
/// Code for optimizing Lattices. An optimal lattice has all the same paths as the original, but with fewer nodes
/// and edges
/// <p/>
/// Note that these methods are all in Lattice so that it is easy to change the definition of "equivalent" nodes and
/// edges. For example, an equivalent node might have the same word, but start or end at a different time.
/// <p/>
/// To experiment with other definitions of equivalent, just create a superclass of Lattice.
*/
public void optimize() {
//System.err.println("***");
//lattice.dumpAllPaths();
//System.err.println("***");
optimizeForward();
//System.err.println("***");
//lattice.dumpAllPaths();
//System.err.println("***");
optimizeBackward();
//System.err.println("***");
//lattice.dumpAllPaths();
//System.err.println("***");
}
/**
/// Make the Lattice deterministic, so that no node has multiple outgoing edges to equivalent nodes.
/// <p/>
/// Given two edges from the same node to two equivalent nodes, replace with one edge to one node with outgoing edges
/// that are a union of the outgoing edges of the old two nodes.
/// <p/>
/// A --> B --> C \--> B' --> Y
/// <p/>
/// where B and B' are equivalent.
/// <p/>
/// is replaced with
/// <p/>
/// A --> B" --> C \--> Y
/// <p/>
/// where B" is the merge of B and B'
/// <p/>
/// Note that equivalent nodes must have the same incomming edges. For example
/// <p/>
/// A --> B \ \ X --> B'
/// <p/>
/// B and B' would not be equivalent because the incomming edges are different
*/
protected void optimizeForward() {
//System.err.println("*** Optimizing forward ***");
Boolean moreChanges = true;
while (moreChanges) {
moreChanges = false;
// search for a node that can be optimized
// note that we use getCopyOfNodes to avoid concurrent changes to nodes
foreach (Node n in lattice.getCopyOfNodes())
{
// we are iterating down a list of node before optimization
// previous iterations may have removed nodes from the list
// therefore we have to check that the node stiff exists
if (lattice.hasNode(n))
{
moreChanges |= optimizeNodeForward(n);
}
}
}
}
/**
/// Look for 2 "to" edges to equivalent nodes. Replace the edges with one edge to one node that is a merge of the
/// equivalent nodes
/// <p/>
/// nodes are equivalent if they have equivalent from edges, and the same label
/// <p/>
/// merged nodes have a union of "from" and "to" edges
*
/// @param n
/// @return true if Node n required an optimize forward
*/
protected Boolean optimizeNodeForward(Node n)
{
Trace.Assert(lattice.hasNode(n));
List<Edge> leavingEdges = new List<Edge>(n.getLeavingEdges());
for (int j = 0; j < leavingEdges.Count; j++)
{
Edge e = leavingEdges[j];
for (int k = j + 1; k < leavingEdges.Count; k++)
{
Edge e2 = leavingEdges[k];
/*
/// If these are not the same edge, and they point to
/// equivalent nodes, we have a hit, return true
*/
Trace.Assert(e != e2);
if (equivalentNodesForward(e.getToNode(), e2.getToNode()))
{
mergeNodesAndEdgesForward(n, e, e2);
return true;
}
}
}
/*
/// return false if we did not get a hit
*/
return false;
}
/**
/// nodes are equivalent forward if they have "from" edges from the same nodes, and have equivalent labels (Token,
/// start/end times)
*
/// @param n1
/// @param n2
/// @return true if n1 and n2 are "equivalent forwards"
*/
protected Boolean equivalentNodesForward(Node n1, Node n2)
{
Trace.Assert(lattice.hasNode(n1));
Trace.Assert(lattice.hasNode(n2));
// do the labels match?
if (!equivalentNodeLabels(n1, n2)) return false;
// if they have different number of "from" edges they are not equivalent
// or if there is a "from" edge with no match then the nodes are not
// equivalent
return n1.hasEquivalentEnteringEdges(n2);
}
/**
/// given edges e1 and e2 from node n to nodes n1 and n2
/// <p/>
/// merge e1 and e2, that is, merge the scores of e1 and e2 create n' that is a merge of n1 and n2 add n' add edge e'
/// from n to n'
/// <p/>
/// remove n1 and n2 and all associated edges
*
/// @param n
/// @param e1
/// @param e2
*/
protected void mergeNodesAndEdgesForward(Node n, Edge e1, Edge e2)
{
Trace.Assert(lattice.hasNode(n));
Trace.Assert(lattice.hasEdge(e1));
Trace.Assert(lattice.hasEdge(e2));
Trace.Assert(e1.getFromNode() == n);
Trace.Assert(e2.getFromNode() == n);
Node n1 = e1.getToNode();
Node n2 = e2.getToNode();
Trace.Assert(n1.hasEquivalentEnteringEdges(n1));
Trace.Assert(n1.getWord().Equals(n2.getWord()));
// merge the scores of e1 and e2 into e1
e1.setAcousticScore(mergeAcousticScores
(e1.getAcousticScore(), e2.getAcousticScore()));
e1.setLMScore(mergeLanguageScores(e1.getLMScore(),
e2.getLMScore()));
// add n2's edges to n1
foreach (Edge e in n2.getLeavingEdges())
{
e2 = n1.getEdgeToNode(e.getToNode());
if (e2 == null) {
lattice.addEdge(n1, e.getToNode(),
e.getAcousticScore(), e.getLMScore());
} else {
// if we got here then n1 and n2 had edges to the same node
// choose the edge with best score
e2.setAcousticScore
(mergeAcousticScores
(e.getAcousticScore(), e2.getAcousticScore()));
e2.setLMScore(mergeLanguageScores(e.getLMScore(),
e2.getLMScore()));
}
}
// remove n2 and all associated edges
lattice.removeNodeAndEdges(n2);
}
/**
/// Minimize the Lattice deterministic, so that no node has multiple incoming edges from equivalent nodes.
/// <p/>
/// Given two edges from equivalent nodes to a single nodes, replace with one edge from one node with incoming edges
/// that are a union of the incoming edges of the old two nodes.
/// <p/>
/// A --> B --> C X --> B' --/
/// <p/>
/// where B and B' are equivalent.
/// <p/>
/// is replaced with
/// <p/>
/// A --> B" --> C X --/
/// <p/>
/// where B" is the merge of B and B'
/// <p/>
/// Note that equivalent nodes must have the same outgoing edges. For example
/// <p/>
/// A --> X \ \ \ A' --> B
/// <p/>
/// A and A' would not be equivalent because the outgoing edges are different
*/
protected void optimizeBackward() {
//System.err.println("*** Optimizing backward ***");
Boolean moreChanges = true;
while (moreChanges) {
moreChanges = false;
// search for a node that can be optimized
// note that we use getCopyOfNodes to avoid concurrent changes to nodes
foreach (Node n in lattice.getCopyOfNodes()) {
// we are iterating down a list of node before optimization
// previous iterations may have removed nodes from the list
// therefore we have to check that the node stiff exists
if (lattice.hasNode(n)) {
moreChanges |= optimizeNodeBackward(n);
}
}
}
}
/**
/// Look for 2 entering edges from equivalent nodes. Replace the edges with one edge to one new node that is a merge
/// of the equivalent nodes Nodes are equivalent if they have equivalent to edges, and the same label. Merged nodes
/// have a union of entering and leaving edges
*
/// @param n
/// @return true if Node n required optimizing backwards
*/
protected Boolean optimizeNodeBackward(Node n) {
List<Edge> enteringEdges = new List<Edge>(n.getEnteringEdges());
for (int j = 0; j < enteringEdges.Count; j++)
{
Edge e = enteringEdges[j];
for (int k = j + 1; k < n.getEnteringEdges().Count; k++)
{
Edge e2 = enteringEdges[k];
/*
/// If these are not the same edge, and they point to
/// equivalent nodes, we have a hit, return true
*/
Trace.Assert(e != e2);
if (equivalentNodesBackward(e.getFromNode(),
e2.getFromNode()))
{
mergeNodesAndEdgesBackward(n, e, e2);
return true;
}
}
}
/*
/// return false if we did not get a hit
*/
return false;
}
/**
/// nodes are equivalent backward if they have "to" edges to the same nodes, and have equivalent labels (Token,
/// start/end times)
*
/// @param n1
/// @param n2
/// @return true if n1 and n2 are "equivalent backwards"
*/
protected Boolean equivalentNodesBackward(Node n1, Node n2)
{
Trace.Assert(lattice.hasNode(n1));
Trace.Assert(lattice.hasNode(n2));
// do the labels match?
if (!equivalentNodeLabels(n1, n2)) return false;
// if they have different number of "to" edges they are not equivalent
// or if there is a "to" edge with no match then the nodes are not equiv
return n1.hasEquivalentLeavingEdges(n2);
}
/**
/// Is the contents of these Node equivalent?
*
/// @param n1
/// @param n2
/// @return true if n1 and n2 have "equivalent labels"
*/
protected Boolean equivalentNodeLabels(Node n1, Node n2)
{
return (n1.getWord().Equals(n2.getWord()) &&
(n1.getBeginTime() == n2.getBeginTime() &&
n1.getEndTime() == n2.getEndTime()));
}
/**
/// given edges e1 and e2 to node n from nodes n1 and n2
/// <p/>
/// merge e1 and e2, that is, merge the scores of e1 and e2 create n' that is a merge of n1 and n2 add n' add edge e'
/// from n' to n
/// <p/>
/// remove n1 and n2 and all associated edges
*
/// @param n
/// @param e1
/// @param e2
*/
protected void mergeNodesAndEdgesBackward(Node n, Edge e1, Edge e2) {
Trace.Assert(lattice.hasNode(n));
Trace.Assert(lattice.hasEdge(e1));
Trace.Assert(lattice.hasEdge(e2));
Trace.Assert(e1.getToNode() == n);
Trace.Assert(e2.getToNode() == n);
Node n1 = e1.getFromNode();
Node n2 = e2.getFromNode();
Trace.Assert(n1.hasEquivalentLeavingEdges(n2));
Trace.Assert(n1.getWord().Equals(n2.getWord()));
// merge the scores of e1 and e2 into e1
e1.setAcousticScore(mergeAcousticScores(e1.getAcousticScore(),
e2.getAcousticScore()));
e1.setLMScore(mergeLanguageScores(e1.getLMScore(),
e2.getLMScore()));
// add n2's "from" edges to n1
foreach (Edge e in n2.getEnteringEdges())
{
e2 = n1.getEdgeFromNode(e.getFromNode());
if (e2 == null) {
lattice.addEdge(e.getFromNode(), n1,
e.getAcousticScore(), e.getLMScore());
} else {
// if we got here then n1 and n2 had edges from the same node
// choose the edge with best score
e2.setAcousticScore
(mergeAcousticScores(e.getAcousticScore(),
e2.getAcousticScore()));
e2.setLMScore(mergeLanguageScores(e.getLMScore(),
e2.getLMScore()));
}
}
// remove n2 and all associated edges
lattice.removeNodeAndEdges(n2);
}
/** Remove all Nodes that have no Edges to them (but not <s>) */
protected void removeHangingNodes()
{
foreach (Node n in lattice.getCopyOfNodes())
{
if (lattice.hasNode(n)) {
if (n == lattice.getInitialNode()) {
} else if (n == lattice.getTerminalNode()) {
} else {
if (n.getLeavingEdges().Count==0
|| n.getEnteringEdges().Count==0)
{
lattice.removeNodeAndEdges(n);
removeHangingNodes();
return;
}
}
}
}
}
/**
/// Provides a single method to merge acoustic scores, so that changes to how acoustic score are merged can be made
/// at one point only.
*
/// @param score1 the first acoustic score
/// @param score2 the second acoustic score
/// @return the merged acoustic score
*/
private double mergeAcousticScores(double score1, double score2)
{
// return lattice.getLogMath().addAsLinear(score1, score2);
return Math.Max(score1, score2);
}
/**
/// Provides a single method to merge language scores, so that changes to how language score are merged can be made
/// at one point only.
*
/// @param score1 the first language score
/// @param score2 the second language score
/// @return the merged language score
*/
private double mergeLanguageScores(double score1, double score2)
{
// return lattice.getLogMath().addAsLinear(score1, score2);
return Math.Max(score1, score2);
}
/**
/// Self test for LatticeOptimizer
*
/// @param args
*/
public static void main(String[] args)
{
Lattice lattice = new Lattice(args[0]);
LatticeOptimizer optimizer = new LatticeOptimizer(lattice);
optimizer.optimize();
lattice.dump(args[1]);
}
}
}
<|start_filename|>Syn.Speech/Decoder/Search/TokenSearchManager.cs<|end_filename|>
using System;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder.Search
{
/// <summary>
///
/// </summary>
public abstract class TokenSearchManager:ISearchManager
{
/// <summary>
/// The property that specifies whether to build a word lattice.
/// </summary>
[S4Boolean(defaultValue = true)]
public static String PROP_BUILD_WORD_LATTICE = "buildWordLattice";
/// <summary>
/// The property that controls whether or not we keep all tokens. If this is
/// set to false, only word tokens are retained, otherwise all tokens are
/// retained.
/// </summary>
[S4Boolean(defaultValue = false)]
public static String PROP_KEEP_ALL_TOKENS = "keepAllTokens";
protected Boolean buildWordLattice=false;
protected Boolean keepAllTokens=false;
void IConfigurable.newProperties(PropertySheet ps)
{
buildWordLattice = ps.getBoolean(PROP_BUILD_WORD_LATTICE);
keepAllTokens = ps.getBoolean(PROP_KEEP_ALL_TOKENS);
//newProperties(ps);
}
/// </summary>
///Find the token to use as a predecessor in resultList given a candidate
///predecessor. There are three cases here:
///
///<ul>
///<li>We want to store everything in resultList. In that case
///{@link #keepAllTokens} is set to true and we just store everything that
///was built before.
///<li>We are only interested in sequence of words. In this case we just
///keep word tokens and ignore everything else. In this case timing and
///scoring information is lost since we keep scores in emitting tokens.
///<li>We want to keep words but we want to keep scores to build a lattice
///from the result list later and {@link #buildWordLattice} is set to true.
///In this case we want to insert intermediate token to store the score and
///this token will be used during lattice path collapse to get score on
///edge. See {@link edu.cmu.sphinx.result.Lattice} for details of resultList
///compression.
///</ul>
///
///@param token
/// the token of interest
///@return the immediate successor word token
/// </summary>
protected Token getResultListPredecessor(Token token)
{
if (keepAllTokens) {
return token;
}
if(!buildWordLattice) {
if (token.isWord())
return token;
else
return token.getPredecessor();
}
float logAcousticScore = 0.0f;
float logLanguageScore = 0.0f;
float logInsertionScore = 0.0f;
while (token != null && !token.isWord()) {
logAcousticScore += token.getAcousticScore();
logLanguageScore += token.getLanguageScore();
logInsertionScore += token.getInsertionScore();
token = token.getPredecessor();
}
return new Token(token, token.getScore(), logInsertionScore, logAcousticScore, logLanguageScore);
}
//void allocate()
//{
// allocate();
//}
void ISearchManager.deallocate()
{
throw new NotImplementedException();
}
virtual public void startRecognition()
{
throw new NotImplementedException("startRecognition not implemented in derived class!");
}
void ISearchManager.startRecognition()
{
startRecognition();
}
void ISearchManager.stopRecognition()
{
stopRecognition();
}
Results.Result ISearchManager.recognize(int nFrames)
{
return recognize(nFrames);
}
public abstract void newProperties(PropertySheet ps);
public abstract void allocate();
public abstract Results.Result recognize(int nFrames);
public abstract void stopRecognition();
}
}
<|start_filename|>Syn.Speech/Decoder/Pruner/IPruner.cs<|end_filename|>
using Syn.Speech.Common;
using Syn.Speech.Decoder.Search;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder.Pruner
{
/// <summary>
/// Provides a mechanism for pruning a set of StateTokens
/// </summary>
public interface IPruner: IConfigurable
{
/// <summary>
/// Starts the pruner
/// </summary>
void startRecognition();
/// <summary>
/// prunes the given set of states
/// </summary>
/// <param name="stateTokenList">a list containing StateToken objects to be scored</param>
/// <returns>the pruned list, (may be the sample list as stateTokenList)</returns>
ActiveList prune(ActiveList stateTokenList);
/// <summary>
/// Performs post-recognition cleanup.
/// </summary>
void stopRecognition();
/// <summary>
/// Allocates resources necessary for this pruner
/// </summary>
void allocate();
/// <summary>
/// Deallocates resources necessary for this pruner
/// </summary>
void deallocate();
}
}
<|start_filename|>Syn.Speech/Alignment/PronounceableFSM.cs<|end_filename|>
//PATROLLED
using System.IO;
using Syn.Speech.Helper;
namespace Syn.Speech.Alignment
{
/// <summary>
/// Implements a finite state machine that checks if a given string is pronounceable.
/// If it is pronounceable, the method <code>accept()</code> will return true.
/// </summary>
public class PronounceableFSM
{
private const string VOCAB_SIZE = "VOCAB_SIZE";
private const string NUM_OF_TRANSITIONS = "NUM_OF_TRANSITIONS";
private const string TRANSITIONS = "TRANSITIONS";
/// <summary>
/// The vocabulary size
/// </summary>
protected internal int vocabularySize;
/// <summary>
/// The transitions of this FSM
/// </summary>
protected internal int[] transitions;
/// <summary>
/// Whether we should scan the input string from the front.
/// </summary>
protected internal bool scanFromFront;
/// <summary>
/// Constructs a PronounceableFSM with information in the given URL.
/// </summary>
/// <param name="fileInfo">path To File</param>
/// <param name="scanFromFront">Indicates whether this FSM should scan the input string from the front, or from the back.</param>
public PronounceableFSM(FileInfo fileInfo, bool scanFromFront)
{
this.scanFromFront = scanFromFront;
//InputStream inputStream = url.openStream();
loadText(File.ReadAllText(fileInfo.FullName));
}
public PronounceableFSM(string stringValue, bool scanFromFront)
{
this.scanFromFront = scanFromFront;
loadText(stringValue);
}
/// <summary>
/// Constructs a PronounceableFSM with the given attributes.
/// </summary>
/// <param name="vocabularySize">The vocabulary size of the FSM.</param>
/// <param name="transitions">The transitions of the FSM.</param>
/// <param name="scanFromFront">Indicates whether this FSM should scan the input string from the front, or from the back.</param>
public PronounceableFSM(int vocabularySize, int[] transitions, bool scanFromFront)
{
this.vocabularySize = vocabularySize;
this.transitions = transitions;
this.scanFromFront = scanFromFront;
}
/// <summary>
/// Loads the ASCII specification of this FSM from the given InputStream.
/// </summary>
/// <param name="toRead">he input stream to load from.</param>
private void loadText(string toRead)
{
var reader = new StringReader(toRead);
string line = null;
while ((line = reader.ReadLine()) != null)
{
if (!line.StartsWith("***"))
{
if (line.StartsWith(VOCAB_SIZE))
{
vocabularySize = parseLastInt(line);
}
else if (line.StartsWith(NUM_OF_TRANSITIONS))
{
int transitionsSize = parseLastInt(line);
transitions = new int[transitionsSize];
}
else if (line.StartsWith(TRANSITIONS))
{
var stringTokenizer = new StringTokenizer(line);
var transition = stringTokenizer.nextToken();
int i = 0;
while (stringTokenizer.hasMoreTokens() && i < transitions.Length)
{
transition = stringTokenizer.nextToken().Trim();
//transitions[i++] = Integer.parseInt(transition);
transitions[i++] = int.Parse(transition);
}
}
}
}
reader.Close();
}
/// <summary>
/// Returns the integer value of the last integer in the given string.
/// </summary>
/// <param name="line">The line to parse the integer from.</param>
/// <returns>An integer</returns>
private int parseLastInt(string line)
{
string lastInt = line.Trim().Substring(line.LastIndexOf(" "));
return int.Parse(lastInt.Trim());
}
/// <summary>
/// Causes this FSM to transition to the next state given the current state and input symbol.
/// </summary>
/// <param name="state">The current state.</param>
/// <param name="symbol">The input symbol.</param>
/// <returns></returns>
private int transition(int state, int symbol)
{
for (int i = state; i < transitions.Length; i++)
{
if ((transitions[i] % vocabularySize) == symbol)
{
return (transitions[i] / vocabularySize);
}
}
return -1;
}
/// <summary>
/// Checks to see if this finite state machine accepts the given input string.
/// </summary>
/// <param name="inputString">The input string to be tested.</param>
/// <returns>true if this FSM accepts, false if it rejects</returns>
public virtual bool accept(string inputString)
{
int symbol;
int state = transition(0, '#');
int leftEnd = inputString.Length - 1;
int start = (scanFromFront) ? 0 : leftEnd;
for (int i = start; 0 <= i && i <= leftEnd; )
{
char c = inputString[i];
if (c == 'n' || c == 'm')
{
symbol = 'N';
}
else if ("aeiouy".IndexOf(c) != -1)
{
symbol = 'V';
}
else
{
symbol = c;
}
state = transition(state, symbol);
if (state == -1)
{
return false;
}
else if (symbol == 'V')
{
return true;
}
if (scanFromFront)
{
i++;
}
else
{
i--;
}
}
return false;
}
}
}
<|start_filename|>Syn.Speech/Decoder/Search/NonEmittingListIterator.cs<|end_filename|>
using System;
using System.Collections;
using System.Collections.Generic;
using Syn.Speech.Common;
using Syn.Speech.Helper;
namespace Syn.Speech.Decoder.Search
{
class NonEmittingListIterator : IEnumerator<ActiveList> {
private ActiveList[] currentActiveLists;
private bool _checkPriorListsFromParent;
private int listPtr;
public NonEmittingListIterator() {
listPtr = -1;
}
public NonEmittingListIterator(ActiveList[] currentActiveLists, bool checkPriorListsFromParent)
{
this.currentActiveLists = currentActiveLists;
this._checkPriorListsFromParent = checkPriorListsFromParent;
}
public bool MoveNext()
{
return listPtr + 1 < currentActiveLists.Length - 1;
}
public void Reset()
{
throw new NotImplementedException();
}
object IEnumerator.Current
{
get { return Current; }
}
public ActiveList Current {
get
{
listPtr++;
if (listPtr >= currentActiveLists.Length)
{
throw new Exception("NoSuchElementException");
}
if (_checkPriorListsFromParent)
{
checkPriorLists();
}
return currentActiveLists[listPtr];
}
}
/** Check that all lists prior to listPtr is empty. */
private void checkPriorLists() {
for (int i = 0; i < listPtr; i++) {
ActiveList activeList = currentActiveLists[i] as ActiveList;
if (activeList.size() > 0) {
throw new Exception("At while processing state order"
+ listPtr + ", state order " + i + " not empty");
}
}
}
public void remove() {
currentActiveLists[listPtr] =
currentActiveLists[listPtr].newInstance();
}
public void Dispose()
{
//throw new NotImplementedException();
}
}
}
<|start_filename|>Syn.Speech/Instrumentation/BestConfidenceAccuracyTracker.cs<|end_filename|>
using System;
using System.Diagnostics;
using Syn.Speech.Recognizers;
using Syn.Speech.Results;
using Syn.Speech.Util;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Instrumentation
{
/// <summary>
/// Tracks and reports recognition accuracy using the "confidenceScorer" component specified in the ConfigurationManager.
/// The "confidenceScorer" component is typically configured to be edu.cmu.sphinx.result.SausageMaker.
/// </summary>
public class BestConfidenceAccuracyTracker : AccuracyTracker
{
/** Defines the class to use for confidence scoring. */
[S4Component(type = typeof(IConfidenceScorer))]
public readonly static String PROP_CONFIDENCE_SCORER = "confidenceScorer";
/// <summary>
/// The confidence scorer
/// </summary>
protected IConfidenceScorer confidenceScorer;
public BestConfidenceAccuracyTracker(IConfidenceScorer confidenceScorer, Recognizer recognizer,
bool showSummary, bool showDetails, bool showResults, bool showAlignedResults, bool showRawResults)
: base(recognizer, showSummary, showDetails, showResults, showAlignedResults, showRawResults)
{
this.confidenceScorer = confidenceScorer;
}
public BestConfidenceAccuracyTracker()
{
}
/*
* (non-Javadoc)
*
* @see edu.cmu.sphinx.util.props.Configurable#newProperties(edu.cmu.sphinx.util.props.PropertySheet)
*/
public override void newProperties(PropertySheet ps)
{
base.newProperties(ps);
confidenceScorer = (IConfidenceScorer)ps.getComponent(PROP_CONFIDENCE_SCORER);
}
/*
* (non-Javadoc)
*
* @see edu.cmu.sphinx.decoder.ResultListener#newResult(edu.cmu.sphinx.result.Result)
*/
public override void newResult(Result result)
{
NISTAlign aligner = getAligner();
String @ref = result.getReferenceText();
if (result.isFinal() && (@ref != null))
{
try
{
IPath bestPath = null;
String hyp = "";
if (result.getBestFinalToken() != null)
{
IConfidenceResult confidenceResult =
confidenceScorer.score(result);
bestPath = confidenceResult.getBestHypothesis();
hyp = bestPath.getTranscriptionNoFiller();
}
aligner.align(@ref, hyp);
if (bestPath != null)
{
showDetails(bestPath.getTranscription());
}
else
{
showDetails("");
}
}
catch (Exception e)
{
Debug.WriteLine(e);
}
}
}
}
}
<|start_filename|>Syn.Speech/Result/BoundedPriorityQueue.cs<|end_filename|>
using System.Collections.Generic;
using System.Linq;
namespace Syn.Speech.Result
{
/// <summary>
/// // TODO: replace with MinMaxPriorityQueue
/// </summary>
/// <typeparam name="T"></typeparam>
public class BoundedPriorityQueue<T> : IEnumerable<T>
{
Queue<T> items;
int maxSize;
public BoundedPriorityQueue(int maxSize)
{
items = new Queue<T>();
this.maxSize = maxSize;
}
public void add(T item)
{
items.Enqueue(item);
if (items.Count > maxSize)
items.Dequeue();
}
public int size()
{
return items.Count;
}
public T poll()
{
return items.Last();
}
public IEnumerator<T> iterator()
{
return items.GetEnumerator();
}
IEnumerator<T> IEnumerable<T>.GetEnumerator()
{
return items.GetEnumerator();
}
System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
{
return items.GetEnumerator();
}
}
}
<|start_filename|>Syn.Speech/Decoder/Decoder.cs<|end_filename|>
using System;
using System.Collections.Generic;
using Syn.Speech.Common;
using Syn.Speech.Decoder.Search;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder
{
public class Decoder: AbstractDecoder
{
/// <summary>
/// The property for the number of features to recognize at once.
/// </summary>
[S4Integer(defaultValue = 100000)]
public static String PROP_FEATURE_BLOCK_SIZE = "featureBlockSize";
private int featureBlockSize;
public Decoder()
{
// Keep this or else XML configuration fails.
}
public Decoder(ISearchManager searchManager, bool fireNonFinalResults, bool autoAllocate,
List<IResultListener> resultListeners, int featureBlockSize):base(searchManager, fireNonFinalResults, autoAllocate,resultListeners)
{
this.featureBlockSize = featureBlockSize;
}
override public void newProperties(PropertySheet ps)
{
featureBlockSize = ps.getInt(PROP_FEATURE_BLOCK_SIZE);
}
/// <summary>
/// Decode frames until recognition is complete.
/// </summary>
/// <param name="referenceText">referenceText the reference text (or null)</param>
/// <returns>a result</returns>
public override Results.Result decode(String referenceText)
{
searchManager.startRecognition();
Results.Result result;
do
{
result = searchManager.recognize(featureBlockSize);
if (result != null)
{
result.setReferenceText(referenceText);
fireResultListeners(result);
}
} while (result != null && !result.isFinal());
searchManager.stopRecognition();
return result;
}
}
}
<|start_filename|>Syn.Speech/Decoder/Search/SimpleActiveListManager.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Diagnostics;
using Syn.Speech.Common;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder.Search
{
/// <summary>
/// A list of ActiveLists. Different token types are placed in different lists.
///
/// </summary>
public class SimpleActiveListManager : ActiveListManager
{
/**
/// This property is used in the Iterator returned by the getNonEmittingListIterator() method. When the
/// Iterator.next() method is called, this property determines whether the lists prior to that returned by next() are
/// empty (they should be empty). If they are not empty, an Error will be thrown.
*/
[S4Boolean(defaultValue = false)]
public static String PROP_CHECK_PRIOR_LISTS_EMPTY = "checkPriorListsEmpty";
/** The property that defines the name of the active list factory to be used by this search manager. */
[S4ComponentList(type = typeof(ActiveListFactory))]
public static String PROP_ACTIVE_LIST_FACTORIES = "activeListFactories";
// --------------------------------------
// Configuration data
// --------------------------------------
private Boolean checkPriorLists;
private List<ActiveListFactory> activeListFactories;
private ActiveList[] currentActiveLists;
/**
///
/// @param activeListFactories
/// @param checkPriorLists
*/
public SimpleActiveListManager(List<ActiveListFactory> activeListFactories, Boolean checkPriorLists)
{
this.activeListFactories = activeListFactories;
this.checkPriorLists = checkPriorLists;
}
public SimpleActiveListManager() {
}
/*
/// (non-Javadoc)
*
/// @see edu.cmu.sphinx.util.props.Configurable#newProperties(edu.cmu.sphinx.util.props.PropertySheet)
*/
override public void newProperties(PropertySheet ps)
{
activeListFactories = ps.getComponentList<ActiveListFactory>(PROP_ACTIVE_LIST_FACTORIES);
checkPriorLists = ps.getBoolean(PROP_CHECK_PRIOR_LISTS_EMPTY);
}
/*
/// (non-Javadoc)
*
/// @see edu.cmu.sphinx.decoder.search.ActiveListManager#setNumStateOrder(java.lang.Class[])
*/
override public void setNumStateOrder(int numStateOrder)
{
// check to make sure that we have the correct
// number of active list factories for the given search states
currentActiveLists = new ActiveList[numStateOrder];
if (activeListFactories.Count==0)
{
Trace.WriteLine("No active list factories configured");
throw new Exception("No active list factories configured");
}
if (activeListFactories.Count != currentActiveLists.Length)
{
Trace.WriteLine("Need " + currentActiveLists.Length +
" active list factories, found " +
activeListFactories.Count);
}
createActiveLists();
}
/**
/// Creates the emitting and non-emitting active lists. When creating the non-emitting active lists, we will look at
/// their respective beam widths (eg, word beam, unit beam, state beam).
*/
private void createActiveLists()
{
int nlists = activeListFactories.Count;
for (int i = 0; i < currentActiveLists.Length; i++)
{
int which = i;
if (which >= nlists) {
which = nlists - 1;
}
ActiveListFactory alf = activeListFactories[which];
currentActiveLists[i] = alf.newInstance();
}
}
/**
/// Adds the given token to the list
*
/// @param token the token to add
*/
override public void add(Token token)
{
ActiveList activeList = findListFor(token);
if (activeList == null)
{
throw new Exception("Cannot find ActiveList for "
+ token.getSearchState().GetType().Name);
}
activeList.add(token);
}
/**
/// Given a token find the active list associated with the token type
*
/// @param token
/// @return the active list
*/
private ActiveList findListFor(Token token)
{
return currentActiveLists[token.getSearchState().getOrder()];
}
/**
/// Replaces an old token with a new token
*
/// @param oldToken the token to replace (or null in which case, replace works like add).
/// @param newToken the new token to be placed in the list.
*/
//override public void replace(Token oldToken, Token newToken)
//{
// ActiveList activeList = findListFor(oldToken);
// Trace.Assert(activeList != null);
// activeList.replace(oldToken, newToken);
//}
/**
/// Returns the emitting ActiveList from the manager
*
/// @return the emitting ActiveList
*/
override public ActiveList getEmittingList()
{
ActiveList list = currentActiveLists[currentActiveLists.Length - 1];
return list;
}
/**
/// Clears emitting list in manager
*/
override public void clearEmittingList()
{
ActiveList list = currentActiveLists[currentActiveLists.Length - 1];
currentActiveLists[currentActiveLists.Length - 1] = list.newInstance();
}
/**
/// Returns an Iterator of all the non-emitting ActiveLists. The iteration order is the same as the search state
/// order.
*
/// @return an Iterator of non-emitting ActiveLists
*/
/** Outputs debugging info for this list manager */
override public void dump()
{
Trace.WriteLine("--------------------");
foreach (ActiveList al in currentActiveLists)
{
dumpList(al);
}
}
public override IEnumerator<ActiveList> getNonEmittingListIterator()
{
return (new NonEmittingListIterator(currentActiveLists, checkPriorLists));
}
/**
/// Dumps out debugging info for the given active list
*
/// @param al the active list to dump
*/
private void dumpList(ActiveList al)
{
Trace.WriteLine("Size: " + al.size() + " Best token: " + al.getBestToken());
}
}
}
<|start_filename|>Syn.Speech/Alignment/Relation.cs<|end_filename|>
//PATROLLED
namespace Syn.Speech.Alignment
{
/// <summary>
/// Represents an ordered set of {@link Item}s and their associated children.
/// A relation has a name and a list of items, and is added to an {@link Utterance} via an {@link UsEnglishWordExpander}.
/// </summary>
public class Relation
{
private readonly string name;
private readonly Utterance owner;
private Item head;
private Item tail;
/// <summary>
/// Name of the relation that contains tokens from the original input text.
/// This is the first thing to be added to the utterance.
/// </summary>
public const string TOKEN = "Token";
/// <summary>
/// Name of the relation that contains the normalized version of the original input text.
/// </summary>
public const string WORD = "Word";
/// <summary>
/// Creates a relation.
/// </summary>
/// <param name="name">The name of the Relation.</param>
/// <param name="owner">The utterance that contains this relation.</param>
internal Relation( string name, Utterance owner)
{
this.name = name;
this.owner = owner;
head = null;
tail = null;
}
/// <summary>
/// Retrieves the name of this Relation.
/// </summary>
/// <returns>The name of this Relation</returns>
public virtual string getName()
{
return name;
}
/// <summary>
/// Gets the head of the item list.
/// </summary>
/// <returns>The head item</returns>
public virtual Item getHead()
{
return head;
}
/// <summary>
/// Sets the head of the item list.
/// </summary>
/// <param name="item">The new head item.</param>
internal virtual void setHead(Item item)
{
head = item;
}
/// <summary>
/// Gets the tail of the item list.
/// </summary>
/// <returns>The tail item.</returns>
public virtual Item getTail()
{
return tail;
}
/// <summary>
/// Sets the tail of the item list.
/// </summary>
/// <param name="item">The new tail item.</param>
internal virtual void setTail(Item item)
{
tail = item;
}
/// <summary>
/// Adds a new item to this relation. The item added shares its contents with the original item.
/// </summary>
/// <returns>The newly added item.</returns>
public virtual Item appendItem()
{
return appendItem(null);
}
/// <summary>
/// Adds a new item to this relation. The item added shares its contents with the original item.
/// </summary>
/// <param name="originalItem">The ItemContents that will be shared by the new item.</param>
/// <returns>The newly added item.</returns>
public virtual Item appendItem(Item originalItem)
{
ItemContents contents;
Item newItem;
if (originalItem == null)
{
contents = null;
}
else
{
contents = originalItem.getSharedContents();
}
newItem = new Item(this, contents);
if (head == null)
{
head = newItem;
}
if (tail != null)
{
tail.attach(newItem);
}
tail = newItem;
return newItem;
}
/// <summary>
/// Returns the utterance that contains this relation.
/// </summary>
/// <returns>The utterance that contains this relation.</returns>
public virtual Utterance getUtterance()
{
return owner;
}
}
}
<|start_filename|>Syn.Speech/Decoder/Search/DecoderResult.cs<|end_filename|>
using System;
using System.Collections.Generic;
using Syn.Speech.Common;
using Syn.Speech.Common.FrontEnd;
using Syn.Speech.Result;
using Syn.Speech.Util;
using Util;
namespace Syn.Speech.Decoder.Search
{
public class DecoderResult: IResult
{
private IActiveList activeList;
private List<IToken> resultList;
private IAlternateHypothesisManager alternateHypothesisManager;
private Boolean _isFinal = false;
private String reference = String.Empty;
private Boolean wordTokenFirst;
private int currentFrameNumber = -1;
private LogMath logMath = null;
/// <summary>
///Creates a result
///@param activeList the active list associated with this result
///@param resultList the result list associated with this result
///@param frameNumber the frame number for this result.
///@param isFinal if true, the result is a final result. This means that the last frame in the
/// speech segment has been decoded.
/// <summary>
public DecoderResult(IActiveList activeList, List<IToken> resultList, int frameNumber, Boolean isFinal)
{
this.activeList = activeList;
this.resultList = resultList;
this.currentFrameNumber = frameNumber;
this._isFinal = isFinal;
logMath = LogMath.getLogMath();
}
/// <summary>
///Creates a result
///@param activeList the active list associated with this result
///@param resultList the result list associated with this result
///@param frameNumber the frame number for this result.
///@param isFinal if true, the result is a final result
/// <summary>
public DecoderResult(IAlternateHypothesisManager alternateHypothesisManager,
IActiveList activeList, List<IToken> resultList, int frameNumber,
Boolean isFinal, Boolean wordTokenFirst)
: this(activeList, resultList, frameNumber, isFinal)
{
this.alternateHypothesisManager = alternateHypothesisManager;
this.wordTokenFirst = wordTokenFirst;
}
IToken IResult.getBestToken()
{
throw new NotImplementedException();
}
void IResult.setReferenceText(string _ref)
{
throw new NotImplementedException();
}
bool IResult.isFinal()
{
throw new NotImplementedException();
}
IAlternateHypothesisManager IResult.getAlternateHypothesisManager()
{
throw new NotImplementedException();
}
List<WordResult> IResult.getTimedBestResult(bool withFillers)
{
throw new NotImplementedException();
}
LogMath IResult.getLogMath()
{
throw new NotImplementedException();
}
IActiveList IResult.getActiveTokens()
{
throw new NotImplementedException();
}
List<IToken> IResult.getResultTokens()
{
throw new NotImplementedException();
}
int IResult.getFrameNumber()
{
throw new NotImplementedException();
}
IToken IResult.getBestFinalToken()
{
throw new NotImplementedException();
}
IToken IResult.getBestActiveToken()
{
throw new NotImplementedException();
}
IToken IResult.findToken(string text)
{
throw new NotImplementedException();
}
List<IToken> IResult.findPartialMatchingTokens(string text)
{
throw new NotImplementedException();
}
IToken IResult.getBestActiveParitalMatchingToken(string text)
{
throw new NotImplementedException();
}
IFrameStatistics[] IResult.getFrameStatistics()
{
throw new NotImplementedException();
}
int IResult.getStartFrame()
{
throw new NotImplementedException();
}
int IResult.getEndFrame()
{
throw new NotImplementedException();
}
List<IData> IResult.getDataFrames()
{
throw new NotImplementedException();
}
string IResult.getBestResultNoFiller()
{
throw new NotImplementedException();
}
string IResult.getBestFinalResultNoFiller()
{
throw new NotImplementedException();
}
string IResult.getBestPronunciationResult()
{
throw new NotImplementedException();
}
List<WordResult> IResult.getTimedWordPath(IToken token, bool withFillers)
{
throw new NotImplementedException();
}
List<WordResult> IResult.getTimedWordTokenLastPath(IToken token, bool withFillers)
{
throw new NotImplementedException();
}
string IResult.ToString()
{
throw new NotImplementedException();
}
void IResult.setFinal(bool finalResult)
{
throw new NotImplementedException();
}
bool IResult.validate()
{
throw new NotImplementedException();
}
string IResult.getReferenceText()
{
throw new NotImplementedException();
}
}
}
<|start_filename|>Syn.Speech/Alignment/DecisionTree.cs<|end_filename|>
using System;
using System.IO;
using System.Runtime.InteropServices;
//PATROLLED
using System.Text;
using Syn.Speech.Helper;
namespace Syn.Speech.Alignment
{
public class DecisionTree
{
internal const string TOTAL = "TOTAL";
internal const string NODE = "NODE";
internal const string LEAF = "LEAF";
internal const string OPERAND_MATCHES = "MATCHES";
internal Node[] cart;
[NonSerialized]
internal int curNode;
public DecisionTree(FileInfo fileInfo)
{
using (var reader = new StreamReader(fileInfo.OpenRead()))
{
string line;
line = reader.ReadLine();
while (line != null)
{
if (!line.StartsWith("***"))
{
parseAndAdd(line);
}
line = reader.ReadLine();
}
reader.Close();
}
}
//EXTRA
public DecisionTree(string stringValue)
{
using (var reader = new StringReader(stringValue))
{
string line;
line = reader.ReadLine();
while (line != null)
{
if (!line.StartsWith("***"))
{
parseAndAdd(line);
}
line = reader.ReadLine();
}
reader.Close();
}
}
/// <summary>
/// Creates a new CART by reading from the given reader.
/// </summary>
/// <param name="reader">the source of the CART data.</param>
/// <param name="nodes">the number of nodes to read for this cart.</param>
public DecisionTree(StreamReader reader, int nodes): this(nodes)
{
for (int i = 0; i < nodes; i++)
{
var line = reader.ReadLine();
if (!line.StartsWith("***"))
{
parseAndAdd(line);
}
}
}
private DecisionTree(int numNodes)
{
cart = new Node[numNodes];
}
public virtual void dumpDot(TextWriter printWriter)
{
printWriter.Write("digraph \"CART Tree\" {\n");
printWriter.Write("rankdir = LR\n");
foreach (Node n in cart)
{
printWriter.WriteLine("\tnode" + Math.Abs(n.GetHashCode()) + " [ label=\""
+ n + "\", color=" + dumpDotNodeColor(n)
+ ", shape=" + dumpDotNodeShape(n) + " ]\n");
if (n is DecisionNode)
{
DecisionNode dn = (DecisionNode)n;
if (dn.qtrue < cart.Length && cart[dn.qtrue] != null)
{
printWriter.Write("\tnode" + Math.Abs(n.GetHashCode()) + " -> node"
+ Math.Abs(cart[dn.qtrue].GetHashCode())
+ " [ label=" + "TRUE" + " ]\n");
}
if (dn.qfalse < cart.Length && cart[dn.qfalse] != null)
{
printWriter.Write("\tnode" + Math.Abs(n.GetHashCode()) + " -> node"
+ Math.Abs(cart[dn.qfalse].GetHashCode())
+ " [ label=" + "FALSE" + " ]\n");
}
}
}
printWriter.Write("}\n");
printWriter.Close();
}
internal virtual string dumpDotNodeColor(Node n)
{
return n is LeafNode ? "green" : "red";
}
internal virtual string dumpDotNodeShape(Node n)
{
return "box";
}
/// <summary>
/// Creates a node from the given input line and add it to the CART.
/// It expects the TOTAL line to come before any of the nodes.
/// </summary>
/// <param name="line">The line of input to parse.</param>
/// <exception cref="Error"></exception>
protected internal virtual void parseAndAdd(string line)
{
var tokenizer = new StringTokenizer(line, " ");
string type = tokenizer.nextToken();
if (type.Equals(LEAF) || type.Equals(NODE))
{
cart[curNode] = getNode(type, tokenizer, curNode);
cart[curNode].setCreationLine(line);
curNode++;
}
else if (type.Equals(TOTAL))
{
cart = new Node[int.Parse(tokenizer.nextToken())];
curNode = 0;
}
else
{
throw new Error("Invalid CART type: " + type);
}
}
/// <summary>
/// Gets the node based upon the type and tokenizer.
/// </summary>
/// <param name="type"><code>NODE</code> or <code>LEAF</code></param>
/// <param name="tokenizer">The StringTokenizer containing the data to get.</param>
/// <param name="currentNode">The index of the current node we're looking at.</param>
/// <returns>The node</returns>
internal virtual Node getNode(string type, StringTokenizer tokenizer, int currentNode)
{
if (type.Equals(NODE))
{
string feature = tokenizer.nextToken();
string operand = tokenizer.nextToken();
object value = parseValue(tokenizer.nextToken());
//int qfalse = Integer.parseInt(tokenizer.nextToken());
int qfalse = int.Parse(tokenizer.nextToken());
if (operand.Equals(OPERAND_MATCHES))
{
return new MatchingNode(feature, value.ToString(),
currentNode + 1, qfalse);
}
else
{
return new ComparisonNode(feature, value, operand,
currentNode + 1, qfalse);
}
}
else if (type.Equals(LEAF))
{
return new LeafNode(parseValue(tokenizer.nextToken()));
}
return null;
}
/// <summary>
/// Coerces a string into a value.
/// </summary>
/// <param name="_string">of the form "type(value)"; for example, "Float(2.3)"</param>
/// <returns>The value.</returns>
protected internal virtual object parseValue(string _string)
{
int openParen = _string.IndexOf("(");
string type = _string.Substring(0, openParen);
string value = _string.Substring(openParen + 1, _string.Length - 1);
if (type.Equals("String"))
{
return value;
}
else if (type.Equals("Float"))
{
//return new Float(Float.parseFloat(value));
return float.Parse(value);
}
else if (type.Equals("Integer"))
{
//return new Integer(Integer.parseInt(value));
return int.Parse(value);
}
else if (type.Equals("List"))
{
var tok = new StringTokenizer(value, ",");
int size = tok.countTokens();
int[] values = new int[size];
for (int i = 0; i < size; i++)
{
//float fval = Float.parseFloat(tok.nextToken());
float fval = float.Parse(tok[i]);
values[i] = (int)Math.Round(fval);
}
return values;
}
else
{
throw new Error("Unknown type: " + type);
}
}
/// <summary>
/// Passes the given item through this CART and returns the interpretation.
/// </summary>
/// <param name="item">The item to analyze</param>
/// <returns>The interpretation.</returns>
public virtual object interpret(Item item)
{
int nodeIndex = 0;
DecisionNode decision;
while (!(cart[nodeIndex] is LeafNode))
{
decision = (DecisionNode)cart[nodeIndex];
nodeIndex = decision.getNextNode(item);
}
this.LoggerInfo("LEAF " + cart[nodeIndex].getValue());
return cart[nodeIndex].getValue();
}
internal abstract class Node
{
protected internal object value;
public Node(object value)
{
this.value = value;
}
public virtual object getValue()
{
return value;
}
public virtual string getValueString()
{
if (value == null)
{
return "NULL()";
}
else if (value is string)
{
return "String(" + value + ")";
}
else if (value is float)
{
return "Float(" + value + ")";
}
else if (value is int)
{
return "Integer(" + value + ")";
}
else
{
return value.GetType() + "(" + value
+ ")";
}
}
public virtual void setCreationLine([In] string obj0)
{
}
}
internal abstract class DecisionNode : Node
{
private readonly PathExtractor path;
protected internal int qfalse;
protected internal int qtrue;
public virtual string getFeature()
{
return path.ToString();
}
public virtual object findFeature([In] Item obj0)
{
return path.findFeature(obj0);
}
public int getNextNode(Item item)
{
return getNextNode(findFeature(item));
}
protected DecisionNode(string feature, object value, int qtrue, int qfalse)
: base(value)
{
path = new PathExtractor(feature, true);
this.qtrue = qtrue;
this.qfalse = qfalse;
}
public abstract int getNextNode([In] object objectValue);
}
internal class ComparisonNode : DecisionNode
{
internal const string LESS_THAN = "<";
internal const string EQUALS = "=";
internal const string GREATER_THAN = ">";
/// <summary>
/// The comparison type. One of LESS_THAN, GREATER_THAN, or EQUAL_TO.
/// </summary>
internal string comparisonType;
public ComparisonNode(string feature, object value, string comparisonType, int qtrue, int qfalse)
: base(feature, value, qtrue, qfalse)
{
if (!comparisonType.Equals(LESS_THAN)
&& !comparisonType.Equals(EQUALS)
&& !comparisonType.Equals(GREATER_THAN))
{
throw new Error("Invalid comparison type: " + comparisonType);
}
else
{
this.comparisonType = comparisonType;
}
}
public override int getNextNode(object objectValue)
{
bool yes = false;
int ret;
if (comparisonType.Equals(LESS_THAN)
|| comparisonType.Equals(GREATER_THAN))
{
float cart_fval;
float fval;
if (value is float)
{
cart_fval = ((float)value);
}
else
{
cart_fval = float.Parse(value.ToString());
}
if (objectValue is float)
{
fval = ((float)objectValue);
}
else
{
fval = float.Parse(objectValue.ToString());
}
if (comparisonType.Equals(LESS_THAN))
{
yes = (fval < cart_fval);
}
else
{
yes = (fval > cart_fval);
}
}
else
{ // comparisonType = "="
string sval = objectValue.ToString();
string cart_sval = value.ToString();
yes = sval.Equals(cart_sval);
}
if (yes)
{
ret = qtrue;
}
else
{
ret = qfalse;
}
this.LoggerInfo(trace(objectValue, yes, ret));
return ret;
}
private string trace(object objectValue, bool match, int next)
{
return "NODE " + getFeature() + " [" + objectValue + "] "
+ comparisonType + " [" + getValue() + "] "
+ (match ? "Yes" : "No") + " next " + next;
}
public override string ToString()
{
return "NODE " + getFeature() + " " + comparisonType + " "
+ getValueString() + " " + qtrue + " "
+ qfalse;
}
}
internal class MatchingNode : DecisionNode
{
internal Pattern pattern;
public MatchingNode(string feature, string regex, int qtrue, int qfalse)
: base(feature, regex, qtrue, qfalse)
{
pattern = Pattern.Compile(regex);
}
public override int getNextNode(object objectValue)
{
return pattern.Matcher((string)objectValue).Matches() ? qtrue : qfalse;
}
public override string ToString()
{
var buf = new StringBuilder(NODE + " " + getFeature() + " " + OPERAND_MATCHES);
buf.Append(getValueString() + " ");
buf.Append(qtrue + " ");
buf.Append(qfalse);
return buf.ToString();
}
}
internal class LeafNode : Node
{
public LeafNode(object value)
: base(value)
{
}
public override string ToString()
{
return "LEAF " + getValueString();
}
}
}
}
<|start_filename|>Syn.Speech/Jsgf/JSGFGrammar.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using Syn.Speech.Logging;
using Syn.Speech.Helper;
using Syn.Speech.Jsgf.Parser;
using Syn.Speech.Jsgf.Rule;
using Syn.Speech.Linguist.Dictionary;
using Syn.Speech.Linguist.Language.Grammar;
using Syn.Speech.Util;
using Syn.Speech.Util.Props;
//PATROLLED + REFACTORED
namespace Syn.Speech.Jsgf
{
/**
* <h3>Defines a BNF-style grammar based on JSGF grammar rules in a file.</h3>
*
*
* The Java Speech Grammar Format (JSGF) is a BNF-style, platform-independent,
* and vendor-independent textual representation of grammars for use in speech
* recognition. It is used by the <a
* href="http://java.sun.com/products/java-media/speech/">Java Speech API
* (JSAPI) </a>.
*
* Here we only intend to give a couple of examples of grammars written in JSGF,
* so that you can quickly learn to write your own grammars. For more examples
* and a complete specification of JSGF, go to
*
* <a href="http://java.sun.com/products/java-media/speech/forDevelopers/JSGF/">
* http://java.sun.com/products/java-media/speech/forDevelopers/JSGF/ </a>.
*
*
* <h3>Example 1: "Hello World" in JSGF</h3>
*
* The example below shows how a JSGF grammar that generates the sentences
* "Hello World":
*
* <pre>
* #JSGF V1.0
* public <helloWorld> = Hello World;
* </pre>
*
* <i>Figure 1: Hello grammar that generates the sentences "Hello World". </i>
* <p/>
*
* The above grammar is saved in a file called "hello.gram". It defines a public
* grammar rule called "helloWorld". In order for this grammar rule to be
* publicly accessible, we must be declared it "public". Non-public grammar
* rules are not visible outside of the grammar file.
*
* The location of the grammar file(s) is(are) defined by the
* {@link #PROP_BASE_GRAMMAR_URL baseGrammarURL}property. Since all JSGF grammar
* files end with ".gram", it will automatically search all such files at the
* given URL for the grammar. The name of the grammar to search for is specified
* by {@link #PROP_GRAMMAR_NAME grammarName}. In this example, the grammar name
* is "helloWorld".
*
* <h3>Example 2: Command Grammar in JSGF</h3>
*
* This examples shows a grammar that generates basic control commands like
* "move a menu thanks please", "close file",
* "oh mighty computer please kindly delete menu thanks". It is the same as one
* of the command & control examples in the <a
* href="http://java.sun.com/products/java-media/speech/forDevelopers/JSGF/"
* >JSGF specification </a>. It is considerably more complex than the previous
* example. It defines the public grammar called "basicCmd".
*
* <pre>
* #JSGF V1.0
* public <basicCmd> = <startPolite> <command> <endPolite>;
* <command> = <action> <object>;
* <action> = /10/ open |/2/ close |/1/ delete |/1/ move;
* <object> = [the | a] (window | file | menu);
* <startPolite> = (please | kindly | could you | oh mighty computer) *;
* <endPolite> = [ please | thanks | thank you ];
* </pre>
*
* <i>Figure 2: Command grammar that generates simple control commands. </i>
* <p/>
*
* The features of JSGF that are shown in this example includes:
* <ul>
* <li>using other grammar rules within a grammar rule.
* <li>the OR "|" operator.
* <li>the grouping "(...)" operator.
* <li>the optional grouping "[...]" operator.
* <li>the zero-or-many "*" (called Kleene star) operator.
* <li>a probability (e.g., "open" is more likely than the others).
* </ul>
*
* <h3>From JSGF to Grammar Graph</h3>
*
* After the JSGF grammar is read in, it is converted to a graph of words
* representing the grammar. Lets call this the grammar graph. It is from this
* grammar graph that the eventual search structure used for speech recognition
* is built. Below, we show the grammar graphs created from the above JSGF
* grammars. The nodes <code>"<sil>"</code> means "silence".
*
* <p/>
* <img src="doc-files/helloWorld.jpg"> <br>
*
* <i>Figure 3: Grammar graph created from the Hello World grammar. </i>
* <p/>
* <img src="doc-files/commandGrammar.jpg"> <br>
*
* <i>Figure 4: Grammar graph created from the Command grammar. </i>
*
* <h3>Limitations</h3>
*
* There is a known limitation with the current JSGF support. Grammars that
* contain non-speech loops currently cause the recognizer to hang.
* <p/>
* For example, in the following grammar
*
* <pre>
* #JSGF V1.0
* grammar jsgf.nastygram;
* public <nasty> = I saw a ((cat* | dog* | mouse*)+)+;
* </pre>
*
* the production: ((cat* | dog* | mouse*)+)+ can result in a continuous loop,
* since (cat* | dog* | mouse*) can represent no speech (i.e. zero cats, dogs
* and mice), this is equivalent to ()+. To avoid this problem, the grammar
* writer should ensure that there are no rules that could possibly match no
* speech within a plus operator or kleene star operator.
*
* <h3>Dynamic grammar behavior</h3> It is possible to modify the grammar of a
* running application. Some rules and notes:
* <ul>
* <li>Unlike a JSAPI recognizer, the JSGF Grammar only maintains one Rule
* Grammar. This restriction may be relaxed in the future.
* <li>The grammar should not be modified while a recognition is in process
* <li>The call to JSGFGrammar.loadJSGF will load in a completely new grammar,
* tossing any old grammars or changes. No call to commitChanges is necessary
* (although such a call would be harmless in this situation).
* <li>RuleGrammars can be modified via calls to RuleGrammar.setEnabled and
* RuleGrammar.setRule). In order for these changes to take place,
* JSGFGrammar.commitChanges must be called after all grammar changes have been
* made.
* </ul>
*
* <h3>Implementation Notes</h3>
* <ol>
* <li>All internal probabilities are maintained in LogMath log base.
* </ol>
*/
public class JSGFGrammar : Grammar
{
/// <summary>
/// The property that defines the location of the JSGF grammar file.
/// </summary>
[S4String]
public const String PropBaseGrammarUrl = "grammarLocation";
/// <summary>
/// The property that defines the location of the JSGF grammar file.
/// </summary>
[S4String(DefaultValue = "default.gram")]
public const String PropGrammarName = "grammarName";
// ---------------------
// Configurable data
// ---------------------
private JSGFRuleGrammar _ruleGrammar;
protected JSGFRuleGrammarManager Manager;
protected RuleStack RuleStack;
private readonly LogMath _logMath;
protected bool LoadGrammar = true;
protected GrammarNode FirstNode;
//protected Logger logger;
public JSGFGrammar(String location, String grammarName, bool showGrammar, bool optimizeGrammar, bool addSilenceWords, bool addFillerWords, IDictionary dictionary)
: this(ConfigurationManagerUtils.ResourceToUrl(location), grammarName, showGrammar, optimizeGrammar, addSilenceWords, addFillerWords, dictionary)
{
}
public JSGFGrammar(URL baseUrl, String grammarName, bool showGrammar, bool optimizeGrammar, bool addSilenceWords, bool addFillerWords, IDictionary dictionary)
: base(showGrammar, optimizeGrammar, addSilenceWords, addFillerWords, dictionary)
{
_logMath = LogMath.GetLogMath();
BaseUrl = baseUrl;
GrammarName = grammarName;
LoadGrammar = true;
//logger = Logger.getLogger(getClass().getName());
}
public JSGFGrammar()
{
}
/*
* (non-Javadoc)
*
* @see
* edu.cmu.sphinx.util.props.Configurable#newProperties(edu.cmu.sphinx.util
* .props.PropertySheet)
*/
public override void NewProperties(PropertySheet ps)
{
base.NewProperties(ps);
BaseUrl = ConfigurationManagerUtils.GetResource(PropBaseGrammarUrl, ps);
//logger = ps.getLogger();
GrammarName = ps.GetString(PropGrammarName);
LoadGrammar = true;
}
/**
* Returns the RuleGrammar of this JSGFGrammar.
*
* @return the RuleGrammar
*/
public JSGFRuleGrammar GetRuleGrammar()
{
return _ruleGrammar;
}
/**
* Returns manager used to load grammars
*
* @return manager with loaded grammars
*/
public JSGFRuleGrammarManager GetGrammarManager()
{
if (Manager == null)
Manager = new JSGFRuleGrammarManager();
return Manager;
}
/**
* Sets the URL context of the JSGF grammars.
*
* @param url
* the URL context of the grammars
*/
public URL BaseUrl { protected get; set; }
/** Returns the name of this grammar. */
public string GrammarName { get; private set; }
/**
* The JSGF grammar specified by grammarName will be loaded from the base
* url (tossing out any previously loaded grammars)
*
* @param grammarName
* the name of the grammar
* @throws IOException
* if an error occurs while loading or compiling the grammar
* @throws JSGFGrammarException
* @throws JSGFGrammarParseException
*/
public void LoadJSGF(String grammarName)
{
GrammarName = grammarName;
LoadGrammar = true;
CommitChanges();
}
/**
* Creates the grammar.
*
* @return the initial node of the Grammar
*/
protected override GrammarNode CreateGrammar()
{
try
{
CommitChanges();
}
catch (JSGFGrammarException e)
{
throw new IOException(e.Message);
}
catch (JSGFGrammarParseException e)
{
throw new IOException(((Exception) e).Message);
}
return FirstNode;
}
/**
* Returns the initial node for the grammar
*
* @return the initial grammar node
*/
public override GrammarNode InitialNode
{
get { return FirstNode; }
}
/**
* Parses the given Rule into a network of GrammarNodes.
*
* @param rule
* the Rule to parse
* @return a grammar graph
*/
protected GrammarGraph ProcessRule(JSGFRule rule)
{
GrammarGraph result;
if (rule != null)
{
this.LogInfo("parseRule: " + rule);
}
if (rule is JSGFRuleAlternatives)
{
result = ProcessRuleAlternatives((JSGFRuleAlternatives)rule);
}
else if (rule is JSGFRuleCount)
{
result = ProcessRuleCount((JSGFRuleCount)rule);
}
else if (rule is JSGFRuleName)
{
result = ProcessRuleName((JSGFRuleName)rule);
}
else if (rule is JSGFRuleSequence)
{
result = ProcessRuleSequence((JSGFRuleSequence)rule);
}
else if (rule is JSGFRuleTag)
{
result = ProcessRuleTag((JSGFRuleTag)rule);
}
else if (rule is JSGFRuleToken)
{
result = ProcessRuleToken((JSGFRuleToken)rule);
}
else
{
throw new ArgumentException("Unsupported Rule type: " + rule);
}
return result;
}
/**
* Parses the given RuleName into a network of GrammarNodes.
*
* @param initialRuleName
* the RuleName rule to parse
* @return a grammar graph
*/
private GrammarGraph ProcessRuleName(JSGFRuleName initialRuleName)
{
this.LogInfo("parseRuleName: " + initialRuleName);
GrammarGraph result = RuleStack.Contains(initialRuleName.GetRuleName());
if (result != null)
{ // its a recursive call
return result;
}
else
{
result = new GrammarGraph(this);
RuleStack.Push(initialRuleName.GetRuleName(), result);
}
JSGFRuleName ruleName = _ruleGrammar.Resolve(initialRuleName);
if (ruleName == JSGFRuleName.Null)
{
result.StartNode.Add(result.EndNode, 0.0f);
}
else if (ruleName == JSGFRuleName.Void)
{
// no connection for void
}
else
{
if (ruleName == null)
{
throw new JSGFGrammarException("Can't resolve "
+ initialRuleName + " g "
+ initialRuleName.GetFullGrammarName());
}
JSGFRuleGrammar rg = Manager.RetrieveGrammar(ruleName
.GetFullGrammarName());
if (rg == null)
{
throw new JSGFGrammarException("Can't resolve grammar name "
+ ruleName.GetFullGrammarName());
}
JSGFRule rule = rg.GetRule(ruleName.GetSimpleRuleName());
if (rule == null)
{
throw new JSGFGrammarException("Can't resolve rule: "
+ ruleName.GetRuleName());
}
GrammarGraph ruleResult = ProcessRule(rule);
if (result != ruleResult)
{
result.StartNode.Add(ruleResult.StartNode, 0.0f);
ruleResult.EndNode.Add(result.EndNode, 0.0f);
}
}
RuleStack.Pop();
return result;
}
/**
* Parses the given RuleCount into a network of GrammarNodes.
*
* @param ruleCount
* the RuleCount object to parse
* @return a grammar graph
*/
private GrammarGraph ProcessRuleCount(JSGFRuleCount ruleCount)
{
this.LogInfo("parseRuleCount: " + ruleCount);
GrammarGraph result = new GrammarGraph(this);
int count = ruleCount.Count;
GrammarGraph newNodes = ProcessRule(ruleCount.Rule);
result.StartNode.Add(newNodes.StartNode, 0.0f);
newNodes.EndNode.Add(result.EndNode, 0.0f);
// if this is optional, add a bypass arc
if (count == JSGFRuleCount.ZeroOrMore
|| count == JSGFRuleCount.Optional)
{
result.StartNode.Add(result.EndNode, 0.0f);
}
// if this can possibly occur more than once, add a loopback
if (count == JSGFRuleCount.OnceOrMore
|| count == JSGFRuleCount.ZeroOrMore)
{
newNodes.EndNode.Add(newNodes.StartNode, 0.0f);
}
return result;
}
/**
* Parses the given RuleAlternatives into a network of GrammarNodes.
*
* @param ruleAlternatives
* the RuleAlternatives to parse
* @return a grammar graph
*/
private GrammarGraph ProcessRuleAlternatives( JSGFRuleAlternatives ruleAlternatives)
{
this.LogInfo("parseRuleAlternatives: " + ruleAlternatives);
GrammarGraph result = new GrammarGraph(this);
List<JSGFRule> rules = ruleAlternatives.GetRules();
List<Float> weights = GetNormalizedWeights(ruleAlternatives.GetWeights());
// expand each alternative, and connect them in parallel
for (int i = 0; i < rules.Count; i++)
{
JSGFRule rule = rules[i];
float weight = 0.0f;
if (weights != null)
{
weight = weights[i];
}
this.LogInfo("Alternative: " + rule);
GrammarGraph newNodes = ProcessRule(rule);
result.StartNode.Add(newNodes.StartNode, weight);
newNodes.EndNode.Add(result.EndNode, 0.0f);
}
return result;
}
/**
* Normalize the weights. The weights should always be zero or greater. We
* need to convert the weights to a log probability.
*
* @param weights
* the weights to normalize
*/
private List<Float> GetNormalizedWeights(List<Float> weights)
{
if (weights == null)
{
return null;
}
double sum = 0.0;
foreach (float weight in weights)
{
if (weight < 0)
{
throw new ArgumentException("Negative weight " + weight);
}
sum += weight;
}
var normalized = new List<Float>(weights);
for (int i = 0; i < weights.Count; i++)
{
if (sum == 0.0f)
{
normalized.Set(i, LogMath.LogZero);
}
else
{
normalized.Set(i, _logMath.LinearToLog(weights[i] / sum));
}
}
return normalized;
}
/**
* Parses the given RuleSequence into a network of GrammarNodes.
*
* @param ruleSequence
* the RuleSequence to parse
* @return the first and last GrammarNodes of the network
*/
private GrammarGraph ProcessRuleSequence(JSGFRuleSequence ruleSequence)
{
GrammarNode startNode = null;
GrammarNode endNode = null;
this.LogInfo("parseRuleSequence: " + ruleSequence);
List<JSGFRule> rules = ruleSequence.Rules;
GrammarNode lastGrammarNode = null;
// expand and connect each rule in the sequence serially
for (int i = 0; i < rules.Count; i++)
{
JSGFRule rule = rules[i];
GrammarGraph newNodes = ProcessRule(rule);
// first node
if (i == 0)
{
startNode = newNodes.StartNode;
}
// last node
if (i == (rules.Count - 1))
{
endNode = newNodes.EndNode;
}
if (i > 0)
{
lastGrammarNode.Add(newNodes.StartNode, 0.0f);
}
lastGrammarNode = newNodes.EndNode;
}
return new GrammarGraph(startNode, endNode,this);
}
/**
* Parses the given RuleTag into a network GrammarNodes.
*
* @param ruleTag
* the RuleTag to parse
* @return the first and last GrammarNodes of the network
*/
private GrammarGraph ProcessRuleTag(JSGFRuleTag ruleTag)
{
this.LogInfo("parseRuleTag: " + ruleTag);
JSGFRule rule = ruleTag.Rule;
return ProcessRule(rule);
}
/**
* Creates a GrammarNode with the word in the given RuleToken.
*
* @param ruleToken
* the RuleToken that contains the word
* @return a GrammarNode with the word in the given RuleToken
*/
private GrammarGraph ProcessRuleToken(JSGFRuleToken ruleToken)
{
GrammarNode node = CreateGrammarNode(ruleToken.Text);
return new GrammarGraph(node, node, this);
}
// ///////////////////////////////////////////////////////////////////
// Loading part
// //////////////////////////////////////////////////////////////////
private static URL GrammarNameToUrl(URL baseUrl, String grammarName)
{
// Convert each period in the grammar name to a slash "/"
// Append a slash and the converted grammar name to the base URL
// Append the ".gram" suffix
grammarName = grammarName.Replace('.', '/');
StringBuilder sb = new StringBuilder();
if (baseUrl != null)
{
sb.Append(baseUrl);
if (sb[sb.Length - 1] != '/')
sb.Append('/');
}
sb.Append(grammarName).Append(".gram");
String urlstr = sb.ToString();
URL grammarUrl = null;
try
{
grammarUrl = new URL(URLType.Path, urlstr);
}
catch (UriFormatException me)
{
//grammarURL = ClassLoader.getSystemResource(urlstr); //TODO: Check behaviour of comment
if (grammarUrl == null)
throw new UriFormatException(urlstr);
}
return grammarUrl;
}
/**
* Commit changes to all loaded grammars and all changes of grammar since
* the last commitChange
*
* @throws JSGFGrammarParseException
* @throws JSGFGrammarException
*/
public virtual void CommitChanges()
{
try
{
if (LoadGrammar)
{
if (Manager == null)
GetGrammarManager();
_ruleGrammar = LoadNamedGrammar(GrammarName);
LoadImports(_ruleGrammar);
LoadGrammar = false;
}
Manager.LinkGrammars();
RuleStack = new RuleStack();
NewGrammar();
FirstNode = CreateGrammarNode("<sil>");
GrammarNode finalNode = CreateGrammarNode("<sil>");
finalNode.SetFinalNode(true);
// go through each rule and create a network of GrammarNodes
// for each of them
foreach (String ruleName in _ruleGrammar.GetRuleNames())
{
if (_ruleGrammar.IsRulePublic(ruleName))
{
String fullName = GetFullRuleName(ruleName);
GrammarGraph publicRuleGraph = new GrammarGraph(this);
RuleStack.Push(fullName, publicRuleGraph);
JSGFRule rule = _ruleGrammar.GetRule(ruleName);
GrammarGraph graph = ProcessRule(rule);
RuleStack.Pop();
FirstNode.Add(publicRuleGraph.StartNode, 0.0f);
publicRuleGraph.EndNode.Add(finalNode, 0.0f);
publicRuleGraph.StartNode.Add(graph.StartNode,
0.0f);
graph.EndNode.Add(publicRuleGraph.EndNode, 0.0f);
}
}
PostProcessGrammar();
if (Logger.Level == LogLevel.All)
{
DumpGrammar();
}
}
catch (UriFormatException mue)
{
throw new IOException("bad base grammar URL " + BaseUrl + ' ' + mue);
}
}
/**
* Load grammars imported by the specified RuleGrammar if they are not
* already loaded.
*
* @throws JSGFGrammarParseException
*/
private void LoadImports(JSGFRuleGrammar grammar)
{
foreach (JSGFRuleName ruleName in grammar.Imports)
{
// System.out.println ("Checking import " + ruleName);
String grammarName = ruleName.GetFullGrammarName();
JSGFRuleGrammar importedGrammar = GetNamedRuleGrammar(grammarName);
if (importedGrammar == null)
{
// System.out.println ("Grammar " + grammarName +
// " not found. Loading.");
importedGrammar = LoadNamedGrammar(ruleName
.GetFullGrammarName());
}
if (importedGrammar != null)
{
LoadImports(importedGrammar);
}
}
LoadFullQualifiedRules(grammar);
}
private JSGFRuleGrammar GetNamedRuleGrammar(String grammarName)
{
return Manager.RetrieveGrammar(grammarName);
}
/**
* Load named grammar from import rule
*
* @param grammarName
* @return already loaded grammar
* @throws JSGFGrammarParseException
* @throws IOException
*/
private JSGFRuleGrammar LoadNamedGrammar(String grammarName)
{
var url = GrammarNameToUrl(BaseUrl, grammarName);
JSGFRuleGrammar ruleGrammar = JSGFParser.NewGrammarFromJSGF(url, new JSGFRuleGrammarFactory(Manager));
ruleGrammar.SetEnabled(true);
return ruleGrammar;
}
/**
* Load grammars imported by a fully qualified Rule Token if they are not
* already loaded.
*
* @param grammar
* @throws IOException
* @throws GrammarException
* @throws JSGFGrammarParseException
*/
private void LoadFullQualifiedRules(JSGFRuleGrammar grammar)
{
// Go through every rule
foreach (String ruleName in grammar.GetRuleNames())
{
String rule = grammar.GetRule(ruleName).ToString();
// check for rule-Tokens
int index = 0;
while (index < rule.Length)
{
index = rule.IndexOf('<', index);
if (index < 0)
{
break;
}
// Extract rule name
var endIndex = rule.IndexOf('>', index + 1);
JSGFRuleName extractedRuleName = new JSGFRuleName(rule
.Substring(index + 1, endIndex - (index + 1))
.Trim());
index = endIndex + 1;
// Check for full qualified rule name
if (extractedRuleName.GetFullGrammarName() != null)
{
String grammarName = extractedRuleName.GetFullGrammarName();
JSGFRuleGrammar importedGrammar = GetNamedRuleGrammar(grammarName);
if (importedGrammar == null)
{
importedGrammar = LoadNamedGrammar(grammarName);
}
if (importedGrammar != null)
{
LoadImports(importedGrammar);
}
}
}
}
}
/**
* Gets the fully resolved rule name
*
* @param ruleName
* the partial name
* @return the fully resolved name
* @throws JSGFGrammarException
*/
private String GetFullRuleName(String ruleName)
{
JSGFRuleName rname = _ruleGrammar.Resolve(new JSGFRuleName(ruleName));
return rname.GetRuleName();
}
/** Dumps interesting things about this grammar */
protected void DumpGrammar()
{
Console.WriteLine("Imported rules { ");
foreach (JSGFRuleName imp in _ruleGrammar.GetImports())
{
Console.WriteLine(" Import " + imp.GetRuleName());
}
Console.WriteLine("}");
Console.WriteLine("Rulenames { ");
foreach (String name in _ruleGrammar.GetRuleNames())
{
Console.WriteLine(" Name " + name);
}
Console.WriteLine("}");
}
}
}
<|start_filename|>Syn.Speech/Decoder/Adaptation/Transform.cs<|end_filename|>
using System.Diagnostics;
using System.IO;
using System.Text;
using Syn.Speech.Helper;
using Syn.Speech.Helper.Math;
using Syn.Speech.Linguist.Acoustic.Tiedstate;
//PATROLLED
namespace Syn.Speech.Decoder.Adaptation
{
public class Transform
{
private float[][][][] As;
private float[][][] Bs;
private Sphinx3Loader loader;
private int nrOfClusters;
public Transform(Sphinx3Loader loader, int nrOfClusters)
{
this.loader = loader;
this.nrOfClusters = nrOfClusters;
}
/// <summary>
/// Used for access to A matrix.
/// </summary>
/// <returns>A matrix (representing A from A*x + B = C)</returns>
public float[][][][] getAs()
{
return As;
}
/// <summary>
/// Used for access to B matrix.
/// </summary>
/// <returns>B matrix (representing B from A*x + B = C)</returns>
public float[][][] getBs()
{
return Bs;
}
/// <summary>
/// Writes the transformation to file in a format that could further be used in Sphinx3 and Sphinx4.
/// </summary>
/// <param name="filePath">The file path.</param>
/// <param name="index">The index.</param>
public void store(string filePath, int index)
{
//PrintWriter writer = new PrintWriter(filePath, "UTF-8");
var writer = new StreamWriter(filePath, false, Encoding.UTF8);
// nMllrClass
writer.WriteLine("1");
writer.WriteLine(loader.getNumStreams());
for (var i = 0; i < loader.getNumStreams(); i++)
{
writer.WriteLine(loader.getVectorLength()[i]);
for (var j = 0; j < loader.getVectorLength()[i]; j++)
{
for (var k = 0; k < loader.getVectorLength()[i]; ++k)
{
writer.Write(As[index][i][j][k]);
writer.Write(" ");
}
writer.WriteLine();
}
for (var j = 0; j < loader.getVectorLength()[i]; j++)
{
writer.Write(Bs[index][i][j]);
writer.Write(" ");
}
writer.WriteLine();
for (var j = 0; j < loader.getVectorLength()[i]; j++)
{
writer.Write("1.0 ");
}
writer.WriteLine();
}
writer.Close();
}
/// <summary>
/// Used for computing the actual transformations (A and B matrices). These are stored in As and Bs.
/// </summary>
/// <param name="regLs">The reg ls.</param>
/// <param name="regRs">The reg rs.</param>
private void computeMllrTransforms(double[][][][][] regLs,
double[][][][] regRs)
{
int len;
for (int c = 0; c < nrOfClusters; c++)
{
this.As[c] = new float[loader.getNumStreams()][][];
this.Bs[c] = new float[loader.getNumStreams()][];
for (int i = 0; i < loader.getNumStreams(); i++)
{
len = loader.getVectorLength()[i];
//TODO: CHECK SEMANTICS
//this.As[c][i] = new float[len][len];
this.As[c][i] = new float[len][];
this.Bs[c][i] = new float[len];
for (int j = 0; j < len; ++j)
{
var coef = new Array2DRowRealMatrix(regLs[c][i][j], false);
var solver = new LUDecomposition(coef).getSolver();
var vect = new ArrayRealVector(regRs[c][i][j], false);
var ABloc = solver.solve(vect);
for (int k = 0; k < len; ++k)
{
this.As[c][i][j][k] = (float)ABloc.getEntry(k);
}
this.Bs[c][i][j] = (float)ABloc.getEntry(len);
}
}
}
}
/// <summary>
///Read the transformation from a file
/// </summary>
/// <param name="filePath">The file path.</param>
public void load(string filePath)
{
//TODO: IMPLEMENT A LESS MEMORY CONSUMING METHOD
var input = new Scanner(File.ReadAllText(filePath));
int numStreams, nMllrClass;
int[] vectorLength = new int[1];
nMllrClass = input.nextInt();
Trace.Assert(nMllrClass == 1);
numStreams = input.nextInt();
this.As = new float[nMllrClass][][][];
this.Bs = new float[nMllrClass][][];
for (int i = 0; i < numStreams; i++)
{
vectorLength[i] = input.nextInt();
int length = vectorLength[i];
//TODO: CHECK SEMANTICS
//this.As[0] = new float[numStreams][length][length];
//this.Bs[0] = new float[numStreams][length];
this.As[0] = new float[numStreams][][];
this.Bs[0] = new float[numStreams][];
for (int j = 0; j < length; j++)
{
for (int k = 0; k < length; ++k)
{
As[0][i][j][k] = input.nextFloat();
}
}
for (int j = 0; j < length; j++)
{
Bs[0][i][j] = input.nextFloat();
}
}
//input.close();
}
/// <summary>
/// tores in current object a transform generated on the provided stats.
/// </summary>
/// <param name="stats">Provided stats that were previously collected from Result objects..</param>
public void update(Stats stats)
{
stats.fillRegLowerPart();
As = new float[nrOfClusters][][][];
Bs = new float[nrOfClusters][][];
this.computeMllrTransforms(stats.getRegLs(), stats.getRegRs());
}
}
}
<|start_filename|>Syn.Speech/SpeakerId/SpeakerIdentification.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using Syn.Speech.FrontEnds;
using Syn.Speech.FrontEnds.Util;
using Syn.Speech.Helper;
using Syn.Speech.Helper.Mathematics.Linear;
using Syn.Speech.Helper.Mathematics.Stat.Correlation;
using Syn.Speech.Properties;
using Syn.Speech.Util.Props;
//REFACTORED
namespace Syn.Speech.SpeakerId
{
/// <summary>
/// Provides method for detecting the number of speakers from a given input file
/// </summary>
public class SpeakerIdentification : Identification
{
public readonly String FrontendName = "plpFrontEnd";
private readonly FrontEnd _frontEnd;
private readonly StreamDataSource _audioSource;
private ConfigurationManager cm;
public SpeakerIdentification()
{
URL url = new URL(URLType.Resource, Resources.speakerid_frontend_config);
cm = new ConfigurationManager(url);
_audioSource = cm.Lookup("streamDataSource") as StreamDataSource;
_frontEnd = cm.Lookup(FrontendName) as FrontEnd;
}
/**
* @return The list of feature vectors from the fileStream used by
* audioSource
*/
private List<float[]> GetFeatures()
{
List<float[]> ret = new List<float[]>();
try
{
int featureLength = -1;
IData feature = _frontEnd.GetData();
while (!(feature is DataEndSignal))
{
if (feature is DoubleData)
{
double[] featureData = ((DoubleData)feature).Values;
if (featureLength < 0)
{
featureLength = featureData.Length;
}
float[] convertedData = new float[featureData.Length];
for (int i = 0; i < featureData.Length; i++)
{
convertedData[i] = (float)featureData[i];
}
ret.Add(convertedData);
}
else if (feature is FloatData)
{
float[] featureData = ((FloatData)feature).Values;
if (featureLength < 0)
{
featureLength = featureData.Length;
}
ret.Add(featureData);
}
feature = _frontEnd.GetData();
}
}
catch (Exception e)
{
e.PrintStackTrace();
}
return ret;
}
/**
*
* @param bicValue
* The bicValue of the model represented by only one Gaussian.
* This parameter it's useful when this function is called
* repeatedly for different frame values and the same features
* parameter
* @param frame
* the frame which is tested for being a change point
* @param features
* the feature vectors matrix
* @return the likelihood ratio
*/
static double GetLikelihoodRatio(double bicValue, int frame, Array2DRowRealMatrix features)
{
double bicValue1, bicValue2;
int d = Segment.FeaturesSize;
double penalty = 0.5 * (d + 0.5 * d * (d + 1)) * Math.Log(features.getRowDimension()) * 2;
int nrows = features.getRowDimension(), ncols = features.getColumnDimension();
Array2DRowRealMatrix sub1, sub2;
sub1 = (Array2DRowRealMatrix)features.getSubMatrix(0, frame - 1, 0, ncols - 1);
sub2 = (Array2DRowRealMatrix)features.getSubMatrix(frame, nrows - 1, 0, ncols - 1);
bicValue1 = GetBICValue(sub1);
bicValue2 = GetBICValue(sub2);
return (bicValue - bicValue1 - bicValue2 - penalty);
}
/**
* @param start
* The starting frame
* @param length
* The length of the interval, as numbers of frames
* @param features
* The matrix build with feature vectors as rows
* @return Returns the changing point in the input represented by features
*
*/
private static int GetPoint(int start, int length, int step, Array2DRowRealMatrix features)
{
double max = Double.NegativeInfinity;
int ncols = features.getColumnDimension(), point = 0;
var sub = (Array2DRowRealMatrix)features.getSubMatrix(start, start + length - 1, 0, ncols - 1);
double bicValue = GetBICValue(sub);
for (int i = Segment.FeaturesSize + 1; i < length - Segment.FeaturesSize; i += step)
{
double aux = GetLikelihoodRatio(bicValue, i, sub);
if (aux > max)
{
max = aux;
point = i;
}
}
if (max < 0)
point = Integer.MIN_VALUE;
return point + start;
}
/**
*
* @param features
* Matrix with feature vectors as rows
* @return A list with all changing points detected in the file
*/
private LinkedList<Integer> GetAllChangingPoints(Array2DRowRealMatrix features)
{
LinkedList<Integer> ret = new LinkedList<Integer>();
ret.Add(0);
int framesCount = features.getRowDimension(), step = 500;
int start = 0, end = step, cp;
while (end < framesCount)
{
cp = GetPoint(start, end - start + 1, step / 10, features);
if (cp > 0)
{
start = cp;
end = start + step;
ret.Add(cp);
}
else
end += step;
}
ret.Add(framesCount);
return ret;
}
/**
* @param mat
* A matrix with feature vectors as rows.
* @return Returns the BICValue of the Gaussian model that approximates the
* the feature vectors data samples
*/
public static double GetBICValue(Array2DRowRealMatrix mat)
{
double ret = 0;
EigenDecomposition ed = new EigenDecomposition(new Covariance(mat).getCovarianceMatrix());
double[] re = ed.getRealEigenvalues();
for (int i = 0; i < re.Length; i++)
ret += Math.Log(re[i]);
return ret * (mat.getRowDimension() / 2);
}
/**
* @param inputFileName The name of the file used for diarization
* @return A cluster for each speaker found in the input file
*/
public List<SpeakerCluster> Cluster(Stream stream)
{
_audioSource.SetInputStream(stream);
List<float[]> features = GetFeatures();
return Cluster(features);
}
/**
* @param features The feature vectors to be used for clustering
* @return A cluster for each speaker detected based on the feature vectors provided
*/
public List<SpeakerCluster> Cluster(List<float[]> features)
{
List<SpeakerCluster> ret = new List<SpeakerCluster>();
Array2DRowRealMatrix featuresMatrix = ArrayToRealMatrix(features, features.Count);
LinkedList<Integer> l = GetAllChangingPoints(featuresMatrix);
var it = l.GetEnumerator();
int curent;
it.MoveNext();
int previous = it.Current;
while (it.MoveNext())
{
curent = it.Current;
Segment s = new Segment(previous * Segment.FrameLength, (curent - previous)
* (Segment.FrameLength));
Array2DRowRealMatrix featuresSubset = (Array2DRowRealMatrix)featuresMatrix.getSubMatrix(
previous, curent - 1, 0, 12);
ret.Add(new SpeakerCluster(s, featuresSubset, GetBICValue(featuresSubset)));
previous = curent;
}
int clusterCount = ret.Count;
Array2DRowRealMatrix distance;
distance = new Array2DRowRealMatrix(clusterCount, clusterCount);
distance = UpdateDistances(ret);
while (true)
{
double distmin = 0;
int imin = -1, jmin = -1;
for (int i = 0; i < clusterCount; i++)
for (int j = 0; j < clusterCount; j++)
if (i != j)
distmin += distance.getEntry(i, j);
distmin /= (clusterCount * (clusterCount - 1) * 4);
for (int i = 0; i < clusterCount; i++)
{
for (int j = 0; j < clusterCount; j++)
{
if (distance.getEntry(i, j) < distmin && i != j)
{
distmin = distance.getEntry(i, j);
imin = i;
jmin = j;
}
}
}
if (imin == -1)
{
break;
}
ret[imin].MergeWith(ret[jmin]);
UpdateDistances(ret, imin, jmin, distance);
ret.Remove(jmin);
clusterCount--;
}
return ret;
}
/**
* @param Clustering
* The array of clusters
* @param posi
* The index of the merged cluster
* @param posj
* The index of the cluster that will be eliminated from the
* clustering
* @param distance
* The distance matrix that will be updated
*/
void UpdateDistances(List<SpeakerCluster> clustering, int posi, int posj, Array2DRowRealMatrix distance)
{
int clusterCount = clustering.Count;
for (int i = 0; i < clusterCount; i++)
{
distance.setEntry(i, posi, ComputeDistance(clustering[i], clustering[posi]));
distance.setEntry(posi, i, distance.getEntry(i, posi));
}
for (int i = posj; i < clusterCount - 1; i++)
for (int j = 0; j < clusterCount; j++)
distance.setEntry(i, j, distance.getEntry(i + 1, j));
for (int i = 0; i < clusterCount; i++)
for (int j = posj; j < clusterCount - 1; j++)
distance.setEntry(i, j, distance.getEntry(i, j + 1));
}
/**
* @param Clustering
* The array of clusters
*/
Array2DRowRealMatrix UpdateDistances(List<SpeakerCluster> clustering)
{
int clusterCount = clustering.Count;
Array2DRowRealMatrix distance = new Array2DRowRealMatrix(clusterCount, clusterCount);
for (int i = 0; i < clusterCount; i++)
{
for (int j = 0; j <= i; j++)
{
distance.setEntry(i, j, ComputeDistance(clustering[i], clustering[j]));
distance.setEntry(j, i, distance.getEntry(i, j));
}
}
return distance;
}
static double ComputeDistance(SpeakerCluster c1, SpeakerCluster c2)
{
int rowDim = c1.FeatureMatrix.getRowDimension() + c2.FeatureMatrix.getRowDimension();
int colDim = c1.FeatureMatrix.getColumnDimension();
Array2DRowRealMatrix combinedFeatures = new Array2DRowRealMatrix(rowDim, colDim);
combinedFeatures.setSubMatrix(c1.FeatureMatrix.getData(), 0, 0);
combinedFeatures.setSubMatrix(c2.FeatureMatrix.getData(), c1.FeatureMatrix.getRowDimension(), 0);
double bicValue = GetBICValue(combinedFeatures);
double d = Segment.FeaturesSize;
double penalty = 0.5 * (d + 0.5 * d * (d + 1)) * Math.Log(combinedFeatures.getRowDimension()) * 2;
return bicValue - c1.GetBicValue() - c2.GetBicValue() - penalty;
}
/**
* @param lst
* An ArrayList with all the values being vectors of the same
* dimension
* @return The RealMatrix with the vectors from the ArrayList on columns
*/
static Array2DRowRealMatrix ArrayToRealMatrix(List<float[]> lst, int size)
{
int length = lst[1].Length;
var ret = new Array2DRowRealMatrix(size, length);
int i = 0;
for (i = 0; i < size; i++)
{
double[] converted = new double[length];
for (int j = 0; j < length; j++)
converted[j] = ((lst[i])[j]);
ret.setRow(i, converted);
}
return ret;
}
void PrintMatrix(Array2DRowRealMatrix a)
{
for (int i = 0; i < a.getRowDimension(); i++)
{
for (int j = 0; j < a.getColumnDimension(); j++)
Console.Write(a.getEntry(i, j) + " ");
Console.WriteLine();
}
}
}
}
<|start_filename|>Syn.Speech/Helper/Extensions.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Text.RegularExpressions;
namespace Syn.Speech.Helper
{
public static class Extensions
{
private static DateTime Jan1st1970 = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
public static string ReplaceAll(this string value, string pattern, string replacement)
{
string toReturn = value;
var regex = new Regex(pattern);
return regex.Replace(toReturn, replacement);
}
public static void Fill<T>(this T[] originalArray, T with)
{
for (var i = 0; i < originalArray.Length; i++)
{
originalArray[i] = with;
}
}
public static bool offer<T>(this LinkedList<T> source, T item)
{
source.AddLast(item);
return true;
}
public static T poll<T>(this LinkedList<T> source)
{
var toReturn = source.First.Value;
source.RemoveFirst();
return toReturn;
}
public static V getProperty<T, V>(this Dictionary<T, V> source, T expectedValue, V defaultValue)
{
if (source.ContainsKey(expectedValue))
{
return source[expectedValue];
}
else
{
return defaultValue;
}
}
public static double[] copyOfRange(double[] src, int start, int end)
{
int len = end - start;
double[] dest = new double[len];
// note i is always from 0
for (int i = 0; i < len; i++)
{
dest[i] = src[start + i]; // so 0..n = 0+x..n+x
}
return dest;
}
public static double[] copyOf(double[] src, int length)
{
return copyOfRange(src, 0, length);
}
public static T GetField<T>(this Type source, string memberName)
{
foreach (var property in source.GetMembers())
{
var attribute = property.GetCustomAttributes(typeof(T), false);
if (property.Name == memberName) return (T) attribute[0];
}
return default (T);
}
/// <summary>
/// Extension for Java's System.currentTimeMillis
/// </summary>
/// <returns></returns>
public static long currentTimeMillis()
{
return (long)((DateTime.UtcNow - Jan1st1970).TotalMilliseconds);
}
public static T Remove<T>(this List<T> source, int index)
{
var toReturn = source[index];
source.RemoveAt(index);
return toReturn;
}
public static void Put<V, T>(Dictionary<T, V> source, T key, V value)
{
if (source.ContainsKey(key))
{
source[key] = value;
}
else { source.Add(key,value);}
}
}
}
<|start_filename|>Syn.Speech/Decoder/FrameDecoder.cs<|end_filename|>
using System.Collections.Generic;
using Syn.Speech.Common.FrontEnd;
using Syn.Speech.Decoder.Search;
using Syn.Speech.FrontEnd;
using Syn.Speech.FrontEnd.EndPoint;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Decoder
{
public class FrameDecoder : AbstractDecoder, IDataProcessor
{
private IDataProcessor predecessor;
private bool isRecognizing;
private Results.Result result;
public FrameDecoder(ISearchManager searchManager, bool fireNonFinalResults, bool autoAllocate,
List<IResultListener> listeners)
: base(searchManager, fireNonFinalResults, autoAllocate, listeners)
{
}
public FrameDecoder() { }
public override Results.Result decode(string referenceText)
{
return searchManager.recognize(1);
}
public IData getData()
{
IData d = getPredecessor().getData();
if (isRecognizing && (d is FloatData || d is DoubleData || d is SpeechEndSignal))
{
result = decode(null);
if (result != null)
{
fireResultListeners(result);
result = null;
}
}
// we also trigger recogntion on a DataEndSignal to allow threaded scorers to shut down correctly
if (d is DataEndSignal)
{
searchManager.stopRecognition();
}
if (d is SpeechStartSignal)
{
searchManager.startRecognition();
isRecognizing = true;
result = null;
}
if (d is SpeechEndSignal)
{
searchManager.stopRecognition();
//fire results which were not yet final
if (result != null)
fireResultListeners(result);
isRecognizing = false;
}
return d;
}
public override void newProperties(PropertySheet ps)
{
//TODO: THIS DOESN'T EXIST in SPHINX4
}
public IDataProcessor getPredecessor()
{
return predecessor;
}
public void setPredecessor(IDataProcessor predecessor)
{
this.predecessor = predecessor;
}
public void initialize() {}
}
}
<|start_filename|>Syn.Speech/Result/Edge.cs<|end_filename|>
using System;
using System.IO;
namespace Syn.Speech.Result
{
/// <summary>
/// Edges are part of Lattices. They connect Nodes, and contain the score associated with that sequence.
/// </summary>
public class Edge
{
protected double acousticScore;
protected double lmScore;
protected Node fromNode=null;
protected Node toNode=null;
/**
/// Create an Edge from fromNode to toNode with acoustic and Language Model scores.
*
/// @param fromNode
/// @param toNode
/// @param acousticScore
/// @param lmScore
*/
public Edge(Node fromNode, Node toNode,
double acousticScore, double lmScore)
{
this.acousticScore = acousticScore;
this.lmScore = lmScore;
this.fromNode = fromNode;
this.toNode = toNode;
}
override
public String ToString()
{
return "Edge(" + fromNode + "-->" + toNode + '[' + acousticScore
+ ',' + lmScore + "])";
}
/**
/// Internal routine used when creating a Lattice from a .LAT file
*
/// @param lattice
/// @param tokens
*/
public static void load(Lattice lattice, StringTokenizer tokens)
{
String from = tokens.nextToken();
String to = tokens.nextToken();
int score = int.Parse(tokens.nextToken());
Node fromNode = lattice.getNode(from);
if (fromNode == null) {
throw new Exception("Edge fromNode \"" + from + "\" does not exist");
}
Node toNode = lattice.getNode(to);
if (toNode == null) {
throw new Exception("Edge toNode \"" + to + "\" does not exist");
}
lattice.addEdge(fromNode, toNode, score, 0.0);
}
/**
/// Internal routine used when dumping a Lattice as a .LAT file
*
/// @param f
/// @throws IOException
*/
public void dump(StreamWriter f)
{
f.WriteLine("edge: " + fromNode.getId() + " " + toNode.getId() + " "
+ acousticScore + " " + lmScore);
}
/**
/// Internal routine used when dumping a Lattice as an AiSee file
*
/// @param f
/// @throws IOException
*/
public void dumpAISee(StreamWriter f)
{
f.Write("edge: { sourcename: \"" + fromNode.getId()
+ "\" targetname: \"" + toNode.getId()
+ "\" label: \"" + acousticScore + ',' + lmScore + "\" }\n");
}
/**
/// Internal routine used when dumping a Lattice as an Graphviz file
*
/// @param f
/// @throws IOException
*/
public void dumpDot(StreamWriter f)
{
String label = "" + acousticScore + "," + lmScore;
f.Write("\tnode" + fromNode.getId() + " -> node" + toNode.getId()
+ " [ label=\"" + label + "\" ]\n");
}
/**
/// Get the acoustic score associated with an Edge. This is the acoustic
/// score of the word that this edge is transitioning to, that is, the word
/// represented by the node returned by the getToNode() method.
///
/// @return the acoustic score of the word this edge is transitioning to
*/
public double getAcousticScore()
{
return acousticScore;
}
/**
/// Get the language model score associated with an Edge
*
/// @return the score
*/
public double getLMScore()
{
return lmScore;
}
/**
/// Get the "from" Node associated with an Edge
*
/// @return the Node
*/
public Node getFromNode()
{
return fromNode;
}
/**
/// Get the "to" Node associated with an Edge
*
/// @return the Node
*/
public Node getToNode()
{
return toNode;
}
/**
/// Sets the acoustic score
*
/// @param v the acoustic score.
*/
public void setAcousticScore(double v)
{
acousticScore = v;
}
/**
/// Sets the language model score
*
/// @param v the lm score.
*/
public void setLMScore(double v)
{
lmScore = v;
}
/**
/// Returns true if the given edge is equivalent to this edge. Two edges are equivalent only if they have their
/// 'fromNode' and 'toNode' are equivalent, and that their acoustic and language scores are the same.
*
/// @param other the Edge to compare this Edge against
/// @return true if the Edges are equivalent; false otherwise
*/
public Boolean isEquivalent(Edge other)
{
/*
/// TODO: Figure out why there would be minute differences
/// in the acousticScore. Therefore, the equality of the acoustic
/// score is judge based on whether the difference is bigger than 1.
*/
double diff = Math.Abs(acousticScore)* 0.00001;
return ((Math.Abs(acousticScore - other.getAcousticScore()) <= diff &&
lmScore == other.getLMScore()) &&
(fromNode.isEquivalent(other.getFromNode()) &&
toNode.isEquivalent(other.getToNode())));
}
}
}
<|start_filename|>Syn.Speech/Tools/Bandwidth/BandDetector.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using Syn.Speech.FrontEnds;
using Syn.Speech.FrontEnds.FrequencyWarp;
using Syn.Speech.FrontEnds.Transform;
using Syn.Speech.FrontEnds.Util;
using Syn.Speech.FrontEnds.Window;
//PATROLLED + REFACTORED
namespace Syn.Speech.Tools.Bandwidth
{
/**
* A simple energy-based detector for upsampled audio. Could be used to detect
* bandwidth issues leading to the accuracy issues.
*
* The detector simply looks for energies in different mel bands and using the
* threshold it decides if we have cut of the frequencies signal. On every frame
* we find the maximum energy band, then we just control that energy doesn't
* fall too fast in upper bands.
*
* A paper on the subject is "DETECTING BANDLIMITED AUDIO IN BROADCAST TELEVISION SHOWS"
* by by <NAME>, <NAME> and <NAME> where spline approximation is proposed
* for detection. However, the paper seems to contain a fundamental flaw. The
* decision is made on average spectrum, not per-frame. This probably leads
* to omission of the events in high frequency which might signal about wide band.
*/
public class BandDetector
{
const int Bands = 40;
//From 4750 to 6800 Hz
const int HighRangeStart = 35;
const int HighRangeEnd = 39;
//From 2156 to 3687 Hz
const int LowRangeStart = 23;
const int LowRangeEnd = 29;
//Thresholds, selected during the experiments, about -30dB
const double NoSignalLevel = 0.02;
const double SignalLevel = 0.5;
//Don't care if intensity is very low
const double LowIntensity = 1e+5;
private readonly FrontEnd _frontend;
private readonly AudioFileDataSource _source;
public BandDetector()
{
// standard frontend
_source = new AudioFileDataSource(320, null);
var windower = new RaisedCosineWindower(0.97f,25.625f, 10.0f);
var fft = new DiscreteFourierTransform(512, false);
var filterbank = new MelFrequencyFilterBank(130.0, 6800.0, Bands);
var list = new List<IDataProcessor> {_source, windower, fft, filterbank};
_frontend = new FrontEnd(list);
}
public static void Main(string[] args)
{
if (args.Length < 1)
{
Console.WriteLine("Usage: Detector <filename.wav> or Detector <filelist>");
return;
}
if (args[0].EndsWith(".wav"))
{
BandDetector detector = new BandDetector();
Console.WriteLine("Bandwidth for " + args[0] + " is " + detector.Bandwidth(args[0]));
}
else
{
BandDetector detector = new BandDetector();
TextReader reader = new StreamReader(args[0]);
string line;
while ((line = reader.ReadLine()) != null)
{
if (detector.Bandwidth(line)) Console.WriteLine("Bandwidth for " + line + " is low");
}
reader.Close();
}
}
public bool Bandwidth(String file)
{
_source.SetAudioFile(file, "");
IData data;
var energy = new double[Bands];
while ((data = _frontend.GetData()) != null)
{
if (data is DoubleData)
{
double maxIntensity = LowIntensity;
double[] frame = ((DoubleData)data).Values;
for (int i = 0; i < Bands; i++)
maxIntensity = Math.Max(maxIntensity, frame[i]);
if (maxIntensity <= LowIntensity)
{
continue;
}
for (int i = 0; i < Bands; i++)
{
energy[i] = Math.Max(frame[i] / maxIntensity, energy[i]);
}
}
}
double maxLow = Max(energy, LowRangeStart, LowRangeEnd);
double maxHi = Max(energy, HighRangeStart, HighRangeEnd);
// System.out.format("%f %f\n", maxHi, maxLow);
// for (int i = 0; i < bands; i++)
// System.out.format("%.4f ", energy[i]);
// System.out.println();
if (maxHi < NoSignalLevel && maxLow > SignalLevel)
return true;
return false;
}
private static double Max(double[] energy, int start, int end)
{
double max = 0;
for (int i = start; i <= end; i++)
max = Math.Max(max, energy[i]);
return max;
}
}
}
<|start_filename|>Syn.Speech/Alignment/PathExtractor.cs<|end_filename|>
using System;
using System.Collections.Generic;
using Syn.Speech.Helper;
//PATROLLED
namespace Syn.Speech.Alignment
{
public class PathExtractor
{
//private static bool INTERPRET_PATHS = String.instancehelper_equals(java.lang.System.getProperty("com.sun.speech.freetts.interpretCartPaths", "false"), (object) "true");
//private static bool LAZY_COMPILE = String.instancehelper_equals(java.lang.System.getProperty("com.sun.speech.freetts.lazyCartCompile", "true"), (object)"true");
private static readonly bool INTERPRET_PATHS;
private static readonly bool LAZY_COMPILE;
public const string INTERPRET_PATHS_PROPERTY = "com.sun.speech.freetts.interpretCartPaths";
public const string LAZY_COMPILE_PROPERTY = "com.sun.speech.freetts.lazyCartCompile";
private readonly string pathAndFeature;
private readonly string path;
private readonly string feature;
private object[] compiledPath;
static PathExtractor()
{
var interpretPaths = Environment.GetEnvironmentVariable("com.sun.speech.freetts.interpretCartPaths");
INTERPRET_PATHS = !string.IsNullOrEmpty(interpretPaths) && bool.Parse(interpretPaths);
var lazyCompile = Environment.GetEnvironmentVariable("com.sun.speech.freetts.lazyCartCompile");
LAZY_COMPILE = string.IsNullOrEmpty(lazyCompile) || bool.Parse(lazyCompile);
}
public PathExtractor(string pathAndFeature, bool wantFeature)
{
this.pathAndFeature = pathAndFeature;
if (INTERPRET_PATHS)
{
path = pathAndFeature;
return;
}
if (wantFeature)
{
int lastDot = pathAndFeature.LastIndexOf(".");
// string can be of the form "p.feature" or just "feature"
if (lastDot == -1)
{
feature = pathAndFeature;
path = null;
}
else
{
feature = pathAndFeature.Substring(lastDot + 1);
path = pathAndFeature.Substring(0, lastDot);
}
}
else
{
path = pathAndFeature;
}
if (!LAZY_COMPILE)
{
compiledPath = compile(path);
}
}
public virtual Item findItem(Item item)
{
if (INTERPRET_PATHS)
{
return item.findItem(path);
}
if (compiledPath == null)
{
compiledPath = compile(path);
}
Item pitem = item;
for (int i = 0; pitem != null && i < compiledPath.Length; )
{
OpEnum op = (OpEnum)compiledPath[i++];
if (op == OpEnum.NEXT)
{
pitem = pitem.getNext();
}
else if (op == OpEnum.PREV)
{
pitem = pitem.getPrevious();
}
else if (op == OpEnum.NEXT_NEXT)
{
pitem = pitem.getNext();
if (pitem != null)
{
pitem = pitem.getNext();
}
}
else if (op == OpEnum.PREV_PREV)
{
pitem = pitem.getPrevious();
if (pitem != null)
{
pitem = pitem.getPrevious();
}
}
else if (op == OpEnum.PARENT)
{
pitem = pitem.getParent();
}
else if (op == OpEnum.DAUGHTER)
{
pitem = pitem.getDaughter();
}
else if (op == OpEnum.LAST_DAUGHTER)
{
pitem = pitem.getLastDaughter();
}
else if (op == OpEnum.RELATION)
{
string relationName = (string)compiledPath[i++];
pitem =
pitem.getSharedContents()
.getItemRelation(relationName);
}
else
{
this.LoggerInfo("findItem: bad feature " + op + " in "
+ path);
}
}
return pitem;
}
public virtual object findFeature(Item item)
{
if (INTERPRET_PATHS)
{
return item.findFeature(path);
}
Item pitem = findItem(item);
object results = null;
if (pitem != null)
{
//if (LOGGER.isLoggable(Level.FINER))
//{
// LOGGER.finer("findFeature: Item [" + pitem + "], feature '"
// + feature + "'");
//}
this.LoggerInfo("findFeature: Item [" + pitem + "], feature '" + feature + "'");
results = pitem.getFeatures().getObject(feature);
}
results = (results == null) ? "0" : results;
//if (LOGGER.isLoggable(Level.FINER))
//{
// LOGGER.finer("findFeature: ...results = '" + results + "'");
//}
this.LoggerInfo("findFeature: ...results = '" + results + "'");
return results;
}
private object[] compile(string obj0)
{
if (path == null)
{
return new object[0];
}
var list = new List<object>();
var tok = new StringTokenizer(path, ":.");
while (tok.hasMoreTokens())
{
string token = tok.nextToken();
OpEnum op = OpEnum.getInstance(token);
if (op == null)
{
throw new Error("Bad path compiled " + path);
}
list.Add(op);
if (op == OpEnum.RELATION)
{
list.Add(tok.nextToken());
}
}
return list.ToArray();
}
public override string ToString()
{
return pathAndFeature;
}
}
}
<|start_filename|>Syn.Speech/Result/Lattice.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using Syn.Speech.Common;
using Syn.Speech.Decoder.Search;
using Syn.Speech.Linguist;
using Syn.Speech.Linguist.Dictionary;
using Syn.Speech.Util;
namespace Syn.Speech.Result
{
/**
/// <p/>
/// Provides recognition lattice results. Lattices are created from {@link edu.cmu.sphinx.result.Result Results} which
/// can be partial or final. </p>
/// <p/>
/// Lattices describe all theories considered by the Recognizer that have not been pruned out. Lattices are a directed
/// graph containing {@link edu.cmu.sphinx.result.Node Nodes} and {@link edu.cmu.sphinx.result.Edge Edges}. A Node that
/// corresponds to a theory that a word was spoken over a particular period of time. An Edge that corresponds to the
/// score of one word following another. The usual result transcript is the sequence of Nodes though the Lattice with
/// the best scoring path. Lattices are a useful tool for analyzing "alternate results". </p>
/// <p/>
/// A Lattice can be created from a Result that has a full token tree (with its corresponding
/// AlternativeHypothesisManager). Currently, only the {@link edu.cmu.sphinx.decoder.search.WordPruningBreadthFirstSearchManager}
/// has an AlternativeHypothesisManager. Furthermore, the lattice construction code currently only works for linguists
/// where the {@link edu.cmu.sphinx.linguist.WordSearchState} returns false on the <code>isWordStart</code> method, i.e.,
/// where the word states appear at the end of the word in the linguist. <i>Therefore, lattices should only be created
/// from Result from the {@link edu.cmu.sphinx.linguist.lextree.LexTreeLinguist} and the {@link
/// edu.cmu.sphinx.decoder.search.WordPruningBreadthFirstSearchManager}. </i> </p>
/// <p/>
/// Lattices can also be created from a collapsed {@link edu.cmu.sphinx.decoder.search.Token} tree and its
/// AlternativeHypothesisManager. This is what 'collapsed' means. Normally, between two word tokens is a series of tokens
/// for other types of states, such as unit or HMM states. Using 'W' for word tokens, 'U' for unit tokens, 'H' for HMM
/// tokens, a token chain can look like: </p>
/// <pre>
/// W - U - H - H - H - H - U - H - H - H - H - W
/// </pre>
/// <p/>
/// Usually, HMM tokens contains acoustic scores, and word tokens contains language scores. If we want to know the total
/// acoustic and language scores between any two words, it is unnecessary to keep around the unit and HMM tokens.
/// Therefore, all their acoustic and language scores are 'collapsed' into one token, so that it will look like: </p>
/// <pre>
/// W - P - W
/// </pre>
/// <p/>
/// where 'P' is a token that represents the path between the two words, and P contains the acoustic and language scores
/// between the two words. It is this type of collapsed token tree that the Lattice class is expecting. Normally, the
/// task of collapsing the token tree is done by the {@link edu.cmu.sphinx.decoder.search.WordPruningBreadthFirstSearchManager}.
/// A collapsed token tree can look like: </p>
/// <pre>
/// "cat" - P - </s>
/// /
/// P
/// /
/// <s> - P - "a" - P - "big"
/// \
/// P
/// \
/// "dog" - P - </s>
/// </pre>
/// <p/>
/// When a Lattice is constructed from a Result, the above collapsed token tree together with the alternate hypothesis of
/// "all" instead of "a", will be converted into a Lattice that looks like the following:
/// <pre>
/// "a" "cat"
/// / \ / \
/// <s> "big" - </s>
/// \ / \ /
/// "all" "dog"
/// </pre>
/// <p/>
/// Initially, a lattice can have redundant nodes, i.e., nodes referring to the same word and that originate from the
/// same parent node. These nodes can be collapsed using the {@link LatticeOptimizer}. </p>
*/
public class Lattice
{
protected Node initialNode;
public Node terminalNode;
protected List<Edge> edges=new List<Edge>();
protected Dictionary<String, Node> nodes= new Dictionary<String, Node>();
protected double logBase;
protected LogMath logMath=LogMath.getLogMath();
private List<Token> visitedWordTokens;
private AlternateHypothesisManager loserManager;
/** Create an empty Lattice. */
public Lattice() {
}
/**
/// Create a Lattice from a Result.
/// <p/>
/// The Lattice is created from the Token tree referenced by the Result. The Lattice is then optimized to all
/// collapse equivalent paths.
*
/// @param result the result to convert into a lattice
*/
public Lattice(Result result)
{
visitedWordTokens = new List<Token>();
loserManager = result.getAlternateHypothesisManager();
if (loserManager != null) {
loserManager.purge();
}
List<Token> tokens = null;
if(result.getBestFinalToken() == null)
tokens = result.getActiveTokens().getTokens();
else
tokens = result.getResultTokens();
foreach (Token token in tokens)
{
Token tokenLocal = token.getPredecessor();
while (tokenLocal != null && !tokenLocal.isWord())
{
tokenLocal = tokenLocal.getPredecessor();
}
Trace.Assert( tokenLocal != null && tokenLocal.getWord().isSentenceEndWord());
if (terminalNode == null)
{
terminalNode = new Node(getNodeID(result.getBestToken()),
token.getWord(), -1, -1);
initialNode = terminalNode;
addNode(terminalNode);
}
collapseWordToken(tokenLocal);
}
}
/**
/// Returns the node corresponding to the given word token.
*
/// @param token the token which we want a node of
/// @return the node of the given token
*/
private Node getNode(Token token)
{
if (token.getWord().isSentenceEndWord())
{
return terminalNode;
}
Node node = nodes[getNodeID(token)];
if (node == null)
{
IWordSearchState wordState =
(IWordSearchState) token.getSearchState();
int startFrame = -1;
int endFrame = -1;
if (wordState.isWordStart()) {
startFrame = token.getFrameNumber();
} else {
endFrame = token.getFrameNumber();
}
node = new Node(getNodeID(token), token.getWord(),
startFrame, endFrame);
addNode(node);
}
return node;
}
/**
/// Collapse the given word-ending token. This means collapsing all the unit
/// and HMM tokens that correspond to the word represented by this token into
/// an edge of the lattice.
///
/// @param token
/// the word-ending token to collapse
*/
private void collapseWordToken(Token token)
{
Trace.Assert(token != null);
if (visitedWordTokens.Contains(token))
{
return;
}
visitedWordTokens.Add(token);
collapseWordPath(getNode(token), token.getPredecessor(),
token.getAcousticScore() + token.getInsertionScore(),
token.getLanguageScore());
if (loserManager != null
&& loserManager.hasAlternatePredecessors(token)) {
foreach (Token loser in loserManager.getAlternatePredecessors(token))
{
collapseWordPath(getNode(token), loser,
token.getAcousticScore(), token.getLanguageScore());
}
}
}
/**
/// @param parentWordNode
/// the 'toNode' of the returned edge
/// @param token
/// the predecessor token of the token represented by the
/// parentWordNode
/// @param acousticScore
/// the acoustic score until and including the parent of token
/// @param languageScore
/// the language score until and including the parent of token
*/
private void collapseWordPath(Node parentWordNode, Token token,
float acousticScore, float languageScore) {
if (token == null)
return;
if (token.isWord()) {
/*
/// If this is a word, create a Node for it, and then create an edge
/// from the Node to the parentWordNode
*/
Node fromNode = getNode(token);
addEdge(fromNode, parentWordNode, acousticScore,
languageScore);
if (token.getPredecessor() != null) {
/* Collapse the token sequence ending in this token. */
collapseWordToken(token);
} else {
/* we've reached the sentence start token */
Trace.Assert(token.getWord().isSentenceStartWord());
initialNode = fromNode;
}
return;
}
/*
/// If a non-word token, just add the acoustic and language scores to the
/// current totals, and then move on to the predecessor token. Fast
/// forward through the not so interesting states to save stack space.
*/
while (true) {
acousticScore += token.getAcousticScore()
+ token.getInsertionScore();
languageScore += token.getLanguageScore();
Token preToken = token.getPredecessor();
if (preToken == null)
return;
if (preToken.isWord()
|| (loserManager != null && loserManager
.hasAlternatePredecessors(token)))
break;
token = preToken;
}
collapseWordPath(parentWordNode, token.getPredecessor(), acousticScore,
languageScore);
/* Traverse the path(s) for the loser token(s). */
if (loserManager != null
&& loserManager.hasAlternatePredecessors(token)) {
foreach (Token loser in loserManager.getAlternatePredecessors(token))
{
collapseWordPath(parentWordNode, loser, acousticScore,
languageScore);
}
}
}
/**
/// Returns an ID for the Node associated with the given token.
*
/// @param token the token associated with the Node
/// @return an ID for the Node
*/
private String getNodeID(Token token)
{
return token.GetHashCode().ToString();
}
/**
/// Create a Lattice from a LAT file. LAT files are created by the method Lattice.dump()
*
/// @param fileName
*/
public Lattice(String fileName)
{
try {
Debug.Print("Loading from " + fileName);
// load the nodes
LineNumberReader _in = new LineNumberReader(new StreamReader(fileName));
String line=_in.readLine();
while (line != null)
{
StringTokenizer tokens = new StringTokenizer(line);
if (tokens.hasMoreTokens()) {
String type = tokens.nextToken();
if (type.Equals("edge:"))
{
Edge.load(this, tokens);
}
else if (type.Equals("node:"))
{
Node.load(this, tokens);
}
else if (type.Equals("initialNode:"))
{
setInitialNode(getNode(tokens.nextToken()));
}
else if (type.Equals("terminalNode:"))
{
setTerminalNode(getNode(tokens.nextToken()));
}
else if (type.Equals("logBase:"))
{
logBase = Double.Parse(tokens.nextToken());
}
else
{
_in.close();
throw new Exception("SYNTAX ERROR: " + fileName +
'[' + _in.getLineNumber() + "] " + line);
}
}
line=_in.readLine();
}
_in.close();
}
catch (Exception e) {
throw new Exception(e.ToString());
}
}
/**
/// Add an edge from fromNode to toNode. This method creates the Edge object and does all the connecting
*
/// @param fromNode
/// @param toNode
/// @param acousticScore
/// @param lmScore
/// @return the new Edge
*/
public Edge addEdge(Node fromNode, Node toNode,
double acousticScore, double lmScore)
{
Edge e = new Edge(fromNode, toNode, acousticScore, lmScore);
fromNode.addLeavingEdge(e);
toNode.addEnteringEdge(e);
edges.Add(e);
return e;
}
/**
/// Add a Node that represents the theory that a given word was spoken over a given period of time.
*
/// @param word
/// @param beginTime
/// @param endTime
/// @return the new Node
*/
public Node addNode(IWord word, int beginTime, int endTime)
{
Node n = new Node(word, beginTime, endTime);
addNode(n);
return n;
}
/**
/// Add a Node with a given ID that represents the theory that a given word was spoken over a given period of time.
/// This method is used when loading Lattices from .LAT files.
*
/// @param word
/// @param beginTime
/// @param endTime
/// @return the new Node
*/
protected Node addNode(String id, IWord word, int beginTime, int endTime)
{
Node n = new Node(id, word, beginTime, endTime);
addNode(n);
return n;
}
/**
/// Add a Node with a given ID that represents the theory that a given word was spoken over a given period of time.
/// This method is used when loading Lattices from .LAT files.
*
/// @param word
/// @param beginTime
/// @param endTime
/// @return the new Node
*/
public Node addNode(String id, String word, int beginTime, int endTime)
{
Word w = new Word(word, new Pronunciation[0], false);
return addNode(id, w, beginTime, endTime);
}
/**
/// Add a Node corresponding to a Token from the result Token tree. Usually, the Token should reference a search
/// state that is a WordSearchState, although other Tokens may be used for debugging.
*
/// @param token
/// @return the new Node
*/
protected Node addNode(Token token, int beginTime, int endTime)
{
Trace.Assert(token.getSearchState() is IWordSearchState);
IWord word = ((IWordSearchState) (token.getSearchState()))
.getPronunciation().getWord();
return addNode(token.GetHashCode().ToString(), word, beginTime, endTime);
}
/**
/// Test to see if the Lattice contains an Edge
*
/// @param edge
/// @return true if yes
*/
public Boolean hasEdge(Edge edge)
{
return edges.Contains(edge);
}
/**
/// Test to see if the Lattice contains a Node
*
/// @param node
/// @return true if yes
*/
public Boolean hasNode(Node node)
{
return hasNode(node.getId());
}
/**
/// Test to see if the Lattice already contains a Node corresponding to a given Token.
*
/// @param ID the ID of the Node to find
/// @return true if yes
*/
public Boolean hasNode(String ID)
{
return nodes.ContainsKey(ID);
}
/**
/// Add a Node to the set of all Nodes
*
/// @param n
*/
protected void addNode(Node n)
{
Trace.Assert(!hasNode(n.getId()));
nodes.Add(n.getId(), n);
}
/**
/// Remove a Node from the set of all Nodes
*
/// @param n
*/
protected void removeNode(Node n)
{
Trace.Assert(hasNode(n.getId()));
nodes.Remove(n.getId());
}
/**
/// Get the Node associated with an ID
*
/// @param id
/// @return the Node
*/
public Node getNode(String id)
{
return (nodes[id]);
}
/**
/// Get a copy of the Collection of all Nodes. Used by LatticeOptimizer to avoid Concurrent modification of the nodes
/// list.
*
/// @return a copy of the collection of Nodes
*/
public List<Node> getCopyOfNodes()
{
return nodes.Values.ToList();
}
/**
/// Get the Collection of all Nodes.
*
/// @return the collection of all Nodes
*/
public List<Node> getNodes()
{
return nodes.Values.ToList();
}
/**
/// Remove an Edge from the set of all Edges.
*
/// @param e
*/
protected void removeEdge(Edge e)
{
edges.Remove(e);
}
/**
/// Get the set of all Edges.
*
/// @return the set of all edges
*/
public List<Edge> getEdges()
{
return edges;
}
/**
/// Dump the Lattice in the form understood by AiSee (a graph visualization tool). See http://www.AbsInt.com
*
/// @param fileName
/// @param title
*/
public void dumpAISee(String fileName, String title)
{
try {
Debug.Print("Dumping " + title + " to " + fileName);
StreamWriter f = new StreamWriter(fileName);
f.Write("graph: {\n");
f.Write("title: \"" + title + "\"\n");
f.Write("display_edge_labels: yes\n");
/*
f.write( "colorentry 32: 25 225 0\n");
f.write( "colorentry 33: 50 200 0\n");
f.write( "colorentry 34: 75 175 0\n");
f.write( "colorentry 35: 100 150 0\n");
f.write( "colorentry 36: 125 125 0\n");
f.write( "colorentry 37: 150 100 0\n");
f.write( "colorentry 38: 175 75 0\n");
f.write( "colorentry 39: 200 50 0\n");
f.write( "colorentry 40: 225 25 0\n");
f.write( "colorentry 41: 250 0 0\n");
f.write( "color: black\n");
f.write( "orientation: left_to_right\n");
f.write( "xspace: 10\n");
f.write( "yspace: 10\n");
*/
foreach (Node node in nodes.Values)
{
node.dumpAISee(f);
}
foreach (Edge edge in edges)
{
edge.dumpAISee(f);
}
f.Write("}\n");
f.Close();
}
catch (IOException e)
{
throw e;
}
}
/**
/// Dump the Lattice in the form understood by Graphviz. See http://graphviz.org
*
/// @param fileName
/// @param title
*/
public void dumpDot(String fileName, String title)
{
try {
Debug.Print("Dumping " + title + " to " + fileName);
StreamWriter f = new StreamWriter(fileName);
f.Write("digraph \"" + title + "\" {\n");
f.Write("rankdir = LR\n");
foreach (Node node in nodes.Values)
{
node.dumpDot(f);
}
foreach (Edge edge in edges)
{
edge.dumpDot(f);
}
f.Write("}\n");
f.Close();
}
catch (IOException e) {
throw e;
}
}
public void dumpSlf(StreamWriter w)
{
w.Write("VERSION=1.1\n");
w.Write("UTTERANCE=test\n");
w.Write("base=1.0001\n");
w.Write("lmscale=9.5\n");
w.Write("start=0\n");
w.Write("end=1\n");
w.Write("#\n# Size line.\n#\n");
w.Write("NODES="+nodes.Count.ToString()+" LINKS="+this.edges.Count.ToString()+"\n");
// we cannot use the id from sphinx as node id. The id from sphinx may be arbitrarily big.
// Certain tools, such as lattice-tool from srilm, may elect to use an array to hold the nodes,
// which might cause out of memory problem due to huge array.
Dictionary<String, Int32> nodeIdMap=new Dictionary<String, Int32>();
nodeIdMap.Add(initialNode.getId(), 0);
nodeIdMap.Add(terminalNode.getId(), 1);
int count=2;
w.Write("#\n# Nodes definitions.\n#\n");
foreach(Node node in nodes.Values)
{
if (nodeIdMap.ContainsKey(node.getId()))
{
w.Write("I=" + nodeIdMap[node.getId()]);
}
else
{
nodeIdMap.Add(node.getId(), count);
w.Write("I=" + count);
count++;
}
w.Write(" t="+(node.getBeginTime()*1.0/1000));
String spelling = node.getWord().getSpelling();
if (spelling.StartsWith("<"))
spelling = "!NULL";
w.Write(" W=" + spelling);
w.Write("\n");
}
w.Write("#\n# Link definitions.\n#\n");
count=0;
foreach(Edge edge in edges)
{
w.Write("J="+count);
w.Write(" S="+nodeIdMap[edge.getFromNode().getId()]);
w.Write(" E="+nodeIdMap[edge.getToNode().getId()]);
w.Write(" a="+edge.getAcousticScore());
w.Write(" l="+edge.getLMScore() / 9.5);
w.Write("\n");
count++;
}
w.Flush();
}
/**
/// Dump the Lattice as a .LAT file
*
/// @param out
/// @throws IOException
*/
protected void dump(StreamWriter _out)
{
//System.err.println( "Dumping to " + out );
foreach (Node node in nodes.Values)
{
node.dump(_out);
}
foreach (Edge edge in edges)
{
edge.dump(_out);
}
_out.WriteLine("initialNode: " + initialNode.getId());
_out.WriteLine("terminalNode: " + terminalNode.getId());
_out.WriteLine("logBase: " + logMath.getLogBase());
_out.Flush();
}
/**
/// Dump the Lattice as a .LAT file. Used to save Lattices as ASCII files for testing and experimentation.
*
/// @param file
*/
public void dump(String file)
{
try
{
dump(new StreamWriter(file));
}
catch (IOException e)
{
throw e;
}
}
/**
/// Remove a Node and all Edges connected to it. Also remove those Edges from all connected Nodes.
*
/// @param n
*/
public void removeNodeAndEdges(Node n)
{
//System.err.println("Removing node " + n + " and associated edges");
foreach (Edge e in n.getLeavingEdges())
{
e.getToNode().removeEnteringEdge(e);
//System.err.println( "\tRemoving " + e );
edges.Remove(e);
}
foreach (Edge e in n.getEnteringEdges())
{
e.getFromNode().removeLeavingEdge(e);
//System.err.println( "\tRemoving " + e );
edges.Remove(e);
}
//System.err.println( "\tRemoving " + n );
nodes.Remove(n.getId());
Trace.Assert(checkConsistency());
}
/**
/// Remove a Node and cross connect all Nodes with Edges to it.
/// <p/>
/// For example given
/// <p/>
/// Nodes A, B, X, M, N Edges A-->X, B-->X, X-->M, X-->N
/// <p/>
/// Removing and cross connecting X would result in
/// <p/>
/// Nodes A, B, M, N Edges A-->M, A-->N, B-->M, B-->N
*
/// @param n
*/
protected void removeNodeAndCrossConnectEdges(Node n)
{
Debug.WriteLine("Removing node " + n + " and cross connecting edges");
foreach (Edge ei in n.getEnteringEdges())
{
foreach (Edge ej in n.getLeavingEdges())
{
addEdge(ei.getFromNode(), ej.getToNode(),
ei.getAcousticScore(), ei.getLMScore());
}
}
removeNodeAndEdges(n);
Trace.Assert(checkConsistency());
}
/**
/// Get the initialNode for this Lattice. This corresponds usually to the <s> symbol
*
/// @return the initial Node
*/
public Node getInitialNode()
{
return initialNode;
}
/**
/// Set the initialNode for this Lattice. This corresponds usually to the <s> symbol
*
/// @param p_initialNode
*/
public void setInitialNode(Node p_initialNode)
{
initialNode = p_initialNode;
}
/**
/// Get the terminalNode for this Lattice. This corresponds usually to the </s> symbol
*
/// @return the initial Node
*/
public Node getTerminalNode()
{
return terminalNode;
}
/**
/// Set the terminalNode for this Lattice. This corresponds usually to the </s> symbol
*
/// @param p_terminalNode
*/
public void setTerminalNode(Node p_terminalNode)
{
terminalNode = p_terminalNode;
}
/** Dump all paths through this Lattice. Used for debugging. */
public void dumpAllPaths()
{
foreach (String path in allPaths())
{
Debug.WriteLine(path);
}
}
/**
/// Generate a List of all paths through this Lattice.
*
/// @return a lists of lists of Nodes
*/
public List<String> allPaths()
{
return allPathsFrom("", initialNode);
}
/**
/// Internal routine used to generate all paths starting at a given node.
*
/// @param path
/// @param n
/// @return a list of lists of Nodes
*/
protected List<String> allPathsFrom(String path, Node n)
{
String p = path + ' ' + n.getWord();
List<String> l = new List<String>();
if (n == terminalNode)
{
l.Add(p);
}
else
{
foreach (Edge e in n.getLeavingEdges())
{
l.AddRange(allPathsFrom(p, e.getToNode()));
}
}
return l;
}
Boolean checkConsistency()
{
foreach (Node n in nodes.Values)
{
foreach (Edge e in n.getEnteringEdges())
{
if (!hasEdge(e))
{
throw new Exception("Lattice has NODE with missing FROM edge: "
+ n + ',' + e);
}
}
foreach (Edge e in n.getLeavingEdges())
{
if (!hasEdge(e))
{
throw new Exception("Lattice has NODE with missing TO edge: " +
n + ',' + e);
}
}
}
foreach (Edge e in edges)
{
if (!hasNode(e.getFromNode()))
{
throw new Exception("Lattice has EDGE with missing FROM node: " +
e);
}
if (!hasNode(e.getToNode()))
{
throw new Exception("Lattice has EDGE with missing TO node: " + e);
}
if (!e.getToNode().hasEdgeFromNode(e.getFromNode()))
{
throw new Exception("Lattice has EDGE with TO node with no corresponding FROM edge: " + e);
}
if (!e.getFromNode().hasEdgeToNode(e.getToNode()))
{
throw new Exception("Lattice has EDGE with FROM node with no corresponding TO edge: " + e);
}
}
return true;
}
protected void sortHelper(Node n, List<Node> sorted, List<Node> visited)
{
if (visited.Contains(n))
{
return;
}
visited.Add(n);
if (n == null)
{
throw new Exception("Node is null");
}
foreach (Edge e in n.getLeavingEdges())
{
sortHelper(e.getToNode(), sorted, visited);
}
sorted.Add(n);
}
/**
/// Topologically sort the nodes in this lattice.
*
/// @return Topologically sorted list of nodes in this lattice.
*/
public List<Node> sortNodes()
{
List<Node> sorted = new List<Node>(nodes.Count);
sortHelper(initialNode, sorted, new List<Node>());
sorted.Reverse();
return sorted;
}
/**
/// Compute the utterance-level posterior for every node in the lattice, i.e. the probability that this node occurs
/// on any path through the lattice. Uses a forward-backward algorithm specific to the nature of non-looping
/// left-to-right lattice structures.
/// <p/>
/// Node posteriors can be retrieved by calling getPosterior() on Node objects.
*
/// @param languageModelWeightAdjustment the weight multiplier that will be applied to language score already scaled by language weight
*/
public void computeNodePosteriors(float languageModelWeightAdjustment)
{
computeNodePosteriors(languageModelWeightAdjustment, false);
}
/**
/// Compute the utterance-level posterior for every node in the lattice, i.e. the probability that this node occurs
/// on any path through the lattice. Uses a forward-backward algorithm specific to the nature of non-looping
/// left-to-right lattice structures.
/// <p/>
/// Node posteriors can be retrieved by calling getPosterior() on Node objects.
*
/// @param languageModelWeightAdjustment the weight multiplier that will be applied to language score already scaled by language weight
/// @param useAcousticScoresOnly use only the acoustic scores to compute the posteriors, ignore the language weight
/// and scores
*/
public void computeNodePosteriors(float languageModelWeightAdjustment,
Boolean useAcousticScoresOnly)
{
if (initialNode == null)
return;
//forward
initialNode.setForwardScore(LogMath.LOG_ONE);
initialNode.setViterbiScore(LogMath.LOG_ONE);
List<Node> sortedNodes = sortNodes();
Trace.Assert(sortedNodes[0] == initialNode);
foreach (Node currentNode in sortedNodes)
{
foreach (Edge edge in currentNode.getLeavingEdges())
{
double forwardProb = edge.getFromNode().getForwardScore();
double edgeScore = computeEdgeScore
(edge, languageModelWeightAdjustment, useAcousticScoresOnly);
forwardProb += edgeScore;
edge.getToNode().setForwardScore
(logMath.addAsLinear
((float) forwardProb,
(float) edge.getToNode().getForwardScore()));
double vs = edge.getFromNode().getViterbiScore() +
edgeScore;
if (edge.getToNode().getBestPredecessor() == null ||
vs > edge.getToNode().getViterbiScore()) {
edge.getToNode().setBestPredecessor(currentNode);
edge.getToNode().setViterbiScore(vs);
}
}
}
//backward
terminalNode.setBackwardScore(LogMath.LOG_ONE);
Trace.Assert(sortedNodes[sortedNodes.Count - 1] == terminalNode);
int n = sortedNodes.Count - 1;
while (n > 0)
{
Node currentNode = sortedNodes[n-1];
List<Edge> currentEdges = currentNode.getLeavingEdges();
foreach (Edge edge in currentEdges)
{
double backwardProb = edge.getToNode().getBackwardScore();
backwardProb += computeEdgeScore
(edge, languageModelWeightAdjustment, useAcousticScoresOnly);
edge.getFromNode().setBackwardScore
(logMath.addAsLinear((float) backwardProb,
(float) edge.getFromNode().getBackwardScore()));
}
}
//inner
double normalizationFactor = terminalNode.getForwardScore();
foreach (Node node in nodes.Values)
{
node.setPosterior((node.getForwardScore() +
node.getBackwardScore()) - normalizationFactor);
}
}
/**
/// Retrieves the MAP path from this lattice. Only works once computeNodePosteriors has been called.
*
/// @return a list of nodes representing the MAP path.
*/
public List<Node> getViterbiPath()
{
List<Node> path = new List<Node>();
Node n = terminalNode;
while (n != initialNode)
{
path.Insert(0,n); //insert first
n = n.getBestPredecessor();
}
path.Insert(0,initialNode);
return path;
}
/**
/// Computes the score of an edge. It multiplies on adjustment since language model
/// score is already scaled by language model weight in linguist.
*
/// @param edge the edge which score we want to compute
/// @param languageModelWeightAdjustment the weight multiplier that will be applied to language score already scaled by language weight
/// @return the score of an edge
*/
private double computeEdgeScore(Edge edge, float languageModelWeightAdjustment,
Boolean useAcousticScoresOnly) {
if (useAcousticScoresOnly)
{
return edge.getAcousticScore();
}
else
{
return edge.getAcousticScore() + edge.getLMScore()* languageModelWeightAdjustment;
}
}
/**
/// Returns true if the given Lattice is equivalent to this Lattice. Two lattices are equivalent if all their nodes
/// and edges are equivalent.
*
/// @param other the Lattice to compare this Lattice against
/// @return true if the Lattices are equivalent; false otherwise
*/
public Boolean isEquivalent(Lattice other)
{
return checkNodesEquivalent(initialNode, other.getInitialNode());
}
/**
/// Returns true if the two lattices starting at the given two nodes are equivalent. It recursively checks all the
/// child nodes until these two nodes until there are no more child nodes.
*
/// @param n1 starting node of the first lattice
/// @param n2 starting node of the second lattice
/// @return true if the two lattices are equivalent
*/
private Boolean checkNodesEquivalent(Node n1, Node n2)
{
Trace.Assert(n1 != null && n2 != null);
Boolean equivalent = n1.isEquivalent(n2);
if (equivalent) {
List<Edge> leavingEdges = n1.getCopyOfLeavingEdges();
List<Edge> leavingEdges2 = n2.getCopyOfLeavingEdges();
Debug.WriteLine("# edges: " + leavingEdges.Count.ToString() + " "+
leavingEdges2.Count.ToString());
foreach (Edge edge in leavingEdges)
{
/* find an equivalent edge from n2 for this edge */
Edge e2 = n2.findEquivalentLeavingEdge(edge);
if (e2 == null)
{
Debug.WriteLine("Equivalent edge not found, lattices not equivalent.");
return false;
} else {
if (!leavingEdges2.Remove(e2))
{
/*
/// if it cannot be removed, then the leaving edges
/// are not the same
*/
Debug.WriteLine("Equivalent edge already matched, lattices not equivalent.");
return false;
} else {
/* recursively check the two child nodes */
equivalent &= checkNodesEquivalent
(edge.getToNode(), e2.getToNode());
if (!equivalent) {
return false;
}
}
}
}
if (leavingEdges2.Count!=0)
{
Debug.WriteLine("One lattice has too many edges.");
return false;
}
}
return equivalent;
}
Boolean isFillerNode(Node node)
{
return node.getWord().getSpelling().Equals("<sil>");
}
public void removeFillers()
{
foreach (Node node in sortNodes())
{
if (isFillerNode(node))
{
removeNodeAndCrossConnectEdges(node);
Trace.Assert(checkConsistency());
}
}
}
}
}
<|start_filename|>Syn.Speech/Alignment/Item.cs<|end_filename|>
using System.Runtime.InteropServices;
//PATROLLED
using Syn.Speech.Helper;
namespace Syn.Speech.Alignment
{
public class Item
{
private readonly Relation ownerRelation;
private readonly ItemContents contents;
private Item parent;
private Item daughter;
private Item next;
private Item prev;
public Item(Relation relation, ItemContents sharedContents)
{
ownerRelation = relation;
if (sharedContents != null)
{
contents = sharedContents;
}
else
{
contents = new ItemContents();
}
parent = null;
daughter = null;
next = null;
prev = null;
getSharedContents().addItemRelation(relation.getName(), this);
}
public virtual Item getItemAs(string relationName)
{
return getSharedContents().getItemRelation(relationName);
}
public virtual Relation getOwnerRelation()
{
return ownerRelation;
}
public virtual ItemContents getSharedContents()
{
return contents;
}
public virtual bool hasDaughters()
{
return daughter != null;
}
public virtual Item getDaughter()
{
return daughter;
}
public virtual Item getNthDaughter(int which)
{
Item nthDaughter = daughter;
int count = 0;
while (count++ != which && nthDaughter != null)
{
nthDaughter = nthDaughter.next;
}
return nthDaughter;
}
public virtual Item getLastDaughter()
{
Item lastDaughter = daughter;
if (lastDaughter == null)
{
return null;
}
while (lastDaughter.next != null)
{
lastDaughter = lastDaughter.next;
}
return lastDaughter;
}
public virtual Item addDaughter(Item item)
{
Item newItem;
ItemContents itemContents;
Item p = getLastDaughter();
if (p != null)
{
newItem = p.appendItem(item);
}
else
{
if (item == null)
{
itemContents = new ItemContents();
}
else
{
itemContents = item.getSharedContents();
}
newItem = new Item(getOwnerRelation(), itemContents);
newItem.parent = this;
daughter = newItem;
}
return newItem;
}
public virtual Item createDaughter()
{
return addDaughter(null);
}
public virtual Item getParent()
{
Item item;
for (item = this; item.prev != null; item = item.prev){}
return item.parent;
}
public virtual Utterance getUtterance()
{
return getOwnerRelation().getUtterance();
}
public virtual FeatureSet getFeatures()
{
return getSharedContents().getFeatures();
}
public virtual object findFeature(string pathAndFeature)
{
int lastDot;
string feature;
string path;
Item item;
object results = null;
lastDot = pathAndFeature.LastIndexOf(".");
// string can be of the form "p.feature" or just "feature"
if (lastDot == -1)
{
feature = pathAndFeature;
path = null;
}
else
{
feature = pathAndFeature.Substring(lastDot + 1);
path = pathAndFeature.Substring(0, lastDot);
}
item = findItem(path);
if (item != null)
{
results = item.getFeatures().getObject(feature);
}
results = (results == null) ? "0" : results;
// System.out.println("FI " + pathAndFeature + " are " + results);
return results;
}
public virtual Item findItem(string path)
{
Item pitem = this;
StringTokenizer tok;
if (path == null) {
return this;
}
tok = new StringTokenizer(path, ":.");
while (pitem != null && tok.hasMoreTokens()) {
string token = tok.nextToken();
if (token.Equals("n")) {
pitem = pitem.getNext();
} else if (token.Equals("p")) {
pitem = pitem.getPrevious();
} else if (token.Equals("nn")) {
pitem = pitem.getNext();
if (pitem != null) {
pitem = pitem.getNext();
}
} else if (token.Equals("pp")) {
pitem = pitem.getPrevious();
if (pitem != null) {
pitem = pitem.getPrevious();
}
} else if (token.Equals("parent")) {
pitem = pitem.getParent();
} else if (token.Equals("daughter") || token.Equals("daughter1")) {
pitem = pitem.getDaughter();
} else if (token.Equals("daughtern")) {
pitem = pitem.getLastDaughter();
} else if (token.Equals("R")) {
string relationName = tok.nextToken();
pitem =
pitem.getSharedContents()
.getItemRelation(relationName);
} else {
this.LoggerInfo("findItem: bad feature " + token + " in " + path);
}
}
return pitem;
}
public virtual Item getNext()
{
return next;
}
public virtual Item getPrevious()
{
return prev;
}
public virtual Item appendItem(Item originalItem)
{
ItemContents contents;
Item newItem;
if (originalItem == null)
{
contents = null;
}
else
{
contents = originalItem.getSharedContents();
}
newItem = new Item(getOwnerRelation(), contents);
newItem.next = next;
if (next != null)
{
next.prev = newItem;
}
attach(newItem);
if (ownerRelation.getTail() == this)
{
ownerRelation.setTail(newItem);
}
return newItem;
}
internal virtual void attach([In] Item obj0)
{
next = obj0;
obj0.prev = this;
}
public virtual Item prependItem(Item originalItem)
{
ItemContents contents;
Item newItem;
if (originalItem == null)
{
contents = null;
}
else
{
contents = originalItem.getSharedContents();
}
newItem = new Item(getOwnerRelation(), contents);
newItem.prev = prev;
if (prev != null)
{
prev.next = newItem;
}
newItem.next = this;
prev = newItem;
if (parent != null)
{
parent.daughter = newItem;
newItem.parent = parent;
parent = null;
}
if (ownerRelation.getHead() == this)
{
ownerRelation.setHead(newItem);
}
return newItem;
}
public override string ToString()
{
string name = getFeatures().getString("name");
if (name == null)
{
name = "";
}
return name;
}
public virtual bool equalsShared(Item otherItem)
{
if (otherItem == null)
{
return false;
}
else
{
return getSharedContents().Equals(otherItem.getSharedContents());
}
}
}
}
<|start_filename|>Syn.Speech/Helper/Java.cs<|end_filename|>
using System;
using System.CodeDom.Compiler;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
namespace Syn.Speech.Helper
{
/// <summary>
/// Java Extensions
/// </summary>
public static class Java
{
private static readonly DateTime Jan1St1970 = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
private const int InitialHash = 17; // Prime number
private const int Multiplier = 23; // Different prime number
private static int _hashIndex = int.MinValue;
public static double Random()
{
return new Random().Next();
}
public static int GetHashCode<T>(params T[] values)
{
//unchecked // overflow is fine
{
var hash = InitialHash;
if (values != null)
for (var i = 0; i < values.Length; i++)
{
var currentValue = values[i];
hash = hash*Multiplier + currentValue.GetHashCode();
}
return hash;
}
}
public static bool IsEmpty(this string source)
{
return string.IsNullOrEmpty(source);
}
public static string JSubString(this string s, int start, int end)
{
return s.Substring(start, end - start);
}
public static int GetUniqueNumber()
{
var toReturn = _hashIndex;
_hashIndex++;
return toReturn;
}
public static string ReplaceAll(this string value, string pattern, string replacement)
{
string toReturn = value;
var regex = new Regex(pattern);
return regex.Replace(toReturn, replacement);
}
public static bool Offer<T>(this LinkedList<T> source, T item)
{
source.AddLast(item);
return true;
}
public static T Poll<T>(this LinkedList<T> source)
{
if (source.Count == 0)
{
return default(T);
}
var toReturn = source.First.Value;
source.RemoveFirst();
return toReturn;
}
public static T PeekLast<T>(this LinkedList<T> source)
{
return source.Last.Value;
}
public static TV GetProperty<T, TV>(this Dictionary<T, TV> source, T expectedValue, TV defaultValue)
{
if (source.ContainsKey(expectedValue))
{
return source[expectedValue];
}
else
{
return defaultValue;
}
}
public static string GetSystemProperty(string name, string defaultValue)
{
var resourceValue = Properties.Resources.ResourceManager.GetString(name);
if (string.IsNullOrEmpty(resourceValue))
{
return defaultValue;
}
return resourceValue;
}
public static double[] CopyOfRange(double[] src, int start, int end)
{
int len = end - start;
double[] dest = new double[len];
// note i is always from 0
for (int i = 0; i < len; i++)
{
dest[i] = src[start + i]; // so 0..n = 0+x..n+x
}
return dest;
}
public static double[] CopyOf(double[] src, int length)
{
return CopyOfRange(src, 0, length);
}
public static T GetField<T>(this Type source, string memberName)
{
foreach (var property in source.GetMembers())
{
var attribute = property.GetCustomAttributes(typeof(T), false);
if (property.Name == memberName) return (T) attribute[0];
}
return default (T);
}
public static IEnumerable<T> SubList<T>(this IEnumerable<T> source, int fromRange, int toRange)
{
return source.Skip(fromRange).Take(toRange - fromRange);
}
public static T Remove<T>(this LinkedList<T> source, int index)
{
var element = source.ElementAt(0); //Note: This is an O(n) operation
source.Remove(element);
return element;
}
/// <summary>
/// Extension for Java's System.currentTimeMillis
/// </summary>
/// <returns></returns>
public static long CurrentTimeMillis()
{
return (long)((DateTime.UtcNow - Jan1St1970).TotalMilliseconds);
}
public static T Remove<T>(this List<T> source, int index)
{
var toReturn = source[index];
source.RemoveAt(index);
return toReturn;
}
public static void Put<T, TV>(this Dictionary<T, TV> source, T key, TV value)
{
if (key == null || value == null)
{
return;
}
//TODO: EXTEND NULL KEY TO IMITATE JAVA HASHMAPs
if (source.ContainsKey(key))
{
source[key] = value;
}
else { source.Add(key,value);}
}
public static void Put<T, V>(SortedDictionary<T, V> source, T key, V value)
{
if (key == null || value == null)
{
return;
}
//TODO: EXTEND NULL KEY TO IMITATE JAVA HASHMAPs
if (source.ContainsKey(key))
{
source[key] = value;
}
else { source.Add(key, value); }
}
public static TV Get<T,TV>(this Dictionary<T, TV> source, T key)
{
if (source.ContainsKey(key))
{
return source[key];
}
return default(TV);
}
public static V Get<T, V>(this SortedDictionary<T, V> source, T key)
{
if (source.ContainsKey(key))
{
return source[key];
}
return default(V);
}
public static void Add<T>(this LinkedList<T> source, T value)
{
source.AddLast(value);
}
public static T CreateArray<T>(params int[] lengths)
{
return (T)InitializeJaggedArray(typeof(T).GetElementType(), 0, lengths);
}
static object InitializeJaggedArray(Type type, int index, int[] lengths)
{
Array array = Array.CreateInstance(type, lengths[index]);
Type elementType = type.GetElementType();
if (elementType != null)
{
for (int i = 0; i < lengths[index]; i++)
{
array.SetValue(
InitializeJaggedArray(elementType, index + 1, lengths), i);
}
}
return array;
}
public static long Skip(this Stream source, long toSkip)
{
var currentPosition = source.Position;
source.Seek(toSkip, SeekOrigin.Current);
var toReturn = source.Position - currentPosition;
return toReturn;
}
public static IEnumerator<T> ListIterator<T>(this IEnumerator<T> source, int index)
{
var cloneList = new List<T>();
while (source.MoveNext())
{
cloneList.Add(source.Current);
}
var clone = cloneList.GetEnumerator();
var toReach = 0;
while (toReach != index)
{
clone.MoveNext();
toReach++;
}
return clone;
}
public static void AddAll<T>(this HashSet<T> source, IEnumerable<T> values)
{
foreach (var item in values)
{
source.Add(item);
}
}
public static void AddAll<T>(this SortedSet<T> source, IEnumerable<T> values)
{
foreach (var item in values)
{
source.Add(item);
}
}
public static void AddAll<T>(this LinkedList<T> source, IEnumerable<T> values)
{
foreach (var item in values)
{
source.Add(item);
}
}
public static int Read(this FileStream source, byte[] bytes)
{
return source.Read(bytes, 0, bytes.Length);
}
public static int ReadInt(this Stream source)
{
int val = source.ReadByte() << 24 | source.ReadByte() << 16 | source.ReadByte() << 8 | source.ReadByte();
return val;
}
public static float ReadFloat(this Stream source)
{
int val = ReadInt(source);
return Float.IntBitsToFloat(val);
}
public static sbyte[] ToSignedBytes(this byte[] maindata)
{
//return Array.ConvertAll(maindata, b => unchecked((sbyte)b));
return (sbyte[])(Array)maindata;
}
public static bool IsEmpty<T>(this List<T> source)
{
return source.Count == 0;
}
public static bool IsEmpty<T>(this LinkedList<T> source)
{
return source.Count == 0;
}
public static T Set<T>(this List<T> source, int index, T element)
{
var toReturn = source[index];
source[index] = element;
return toReturn;
}
public static int TripleShift(int n, int s)
{
if (n >= 0)
return n >> s;
return (n >> s) + (2 << ~s);
}
/// <summary>
/// Splits the specified string using regex pattern.
/// </summary>
/// <param name="source">The source.</param>
/// <param name="pattern">The pattern.</param>
/// <returns></returns>
public static string[] Split(this string source, string pattern)
{
return Regex.Split(source, pattern);
}
public static void PrintStackTrace(this Exception source)
{
Console.WriteLine(source.ToString());
}
public static T Min<T>(IEnumerable<T> values, IComparer<T> comparer)
{
bool first = true;
T result = default(T);
foreach (T value in values)
{
if (first)
{
result = value;
first = false;
}
else
{
if (comparer.Compare(result, value) > 0)
{
result = value;
}
}
}
return result;
}
public static bool IsValidIdentifier(char value)
{
// using System.CodeDom.Compiler;
CodeDomProvider provider = CodeDomProvider.CreateProvider("C#");
return provider.IsValidIdentifier(value.ToString(CultureInfo.InvariantCulture));
}
public static char ToChar(int value)
{
return Convert.ToChar(value);
}
public static int ToInt(char chr)
{
return Convert.ToInt32(chr, CultureInfo.InvariantCulture.NumberFormat);
}
public static void Reset(this Stream source, int position)
{
source.Seek(position, SeekOrigin.Begin);
}
}
}
<|start_filename|>Syn.Speech/FrontEnds/Transform/DiscreateCosineTransform2.cs<|end_filename|>
using System;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.FrontEnd.Transform
{
/// <summary>
/// Applies the optimized MelCosine filter used in pocketsphinx to the given melspectrum.
/// </summary>
public class DiscreteCosineTransform2 : DiscreteCosineTransform
{
public DiscreteCosineTransform2(int numberMelFilters, int cepstrumSize)
: base(numberMelFilters, cepstrumSize)
{
}
public DiscreteCosineTransform2()
{
}
public void newProperties(PropertySheet ps)
{
base.newProperties(ps);
}
/**
* Apply the optimized MelCosine filter used in pocketsphinx to the given melspectrum.
*
* @param melspectrum the MelSpectrum data
* @return MelCepstrum data produced by apply the MelCosine filter to the MelSpectrum data
*/
protected double[] applyMelCosine(double[] melspectrum)
{
// create the cepstrum
double[] cepstrum = new double[cepstrumSize];
double sqrt_inv_n = Math.Sqrt(1.0 / numberMelFilters);
double sqrt_inv_2n = Math.Sqrt(2.0 / numberMelFilters);
cepstrum[0] = melspectrum[0];
for (int j = 1; j < numberMelFilters; j++)
{
cepstrum[0] += melspectrum[j];
}
cepstrum[0] *= sqrt_inv_n;
if (numberMelFilters <= 0)
{
return cepstrum;
}
for (int i = 1; i < cepstrum.Length; i++)
{
double[] melcosine_i = melcosine[i];
int j = 0;
cepstrum[i] = 0;
for (j = 0; j < numberMelFilters; j++)
{
cepstrum[i] += (melspectrum[j] * melcosine_i[j]);
}
cepstrum[i] *= sqrt_inv_2n;
}
return cepstrum;
}
}
}
<|start_filename|>Syn.Speech/Alignment/SimpleWordExpander.cs<|end_filename|>
using System.Collections.Generic;
using System.Linq;
using System.Text.RegularExpressions;
//PATROLLED
using Syn.Speech.Helper;
namespace Syn.Speech.Alignment
{
public class SimpleWordExpander : IWordExpander
{
public virtual List<string> expand(string text)
{
text = text.Replace('’', '\'');
text = text.Replace('‘', ' ');
text = text.Replace('”', ' ');
text = text.Replace('“', ' ');
text = text.Replace('»', ' ');
text = text.Replace('«', ' ');
text = text.Replace('–', '-');
text = text.Replace('—', ' ');
text = text.Replace('…', ' ');
text = text.Replace(" - ", " ");
text = text.ReplaceAll("[,.?:!;?()/_*%]", " ");
text = text.ToLower();
return Arrays.asList(Regex.Split(text,"\\s+"));
}
}
}
<|start_filename|>Syn.Speech/Results/AbstractSausageMaker.cs<|end_filename|>
using System;
using System.Collections;
using System.Collections.Generic;
using System.Text;
using Syn.Speech.Helper;
using Syn.Speech.Linguist.Dictionary;
using Syn.Speech.Util;
using Syn.Speech.Util.Props;
//PATROLLED
namespace Syn.Speech.Results
{
/**
* Parent to all sausage makers.
*
* @author <NAME>
*/
public abstract class AbstractSausageMaker : IConfidenceScorer, IConfigurable {
/**
* A Cluster is a set of Nodes together with their earliest start time and latest end time. A SausageMaker builds up
* a sequence of such clusters that then gets turned into a Sausage.
*
* @see Node
* @see Sausage
* @see SausageMaker
*/
public class Cluster : IEnumerable<Node> {
public int startTime;
public int endTime;
internal LinkedList<Node> elements = new LinkedList<Node>();
public Cluster(Node n) {
startTime = n.getBeginTime();
endTime = n.getEndTime();
Java.Add(elements,n);
}
public Cluster(int start, int end) {
startTime = start;
endTime = end;
}
public void add(Node n) {
if (n.getBeginTime() < startTime) {
startTime = n.getBeginTime();
}
if (n.getEndTime() > endTime) {
endTime = n.getEndTime();
}
Java.Add(elements, n);
}
public void add(Cluster c) {
if (c.startTime < startTime) {
startTime = c.startTime;
}
if (c.endTime > endTime) {
endTime = c.endTime;
}
Java.AddAll(elements,c.getElements());
}
public IEnumerator<Node> GetEnumerator() {
return elements.GetEnumerator();
}
public override String ToString() {
StringBuilder sb = new StringBuilder();
sb.Append("s: ").Append(startTime).Append(" e: ").Append(endTime).Append('[');
foreach (Node node in elements)
sb.Append(node).Append(',');
if (!elements.IsEmpty())
sb.Length= (sb.Length - 1);
sb.Append(']');
return sb.ToString();
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
/** @return Returns the elements. */
public LinkedList<Node> getElements() {
return elements;
}
/** @param elements The elements to set. */
public void setElements(LinkedList<Node> elements) {
this.elements = elements;
}
}
internal class ClusterComparator : IComparer<Cluster> {
/**
* Compares to clusters according to their topological relationship. Relies on strong assumptions about the
* possible constituents of clusters which will only be valid during the sausage creation process.
*
* @param cluster1 the first cluster
* @param cluster2 the second cluster
*/
public int Compare(Cluster cluster1, Cluster cluster2) {
foreach (Node n1 in cluster1) {
foreach (Node n2 in cluster2) {
if (n1.isAncestorOf(n2)) {
return -1;
} else if (n2.isAncestorOf(n1)) {
return 1;
}
}
}
return 0;
}
}
/** The property that defines the language model weight. */
[S4Double(defaultValue = 1.0)]
public const String PROP_LANGUAGE_WEIGHT = "languageWeight";
protected float languageWeight;
protected Lattice lattice;
public AbstractSausageMaker() {
}
/** @see edu.cmu.sphinx.util.props.Configurable#newProperties(edu.cmu.sphinx.util.props.PropertySheet) */
public void newProperties(PropertySheet ps) {
languageWeight = ps.getFloat(PROP_LANGUAGE_WEIGHT);
}
public abstract IConfidenceResult score(Result result);
protected static int getOverlap(Node n, int startTime, int endTime) {
return Math.Min(n.getEndTime(), endTime) -
Math.Max(n.getBeginTime(), startTime);
}
protected static int getOverlap(Node n1, Node n2) {
return Math.Min(n1.getEndTime(), n2.getEndTime()) -
Math.Max(n1.getBeginTime(), n2.getBeginTime());
}
/**
* Returns true if the two given clusters has time overlaps.
*
* @param cluster1 the first cluster to examine
* @param cluster2 the second cluster to examine
* @return true if the clusters has overlap, false if they don't
*/
protected bool hasOverlap(Cluster cluster1, Cluster cluster2) {
return (cluster1.startTime < cluster2.endTime &&
cluster2.startTime < cluster1.endTime);
}
/**
* Return the total probability mass of the subcluster of nodes of the given cluster that all have the given word as
* their word.
*
* @param cluster the cluster to subcluster from
* @param word the word to subcluster by
* @return the log probability mass of the subcluster formed by the word
*/
protected double wordSubClusterProbability(LinkedList<Node> cluster, String word) {
return clusterProbability(makeWordSubCluster(cluster, word));
}
/**
* Return the total probability mass of the subcluster of nodes of the given cluster that all have the given word as
* their word.
*
* @param cluster the cluster to subcluster from
* @param word the word to subcluster by
* @return the log probability mass of the subcluster formed by the word
*/
protected double wordSubClusterProbability(Cluster cluster, String word) {
return clusterProbability(makeWordSubCluster(cluster, word));
}
/**
* Calculate the sum of posteriors in this cluster.
*
* @param cluster the cluster to sum over
* @return the probability sum
*/
protected double clusterProbability(LinkedList<Node> cluster) {
float p = LogMath.LOG_ZERO;
LogMath logMath = LogMath.getLogMath();
foreach (Node node in cluster)
p = logMath.addAsLinear(p, (float)node.getPosterior());
return p;
}
/**
* Calculate the sum of posteriors in this cluster.
*
* @param cluster the cluster to sum over
* @return the probability sum
*/
protected double clusterProbability(Cluster cluster) {
return clusterProbability(cluster.elements);
}
/**
* Form a subcluster by extracting all nodes corresponding to a given word.
*
* @param cluster the parent cluster
* @param word the word to cluster by
* @return the subcluster.
*/
protected LinkedList<Node> makeWordSubCluster(LinkedList<Node> cluster, String word)
{
var sub = new LinkedList<Node>();
foreach (Node n in cluster) {
if (n.getWord().getSpelling().Equals(word)) {
Java.Add(sub,n);
}
}
return sub;
}
/**
* Form a subcluster by extracting all nodes corresponding to a given word.
*
* @param cluster the parent cluster
* @param word the word to cluster by
* @return the subcluster.
*/
protected Cluster makeWordSubCluster(Cluster cluster, String word) {
var l = makeWordSubCluster(cluster.elements, word);
Cluster c = new Cluster(cluster.startTime, cluster.endTime);
c.elements = l;
return c;
}
/**
* print out a list of clusters for debugging
*
* @param clusters
*/
protected void printClusters(List<Cluster> clusters)
{
var i = clusters.GetEnumerator();
int j = 0;
while (i.MoveNext())
{
j++;
Console.WriteLine("----cluster " + j + " : ");
Console.WriteLine(i.Current);
}
Console.WriteLine("----");
}
/**
* Turn a list of lattice node clusters into a Sausage object.
*
* @param clusters the list of node clusters in topologically correct order
* @return the Sausage corresponding to the cluster list
*/
protected Sausage sausageFromClusters(List<Cluster> clusters) {
Sausage sausage = new Sausage(clusters.Count);
int index = 0;
foreach (Cluster cluster in clusters) {
HashSet<String> seenWords = new HashSet<String>();
foreach (Node node in cluster) {
Word word = node.getWord();
if (seenWords.Contains(word.getSpelling())) {
continue;
}
seenWords.Add(word.getSpelling());
WordResult swr =
new WordResult(
node,
wordSubClusterProbability(
cluster, word.getSpelling()));
sausage.addWordHypothesis(index, swr);
}
index++;
}
sausage.fillInBlanks();
return sausage;
}
}
}
<|start_filename|>Syn.Speech/Decoder/Scorer/ThreadedAcousticScorer.cs<|end_filename|>
//using System;
//using System.Collections.Generic;
//using System.Diagnostics;
//using System.Threading;
//using Syn.Speech.Common.FrontEnd;
//using Syn.Speech.FrontEnd;
//using Syn.Speech.Util.Props;
//namespace Syn.Speech.Decoder.Scorer
//{
// /// <summary>
// /// An acoustic scorer that breaks the scoring up into a configurable number of separate threads.
// ///
// /// All scores are maintained in LogMath log base
// /// </summary>
// public class ThreadedAcousticScorer : SimpleAcousticScorer
// {
// /**
// /// The property that controls the thread priority of scoring threads.
// /// Must be a value between {@link Thread#MIN_PRIORITY} and {@link Thread#MAX_PRIORITY}, inclusive.
// /// The default is {@link Thread#NORM_PRIORITY}.
// */
// [S4Integer(defaultValue = (int)ThreadPriority.Normal)]
// public static string PROP_THREAD_PRIORITY = "threadPriority";
// /**
// /// The property that controls the number of threads that are used to score HMM states. If the isCpuRelative
// /// property is false, then is is the exact number of threads that are used to score HMM states. If the isCpuRelative
// /// property is true, then this value is combined with the number of available processors on the system. If you want
// /// to have one thread per CPU available to score states, set the NUM_THREADS property to 0 and the isCpuRelative to
// /// true. If you want exactly one thread to process scores set NUM_THREADS to 1 and isCpuRelative to false.
// /// <p/>
// /// If the value is 1 isCpuRelative is false no additional thread will be instantiated, and all computation will be
// /// done in the calling thread itself. The default value is 0.
// */
// [S4Integer(defaultValue = 0)]
// public static string PROP_NUM_THREADS = "numThreads";
// /**
// /// The property that controls whether the number of available CPUs on the system is used when determining
// /// the number of threads to use for scoring. If true, the NUM_THREADS property is combined with the available number
// /// of CPUS to determine the number of threads. Note that the number of threads is contained to be never lower than
// /// zero. Also, if the number of threads is 0, the states are scored on the calling thread, no separate threads are
// /// started. The default value is false.
// */
// [S4Boolean(defaultValue = true)]
// public static string PROP_IS_CPU_RELATIVE = "isCpuRelative";
// /**
// /// The property that controls the minimum number of scoreables sent to a thread. This is used to prevent
// /// over threading of the scoring that could happen if the number of threads is high compared to the size of the
// /// active list. The default is 50
// */
// [S4Integer(defaultValue = 10)]
// public static string PROP_MIN_SCOREABLES_PER_THREAD = "minScoreablesPerThread";
// private static string className = typeof(ThreadedAcousticScorer).Name;
// private int numThreads; // number of threads in use
// private int threadPriority;
// private int minScoreablesPerThread; // min scoreables sent to a thread
// private ExecutorService executorService;
// /**
// /// @param frontEnd
// /// the frontend to retrieve features from for scoring
// /// @param scoreNormalizer
// /// optional post-processor for computed scores that will
// /// normalize scores. If not set, no normalization will applied
// /// and the token scores will be returned unchanged.
// /// @param minScoreablesPerThread
// /// the number of threads that are used to score HMM states. If
// /// the isCpuRelative property is false, then is is the exact
// /// number of threads that are used to score HMM states. If the
// /// isCpuRelative property is true, then this value is combined
// /// with the number of available processors on the system. If you
// /// want to have one thread per CPU available to score states, set
// /// the NUM_THREADS property to 0 and the isCpuRelative to true.
// /// If you want exactly one thread to process scores set
// /// NUM_THREADS to 1 and isCpuRelative to false.
// /// <p/>
// /// If the value is 1 isCpuRelative is false no additional thread
// /// will be instantiated, and all computation will be done in the
// /// calling thread itself. The default value is 0.
// /// @param cpuRelative
// /// controls whether the number of available CPUs on the system is
// /// used when determining the number of threads to use for
// /// scoring. If true, the NUM_THREADS property is combined with
// /// the available number of CPUS to determine the number of
// /// threads. Note that the number of threads is constrained to be
// /// never lower than zero. Also, if the number of threads is 0,
// /// the states are scored on the calling thread, no separate
// /// threads are started. The default value is false.
// /// @param numThreads
// /// the minimum number of scoreables sent to a thread. This is
// /// used to prevent over threading of the scoring that could
// /// happen if the number of threads is high compared to the size
// /// of the active list. The default is 50
// /// @param threadPriority
// /// the thread priority of scoring threads. Must be a value between
// /// {@link Thread#MIN_PRIORITY} and {@link Thread#MAX_PRIORITY}, inclusive.
// /// The default is {@link Thread#NORM_PRIORITY}.
// */
// public ThreadedAcousticScorer(BaseDataProcessor frontEnd, IScoreNormalizer scoreNormalizer,
// int minScoreablesPerThread, Boolean cpuRelative, int numThreads, int threadPriority)
// :base(frontEnd, scoreNormalizer)
// {
// init(minScoreablesPerThread, cpuRelative, numThreads, threadPriority);
// }
// public ThreadedAcousticScorer()
// {
// }
// override
// public void newProperties(PropertySheet ps)
// {
// base.newProperties(ps);
// init(ps.getInt(PROP_MIN_SCOREABLES_PER_THREAD),
// ps.getBoolean(PROP_IS_CPU_RELATIVE),
// ps.getInt(PROP_NUM_THREADS),
// ps.getInt(PROP_THREAD_PRIORITY));
// }
// private void init(int minScoreablesPerThread, Boolean cpuRelative, int numThreads, int threadPriority)
// {
// this.minScoreablesPerThread = minScoreablesPerThread;
// if (cpuRelative)
// {
// numThreads += Environment.ProcessorCount;
// }
// this.numThreads = numThreads;
// this.threadPriority = threadPriority;
// }
// public void allocate() {
// base.allocate();
// if (executorService == null)
// {
// if (numThreads > 1)
// {
// Trace.WriteLine("# of scoring threads: " + numThreads);
// ThreadPool.QueueUserWorkItem(new WaitCallback())
// executorService = Executors.newFixedThreadPool(numThreads, new CustomThreadFactory(className, true, threadPriority));
// }
// else
// {
// Trace.WriteLine("no scoring threads");
// }
// }
// }
// public void deallocate() {
// base.deallocate();
// if (executorService != null)
// {
// executorService.shutdown();
// executorService = null;
// }
// }
// protected internal virtual T doScoring<T>(List<T> scoreableList, IData data) where T : IScoreable
// {
// if (numThreads > 1)
// {
// int totalSize = scoreableList.Count;
// int jobSize = Math.Max((totalSize + numThreads - 1) / numThreads, minScoreablesPerThread);
// if (jobSize < totalSize)
// {
// List<Callable<T>> tasks = new List<Callable<T>>();
// for (int from = 0, to = jobSize; from < totalSize; from = to, to += jobSize)
// {
// List<T> scoringJob = scoreableList.GetRange(@from, Math.Min(to, totalSize));
// tasks.Add(new Callable<T>()
// {
// public T call()
// {
// return ThreadedAcousticScorer.super.doScoring(scoringJob, data);
// }
// }
// );
// }
// List<T> finalists = new List<T>(tasks.Count);
// foreach (Future<T> result in executorService.invokeAll(tasks))
// finalists.Add(result.get());
// if (finalists.Count == 0)
// {
// throw new DataProcessingException("No scoring jobs ended");
// }
// return Collections.min(finalists, Scoreable.COMPARATOR);
// }
// }
// // if no additional threads are necessary, do the scoring in the calling thread
// return base.doScoring(scoreableList, data);
// }
// }
//}
<|start_filename|>Syn.Speech/Decoder/Scorer/IScoreProvider.cs<|end_filename|>
using Syn.Speech.Common.FrontEnd;
//PATROLLED
namespace Syn.Speech.Decoder.Scorer
{
/// <summary>
/// Thing that can provide the score
/// </summary>
public interface ScoreProvider
{
/// <summary>
/// Provides the score.
/// </summary>
/// <param name="data">The data.</param>
/// <returns>the score</returns>
float getScore(IData data);
/// <summary>
/// Provides component score
/// </summary>
/// <param name="feature">The feature.</param>
/// <returns>the score</returns>
float[] getComponentScore(IData feature);
}
}
| SynHub/syn-speech |
<|start_filename|>ArchUnitNET/Loader/MonoCecilMemberExtensions.cs<|end_filename|>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
//
// SPDX-License-Identifier: Apache-2.0
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using ArchUnitNET.Domain;
using ArchUnitNET.Domain.Exceptions;
using ArchUnitNET.Domain.Extensions;
using JetBrains.Annotations;
using Mono.Cecil;
using Mono.Cecil.Cil;
using static ArchUnitNET.Domain.Visibility;
namespace ArchUnitNET.Loader
{
internal static class MonoCecilMemberExtensions
{
private static readonly OpCode[] BodyTypeOpCodes =
{
OpCodes.Box, OpCodes.Newarr, OpCodes.Initobj, OpCodes.Unbox, OpCodes.Unbox_Any, OpCodes.Ldelem_Any,
OpCodes.Ldobj, OpCodes.Stelem_Any, OpCodes.Ldelema, OpCodes.Stobj
}; //maybe not complete
internal static string BuildFullName(this MethodReference methodReference)
{
return methodReference.FullName + methodReference.GenericParameters.Aggregate(string.Empty,
(current, newElement) => current + "<" + newElement.Name + ">");
}
[NotNull]
internal static string BuildMethodMemberName(this MethodReference methodReference)
{
var builder = new StringBuilder();
builder.Append(methodReference.Name);
builder.Append("(");
if (methodReference.HasParameters)
{
var parameters = methodReference.Parameters;
for (var index = 0; index < parameters.Count; ++index)
{
var parameterDefinition = parameters[index];
if (index > 0)
{
builder.Append(",");
}
if (parameterDefinition.ParameterType.IsSentinel)
{
builder.Append("...,");
}
builder.Append(parameterDefinition.ParameterType.FullName);
}
}
builder.Append(")");
return builder.ToString();
}
[NotNull]
internal static IEnumerable<CustomAttribute> GetAllMethodCustomAttributes(
this MethodDefinition methodDefinition)
{
return methodDefinition.CustomAttributes
.Concat(methodDefinition.Parameters.SelectMany(parameterDefinition =>
parameterDefinition.CustomAttributes))
.Concat(methodDefinition.MethodReturnType.CustomAttributes);
}
[NotNull]
internal static IEnumerable<ITypeInstance<IType>> GetSignatureTypes(this MethodReference methodReference,
TypeFactory typeFactory)
{
var parameters = GetAllParameters(methodReference, typeFactory).ToList();
var returnType = GetReturnType(methodReference, typeFactory);
if (returnType != null)
{
parameters.Insert(0, returnType);
}
return parameters;
}
private static ITypeInstance<IType> GetReturnType(this MethodReference methodReference, TypeFactory typeFactory)
{
return ReturnsVoid(methodReference)
? null
: typeFactory.GetOrCreateStubTypeInstanceFromTypeReference(methodReference.MethodReturnType.ReturnType);
}
[NotNull]
private static IEnumerable<ITypeInstance<IType>> GetAllParameters(this MethodReference methodReference,
TypeFactory typeFactory)
{
var parameters = methodReference.GetParameters(typeFactory).ToList();
var genericParameters = methodReference.GetGenericParameters(typeFactory).ToList();
parameters.AddRange(genericParameters);
return parameters;
}
[NotNull]
internal static IEnumerable<ITypeInstance<IType>> GetParameters(this MethodReference method,
TypeFactory typeFactory)
{
return method.Parameters.Select(parameter =>
{
var typeReference = parameter.ParameterType;
return typeFactory.GetOrCreateStubTypeInstanceFromTypeReference(typeReference);
}).Distinct();
}
[NotNull]
private static IEnumerable<ITypeInstance<IType>> GetGenericParameters(this MethodReference method,
TypeFactory typeFactory)
{
return method.GenericParameters.Select(parameter =>
{
var typeReference = parameter.GetElementType();
return typeFactory.GetOrCreateStubTypeInstanceFromTypeReference(typeReference);
}).Distinct();
}
[NotNull]
internal static IEnumerable<ITypeInstance<IType>> GetBodyTypes(this MethodDefinition methodDefinition,
TypeFactory typeFactory)
{
var instructions = methodDefinition.Body?.Instructions ?? Enumerable.Empty<Instruction>();
var bodyTypes = instructions
.Where(inst => BodyTypeOpCodes.Contains(inst.OpCode) && inst.Operand is TypeReference)
.Select(inst => typeFactory.GetOrCreateStubTypeInstanceFromTypeReference((TypeReference) inst.Operand));
//OpCodes.Ldstr should create a dependency to string, but it does not have a TypeReference as Operand so no Type can be created
bodyTypes = bodyTypes.Union(methodDefinition.Body?.Variables.Select(variableDefinition =>
{
var variableTypeReference = variableDefinition.VariableType;
return typeFactory.GetOrCreateStubTypeInstanceFromTypeReference(variableTypeReference);
}) ?? Enumerable.Empty<TypeInstance<IType>>()).Distinct();
return bodyTypes;
}
[NotNull]
internal static IEnumerable<ITypeInstance<IType>> GetCastTypes(this MethodDefinition methodDefinition,
TypeFactory typeFactory)
{
var instructions = methodDefinition.Body?.Instructions ?? Enumerable.Empty<Instruction>();
return instructions.Where(inst => inst.OpCode == OpCodes.Castclass && inst.Operand is TypeReference)
.Select(inst => typeFactory.GetOrCreateStubTypeInstanceFromTypeReference((TypeReference) inst.Operand));
}
[NotNull]
internal static IEnumerable<ITypeInstance<IType>> GetMetaDataTypes(this MethodDefinition methodDefinition,
TypeFactory typeFactory)
{
var instructions = methodDefinition.Body?.Instructions ?? Enumerable.Empty<Instruction>();
return instructions.Where(inst => inst.OpCode == OpCodes.Ldtoken && inst.Operand is TypeReference)
.Select(inst => typeFactory.GetOrCreateStubTypeInstanceFromTypeReference((TypeReference) inst.Operand));
}
[NotNull]
internal static IEnumerable<ITypeInstance<IType>> GetTypeCheckTypes(this MethodDefinition methodDefinition,
TypeFactory typeFactory)
{
var instructions = methodDefinition.Body?.Instructions ?? Enumerable.Empty<Instruction>();
return instructions.Where(inst => inst.OpCode == OpCodes.Isinst && inst.Operand is TypeReference)
.Select(inst => typeFactory.GetOrCreateStubTypeInstanceFromTypeReference((TypeReference) inst.Operand));
}
internal static bool IsIterator(this MethodDefinition methodDefinition)
{
return methodDefinition.CustomAttributes.Any(att => att.AttributeType.FullName == typeof(System.Runtime
.CompilerServices.IteratorStateMachineAttribute).FullName);
}
internal static bool IsAsync(this MethodDefinition methodDefinition)
{
return methodDefinition.CustomAttributes.Any(att => att.AttributeType.FullName == typeof(System.Runtime
.CompilerServices.AsyncStateMachineAttribute).FullName);
}
[NotNull]
internal static IEnumerable<FieldMember> GetAccessedFieldMembers(this MethodDefinition methodDefinition,
TypeFactory typeFactory)
{
var accessedFieldMembers = new List<FieldMember>();
var instructions = methodDefinition.Body?.Instructions.ToList() ?? new List<Instruction>();
var accessedFieldReferences =
instructions.Select(inst => inst.Operand).OfType<FieldReference>().Distinct();
foreach (var fieldReference in accessedFieldReferences)
{
var declaringType =
typeFactory.GetOrCreateStubTypeInstanceFromTypeReference(fieldReference.DeclaringType);
var matchingFieldMembers = declaringType.Type.GetFieldMembers()
.Where(member => member.Name == fieldReference.Name).ToList();
switch (matchingFieldMembers.Count)
{
case 0:
var stubFieldMember =
typeFactory.CreateStubFieldMemberFromFieldReference(declaringType.Type, fieldReference);
accessedFieldMembers.Add(stubFieldMember);
break;
case 1:
accessedFieldMembers.Add(matchingFieldMembers.First());
break;
default:
throw new MultipleOccurrencesInSequenceException(
$"Multiple Fields matching {fieldReference.FullName} found in provided type.");
}
}
return accessedFieldMembers.Distinct();
}
internal static bool IsCompilerGenerated(this MemberReference memberReference)
{
var declaringType = memberReference.Resolve()?.DeclaringType ?? memberReference.DeclaringType;
return declaringType != null && declaringType.Name.HasCompilerGeneratedName() ||
memberReference.Name.HasCompilerGeneratedName();
}
internal static bool HasCompilerGeneratedName(this string name)
{
return name.StartsWith("<") || name.StartsWith("!");
}
internal static MethodForm GetMethodForm(this MethodDefinition methodDefinition)
{
if (methodDefinition.IsConstructor)
{
return MethodForm.Constructor;
}
if (methodDefinition.IsGetter)
{
return MethodForm.Getter;
}
return methodDefinition.IsSetter ? MethodForm.Setter : MethodForm.Normal;
}
private static bool ReturnsVoid(this IMethodSignature methodSignature)
{
return methodSignature.MethodReturnType.ReturnType.FullName.Equals("System.Void");
}
internal static bool HasConstructorName(this MethodReference methodReference)
{
return methodReference.Name == ".ctor" || methodReference.Name == ".cctor";
}
internal static bool IsBackingField(this FieldReference fieldReference)
{
return fieldReference.FullName.Contains(StaticConstants.BackingField);
}
internal static Visibility GetVisibility([CanBeNull] this MethodDefinition methodDefinition)
{
if (methodDefinition == null)
{
return NotAccessible;
}
if (methodDefinition.IsPublic)
{
return Public;
}
if (methodDefinition.IsPrivate)
{
return Private;
}
if (methodDefinition.IsFamily)
{
return Protected;
}
if (methodDefinition.IsAssembly)
{
return Internal;
}
if (methodDefinition.IsFamilyOrAssembly)
{
return ProtectedInternal;
}
if (methodDefinition.IsFamilyAndAssembly)
{
return PrivateProtected;
}
throw new ArgumentException("The method definition seems to have no visibility.");
}
}
}
<|start_filename|>ArchUnitNETTests/Domain/GenericClassTests.cs<|end_filename|>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
//
// SPDX-License-Identifier: Apache-2.0
using System;
using System.Collections.Generic;
using System.Linq;
using ArchUnitNET.Domain;
using ArchUnitNET.Domain.Extensions;
using ArchUnitNET.Loader;
using Xunit;
using static ArchUnitNET.Domain.Visibility;
using Type = ArchUnitNET.Loader.Type;
namespace ArchUnitNETTests.Domain
{
public class GenericClassTests
{
private const string GuidClassName = "Guid";
private const string SystemGuidFullName = StaticConstants.SystemNamespace + "." + GuidClassName;
private static readonly Architecture Architecture =
new ArchLoader().LoadAssembly(typeof(GenericClassTests).Assembly).Build();
private readonly Class _classWithGenericParameters;
private readonly IType _expectedGenericArgument;
private readonly FieldMember _genericallyTypedField;
public GenericClassTests()
{
_classWithGenericParameters = Architecture.GetClassOfType(typeof(ClassWithGenericParameters<>));
var invokesGenericClass = Architecture.GetClassOfType(typeof(InvokesGenericClass));
_genericallyTypedField = invokesGenericClass
.GetFieldMembersWithName(nameof(InvokesGenericClass.GuidGenericArgument)).SingleOrDefault();
var guidMock = new Type(SystemGuidFullName, GuidClassName,
_classWithGenericParameters.Assembly,
new Namespace(StaticConstants.SystemNamespace, new List<IType>()), Public, false, false, true, false);
_expectedGenericArgument = new Struct(guidMock);
}
[Fact]
public void GenericTypeArgumentsAsExpected()
{
var genericTypeArgumentClass = _genericallyTypedField.GenericArguments.First().Type;
Assert.NotNull(genericTypeArgumentClass);
Assert.Equal(_expectedGenericArgument, genericTypeArgumentClass);
}
[Fact]
public void GenericTypeArgumentsFound()
{
Assert.Single(_genericallyTypedField.GenericArguments);
}
[Fact]
public void GenericTypeAsExpected()
{
var invokedGenericType = _genericallyTypedField.Type;
Assert.Equal(_classWithGenericParameters, invokedGenericType);
}
[Fact]
public void GenericTypeParametersFound()
{
Assert.NotEmpty(_classWithGenericParameters.GenericParameters);
Assert.Single(_classWithGenericParameters.GenericParameters);
}
}
public class ClassWithGenericParameters<T>
{
public void Add(T item)
{
}
}
public class InvokesGenericClass
{
public ClassWithGenericParameters<Guid> GuidGenericArgument = new ClassWithGenericParameters<Guid>();
}
}
<|start_filename|>ArchUnitNETTests/Domain/TypeTests.cs<|end_filename|>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
//
// SPDX-License-Identifier: Apache-2.0
using System.Linq;
using ArchUnitNET.Domain;
using Xunit;
using static ArchUnitNET.Domain.Visibility;
namespace ArchUnitNETTests.Domain
{
public class TypeTests
{
private static readonly Architecture Architecture = StaticTestArchitectures.ArchUnitNETTestArchitecture;
[Fact]
public void TypesAreClassesAndInterfacesAndStructsAndEnums()
{
var types = Architecture.Types.ToList();
var classes = Architecture.Classes;
var interfaces = Architecture.Interfaces;
var structs = Architecture.Structs;
var enums = Architecture.Enums;
Assert.True(types.All(type => classes.Contains(type) || interfaces.Contains(type) || structs.Contains(type) || enums.Contains(type)));
Assert.True(classes.All(cls => types.Contains(cls)));
Assert.True(interfaces.All(intf => types.Contains(intf)));
Assert.True(structs.All(str => types.Contains(str)));
Assert.True(enums.All(en => types.Contains(en)));
}
[Fact]
public void TypesMustHaveVisibility()
{
Assert.True(Architecture.Types.All(type => type.Visibility != NotAccessible));
}
[Fact]
public void AssignEnumsCorrectly()
{
Assert.True(StaticTestTypes.TestEnum is Enum);
}
[Fact]
public void AssignStructsCorrectly()
{
Assert.True(StaticTestTypes.TestStruct is Struct);
}
}
}
<|start_filename|>ArchUnitNETTests/Domain/ClassTests.cs<|end_filename|>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
//
// SPDX-License-Identifier: Apache-2.0
using ArchUnitNET.Domain;
using ArchUnitNET.Domain.Extensions;
using ArchUnitNET.Loader;
using ArchUnitNETTests.Domain.Dependencies.Members;
using JetBrains.Annotations;
using Xunit;
using static ArchUnitNET.Domain.Visibility;
using static ArchUnitNETTests.Domain.StaticTestTypes;
namespace ArchUnitNETTests.Domain
{
public class ClassTests
{
private static readonly Architecture Architecture = StaticTestArchitectures.ArchUnitNETTestArchitecture;
private readonly Class _baseClass;
private readonly Interface _chainedInterface;
private readonly Class _childClass;
private readonly ClassEquivalencyTestData _classEquivalencyTestData;
private readonly Class _duplicateChildClass;
private readonly Interface _implementedInterface;
private readonly Class _implementsInterface;
private readonly Type _misMatchType;
public ClassTests()
{
_baseClass = Architecture.GetClassOfType(typeof(BaseClass));
_childClass = Architecture.GetClassOfType(typeof(ChildClass));
_duplicateChildClass = _baseClass;
var backingType = Architecture.GetITypeOfType(typeof(PropertyType));
_misMatchType =
new Type(backingType.FullName, backingType.Name, backingType.Assembly, backingType.Namespace,
backingType.Visibility, backingType.IsNested, backingType.IsGeneric, backingType.IsStub, false);
_misMatchType.GenericParameters.AddRange(backingType.GenericParameters);
_implementsInterface = Architecture.GetClassOfType(typeof(InheritingType));
_implementedInterface = Architecture.GetInterfaceOfType(typeof(IInheritingInterface));
_chainedInterface = Architecture.GetInterfaceOfType(typeof(IInheritedTestInterface));
_classEquivalencyTestData = new ClassEquivalencyTestData(typeof(ClassWithConstructors));
}
[Fact]
public void AssignableToDirectlyImplementedInterfaces()
{
Assert.True(_implementsInterface.IsAssignableTo(_implementedInterface));
}
[Fact]
public void AssignableToIndirectlyImplementedInterfaces()
{
Assert.True(_implementsInterface.IsAssignableTo(_chainedInterface));
}
[Fact]
public void AssignableToParentClass()
{
Assert.True(_childClass.IsAssignableTo(_baseClass));
}
[Fact]
public void AssignableToSameClass()
{
Assert.True(_childClass.IsAssignableTo(_duplicateChildClass));
}
[Fact]
public void ClassDoesNotEqualNull()
{
Assert.False(_classEquivalencyTestData.OriginClass.Equals(null));
}
[Fact]
public void ClassesAreAssignedCorrectVisibility()
{
Assert.Equal(Public, StaticTestTypes.PublicTestClass.Visibility);
Assert.Equal(Internal, StaticTestTypes.InternalTestClass.Visibility);
Assert.Equal(Public, NestedPublicTestClass.Visibility);
Assert.Equal(Private, NestedPrivateTestClass.Visibility);
Assert.Equal(Protected, NestedProtectedTestClass.Visibility);
Assert.Equal(Internal, NestedInternalTestClass.Visibility);
Assert.Equal(ProtectedInternal, NestedProtectedInternalTestClass.Visibility);
Assert.Equal(PrivateProtected, NestedPrivateProtectedTestClass.Visibility);
}
[Fact]
public void ClassesHaveCorrectIsNestedProperty()
{
Assert.False(StaticTestTypes.PublicTestClass.IsNested);
Assert.False(StaticTestTypes.InternalTestClass.IsNested);
Assert.True(NestedPublicTestClass.IsNested);
Assert.True(NestedPrivateTestClass.IsNested);
Assert.True(NestedProtectedTestClass.IsNested);
Assert.True(NestedInternalTestClass.IsNested);
Assert.True(NestedProtectedInternalTestClass.IsNested);
Assert.True(NestedPrivateProtectedTestClass.IsNested);
}
[Fact]
public void ClassesHaveCorrectIsSealedProperty()
{
Assert.True(StaticTestTypes.SealedTestClass.IsSealed);
Assert.False(StaticTestTypes.PublicTestClass.IsSealed);
}
[Fact]
public void ClassHasConsistentHashCode()
{
var hash = _classEquivalencyTestData.OriginClass.GetHashCode();
var duplicateHash = _classEquivalencyTestData.DuplicateClass.GetHashCode();
Assert.Equal(hash, duplicateHash);
}
[Fact]
public void DuplicateClassesAreEqual()
{
Assert.Equal(_classEquivalencyTestData.OriginClass,
_classEquivalencyTestData.DuplicateClass);
}
[Fact]
public void DuplicateClassObjectReferencesAreEqual()
{
Assert.Equal(_classEquivalencyTestData.OriginClass,
_classEquivalencyTestData.ObjectReferenceDuplicate);
}
[Fact]
public void DuplicateClassReferencesAreEqual()
{
Assert.True(_classEquivalencyTestData.OriginClass
.Equals(_classEquivalencyTestData.ClassReferenceDuplicate));
}
[Fact]
public void NotAssignableToUnrelatedType()
{
Assert.False(_childClass.IsAssignableTo(_misMatchType));
}
[Fact]
public void ParentDependenciesAreInherited()
{
_baseClass.Dependencies.ForEach(parentDependency =>
{
Assert.Contains(parentDependency, _childClass.DependenciesIncludingInherited);
});
}
private class ClassEquivalencyTestData
{
public ClassEquivalencyTestData([NotNull] System.Type originType)
{
OriginClass = Architecture.GetClassOfType(originType).RequiredNotNull();
DuplicateClass = Architecture.GetClassOfType(originType).RequiredNotNull();
ClassReferenceDuplicate = OriginClass;
ObjectReferenceDuplicate = OriginClass;
}
[NotNull] public Class OriginClass { get; }
[NotNull] public object DuplicateClass { get; }
[NotNull] public Class ClassReferenceDuplicate { get; }
[NotNull] public object ObjectReferenceDuplicate { get; }
}
}
}
<|start_filename|>ArchUnitNET/Fluent/Syntax/Elements/Types/Classes/IClassConditions.cs<|end_filename|>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
//
// SPDX-License-Identifier: Apache-2.0
using ArchUnitNET.Domain;
namespace ArchUnitNET.Fluent.Syntax.Elements.Types.Classes
{
public interface IClassConditions<out TReturnType, out TRuleType> : ITypeConditions<TReturnType, TRuleType>
where TRuleType : ICanBeAnalyzed
{
TReturnType BeAbstract();
TReturnType BeSealed();
//Negations
TReturnType NotBeAbstract();
TReturnType NotBeSealed();
}
}
<|start_filename|>ArchUnitNETTests/Fluent/Syntax/Elements/ClassSyntaxElementsTests.cs<|end_filename|>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
//
// SPDX-License-Identifier: Apache-2.0
using System.Collections.Generic;
using ArchUnitNET.Domain;
using Xunit;
using static ArchUnitNET.Fluent.ArchRuleDefinition;
namespace ArchUnitNETTests.Fluent.Syntax.Elements
{
public class ClassSyntaxElementsTests
{
public ClassSyntaxElementsTests()
{
_classes = Architecture.Classes;
}
private static readonly Architecture Architecture = StaticTestArchitectures.ArchUnitNETTestArchitecture;
private readonly IEnumerable<Class> _classes;
[Fact]
public void AreAbstractTest()
{
foreach (var cls in _classes)
{
var clsIsAbstract = Classes().That().Are(cls).Should().BeAbstract();
var clsIsNotAbstract = Classes().That().Are(cls).Should().NotBeAbstract();
var abstractClassesDoNotIncludeType = Classes().That().AreAbstract().Should().NotBe(cls);
var notAbstractClassesDoNotIncludeType = Classes().That().AreNotAbstract().Should().NotBe(cls);
Assert.Equal(cls.IsAbstract, clsIsAbstract.HasNoViolations(Architecture));
Assert.Equal(!cls.IsAbstract, clsIsNotAbstract.HasNoViolations(Architecture));
Assert.Equal(!cls.IsAbstract, abstractClassesDoNotIncludeType.HasNoViolations(Architecture));
Assert.Equal(cls.IsAbstract, notAbstractClassesDoNotIncludeType.HasNoViolations(Architecture));
}
var abstractClassesAreAbstract = Classes().That().AreAbstract().Should().BeAbstract();
var abstractClassesAreNotAbstract = Classes().That().AreAbstract().Should().NotBeAbstract();
var notAbstractClassesAreAbstract = Classes().That().AreNotAbstract().Should().BeAbstract();
var notAbstractClassesAreNotAbstract = Classes().That().AreNotAbstract().Should().NotBeAbstract();
Assert.True(abstractClassesAreAbstract.HasNoViolations(Architecture));
Assert.False(abstractClassesAreNotAbstract.HasNoViolations(Architecture));
Assert.False(notAbstractClassesAreAbstract.HasNoViolations(Architecture));
Assert.True(notAbstractClassesAreNotAbstract.HasNoViolations(Architecture));
}
[Fact]
public void AreEnumsTest()
{
Assert.True(Classes().That().AreNotEnums().Should().Be(Classes()).HasNoViolations(Architecture));
Assert.True(Classes().That().AreEnums().Should().NotExist().HasNoViolations(Architecture));
Assert.True(Classes().Should().NotBeEnums().HasNoViolations(Architecture));
Assert.False(Classes().Should().BeEnums().HasNoViolations(Architecture));
}
[Fact]
public void AreSealedTest()
{
foreach (var cls in _classes)
{
var clsIsSealed = Classes().That().Are(cls).Should().BeSealed();
var clsIsNotSealed = Classes().That().Are(cls).Should().NotBeSealed();
var sealedClassesDoNotIncludeType = Classes().That().AreSealed().Should().NotBe(cls);
var notSealedClassesDoNotIncludeType = Classes().That().AreNotSealed().Should().NotBe(cls);
Assert.Equal(cls.IsSealed, clsIsSealed.HasNoViolations(Architecture));
Assert.Equal(!cls.IsSealed, clsIsNotSealed.HasNoViolations(Architecture));
Assert.Equal(!cls.IsSealed, sealedClassesDoNotIncludeType.HasNoViolations(Architecture));
Assert.Equal(cls.IsSealed, notSealedClassesDoNotIncludeType.HasNoViolations(Architecture));
}
var sealedClassesAreSealed = Classes().That().AreSealed().Should().BeSealed();
var sealedClassesAreNotSealed = Classes().That().AreSealed().Should().NotBeSealed();
var notSealedClassesAreSealed = Classes().That().AreNotSealed().Should().BeSealed();
var notSealedClassesAreNotSealed = Classes().That().AreNotSealed().Should().NotBeSealed();
Assert.True(sealedClassesAreSealed.HasNoViolations(Architecture));
Assert.False(sealedClassesAreNotSealed.HasNoViolations(Architecture));
Assert.False(notSealedClassesAreSealed.HasNoViolations(Architecture));
Assert.True(notSealedClassesAreNotSealed.HasNoViolations(Architecture));
}
[Fact]
public void AreStructsTest()
{
Assert.True(Classes().That().AreNotStructs().Should().Be(Classes()).HasNoViolations(Architecture));
Assert.True(Classes().That().AreStructs().Should().NotExist().HasNoViolations(Architecture));
Assert.True(Classes().Should().NotBeStructs().HasNoViolations(Architecture));
Assert.False(Classes().Should().BeStructs().HasNoViolations(Architecture));
}
[Fact]
public void AreValueTypesTest()
{
Assert.True(Classes().That().AreNotValueTypes().Should().Be(Classes()).HasNoViolations(Architecture));
Assert.True(Classes().That().AreValueTypes().Should().NotExist().HasNoViolations(Architecture));
Assert.True(Classes().Should().NotBeValueTypes().HasNoViolations(Architecture));
Assert.False(Classes().Should().BeValueTypes().HasNoViolations(Architecture));
}
}
}
<|start_filename|>ArchUnitNETTests/Domain/RecordTypeTests.cs<|end_filename|>
using ArchUnitNET.Domain;
using TestAssembly;
using Xunit;
namespace ArchUnitNETTests.Domain
{
public class RecordTypeTests
{
private static readonly Architecture Architecture = StaticTestArchitectures.ArchUnitNETTestAssemblyArchitecture;
[Fact]
public void RecordTypeExists()
{
Assert.Contains(Architecture.Types, type => type.Name.Equals(nameof(AbstractRecord)));
Assert.Contains(Architecture.Types, type => type.Name.Equals(nameof(Record1)));
}
}
}
<|start_filename|>ArchUnitNET/Fluent/Syntax/Elements/Types/Classes/IClassPredicates.cs<|end_filename|>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
//
// SPDX-License-Identifier: Apache-2.0
using ArchUnitNET.Domain;
namespace ArchUnitNET.Fluent.Syntax.Elements.Types.Classes
{
public interface IClassPredicates<out TReturnType, TRuleType> : ITypePredicates<TReturnType, TRuleType>
where TRuleType : ICanBeAnalyzed
{
TReturnType AreAbstract();
TReturnType AreSealed();
//Negations
TReturnType AreNotAbstract();
TReturnType AreNotSealed();
}
}
<|start_filename|>ArchUnitNET/Fluent/Syntax/Elements/Types/ShouldRelateToTypesThat.cs<|end_filename|>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
//
// SPDX-License-Identifier: Apache-2.0
using System;
using System.Collections.Generic;
using ArchUnitNET.Domain;
using static ArchUnitNET.Fluent.Syntax.ConjunctionFactory;
using Assembly = System.Reflection.Assembly;
namespace ArchUnitNET.Fluent.Syntax.Elements.Types
{
public class ShouldRelateToTypesThat<TRuleTypeShouldConjunction, TReferenceType, TRuleType> :
ShouldRelateToObjectsThat<TRuleTypeShouldConjunction, TReferenceType, TRuleType>,
ITypePredicates<TRuleTypeShouldConjunction, TReferenceType>
where TReferenceType : IType
where TRuleType : ICanBeAnalyzed
{
// ReSharper disable once MemberCanBeProtected.Global
public ShouldRelateToTypesThat(IArchRuleCreator<TRuleType> ruleCreator) : base(ruleCreator)
{
}
public TRuleTypeShouldConjunction Are(Type firstType, params Type[] moreTypes)
{
_ruleCreator.ContinueComplexCondition(TypePredicatesDefinition<TReferenceType>.Are(firstType, moreTypes));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction Are(IEnumerable<Type> types)
{
_ruleCreator.ContinueComplexCondition(TypePredicatesDefinition<TReferenceType>.Are(types));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreAssignableTo(string pattern, bool useRegularExpressions = false)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.AreAssignableTo(pattern, useRegularExpressions));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreAssignableTo(IEnumerable<string> patterns,
bool useRegularExpressions = false)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.AreAssignableTo(patterns, useRegularExpressions));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreAssignableTo(IType firstType, params IType[] moreTypes)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.AreAssignableTo(firstType, moreTypes));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreAssignableTo(Type firstType, params Type[] moreTypes)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.AreAssignableTo(firstType, moreTypes));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreAssignableTo(IObjectProvider<IType> types)
{
_ruleCreator.ContinueComplexCondition(TypePredicatesDefinition<TReferenceType>.AreAssignableTo(types));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreAssignableTo(IEnumerable<IType> types)
{
_ruleCreator.ContinueComplexCondition(TypePredicatesDefinition<TReferenceType>.AreAssignableTo(types));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreAssignableTo(IEnumerable<Type> types)
{
_ruleCreator.ContinueComplexCondition(TypePredicatesDefinition<TReferenceType>.AreAssignableTo(types));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreValueTypes()
{
_ruleCreator.ContinueComplexCondition(TypePredicatesDefinition<TReferenceType>.AreValueTypes());
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreEnums()
{
_ruleCreator.ContinueComplexCondition(TypePredicatesDefinition<TReferenceType>.AreEnums());
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreStructs()
{
_ruleCreator.ContinueComplexCondition(TypePredicatesDefinition<TReferenceType>.AreStructs());
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction ImplementInterface(string pattern, bool useRegularExpressions = false)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.ImplementInterface(pattern, useRegularExpressions));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction ImplementInterface(Interface intf)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.ImplementInterface(intf));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction ImplementInterface(Type intf)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.ImplementInterface(intf));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction ResideInNamespace(string pattern, bool useRegularExpressions = false)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.ResideInNamespace(pattern, useRegularExpressions));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction ResideInAssembly(string pattern, bool useRegularExpressions = false)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.ResideInAssembly(pattern, useRegularExpressions));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction ResideInAssembly(Assembly assembly, params Assembly[] moreAssemblies)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.ResideInAssembly(assembly, moreAssemblies));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction HavePropertyMemberWithName(string name)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.HaveMethodMemberWithName(name));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction HaveFieldMemberWithName(string name)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.HaveFieldMemberWithName(name));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction HaveMethodMemberWithName(string name)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.HaveFieldMemberWithName(name));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction HaveMemberWithName(string name)
{
_ruleCreator.ContinueComplexCondition(TypePredicatesDefinition<TReferenceType>.HaveMemberWithName(name));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreNested()
{
_ruleCreator.ContinueComplexCondition(TypePredicatesDefinition<TReferenceType>.AreNested());
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
//Negations
public TRuleTypeShouldConjunction AreNot(Type firstType, params Type[] moreTypes)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.AreNot(firstType, moreTypes));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreNot(IEnumerable<Type> types)
{
_ruleCreator.ContinueComplexCondition(TypePredicatesDefinition<TReferenceType>.AreNot(types));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreNotAssignableTo(string pattern, bool useRegularExpressions = false)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.AreNotAssignableTo(pattern, useRegularExpressions));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreNotAssignableTo(IEnumerable<string> patterns,
bool useRegularExpressions = false)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.AreNotAssignableTo(patterns, useRegularExpressions));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreNotAssignableTo(IType firstType, params IType[] moreTypes)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.AreNotAssignableTo(firstType, moreTypes));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreNotAssignableTo(Type firstType, params Type[] moreTypes)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.AreNotAssignableTo(firstType, moreTypes));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreNotAssignableTo(IObjectProvider<IType> types)
{
_ruleCreator.ContinueComplexCondition(TypePredicatesDefinition<TReferenceType>.AreNotAssignableTo(types));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreNotAssignableTo(IEnumerable<IType> types)
{
_ruleCreator.ContinueComplexCondition(TypePredicatesDefinition<TReferenceType>.AreNotAssignableTo(types));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreNotAssignableTo(IEnumerable<Type> types)
{
_ruleCreator.ContinueComplexCondition(TypePredicatesDefinition<TReferenceType>.AreNotAssignableTo(types));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreNotValueTypes()
{
_ruleCreator.ContinueComplexCondition(TypePredicatesDefinition<TReferenceType>.AreNotValueTypes());
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreNotEnums()
{
_ruleCreator.ContinueComplexCondition(TypePredicatesDefinition<TReferenceType>.AreNotEnums());
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreNotStructs()
{
_ruleCreator.ContinueComplexCondition(TypePredicatesDefinition<TReferenceType>.AreNotStructs());
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction DoNotImplementInterface(string pattern, bool useRegularExpressions = false)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.DoNotImplementInterface(pattern, useRegularExpressions));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction DoNotImplementInterface(Interface intf)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.DoNotImplementInterface(intf));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction DoNotImplementInterface(Type intf)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.DoNotImplementInterface(intf));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction DoNotResideInNamespace(string pattern, bool useRegularExpressions = false)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.DoNotResideInNamespace(pattern, useRegularExpressions));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction DoNotResideInAssembly(string pattern, bool useRegularExpressions = false)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.DoNotResideInAssembly(pattern, useRegularExpressions));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction DoNotResideInAssembly(Assembly assembly, params Assembly[] moreAssemblies)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.DoNotResideInAssembly(assembly, moreAssemblies));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction DoNotHavePropertyMemberWithName(string name)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.DoNotHavePropertyMemberWithName(name));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction DoNotHaveFieldMemberWithName(string name)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.DoNotHaveFieldMemberWithName(name));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction DoNotHaveMethodMemberWithName(string name)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.DoNotHaveMethodMemberWithName(name));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction DoNotHaveMemberWithName(string name)
{
_ruleCreator.ContinueComplexCondition(
TypePredicatesDefinition<TReferenceType>.DoNotHaveMethodMemberWithName(name));
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
public TRuleTypeShouldConjunction AreNotNested()
{
_ruleCreator.ContinueComplexCondition(TypePredicatesDefinition<TReferenceType>.AreNotNested());
return Create<TRuleTypeShouldConjunction, TRuleType>(_ruleCreator);
}
}
}
<|start_filename|>ArchUnitNET/Fluent/Syntax/Elements/Types/Classes/ClassPredicatesDefinition.cs<|end_filename|>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
//
// SPDX-License-Identifier: Apache-2.0
using ArchUnitNET.Domain;
using ArchUnitNET.Fluent.Predicates;
namespace ArchUnitNET.Fluent.Syntax.Elements.Types.Classes
{
public static class ClassPredicatesDefinition
{
public static IPredicate<Class> AreAbstract()
{
return new SimplePredicate<Class>(cls => !cls.IsAbstract.HasValue || cls.IsAbstract.Value, "are abstract");
}
public static IPredicate<Class> AreSealed()
{
return new SimplePredicate<Class>(cls => !cls.IsSealed.HasValue || cls.IsSealed.Value, "are sealed");
}
//Negations
public static IPredicate<Class> AreNotAbstract()
{
return new SimplePredicate<Class>(cls => !cls.IsAbstract.HasValue || !cls.IsAbstract.Value,
"are not abstract");
}
public static IPredicate<Class> AreNotSealed()
{
return new SimplePredicate<Class>(cls => !cls.IsSealed.HasValue || !cls.IsSealed.Value, "are not sealed");
}
}
}
<|start_filename|>ArchUnitNET/Fluent/Syntax/Elements/Types/GivenTypesThat.cs<|end_filename|>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
//
// SPDX-License-Identifier: Apache-2.0
using System;
using System.Collections.Generic;
using ArchUnitNET.Domain;
using static ArchUnitNET.Fluent.Syntax.ConjunctionFactory;
using Assembly = System.Reflection.Assembly;
namespace ArchUnitNET.Fluent.Syntax.Elements.Types
{
public class GivenTypesThat<TGivenRuleTypeConjunction, TRuleType> :
GivenObjectsThat<TGivenRuleTypeConjunction, TRuleType>, ITypePredicates<TGivenRuleTypeConjunction, TRuleType>
where TRuleType : IType
{
// ReSharper disable once MemberCanBeProtected.Global
public GivenTypesThat(IArchRuleCreator<TRuleType> ruleCreator) : base(ruleCreator)
{
}
public TGivenRuleTypeConjunction Are(Type firstType, params Type[] moreTypes)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.Are(firstType, moreTypes));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction Are(IEnumerable<Type> types)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.Are(types));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreAssignableTo(string pattern, bool useRegularExpressions = false)
{
_ruleCreator.AddPredicate(
TypePredicatesDefinition<TRuleType>.AreAssignableTo(pattern, useRegularExpressions));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreAssignableTo(IEnumerable<string> patterns,
bool useRegularExpressions = false)
{
_ruleCreator.AddPredicate(
TypePredicatesDefinition<TRuleType>.AreAssignableTo(patterns, useRegularExpressions));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreAssignableTo(IType firstType, params IType[] moreTypes)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreAssignableTo(firstType, moreTypes));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreAssignableTo(Type firstType, params Type[] moreTypes)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreAssignableTo(firstType, moreTypes));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreAssignableTo(IObjectProvider<IType> types)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreAssignableTo(types));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreAssignableTo(IEnumerable<IType> types)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreAssignableTo(types));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreAssignableTo(IEnumerable<Type> types)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreAssignableTo(types));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreValueTypes()
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreValueTypes());
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreEnums()
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreEnums());
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreStructs()
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreStructs());
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction ImplementInterface(string pattern, bool useRegularExpressions = false)
{
_ruleCreator.AddPredicate(
TypePredicatesDefinition<TRuleType>.ImplementInterface(pattern, useRegularExpressions));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction ImplementInterface(Interface intf)
{
_ruleCreator.AddPredicate(
TypePredicatesDefinition<TRuleType>.ImplementInterface(intf));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction ImplementInterface(Type intf)
{
_ruleCreator.AddPredicate(
TypePredicatesDefinition<TRuleType>.ImplementInterface(intf));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction ResideInNamespace(string pattern, bool useRegularExpressions = false)
{
_ruleCreator.AddPredicate(
TypePredicatesDefinition<TRuleType>.ResideInNamespace(pattern, useRegularExpressions));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction ResideInAssembly(string pattern, bool useRegularExpressions = false)
{
_ruleCreator.AddPredicate(
TypePredicatesDefinition<TRuleType>.ResideInAssembly(pattern, useRegularExpressions));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction ResideInAssembly(Assembly assembly, params Assembly[] moreAssemblies)
{
_ruleCreator.AddPredicate(
TypePredicatesDefinition<TRuleType>.ResideInAssembly(assembly, moreAssemblies));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction HavePropertyMemberWithName(string name)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.HavePropertyMemberWithName(name));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction HaveFieldMemberWithName(string name)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.HaveFieldMemberWithName(name));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction HaveMethodMemberWithName(string name)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.HaveMethodMemberWithName(name));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction HaveMemberWithName(string name)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.HaveMemberWithName(name));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreNested()
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreNested());
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
//Negations
public TGivenRuleTypeConjunction AreNot(Type firstType, params Type[] moreTypes)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreNot(firstType, moreTypes));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreNot(IEnumerable<Type> types)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreNot(types));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreNotAssignableTo(string pattern, bool useRegularExpressions = false)
{
_ruleCreator.AddPredicate(
TypePredicatesDefinition<TRuleType>.AreNotAssignableTo(pattern, useRegularExpressions));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreNotAssignableTo(IEnumerable<string> patterns,
bool useRegularExpressions = false)
{
_ruleCreator.AddPredicate(
TypePredicatesDefinition<TRuleType>.AreNotAssignableTo(patterns, useRegularExpressions));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreNotAssignableTo(IType firstType, params IType[] moreTypes)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreNotAssignableTo(firstType, moreTypes));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreNotAssignableTo(Type firstType, params Type[] moreTypes)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreNotAssignableTo(firstType, moreTypes));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreNotAssignableTo(IObjectProvider<IType> types)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreNotAssignableTo(types));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreNotAssignableTo(IEnumerable<IType> types)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreNotAssignableTo(types));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreNotAssignableTo(IEnumerable<Type> types)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreNotAssignableTo(types));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreNotValueTypes()
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreNotValueTypes());
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreNotEnums()
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreNotEnums());
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreNotStructs()
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreNotStructs());
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction DoNotImplementInterface(string pattern, bool useRegularExpressions = false)
{
_ruleCreator.AddPredicate(
TypePredicatesDefinition<TRuleType>.DoNotImplementInterface(pattern, useRegularExpressions));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction DoNotImplementInterface(Interface intf)
{
_ruleCreator.AddPredicate(
TypePredicatesDefinition<TRuleType>.DoNotImplementInterface(intf));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction DoNotImplementInterface(Type intf)
{
_ruleCreator.AddPredicate(
TypePredicatesDefinition<TRuleType>.DoNotImplementInterface(intf));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction DoNotResideInNamespace(string pattern, bool useRegularExpressions = false)
{
_ruleCreator.AddPredicate(
TypePredicatesDefinition<TRuleType>.DoNotResideInNamespace(pattern, useRegularExpressions));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction DoNotResideInAssembly(string pattern, bool useRegularExpressions = false)
{
_ruleCreator.AddPredicate(
TypePredicatesDefinition<TRuleType>.DoNotResideInAssembly(pattern, useRegularExpressions));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction DoNotResideInAssembly(Assembly assembly, params Assembly[] moreAssemblies)
{
_ruleCreator.AddPredicate(
TypePredicatesDefinition<TRuleType>.DoNotResideInAssembly(assembly, moreAssemblies));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction DoNotHavePropertyMemberWithName(string name)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.DoNotHavePropertyMemberWithName(name));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction DoNotHaveFieldMemberWithName(string name)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.DoNotHaveFieldMemberWithName(name));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction DoNotHaveMethodMemberWithName(string name)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.DoNotHaveMethodMemberWithName(name));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction DoNotHaveMemberWithName(string name)
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.DoNotHaveMemberWithName(name));
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
public TGivenRuleTypeConjunction AreNotNested()
{
_ruleCreator.AddPredicate(TypePredicatesDefinition<TRuleType>.AreNotNested());
return Create<TGivenRuleTypeConjunction, TRuleType>(_ruleCreator);
}
}
}
<|start_filename|>ArchUnitNETTests/Domain/Dependencies/Members/MethodCallDependencyTests.cs<|end_filename|>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
//
// SPDX-License-Identifier: Apache-2.0
using System.Linq;
using ArchUnitNET.Domain;
using ArchUnitNET.Domain.Dependencies;
using ArchUnitNET.Domain.Extensions;
using ArchUnitNETTests.Fluent.Extensions;
using Xunit;
// ReSharper disable UnusedMember.Global
// ReSharper disable UnusedVariable
// ReSharper disable NotAccessedField.Local
namespace ArchUnitNETTests.Domain.Dependencies.Members
{
public class MethodCallDependencyTests
{
private readonly Architecture _architecture = StaticTestArchitectures.ArchUnitNETTestArchitecture;
private readonly Class _classWithConstructors;
private readonly MethodMember _methodAMember;
private readonly MethodMember _methodBMember;
public MethodCallDependencyTests()
{
_classWithConstructors = _architecture.GetClassOfType(typeof(ClassWithConstructors));
_methodAMember = _architecture.GetClassOfType(typeof(ClassWithMethodA))
.GetMethodMembersWithName(nameof(ClassWithMethodA.MethodA).BuildMethodMemberName()).FirstOrDefault();
_methodBMember = _architecture.GetClassOfType(typeof(ClassWithMethodB))
.GetMethodMembersWithName(nameof(ClassWithMethodB.MethodB).BuildMethodMemberName()).FirstOrDefault();
}
[Theory]
[ClassData(typeof(MethodDependencyTestBuild.ConstructorTestData))]
public void ConstructorsAddedToClass(Class classWithConstructors)
{
//Setup
var constructorMembers = classWithConstructors.GetConstructors();
//Assert
constructorMembers.ForEach(constructor =>
{
Assert.Contains(constructor, classWithConstructors.Constructors);
});
}
[Theory]
[ClassData(typeof(MethodDependencyTestBuild.MethodCallDependencyTestData))]
public void MethodCallDependenciesAreFound(IMember originMember, MethodCallDependency expectedDependency)
{
Assert.True(originMember.HasMemberDependency(expectedDependency));
Assert.Contains(expectedDependency, originMember.GetMethodCallDependencies());
}
[Theory]
[ClassData(typeof(MethodDependencyTestBuild.MethodCallDependencyInAsyncMethodTestData))]
public void MethodCallDependenciesAreFoundInAsyncMethod(IMember originMember, MethodCallDependency expectedDependency)
{
Assert.True(originMember.HasMemberDependency(expectedDependency));
Assert.Contains(expectedDependency, originMember.GetMethodCallDependencies());
}
}
public class ClassWithMethodA
{
public static void MethodA()
{
var classWithMethodB = new ClassWithMethodB();
ClassWithMethodB.MethodB();
}
}
public class ClassWithMethodB
{
public static void MethodB()
{
var classWithMethodA = new ClassWithMethodA();
ClassWithMethodA.MethodA();
}
}
public class ClassWithMethodAAsync
{
#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously
public static async void MethodAAsync()
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
{
var classWithMethodB = new ClassWithMethodB();
ClassWithMethodB.MethodB();
}
}
public class ClassWithConstructors
{
private FieldType _fieldTest;
private FieldType _privateFieldTest;
public ClassWithConstructors() : this(new FieldType())
{
}
private ClassWithConstructors(FieldType fieldTest) : this(fieldTest, fieldTest)
{
}
private ClassWithConstructors(FieldType fieldTest, FieldType privateFieldTest)
{
_fieldTest = fieldTest;
_privateFieldTest = privateFieldTest;
}
}
}
<|start_filename|>ArchUnitNETTests/Domain/Dependencies/Members/MethodDependencyTestBuild.cs<|end_filename|>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
//
// SPDX-License-Identifier: Apache-2.0
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using ArchUnitNET.Domain;
using ArchUnitNET.Domain.Dependencies;
using ArchUnitNET.Domain.Extensions;
using ArchUnitNET.Loader;
using ArchUnitNETTests.Fluent.Extensions;
using Type = System.Type;
namespace ArchUnitNETTests.Domain.Dependencies.Members
{
public static class MethodDependencyTestBuild
{
private static readonly Architecture Architecture = StaticTestArchitectures.ArchUnitNETTestArchitecture;
private static object[] BuildMethodCallDependencyTestData(Type originType, string nameOfOriginMember,
Type targetType, string nameOfTargetMember)
{
var originClass = Architecture.GetClassOfType(originType);
var originMember = originClass.GetMembersWithName(nameOfOriginMember).Single();
var targetClass = Architecture.GetClassOfType(targetType);
var targetMember = targetClass.GetMethodMembersWithName(nameOfTargetMember).Single();
var expectedDependency = new MethodCallDependency(originMember, new MethodMemberInstance(targetMember,
Enumerable.Empty<GenericArgument>(), Enumerable.Empty<GenericArgument>()));
return new object[] {originMember, expectedDependency};
}
private static object[] BuildMethodSignatureDependencyTestData(Type originType,
string nameOfOriginMember, Type targetType)
{
var originClass = Architecture.GetClassOfType(originType);
var originMember = originClass.GetMethodMembersWithName(nameOfOriginMember).Single();
var target = Architecture.GetClassOfType(targetType);
var expectedDependency =
new MethodSignatureDependency(originMember, new TypeInstance<Class>(target));
return new object[] {originMember, expectedDependency};
}
public class MethodCallDependencyTestData : IEnumerable<object[]>
{
private readonly List<object[]> _methodCallDependencyData = new List<object[]>
{
BuildMethodCallDependencyTestData(typeof(ClassWithMethodA),
nameof(ClassWithMethodA.MethodA).BuildMethodMemberName(), typeof(ClassWithMethodB),
StaticConstants.ConstructorNameBase.BuildMethodMemberName()),
BuildMethodCallDependencyTestData(typeof(ClassWithMethodA),
nameof(ClassWithMethodA.MethodA).BuildMethodMemberName(), typeof(ClassWithMethodB),
nameof(ClassWithMethodB.MethodB).BuildMethodMemberName()),
BuildMethodCallDependencyTestData(typeof(ClassWithMethodB),
nameof(ClassWithMethodB.MethodB).BuildMethodMemberName(), typeof(ClassWithMethodA),
StaticConstants.ConstructorNameBase.BuildMethodMemberName()),
BuildMethodCallDependencyTestData(typeof(ClassWithMethodB),
nameof(ClassWithMethodB.MethodB).BuildMethodMemberName(), typeof(ClassWithMethodA),
nameof(ClassWithMethodA.MethodA).BuildMethodMemberName())
};
public IEnumerator<object[]> GetEnumerator()
{
return _methodCallDependencyData.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
}
public class MethodCallDependencyInAsyncMethodTestData : IEnumerable<object[]>
{
private readonly List<object[]> _methodCallDependencyData = new List<object[]>
{
BuildMethodCallDependencyTestData(typeof(ClassWithMethodAAsync),
nameof(ClassWithMethodAAsync.MethodAAsync).BuildMethodMemberName(), typeof(ClassWithMethodB),
StaticConstants.ConstructorNameBase.BuildMethodMemberName()),
BuildMethodCallDependencyTestData(typeof(ClassWithMethodAAsync),
nameof(ClassWithMethodAAsync.MethodAAsync).BuildMethodMemberName(), typeof(ClassWithMethodB),
nameof(ClassWithMethodB.MethodB).BuildMethodMemberName()),
BuildMethodCallDependencyTestData(typeof(ClassWithMethodB),
nameof(ClassWithMethodB.MethodB).BuildMethodMemberName(), typeof(ClassWithMethodA),
StaticConstants.ConstructorNameBase.BuildMethodMemberName()),
BuildMethodCallDependencyTestData(typeof(ClassWithMethodB),
nameof(ClassWithMethodB.MethodB).BuildMethodMemberName(), typeof(ClassWithMethodA),
nameof(ClassWithMethodA.MethodA).BuildMethodMemberName())
};
public IEnumerator<object[]> GetEnumerator()
{
return _methodCallDependencyData.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
}
public class MethodSignatureDependencyTestData : IEnumerable<object[]>
{
private readonly List<object[]> _methodSignatureDependencyData = new List<object[]>
{
BuildMethodSignatureDependencyTestData(typeof(ClassWithMethodSignatureA),
nameof(ClassWithMethodSignatureA.MethodA).BuildMethodMemberName(typeof(ClassWithMethodSignatureB)),
typeof(ClassWithMethodSignatureB)),
BuildMethodSignatureDependencyTestData(typeof(ClassWithMethodSignatureB),
nameof(ClassWithMethodSignatureB.MethodB).BuildMethodMemberName(typeof(ClassWithMethodSignatureA)),
typeof(ClassWithMethodSignatureA)),
BuildMethodSignatureDependencyTestData(typeof(ClassWithMethodSignatureC),
StaticConstants.ConstructorNameBase.BuildMethodMemberName(typeof(ClassWithMethodSignatureB)),
typeof(ClassWithMethodSignatureB))
};
public IEnumerator<object[]> GetEnumerator()
{
return _methodSignatureDependencyData.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
}
public class ConstructorTestData : IEnumerable<object[]>
{
private readonly List<object[]> _methodCallDependencyData = new List<object[]>
{
ClassDependenciesIncludeMemberDependenciesBuild.BuildClassTestData(typeof(ClassWithMethodA)),
ClassDependenciesIncludeMemberDependenciesBuild.BuildClassTestData(typeof(ClassWithMethodA)),
ClassDependenciesIncludeMemberDependenciesBuild.BuildClassTestData(typeof(ClassWithConstructors))
};
public IEnumerator<object[]> GetEnumerator()
{
return _methodCallDependencyData.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
}
}
}
<|start_filename|>ArchUnitNET/Fluent/Syntax/Elements/Types/ITypeConditions.cs<|end_filename|>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
//
// SPDX-License-Identifier: Apache-2.0
using System;
using System.Collections.Generic;
using ArchUnitNET.Domain;
using Assembly = System.Reflection.Assembly;
namespace ArchUnitNET.Fluent.Syntax.Elements.Types
{
public interface ITypeConditions<out TReturnType, out TRuleType> : IObjectConditions<TReturnType, TRuleType>
where TRuleType : ICanBeAnalyzed
{
TReturnType Be(Type firstType, params Type[] moreTypes);
TReturnType Be(IEnumerable<Type> types);
TReturnType BeAssignableTo(string pattern, bool useRegularExpressions = false);
TReturnType BeAssignableTo(IEnumerable<string> patterns, bool useRegularExpressions = false);
TReturnType BeAssignableTo(IType firstType, params IType[] moreTypes);
TReturnType BeAssignableTo(Type type, params Type[] moreTypes);
TReturnType BeAssignableTo(IObjectProvider<IType> types);
TReturnType BeAssignableTo(IEnumerable<IType> types);
TReturnType BeAssignableTo(IEnumerable<Type> types);
TReturnType BeValueTypes();
TReturnType BeEnums();
TReturnType BeStructs();
TReturnType ImplementInterface(string pattern, bool useRegularExpressions = false);
TReturnType ImplementInterface(Interface intf);
TReturnType ImplementInterface(Type intf);
TReturnType ResideInNamespace(string pattern, bool useRegularExpressions = false);
TReturnType ResideInAssembly(string pattern, bool useRegularExpressions = false);
TReturnType ResideInAssembly(Assembly assembly, params Assembly[] moreAssemblies);
TReturnType HavePropertyMemberWithName(string name);
TReturnType HaveFieldMemberWithName(string name);
TReturnType HaveMethodMemberWithName(string name);
TReturnType HaveMemberWithName(string name);
TReturnType BeNested();
//Negations
TReturnType NotBe(Type firstType, params Type[] moreTypes);
TReturnType NotBe(IEnumerable<Type> types);
TReturnType NotBeAssignableTo(string pattern, bool useRegularExpressions = false);
TReturnType NotBeAssignableTo(IEnumerable<string> patterns, bool useRegularExpressions = false);
TReturnType NotBeAssignableTo(IType type, params IType[] moreTypes);
TReturnType NotBeAssignableTo(Type type, params Type[] moreTypes);
TReturnType NotBeAssignableTo(IObjectProvider<IType> types);
TReturnType NotBeAssignableTo(IEnumerable<IType> types);
TReturnType NotBeAssignableTo(IEnumerable<Type> types);
TReturnType NotBeValueTypes();
TReturnType NotBeEnums();
TReturnType NotBeStructs();
TReturnType NotImplementInterface(string pattern, bool useRegularExpressions = false);
TReturnType NotImplementInterface(Interface intf);
TReturnType NotImplementInterface(Type intf);
TReturnType NotResideInNamespace(string pattern, bool useRegularExpressions = false);
TReturnType NotResideInAssembly(string pattern, bool useRegularExpressions = false);
TReturnType NotResideInAssembly(Assembly assembly, params Assembly[] moreAssemblies);
TReturnType NotHavePropertyMemberWithName(string name);
TReturnType NotHaveFieldMemberWithName(string name);
TReturnType NotHaveMethodMemberWithName(string name);
TReturnType NotHaveMemberWithName(string name);
TReturnType NotBeNested();
}
}
<|start_filename|>ArchUnitNET/Fluent/Syntax/Elements/Types/ITypePredicates.cs<|end_filename|>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
//
// SPDX-License-Identifier: Apache-2.0
using System;
using System.Collections.Generic;
using ArchUnitNET.Domain;
using Assembly = System.Reflection.Assembly;
namespace ArchUnitNET.Fluent.Syntax.Elements.Types
{
public interface ITypePredicates<out TReturnType, TRuleType> : IObjectPredicates<TReturnType, TRuleType>
where TRuleType : ICanBeAnalyzed
{
TReturnType Are(Type firstType, params Type[] moreTypes);
TReturnType Are(IEnumerable<Type> types);
TReturnType AreAssignableTo(string pattern, bool useRegularExpressions = false);
TReturnType AreAssignableTo(IEnumerable<string> patterns, bool useRegularExpressions = false);
TReturnType AreAssignableTo(IType firstType, params IType[] moreTypes);
TReturnType AreAssignableTo(Type type, params Type[] moreTypes);
TReturnType AreAssignableTo(IObjectProvider<IType> types);
TReturnType AreAssignableTo(IEnumerable<IType> types);
TReturnType AreAssignableTo(IEnumerable<Type> types);
TReturnType AreValueTypes();
TReturnType AreEnums();
TReturnType AreStructs();
TReturnType ImplementInterface(string pattern, bool useRegularExpressions = false);
TReturnType ImplementInterface(Interface intf);
TReturnType ImplementInterface(Type intf);
TReturnType ResideInNamespace(string pattern, bool useRegularExpressions = false);
TReturnType ResideInAssembly(string pattern, bool useRegularExpressions = false);
TReturnType ResideInAssembly(Assembly assembly, params Assembly[] moreAssemblies);
TReturnType HavePropertyMemberWithName(string name);
TReturnType HaveFieldMemberWithName(string name);
TReturnType HaveMethodMemberWithName(string name);
TReturnType HaveMemberWithName(string name);
TReturnType AreNested();
//Negations
TReturnType AreNot(Type firstType, params Type[] moreTypes);
TReturnType AreNot(IEnumerable<Type> types);
TReturnType AreNotAssignableTo(string pattern, bool useRegularExpressions = false);
TReturnType AreNotAssignableTo(IEnumerable<string> patterns, bool useRegularExpressions = false);
TReturnType AreNotAssignableTo(IType type, params IType[] moreTypes);
TReturnType AreNotAssignableTo(Type type, params Type[] moreTypes);
TReturnType AreNotAssignableTo(IObjectProvider<IType> types);
TReturnType AreNotAssignableTo(IEnumerable<IType> types);
TReturnType AreNotAssignableTo(IEnumerable<Type> types);
TReturnType AreNotValueTypes();
TReturnType AreNotEnums();
TReturnType AreNotStructs();
TReturnType DoNotImplementInterface(string pattern, bool useRegularExpressions = false);
TReturnType DoNotImplementInterface(Interface intf);
TReturnType DoNotImplementInterface(Type intf);
TReturnType DoNotResideInNamespace(string pattern, bool useRegularExpressions = false);
TReturnType DoNotResideInAssembly(string pattern, bool useRegularExpressions = false);
TReturnType DoNotResideInAssembly(Assembly assembly, params Assembly[] moreAssemblies);
TReturnType DoNotHavePropertyMemberWithName(string name);
TReturnType DoNotHaveFieldMemberWithName(string name);
TReturnType DoNotHaveMethodMemberWithName(string name);
TReturnType DoNotHaveMemberWithName(string name);
TReturnType AreNotNested();
}
}
<|start_filename|>ArchUnitNET/Fluent/Syntax/Elements/Types/Classes/GivenClassesThat.cs<|end_filename|>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
//
// SPDX-License-Identifier: Apache-2.0
using ArchUnitNET.Domain;
namespace ArchUnitNET.Fluent.Syntax.Elements.Types.Classes
{
public class GivenClassesThat : GivenTypesThat<GivenClassesConjunction, Class>,
IClassPredicates<GivenClassesConjunction, Class>
{
public GivenClassesThat(IArchRuleCreator<Class> ruleCreator) : base(ruleCreator)
{
}
public GivenClassesConjunction AreAbstract()
{
_ruleCreator.AddPredicate(ClassPredicatesDefinition.AreAbstract());
return new GivenClassesConjunction(_ruleCreator);
}
public GivenClassesConjunction AreSealed()
{
_ruleCreator.AddPredicate(ClassPredicatesDefinition.AreSealed());
return new GivenClassesConjunction(_ruleCreator);
}
//Negations
public GivenClassesConjunction AreNotAbstract()
{
_ruleCreator.AddPredicate(ClassPredicatesDefinition.AreNotAbstract());
return new GivenClassesConjunction(_ruleCreator);
}
public GivenClassesConjunction AreNotSealed()
{
_ruleCreator.AddPredicate(ClassPredicatesDefinition.AreNotSealed());
return new GivenClassesConjunction(_ruleCreator);
}
}
}
<|start_filename|>ArchUnitNETTests/Domain/Dependencies/Members/GetterSetterMethodDependencyTests.cs<|end_filename|>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
//
// SPDX-License-Identifier: Apache-2.0
using System.Linq;
using ArchUnitNET.Domain;
using ArchUnitNET.Domain.Dependencies;
using ArchUnitNET.Domain.Extensions;
using Xunit;
// ReSharper disable UnusedMember.Local
// ReSharper disable UnusedMember.Local
// ReSharper disable ParameterOnlyUsedForPreconditionCheck.Local
namespace ArchUnitNETTests.Domain.Dependencies.Members
{
public class GetterSetterMethodDependencyTests
{
private readonly Architecture _architecture = StaticTestArchitectures.ArchUnitNETTestArchitecture;
public GetterSetterMethodDependencyTests()
{
var getterExampleClass = _architecture.GetClassOfType(typeof(GetterMethodDependencyExamples));
getterExampleClass.RequiredNotNull();
}
[Theory]
[ClassData(typeof(GetterSetterTestsBuild.SetterTestData))]
public void AssertSetterMethodDependencies(PropertyMember backedProperty, Class expectedTarget)
{
if (backedProperty.Setter != null)
{
foreach (var dependency in backedProperty.Setter.Dependencies)
{
Assert.Contains(dependency, backedProperty.Dependencies);
}
}
Assert.Contains(expectedTarget, backedProperty.Dependencies.Select(t => t.Target));
}
[Theory]
[ClassData(typeof(GetterSetterTestsBuild.GetterTestData))]
public void AssertGetterMethodDependencies(PropertyMember propertyMember, IType mockTargetType,
MethodCallDependency expectedDependency)
{
Assert.NotEmpty(propertyMember.MemberDependencies);
Assert.Single(propertyMember.GetMethodCallDependencies());
Assert.Contains(mockTargetType,
propertyMember.GetMethodCallDependencies().Select(dependency => dependency.Target));
Assert.Contains(expectedDependency.TargetMember.FullName,
propertyMember.GetMethodCallDependencies()
.Select(dependency => dependency.TargetMember.FullName));
}
[Theory]
[ClassData(typeof(GetterSetterTestsBuild.AccessMethodDependenciesByPropertyTestData))]
public void AccessorMethodDependenciesByProperty(PropertyMember accessedProperty, MethodMember accessorMethod)
{
accessorMethod.MemberDependencies.ForEach(dependency =>
{
Assert.Contains(dependency, accessedProperty.MemberDependencies);
});
}
}
}
<|start_filename|>ArchUnitNETTests/Domain/Dependencies/Members/GetterSetterTestsBuild.cs<|end_filename|>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
// Copyright 2019 <NAME> <<EMAIL>>
//
// SPDX-License-Identifier: Apache-2.0
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using ArchUnitNET.Domain;
using ArchUnitNET.Domain.Dependencies;
using ArchUnitNET.Domain.Extensions;
using ArchUnitNET.Loader;
using ArchUnitNETTests.Fluent.Extensions;
using Type = System.Type;
namespace ArchUnitNETTests.Domain.Dependencies.Members
{
public class GetterSetterTestsBuild
{
private static readonly Architecture Architecture =
new ArchLoader().LoadAssemblies(typeof(GetterMethodDependencyExamples).Assembly).Build();
private static readonly Type GuidType = typeof(Guid);
private static readonly IType MockGuidStruct = GuidType.CreateStubIType();
private static readonly MethodInfo NewGuid = GuidType.GetMethods().First(method => method.Name == "NewGuid");
private static readonly MethodMember MockNewGuid = NewGuid.CreateStubMethodMember();
private static readonly Type[] ExpectedParameters = {typeof(string)};
private static readonly ConstructorInfo ConstructGuid = GuidType.GetConstructor(ExpectedParameters);
private static readonly MethodMember MockConstructorMember = ConstructGuid.CreateStubMethodMember();
private static object[] BuildSetterTestData(Type classType, string backedPropertyName,
Type expectedFieldDependencyTarget)
{
if (classType == null)
{
throw new ArgumentNullException(nameof(classType));
}
if (backedPropertyName == null)
{
throw new ArgumentNullException(nameof(backedPropertyName));
}
if (expectedFieldDependencyTarget == null)
{
throw new ArgumentNullException(nameof(expectedFieldDependencyTarget));
}
var baseClass = Architecture.GetClassOfType(classType);
var backedProperty = baseClass.GetPropertyMembersWithName(backedPropertyName).First();
var expectedDependencyTargetClass = Architecture.GetClassOfType(expectedFieldDependencyTarget);
return new object[] {backedProperty, expectedDependencyTargetClass};
}
private static object[] BuildGetterTestData(Type classType, string propertyName,
IType expectedFieldDependencyTarget, MethodMember expectedTargetMember)
{
if (classType == null)
{
throw new ArgumentNullException(nameof(classType));
}
if (propertyName == null)
{
throw new ArgumentNullException(nameof(propertyName));
}
if (expectedFieldDependencyTarget == null)
{
throw new ArgumentNullException(nameof(expectedFieldDependencyTarget));
}
var baseClass = Architecture.GetClassOfType(classType);
var accessedProperty = baseClass.GetPropertyMembersWithName(propertyName).First();
var expectedDependency = CreateStubMethodCallDependency(accessedProperty, expectedTargetMember);
return new object[] {accessedProperty, expectedFieldDependencyTarget, expectedDependency};
}
private static object[] BuildAccessMethodTestData(Type classType, string propertyName, MethodForm methodForm)
{
if (classType == null)
{
throw new ArgumentNullException(nameof(classType));
}
if (propertyName == null)
{
throw new ArgumentNullException(nameof(propertyName));
}
if (methodForm != MethodForm.Getter && methodForm != MethodForm.Setter)
{
throw new InvalidInputException(
$"Given MethodForm {nameof(methodForm)} is not valid for this test. Please give the form of Getter or Setter.");
}
var baseClass = Architecture.GetClassOfType(classType);
var accessedProperty = baseClass.GetPropertyMembersWithName(propertyName).First();
var accessorMethod = methodForm == MethodForm.Getter ? accessedProperty.Getter : accessedProperty.Setter;
return new object[] {accessedProperty, accessorMethod};
}
private static MethodCallDependency CreateStubMethodCallDependency(IMember originMember,
MethodMember targetMember)
{
var methodCallDependency = new MethodCallDependency(originMember,
new MethodMemberInstance(targetMember, Enumerable.Empty<GenericArgument>(),
Enumerable.Empty<GenericArgument>()));
methodCallDependency.TargetMember.MemberBackwardsDependencies.Add(methodCallDependency);
return methodCallDependency;
}
public class SetterTestData : IEnumerable<object[]>
{
private readonly List<object[]> _setterTestData = new List<object[]>
{
BuildSetterTestData(typeof(SetterMethodDependencyExamples),
nameof(SetterMethodDependencyExamples.CustomProperty),
typeof(ChildField)),
BuildSetterTestData(typeof(SetterMethodDependencyExamples),
nameof(SetterMethodDependencyExamples.LambdaPair),
typeof(ChildField)),
BuildSetterTestData(typeof(SetterMethodDependencyExamples),
nameof(SetterMethodDependencyExamples.ConstructorPair),
typeof(PropertyType)),
BuildSetterTestData(typeof(SetterMethodDependencyExamples),
nameof(SetterMethodDependencyExamples.ConstructorLambdaPair),
typeof(PropertyType)),
BuildSetterTestData(typeof(SetterMethodDependencyExamples),
nameof(SetterMethodDependencyExamples.MethodPair),
typeof(PropertyType)),
BuildSetterTestData(typeof(SetterMethodDependencyExamples),
nameof(SetterMethodDependencyExamples.MethodLambdaPair),
typeof(PropertyType))
};
public IEnumerator<object[]> GetEnumerator()
{
return _setterTestData.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
}
public class GetterTestData : IEnumerable<object[]>
{
private readonly List<object[]> _getterTestData = new List<object[]>
{
BuildGetterTestData(typeof(GetterMethodDependencyExamples),
nameof(GetterMethodDependencyExamples.AcceptedCase),
MockGuidStruct, MockConstructorMember),
BuildGetterTestData(typeof(GetterMethodDependencyExamples),
nameof(GetterMethodDependencyExamples.FirstUnacceptedCase),
MockGuidStruct, MockNewGuid),
BuildGetterTestData(typeof(GetterMethodDependencyExamples),
nameof(GetterMethodDependencyExamples.SecondUnacceptedCase),
MockGuidStruct, MockNewGuid)
};
public IEnumerator<object[]> GetEnumerator()
{
return _getterTestData.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
}
public class AccessMethodDependenciesByPropertyTestData : IEnumerable<object[]>
{
private readonly List<object[]> _accessMethodTestData = new List<object[]>
{
BuildAccessMethodTestData(typeof(SetterMethodDependencyExamples),
nameof(SetterMethodDependencyExamples.CustomProperty),
MethodForm.Setter),
BuildAccessMethodTestData(typeof(SetterMethodDependencyExamples),
nameof(SetterMethodDependencyExamples.LambdaPair),
MethodForm.Setter),
BuildAccessMethodTestData(typeof(SetterMethodDependencyExamples),
nameof(SetterMethodDependencyExamples.ConstructorPair),
MethodForm.Setter),
BuildAccessMethodTestData(typeof(SetterMethodDependencyExamples),
nameof(SetterMethodDependencyExamples.ConstructorLambdaPair),
MethodForm.Setter),
BuildAccessMethodTestData(typeof(SetterMethodDependencyExamples),
nameof(SetterMethodDependencyExamples.MethodPair),
MethodForm.Setter),
BuildAccessMethodTestData(typeof(SetterMethodDependencyExamples),
nameof(SetterMethodDependencyExamples.MethodLambdaPair),
MethodForm.Setter),
BuildAccessMethodTestData(typeof(GetterMethodDependencyExamples),
nameof(GetterMethodDependencyExamples.AcceptedCase),
MethodForm.Getter),
BuildAccessMethodTestData(typeof(GetterMethodDependencyExamples),
nameof(GetterMethodDependencyExamples.FirstUnacceptedCase),
MethodForm.Getter),
BuildAccessMethodTestData(typeof(GetterMethodDependencyExamples),
nameof(GetterMethodDependencyExamples.SecondUnacceptedCase),
MethodForm.Getter)
};
public IEnumerator<object[]> GetEnumerator()
{
return _accessMethodTestData.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
}
}
} | TNG/ArchUnitNET |
<|start_filename|>src/main/java/io/choerodon/iam/infra/dto/ProjectRelationshipDTO.java<|end_filename|>
package io.choerodon.iam.infra.dto;
import com.fasterxml.jackson.annotation.JsonFormat;
import io.choerodon.mybatis.entity.BaseDTO;
import io.swagger.annotations.ApiModelProperty;
import javax.persistence.*;
import java.util.Date;
/**
* @author superlee
* @since 2019-04-23
*
*/
@Table(name = "fd_project_relationship")
public class ProjectRelationshipDTO extends BaseDTO {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@ApiModelProperty(value = "主键Id")
private Long id;
@ApiModelProperty(value = "项目Id")
private Long projectId;
@ApiModelProperty(value = "项目组的项目Id")
private Long parentId;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@ApiModelProperty(value = "开始时间")
private Date startDate;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@ApiModelProperty(value = "结束时间")
private Date endDate;
@Column(name = "is_enabled")
@ApiModelProperty(value = "是否启用")
private Boolean enabled;
@ApiModelProperty(value = "所属ProgramId")
private Long programId;
@Transient
@ApiModelProperty(value = "项目Code")
private String projCode;
@Transient
@ApiModelProperty(value = "项目Name")
private String projName;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getProjectId() {
return projectId;
}
public void setProjectId(Long projectId) {
this.projectId = projectId;
}
public Long getParentId() {
return parentId;
}
public void setParentId(Long parentId) {
this.parentId = parentId;
}
public Date getStartDate() {
return startDate;
}
public void setStartDate(Date startDate) {
this.startDate = startDate;
}
public Date getEndDate() {
return endDate;
}
public void setEndDate(Date endDate) {
this.endDate = endDate;
}
public Boolean getEnabled() {
return enabled;
}
public void setEnabled(Boolean enabled) {
this.enabled = enabled;
}
@Override
public Long getProgramId() {
return programId;
}
@Override
public void setProgramId(Long programId) {
this.programId = programId;
}
public String getProjCode() {
return projCode;
}
public void setProjCode(String projCode) {
this.projCode = projCode;
}
public String getProjName() {
return projName;
}
public void setProjName(String projName) {
this.projName = projName;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/validator/UserPasswordValidator.java<|end_filename|>
package io.choerodon.iam.api.validator;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.infra.dto.PasswordPolicyDTO;
import io.choerodon.iam.infra.dto.SystemSettingDTO;
import io.choerodon.iam.infra.mapper.PasswordPolicyMapper;
import io.choerodon.iam.infra.mapper.SystemSettingMapper;
import org.springframework.stereotype.Component;
import java.util.List;
/**
* 当用户组织的密码策略未开启时,如果修改过系统设置,根据系统设置中的密码长度要求,校验用户密码
*
* @author zmf
*/
@Component
public class UserPasswordValidator {
private final PasswordPolicyMapper passwordPolicyMapper;
private final SystemSettingMapper systemSettingMapper;
public UserPasswordValidator(PasswordPolicyMapper passwordPolicyMapper,
SystemSettingMapper systemSettingMapper) {
this.passwordPolicyMapper = passwordPolicyMapper;
this.systemSettingMapper = systemSettingMapper;
}
/**
* 验证密码是否符合系统设置所配置的密码长度范围
*
* @param password 用户的密码
* @param organizationId 用户所属组织 id
* @param isToThrowException 当校验失败时是否抛出异常
* @return 当符合校验时,返回true
*/
public boolean validate(String password, Long organizationId, boolean isToThrowException) {
PasswordPolicyDTO dto = new PasswordPolicyDTO();
dto.setOrganizationId(organizationId);
PasswordPolicyDTO passwordPolicyDTO = passwordPolicyMapper.selectOne(dto);
// 组织启用密码策略时,跳过验证
if (passwordPolicyDTO != null && Boolean.TRUE.equals(passwordPolicyDTO.getEnablePassword())) {
return true;
}
List<SystemSettingDTO> records = systemSettingMapper.selectAll();
SystemSettingDTO setting = records.isEmpty() ? null : records.get(0);
// 系统设置为空时,跳过
if (setting == null || setting.getMinPasswordLength() == null || setting.getMaxPasswordLength() == null) {
return true;
}
password = password.replaceAll(" ", "");
if (password.length() < setting.getMinPasswordLength() || password.length() > setting.getMaxPasswordLength()) {
if (isToThrowException) {
throw new CommonException("error.password.length.out.of.setting", setting.getMinPasswordLength(), setting.getMaxPasswordLength());
}
return false;
}
return true;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/LookupValueMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.infra.dto.LookupValueDTO;
import io.choerodon.mybatis.common.Mapper;
/**
* @author superlee
*/
public interface LookupValueMapper extends Mapper<LookupValueDTO> {
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/PasswordHistoryMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.infra.dto.PasswordHistoryDTO;
import io.choerodon.mybatis.common.Mapper;
/**
* @author wuguokai
*/
public interface PasswordHistoryMapper extends Mapper<PasswordHistoryDTO> {
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/factory/MessageSourceFactory.java<|end_filename|>
package io.choerodon.iam.infra.factory;
import org.springframework.context.MessageSource;
import org.springframework.context.support.ReloadableResourceBundleMessageSource;
/**
* 根据basename生成对应messageSource工厂类
*
* @author superlee
*/
public class MessageSourceFactory {
private MessageSourceFactory() {
}
public static MessageSource create(String basename) {
ReloadableResourceBundleMessageSource messageBundle =
new ReloadableResourceBundleMessageSource();
messageBundle.setBasename(basename);
messageBundle.setDefaultEncoding("UTF-8");
return messageBundle;
}
}
<|start_filename|>react/src/app/iam/stores/global/member-role/MemberRoleStore.js<|end_filename|>
import { action, computed, observable } from 'mobx';
import { axios, store } from '@choerodon/boot';
import querystring from 'query-string';
/**
* 当要改写 src/app/iam/containers/global/member-role/MemberRoleType.js中的内容时可以逐步把用到的东西移到store里
*/
@store('MemberRoleStore')
class MemberRoleStore {
/**
* 上传状态
*/
@observable uploading = false;
@observable currentMode = 'user'; // 所选模式 默认为用户
// @observable isShowMember = true; // tab 默认为成员
@observable uploadInfo = {
noData: true,
};
@observable roleData = []; // 用户模式下的所有角色
@observable roleMemberDatas = []; // 用户-角色表格数据
@observable clientRoleMemberDatas = []; // 客户端-角色表格数据
@observable usersData = [];
@observable clientsData = [];
@action setUsersData(data) {
this.usersData = data;
}
@computed get getUsersData() {
return this.usersData;
}
@action setClientsData(data) {
this.clientsData = data;
}
@computed get getClientsData() {
return this.clientsData;
}
@action setCurrentMode(data) {
this.currentMode = data;
}
@computed
get getRoleData() {
return this.roleData;
}
@action
setRoleData(data) {
this.roleData = data;
}
@computed
get getRoleMemberDatas() {
return this.roleMemberDatas;
}
@action
setRoleMemberDatas(data) {
this.roleMemberDatas = data;
}
@computed
get getClientRoleMemberDatas() {
return this.clientRoleMemberDatas;
}
@action
setClientRoleMemberDatas(data) {
this.clientRoleMemberDatas = data;
}
@computed
get getUploading() {
return this.uploading;
}
@action
setUploading(flag) {
this.uploading = flag;
}
@computed
get getUploadInfo() {
return this.uploadInfo;
}
@action
setUploadInfo(info) {
this.uploadInfo = info;
}
/**
*
* @param data 通过AppState.currentMenuType获取的层级、id和name
*/
@action
loadCurrentMenuType(data, userId) {
this.data = data;
this.userId = userId;
const { type, id, name } = this.data;
let apiGetway = `/iam/v1/${type}s/${id}`;
let codePrefix;
switch (type) {
case 'organization':
codePrefix = 'organization';
break;
case 'project':
codePrefix = 'project';
break;
case 'site':
codePrefix = 'global';
apiGetway = `/iam/v1/${type}`;
break;
default:
break;
}
this.code = `${codePrefix}.memberrole`;
this.values = { name: name || 'Choerodon' };
this.urlUsers = `${apiGetway}/role_members/users`;
this.urlRoles = `${apiGetway}/role_members/users/roles`;
this.urlRoleMember = `${apiGetway}/role_members`;
this.urlMemberRole = `${apiGetway}/member_role`;
this.urlDeleteMember = `${apiGetway}/role_members/delete`;
this.urlUserCount = `${apiGetway}/role_members/users/count`;
this.roleId = id || 0;
}
/**
* 下载文件
*/
downloadTemplate() {
return axios.get(`${this.urlRoleMember}/download_templates`, {
responseType: 'arraybuffer',
});
}
@action
handleUploadInfo = () => {
const { type, id, name } = this.data;
const timestamp = new Date().getTime();
axios.get(`${this.urlMemberRole}/users/${this.userId}/upload/history?t=${timestamp}`).then((data) => {
if (!data) {
this.setUploadInfo({ noData: true });
this.setUploading(false);
return;
}
this.setUploadInfo(data);
this.setUploading(!data.endTime);
});
}
loadUsers = (queryObj = { sort: 'id' }) => axios.get(`/iam/v1/all/users?${querystring.stringify(queryObj)}`);
loadClients = (queryObj = { sort: 'id' }) => axios.get(`/iam/v1/all/clients?${querystring.stringify(queryObj)}`);
}
const memberRoleStore = new MemberRoleStore();
export default memberRoleStore;
<|start_filename|>react/src/app/iam/containers/organization/ldap/LDAP.js<|end_filename|>
import React, { Component } from 'react';
import { Button, Form, Icon, Input, Modal, Popover, Radio, Select, Tooltip } from 'choerodon-ui';
import { injectIntl, FormattedMessage } from 'react-intl';
import { Content, Header, Page, Permission } from '@choerodon/boot';
import { withRouter } from 'react-router-dom';
import { inject, observer } from 'mobx-react';
import TestLdap from './TestLdap';
import LoadingBar from '../../../components/loadingBar/index';
import './LDAP.scss';
import '../../../common/ConfirmModal.scss';
const { Sidebar } = Modal;
const RadioGroup = Radio.Group;
const FormItem = Form.Item;
const { Option } = Select;
const intlPrefix = 'organization.ldap';
const formItemLayout = {
labelCol: {
xs: { span: 24 },
sm: { span: 100 },
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 9 },
},
};
@Form.create({})
@withRouter
@injectIntl
@inject('AppState')
@observer
export default class LDAP extends Component {
constructor(props) {
super(props);
this.loadLDAP = this.loadLDAP.bind(this);
this.state = this.getInitState();
}
componentDidMount() {
this.loadLDAP();
}
getInitState() {
return {
sidebar: false,
open: false,
saving: false,
organizationId: this.props.AppState.currentMenuType.id,
value: '',
showServer: true,
showUser: true,
showAdminPwd: false,
showWhich: '',
ldapAdminData: '',
};
}
/* 获取同步用户信息 */
getSyncInfo() {
const { LDAPStore } = this.props;
const { organizationId } = this.state;
const ldapData = LDAPStore.getLDAPData;
LDAPStore.getSyncInfo(organizationId, ldapData.id).then((data) => {
if (data.failed) {
Choerodon.prompt(data.message);
} else {
LDAPStore.setIsConfirmLoading(data && !data.syncEndTime);
LDAPStore.setSyncData(data);
}
});
}
/**
* Input后缀提示
* @param text
*/
getSuffix(text) {
return (
<Popover
overlayStyle={{ maxWidth: '180px', wordBreak: 'break-all' }}
className="routePop"
placement="right"
trigger="hover"
content={text}
>
<Icon type="help" />
</Popover>
);
}
/**
* label后缀提示
* @param label label文字
* @param tip 提示文字
*/
labelSuffix(label, tip) {
return (
<div className="labelSuffix">
<span>
{label}
</span>
<Popover
overlayStyle={{ maxWidth: '180px' }}
placement="right"
trigger="hover"
content={tip}
>
<Icon type="help" />
</Popover>
</div>
);
}
/* 加载LDAP */
loadLDAP = () => {
const { LDAPStore, intl } = this.props;
const { organizationId } = this.state;
LDAPStore.loadLDAP(organizationId).catch((error) => {
LDAPStore.cleanData();
const { response } = error;
if (response) {
const { status } = response;
const mess = response.data.message;
switch (status) {
case 400:
Choerodon.prompt(mess);
break;
case 404:
Choerodon.prompt(intl.formatMessage({ id: `${intlPrefix}.notfound.msg` }));
break;
default:
break;
}
LDAPStore.setIsLoading(false);
}
});
this.setState({
saving: false,
});
};
/* 刷新 */
reload = () => {
this.loadLDAP();
};
redirectSyncRecord = () => {
const { AppState, LDAPStore } = this.props;
const ldapData = LDAPStore.getLDAPData;
const menu = AppState.currentMenuType;
const { type, id, name } = menu;
this.props.history.push(`/iam/ldap/sync-record/${ldapData.id}?type=${type}&id=${id}&name=${name}&organizationId=${id}`);
}
/* 开启侧边栏 */
openSidebar(status) {
const { LDAPStore } = this.props;
LDAPStore.setIsShowResult(false);
LDAPStore.setIsSyncLoading(false);
if (this.TestLdap) {
const { resetFields } = this.TestLdap.props.form;
resetFields();
}
this.setState({
sidebar: true,
showWhich: status,
}, () => {
if (status === 'connect') {
LDAPStore.setIsConfirmLoading(false);
} else if (status === 'sync') {
this.getSyncInfo();
}
});
}
/* 关闭侧边栏 */
closeSidebar = () => {
this.setState({
sidebar: false,
}, () => {
this.TestLdap.closeSyncSidebar();
});
};
/* 是否显示服务器设置下拉面板 */
isShowServerSetting = () => {
this.setState({
showServer: !this.state.showServer,
});
}
/* 是否显示用户设置属性下拉面板 */
isShowUserSetting = () => {
this.setState({
showUser: !this.state.showUser,
});
}
/* ssl修改状态默认端口号更改 */
changeSsl() {
const { getFieldValue, setFieldsValue } = this.props.form;
setFieldsValue({
port: getFieldValue('useSSL') === 'Y' ? '389' : '636',
});
}
enableLdap = () => {
const { LDAPStore, intl } = this.props;
const { organizationId } = this.state;
const ldapData = LDAPStore.getLDAPData;
if (ldapData.enabled) {
Modal.confirm({
className: 'c7n-iam-confirm-modal',
title: intl.formatMessage({ id: `${intlPrefix}.disable.title` }),
content: intl.formatMessage({ id: `${intlPrefix}.disable.content` }),
onOk: () => LDAPStore.disabledLdap(organizationId, ldapData.id).then((data) => {
if (data.failed) {
Choerodon.prompt(data.message);
} else {
Choerodon.prompt(intl.formatMessage({ id: 'disable.success' }));
LDAPStore.setLDAPData(data);
}
}),
});
} else {
LDAPStore.enabledLdap(organizationId, ldapData.id).then((data) => {
if (data.failed) {
Choerodon.prompt(data.message);
} else {
Choerodon.prompt(intl.formatMessage({ id: 'enable.success' }));
LDAPStore.setLDAPData(data);
}
});
}
}
/* 表单提交 */
handleSubmit = (e) => {
e.preventDefault();
const { AppState } = this.props;
this.setState({
showServer: true,
showUser: true,
});
this.props.form.validateFieldsAndScroll((err, values) => {
if (!err) {
const { LDAPStore, intl } = this.props;
const original = LDAPStore.getLDAPData;
const ldapStatus = values.useSSL === 'Y';
const ladp = {
...values,
id: original.id,
objectVersionNumber: original.objectVersionNumber,
realNameField: values.realNameField || null,
phoneField: values.phoneField || null,
};
ladp.useSSL = ldapStatus;
if (!ladp.port) {
ladp.port = ladp.useSSL ? 636 : 389;
}
ladp.name = AppState.currentMenuType.name;
ladp.organizationId = AppState.currentMenuType.organizationId;
ladp.enabled = LDAPStore.getLDAPData.enabled;
this.setState({
saving: true,
});
LDAPStore.updateLDAP(this.state.organizationId, LDAPStore.getLDAPData.id, ladp)
.then((data) => {
if (!data.failed) {
LDAPStore.setLDAPData(data);
Choerodon.prompt(intl.formatMessage({ id: 'save.success' }));
this.setState({
saving: false,
});
if (LDAPStore.getLDAPData.enabled) {
LDAPStore.setIsConnectLoading(true);
LDAPStore.setIsConfirmLoading(true);
this.openSidebar('adminConnect');
LDAPStore.testConnect(this.state.organizationId, LDAPStore.getLDAPData.id, ladp)
.then((res) => {
if (res.failed) {
Choerodon.prompt(res.message);
} else {
LDAPStore.setTestData(res);
LDAPStore.setIsConnectLoading(false);
LDAPStore.setIsConfirmLoading(false);
}
});
}
} else {
this.setState({
saving: false,
});
Choerodon.prompt(data.message);
}
})
.catch((error) => {
Choerodon.handleResponseError(error);
this.setState({
saving: false,
});
});
}
});
};
/* 渲染侧边栏头部 */
renderSidebarTitle() {
const { intl } = this.props;
const { showWhich } = this.state;
if (showWhich === 'connect' || showWhich === 'adminConnect') {
return intl.formatMessage({ id: `${intlPrefix}.connect` });
} else {
return intl.formatMessage({ id: `${intlPrefix}.syncuser` });
}
}
/* 渲染侧边栏内容 */
renderSidebarContent() {
const { sidebar, showWhich } = this.state;
return (
<TestLdap
sidebar={sidebar}
showWhich={showWhich}
onRef={(node) => {
this.TestLdap = node;
}}
onAbort={() => { this.closeSidebar(); this.getSyncInfo(); }}
/>
);
}
render() {
const { LDAPStore, AppState, form, intl } = this.props;
const { saving, sidebar, showWhich } = this.state;
const menuType = AppState.currentMenuType;
const organizationName = menuType.name;
const ldapData = LDAPStore.getLDAPData;
const { getFieldDecorator } = form;
const inputWidth = 488;
const tips = {
hostname: intl.formatMessage({ id: `${intlPrefix}.hostname.tip` }),
ssl: intl.formatMessage({ id: `${intlPrefix}.ssl.tip` }),
basedn: intl.formatMessage({ id: `${intlPrefix}.basedn.tip` }),
loginname: intl.formatMessage({ id: `${intlPrefix}.loginname.tip` }),
username: intl.formatMessage({ id: `${intlPrefix}.username.tip` }),
customFilter: intl.formatMessage({ id: `${intlPrefix}.custom-filter.tip` }),
objectclass: intl.formatMessage({ id: `${intlPrefix}.objectclass.tip` }),
uuid: intl.formatMessage({ id: `${intlPrefix}.uuid.tip` }),
};
const mainContent = LDAPStore.getIsLoading ? <LoadingBar /> : (
<div>
<Form onSubmit={this.handleSubmit} layout="vertical" className="ldapForm">
<h3 className="c7n-smssetting-container-title">服务器设置</h3>
<div className="c7n-ldapsetting-container-wrap" style={{ display: this.state.showServer ? 'block' : 'none' }}>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('directoryType', {
rules: [{
required: true,
message: intl.formatMessage({ id: `${intlPrefix}.directorytype.require.msg` }),
}],
initialValue: ldapData.directoryType ? ldapData.directoryType : undefined,
})(
<Select
getPopupContainer={() => document.getElementsByClassName('page-content')[0]}
label={intl.formatMessage({ id: `${intlPrefix}.directorytype` })}
style={{ width: inputWidth }}
>
<Option value="Microsoft Active Directory">
<Tooltip
placement="right"
title={intl.formatMessage({ id: `${intlPrefix}.directorytype.mad.tip` })}
overlayStyle={{ maxWidth: '300px' }}
>
<span style={{ display: 'inline-block', width: '100%' }}>Microsoft Active Directory</span>
</Tooltip>
</Option>
<Option value="OpenLDAP">
<Tooltip
placement="right"
title={intl.formatMessage({ id: `${intlPrefix}.directorytype.openldap.tip` })}
overlayStyle={{ maxWidth: '300px' }}
>
<span style={{ display: 'inline-block', width: '100%' }}>OpenLDAP</span>
</Tooltip>
</Option>
</Select>,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('serverAddress', {
rules: [{
required: true,
whitespace: true,
message: intl.formatMessage({ id: `${intlPrefix}.serveraddress.require.msg` }),
}],
initialValue: ldapData.serverAddress || undefined,
})(
<Input label={intl.formatMessage({ id: `${intlPrefix}.serveraddress` })} style={{ width: inputWidth }} suffix={this.getSuffix(tips.hostname)} autoComplete="off" />,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('useSSL', {
initialValue: ldapData.useSSL ? 'Y' : 'N',
})(
<RadioGroup
className="ldapRadioGroup"
label={this.labelSuffix(intl.formatMessage({ id: `${intlPrefix}.usessl.suffix` }), tips.ssl)}
onChange={this.changeSsl.bind(this)}
>
<Radio value="Y"><FormattedMessage id="yes" /></Radio>
<Radio value="N"><FormattedMessage id="no" /></Radio>
</RadioGroup>,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('sagaBatchSize', {
rules: [{
pattern: /^[1-9]\d*$/,
required: true,
message: intl.formatMessage({ id: `${intlPrefix}.saga-batch-size.msg` }),
}],
initialValue: ldapData.sagaBatchSize || '500',
})(
<Input label={intl.formatMessage({ id: `${intlPrefix}.saga-batch-size` })} style={{ width: inputWidth }} autoComplete="off" />,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('connectionTimeout', {
rules: [{
pattern: /^[1-9]\d*$/,
required: true,
message: intl.formatMessage({ id: `${intlPrefix}.connection-timeout.msg` }),
}],
initialValue: ldapData.connectionTimeout || '10',
})(
<Input label={intl.formatMessage({ id: `${intlPrefix}.connection-timeout` })} style={{ width: inputWidth }} autoComplete="off" suffix={intl.formatMessage({ id: 'second' })} />,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('port', {
rules: [{
pattern: /^[1-9]\d*$/,
message: intl.formatMessage({ id: `${intlPrefix}.port.pattern.msg` }),
}],
initialValue: ldapData.port || (ldapData.useSSL ? '636' : '389'),
})(
<Input label={intl.formatMessage({ id: `${intlPrefix}.port` })} style={{ width: inputWidth }} autoComplete="off" />,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('baseDn', {
initialValue: ldapData.baseDn || undefined,
})(
<Input label={intl.formatMessage({ id: `${intlPrefix}.basedn` })} suffix={this.getSuffix(tips.basedn)} style={{ width: inputWidth }} autoComplete="off" />,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('account', {
rules: [{
required: true,
message: intl.formatMessage({ id: `${intlPrefix}.admin.loginname.msg` }),
}],
initialValue: ldapData.account || undefined,
})(
<Input label={intl.formatMessage({ id: `${intlPrefix}.admin.loginname` })} suffix={this.getSuffix(tips.loginname)} style={{ width: inputWidth }} autoComplete="off" />,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('password', {
rules: [{
required: true,
message: intl.formatMessage({ id: `${intlPrefix}.admin.password.msg` }),
}],
initialValue: ldapData.password || undefined,
})(
<Input label={intl.formatMessage({ id: `${intlPrefix}.admin.password` })} type="password" style={{ width: inputWidth }} autoComplete="off" />,
)}
</FormItem>
</div>
<h3 className="c7n-smssetting-container-title">用户属性设置</h3>
<div className="c7n-ldapsetting-container-wrap" style={{ display: this.state.showUser ? 'block' : 'none' }}>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('objectClass', {
rules: [{
required: true,
whitespace: true,
message: intl.formatMessage({ id: `${intlPrefix}.objectclass.require.msg` }),
}],
initialValue: ldapData.objectClass || undefined,
})(
<Input label={intl.formatMessage({ id: `${intlPrefix}.objectclass` })} suffix={this.getSuffix(tips.objectclass)} style={{ width: inputWidth }} autoComplete="off" />,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('loginNameField', {
rules: [{
required: true,
whitespace: true,
message: intl.formatMessage({ id: `${intlPrefix}.loginname.require.msg` }),
}],
initialValue: ldapData.loginNameField || undefined,
})(
<Input label={intl.formatMessage({ id: `${intlPrefix}.loginname` })} style={{ width: inputWidth }} autoComplete="off" />,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('emailField', {
rules: [{
required: true,
whitespace: true,
message: intl.formatMessage({ id: `${intlPrefix}.email.require.msg` }),
}],
initialValue: ldapData.emailField || undefined,
})(
<Input label={intl.formatMessage({ id: `${intlPrefix}.email` })} style={{ width: inputWidth }} autoComplete="off" />,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('realNameField', {
initialValue: ldapData.realNameField || undefined,
})(
<Input label={intl.formatMessage({ id: `${intlPrefix}.realname` })} style={{ width: inputWidth }} autoComplete="off" />,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('phoneField', {
initialValue: ldapData.phoneField || undefined,
})(
<Input label={intl.formatMessage({ id: `${intlPrefix}.phone` })} style={{ width: inputWidth }} autoComplete="off" />,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('uuidField', {
rules: [{
required: true,
whitespace: true,
message: intl.formatMessage({ id: `${intlPrefix}.uuid.required.msg` }),
}],
initialValue: ldapData.uuidField || undefined,
})(
<Input label={intl.formatMessage({ id: `${intlPrefix}.uuid` })} suffix={this.getSuffix(tips.uuid)} style={{ width: inputWidth }} autoComplete="off" />,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('customFilter', {
rules: [{
pattern: /^\(.*\)$/,
message: intl.formatMessage({ id: `${intlPrefix}.custom-filter.msg` }),
}],
initialValue: ldapData.customFilter || undefined,
})(
<Input label={intl.formatMessage({ id: `${intlPrefix}.custom-filter` })} suffix={this.getSuffix(tips.customFilter)} style={{ width: inputWidth }} autoComplete="off" />,
)}
</FormItem>
</div>
<div className="divider" />
<Permission service={['iam-service.ldap.update']}>
<div className="btnGroup">
<Button
funcType="raised"
type="primary"
htmlType="submit"
loading={saving}
>
<FormattedMessage id={ldapData.enabled ? `${intlPrefix}.saveandtest` : 'save'} />
</Button>
<Button
funcType="raised"
onClick={() => {
const { resetFields } = this.props.form;
resetFields();
}}
style={{ color: '#3F51B5' }}
disabled={saving}
>
<FormattedMessage id="cancel" />
</Button>
</div>
</Permission>
</Form>
</div>
);
return (
<Page
service={[
'iam-service.ldap.create',
'iam-service.ldap.queryByOrgId',
'iam-service.ldap.disableLdap',
'iam-service.ldap.enableLdap',
'iam-service.ldap.syncUsers',
'iam-service.ldap.testConnect',
'iam-service.ldap.update',
'iam-service.ldap.pagingQueryHistories',
'iam-service.ldap.pagingQueryErrorUsers',
]}
>
<Header title={<FormattedMessage id={`${intlPrefix}.header.title`} />}>
<Button
icon={ldapData && ldapData.enabled ? 'remove_circle_outline' : 'finished'}
onClick={this.enableLdap}
>
<FormattedMessage id={ldapData && ldapData.enabled ? 'disable' : 'enable'} />
</Button>
<Permission
service={['iam-service.ldap.testConnect']}
>
<Button
icon="low_priority"
onClick={this.openSidebar.bind(this, 'connect')}
disabled={!(ldapData && ldapData.enabled)}
>
<FormattedMessage id={`${intlPrefix}.connect`} />
</Button>
</Permission>
<Permission
service={['iam-service.ldap.syncUsers']}
>
<Button
icon="sync_user"
onClick={this.openSidebar.bind(this, 'sync')}
disabled={!(ldapData && ldapData.enabled)}
>
<FormattedMessage id={`${intlPrefix}.syncuser`} />
</Button>
</Permission>
<Permission
service={['iam-service.ldap.pagingQueryHistories']}
>
<Button
icon="sync_records"
onClick={this.redirectSyncRecord}
>
<FormattedMessage id={`${intlPrefix}.record.header.title`} />
</Button>
</Permission>
<Button
onClick={this.reload}
icon="refresh"
>
<FormattedMessage id="refresh" />
</Button>
</Header>
<Content
code={intlPrefix}
values={{ name: organizationName }}
>
<div className="ldapContainer">
{mainContent}
</div>
<Sidebar
className="connectContainer"
title={this.renderSidebarTitle()}
visible={sidebar}
okText={<FormattedMessage id={showWhich === 'sync' ? `${intlPrefix}.sync` : `${intlPrefix}.test`} />}
cancelText={<FormattedMessage id={showWhich === 'sync' ? 'return' : 'cancel'} />}
onOk={e => this.TestLdap.handleSubmit(e)}
onCancel={this.closeSidebar}
confirmLoading={LDAPStore.confirmLoading}
alwaysCanCancel
>
{this.renderSidebarContent()}
</Sidebar>
</Content>
</Page>
);
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/app/service/impl/LdapServiceImplSpec.groovy<|end_filename|>
package io.choerodon.iam.app.service.impl
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.app.service.LdapService
import io.choerodon.iam.infra.asserts.LdapAssertHelper
import io.choerodon.iam.infra.asserts.OrganizationAssertHelper
import io.choerodon.iam.infra.common.utils.ldap.LdapSyncUserTask
import io.choerodon.iam.infra.dto.LdapDTO
import io.choerodon.iam.infra.dto.OrganizationDTO
import io.choerodon.iam.infra.mapper.LdapErrorUserMapper
import io.choerodon.iam.infra.mapper.LdapHistoryMapper
import io.choerodon.iam.infra.mapper.LdapMapper
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.context.annotation.Import
import spock.lang.Specification
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class LdapServiceImplSpec extends Specification {
@Autowired
OrganizationAssertHelper organizationAssertHelper
LdapAssertHelper ldapAssertHelper = Mock(LdapAssertHelper)
@Autowired
LdapMapper ldapMapper
@Autowired
LdapSyncUserTask ldapSyncUserTask
@Autowired
LdapSyncUserTask.FinishFallback finishFallback
@Autowired
LdapErrorUserMapper ldapErrorUserMapper
@Autowired
LdapHistoryMapper ldapHistoryMapper
LdapService ldapService
def setup() {
ldapService = new LdapServiceImpl(organizationAssertHelper, ldapAssertHelper,
ldapMapper, ldapSyncUserTask, finishFallback, ldapErrorUserMapper, ldapHistoryMapper)
LdapDTO ldapDTO = new LdapDTO()
ldapDTO.setServerAddress("ldap://acfun.hand.com")
ldapDTO.setPort("389")
ldapDTO.setUseSSL(false)
ldapDTO.setDirectoryType("OpenLDAP")
ldapDTO.setObjectClass("person")
ldapDTO.setRealNameField("displayName")
ldapDTO.setLoginNameField("employeeNumber")
ldapDTO.setEmailField("mail")
ldapDTO.setPhoneField("mobile")
ldapDTO.setBaseDn("ou=emp,dc=hand,dc=com")
ldapDTO.setConnectionTimeout(1000)
ldapAssertHelper.ldapNotExisted(_, _) >> ldapDTO
}
def "SyncLdapUser"() {
// given: "构造请求参数"
// Long organizationId = 1L
// Long id = 1L
//
// when: "调用方法"
// ldapService.syncLdapUser(organizationId, id)
//
// then: "校验结果"
// true
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/app/service/impl/ParsePermissionServiceImplSpec.groovy<|end_filename|>
package io.choerodon.iam.app.service.impl
import io.choerodon.eureka.event.EurekaEventPayload
import io.choerodon.iam.infra.dto.PermissionDTO
import io.choerodon.iam.infra.dto.RoleDTO
import io.choerodon.iam.infra.mapper.PermissionMapper
import io.choerodon.iam.infra.mapper.RoleMapper
import io.choerodon.iam.infra.mapper.RolePermissionMapper
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.http.HttpStatus
import org.springframework.http.ResponseEntity
import org.springframework.transaction.annotation.Transactional
import org.springframework.web.client.RestTemplate
import spock.lang.Specification
/**
* @author dengyouquan
* */
class ParsePermissionServiceImplSpec extends Specification {
@Autowired
PermissionMapper permissionMapper
@Autowired
RolePermissionMapper rolePermissionMapper
@Autowired
RoleMapper roleMapper
private ParsePermissionServiceImpl parsePermissionService =
new ParsePermissionServiceImpl(permissionMapper, rolePermissionMapper, roleMapper)
@Transactional
def "Parser"() {
given: "构造请求参数"
PermissionDTO permissionE = new PermissionDTO()
permissionE.setCode("code")
permissionE.setPath("path")
permissionE.setMethod("method")
permissionE.setResourceLevel("site")
permissionE.setDescription("description")
permissionE.setAction("action")
permissionE.setController("resource")
permissionE.setPublicAccess(true)
permissionE.setLoginAccess(true)
permissionE.setWithin(true)
permissionE.setServiceCode("serviceName")
def file = new File(this.class.getResource('/templates/swagger.json').toURI())
String swaggerJson = file.getText("UTF-8")
EurekaEventPayload instanceE = new EurekaEventPayload()
instanceE.setAppName("iam-service")
instanceE.setInstanceAddress("172.31.176.1")
instanceE.setStatus("UP")
instanceE.setVersion("1.0")
instanceE.setApiData(swaggerJson)
List<RoleDTO> roleList = new ArrayList<>()
RoleDTO roleDO = new RoleDTO()
roleDO.setResourceLevel("project")
roleDO.setCode("role/project/default/administrator")
roleList.add(roleDO)
and: 'mock restTemplate'
def restTemplate = Mock(RestTemplate) {
getForEntity(_,_) >> new ResponseEntity<String>('', HttpStatus.OK)
}
parsePermissionService.setRestTemplate(restTemplate)
when: "调用方法"
parsePermissionService.parser(instanceE)
then: "校验结果"
true
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/ProjectTypeMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.infra.dto.ProjectTypeDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
public interface ProjectTypeMapper extends Mapper<ProjectTypeDTO> {
/**
* 模糊查询projectType
*
* @param name
* @param code
* @param param
* @return
*/
List<ProjectTypeDTO> fuzzyQuery(@Param("name") String name,
@Param("code") String code,
@Param("param") String param);
}
<|start_filename|>src/test/groovy/io/choerodon/iam/api/validator/UserPasswordValidatorSpec.groovy<|end_filename|>
package io.choerodon.iam.api.validator
import io.choerodon.core.exception.CommonException
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.infra.dto.PasswordPolicyDTO
import io.choerodon.iam.infra.dto.SystemSettingDTO
import io.choerodon.iam.infra.mapper.PasswordPolicyMapper
import io.choerodon.iam.infra.mapper.SystemSettingMapper
import org.mockito.Mockito
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.context.annotation.Import
import spock.lang.Specification
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
*
* @author zmf*
*/
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class UserPasswordValidatorSpec extends Specification {
@Autowired
PasswordPolicyMapper passwordPolicyMapper
@Autowired
SystemSettingMapper settingMapper
def "Validate"() {
given: '配置validator'
UserPasswordValidator userPasswordValidator = new UserPasswordValidator(passwordPolicyMapper, settingMapper)
when:
boolean result = userPasswordValidator.validate("12", 1L, false)
then: '校验结果'
result
}
}
<|start_filename|>react/src/app/iam/stores/dashboard/organizationStatistics/OrganizationStatistics.js<|end_filename|>
import { action, computed, observable } from 'mobx';
import { axios, store } from '@choerodon/boot';
@store('OrganizationStatisticsStore')
class OrganizationStatisticsStore {
@observable loading = true;
@observable organizations = [];
@observable currentorg = null;
@observable chartData = {};
@action setLoading(flag) {
this.loading = flag;
}
@action setChartData(data) {
this.chartData = data;
}
@computed get getChartData() {
return this.chartData;
}
@action setOrganizations(data) {
this.organizations = data;
}
@computed get getOrganizations() {
return this.organizations;
}
@action setCurrentOrg(data) {
this.currentorg = data;
}
@computed get getCurrentOrg() {
return this.currentorg;
}
@action loadOrganizations = () => axios.get('/iam/v1/organizations/all?size=500')
.then(action((data) => {
this.organizations = data.list;
if (data.list.length) {
this.currentorg = data.list[0].id;
this.loadPie(data.list[0].id);
}
}))
@action loadPie = id => axios.get(`/iam/v1/organizations/${id}/projects/under_the_type`)
.then(action(
(data) => {
this.chartData = data;
this.loading = false;
},
))
}
const organizationStatisticsStore = new OrganizationStatisticsStore();
export default organizationStatisticsStore;
<|start_filename|>src/main/java/io/choerodon/iam/api/validator/UserValidator.java<|end_filename|>
package io.choerodon.iam.api.validator;
/**
* @author flyleft
* @date 2018/5/30
*/
public class UserValidator {
public interface UserGroup {
//loginName
//email
//phone
//realName
}
public interface UserInfoGroup {
//email
//phone
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/dto/ApplicationDTO.java<|end_filename|>
package io.choerodon.iam.infra.dto;
import io.choerodon.mybatis.entity.BaseDTO;
import io.swagger.annotations.ApiModelProperty;
import org.hibernate.validator.constraints.Length;
import javax.persistence.Column;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.Transient;
import javax.validation.constraints.NotEmpty;
import javax.validation.constraints.Pattern;
import java.util.List;
/**
* @author superlee
* @since 2019-04-23
*/
@Table(name = "iam_application")
public class ApplicationDTO extends BaseDTO {
private static final String CODE_REGULAR_EXPRESSION = "^[a-z]([-a-z0-9]*[a-z0-9])?$";
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
private Long organizationId;
private Long projectId;
@Length(min = 1, max = 20, message = "error.application.name.length")
@NotEmpty(message = "error.application.name.empty")
private String name;
@Pattern(regexp = CODE_REGULAR_EXPRESSION, message = "error.application.code.illegal")
@NotEmpty(message = "error.application.code.empty")
private String code;
@Column(name = "is_enabled")
private Boolean enabled;
@NotEmpty(message = "error.application.applicationCategory.empty")
private String applicationCategory;
private String applicationType;
private String applicationToken;
@Column(name = "is_abnormal")
private Boolean abnormal;
@Transient
private Integer appCount;
@Transient
private String projectName;
@Transient
private String projectCode;
@Transient
private String imageUrl;
@Transient
@ApiModelProperty(value = "发送saga事件,标记从哪里调用的")
private String from;
@Transient
private List<ApplicationDTO> descendants;
@Transient
private List<Long> descendantIds;
@Transient
private Long parentId;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getOrganizationId() {
return organizationId;
}
public void setOrganizationId(Long organizationId) {
this.organizationId = organizationId;
}
public Long getProjectId() {
return projectId;
}
public void setProjectId(Long projectId) {
this.projectId = projectId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public Boolean getEnabled() {
return enabled;
}
public void setEnabled(Boolean enabled) {
this.enabled = enabled;
}
public String getApplicationCategory() {
return applicationCategory;
}
public void setApplicationCategory(String applicationCategory) {
this.applicationCategory = applicationCategory;
}
public String getApplicationType() {
return applicationType;
}
public void setApplicationType(String applicationType) {
this.applicationType = applicationType;
}
public Integer getAppCount() {
return appCount;
}
public void setAppCount(Integer appCount) {
this.appCount = appCount;
}
public String getProjectName() {
return projectName;
}
public void setProjectName(String projectName) {
this.projectName = projectName;
}
public String getProjectCode() {
return projectCode;
}
public void setProjectCode(String projectCode) {
this.projectCode = projectCode;
}
public String getImageUrl() {
return imageUrl;
}
public void setImageUrl(String imageUrl) {
this.imageUrl = imageUrl;
}
public String getFrom() {
return from;
}
public void setFrom(String from) {
this.from = from;
}
public List<ApplicationDTO> getDescendants() {
return descendants;
}
public void setDescendants(List<ApplicationDTO> descendants) {
this.descendants = descendants;
}
public List<Long> getDescendantIds() {
return descendantIds;
}
public void setDescendantIds(List<Long> descendantIds) {
this.descendantIds = descendantIds;
}
public Boolean getAbnormal() {
return abnormal;
}
public void setAbnormal(Boolean abnormal) {
this.abnormal = abnormal;
}
public Long getParentId() {
return parentId;
}
public void setParentId(Long parentId) {
this.parentId = parentId;
}
public String getApplicationToken() {
return applicationToken;
}
public void setApplicationToken(String applicationToken) {
this.applicationToken = applicationToken;
}
}
<|start_filename|>react/src/app/iam/stores/organization/ldap/index.js<|end_filename|>
import LDAP from './LDAPStore';
export default LDAP;
<|start_filename|>react/src/app/iam/containers/organization/role/Sider.js<|end_filename|>
import React, { Component } from 'react';
import remove from 'lodash/remove';
import { Form, Modal, Tooltip, Radio, Select, Input, Table } from 'choerodon-ui';
import { inject, observer } from 'mobx-react';
import { withRouter } from 'react-router-dom';
import { Content, Permission } from '@choerodon/boot';
import { injectIntl, FormattedMessage } from 'react-intl';
const { Sidebar } = Modal;
const intlPrefix = 'organization.application';
@withRouter
@injectIntl
@inject('AppState')
@observer
export default class Application extends Component {
constructor(props) {
super(props);
this.state = {
selections: props.selectedPermissions,
};
}
handleSelect = (record, selected, selectedRows) => {
const { selections } = this.state;
if (selected) {
if (!selections.includes(record.id)) {
selections.push(record.id);
}
} else {
remove(selections, p => p === record.id);
}
this.setState({
selections,
});
}
handleSelectAll = (selected, selectedRows, changeRows) => {
let { selections } = this.state;
if (selected) {
selections = selections.concat(selectedRows.map(r => r.id));
selections = [...new Set(selections)];
} else {
remove(selections, p => changeRows.map(r => r.id).includes(p));
}
this.setState({
selections,
});
}
handleOk = () => {
const { onOk } = this.props;
const { selections } = this.state;
if (onOk) {
onOk(selections);
}
}
renderContent() {
const { menu: { permissions = [] }, disabled } = this.props;
const { selections } = this.state;
const columns = [{
title: '权限',
dataIndex: 'code',
key: 'code',
width: '40%',
}, {
title: '描述',
dataIndex: 'description',
key: 'description',
}];
const rowSelection = {
selectedRowKeys: selections,
onSelect: (record, selected, selectedRows) => {
this.handleSelect(record, selected, selectedRows);
},
onSelectAll: (selected, selectedRows, changeRows) => {
this.handleSelectAll(selected, selectedRows, changeRows);
},
getCheckboxProps: record => ({
disabled,
// name: record.name,
}),
};
return (
<Table
loading={false}
filterBar={false}
pagination={false}
columns={columns}
defaultExpandAllRows
dataSource={permissions.slice()}
rowKey={record => record.id}
rowSelection={rowSelection}
/>
);
}
render() {
const { onCancel, menu = {} } = this.props;
return (
<Sidebar
visible
title="菜单权限配置"
bodyStyle={{ padding: 0 }}
onCancel={onCancel}
onOk={this.handleOk}
>
<Content
title={`配置菜单“${menu.name}”的权限`}
description="您可以在此配置当前角色所分配菜单下的权限。"
link="#"
>
{this.renderContent()}
</Content>
</Sidebar>
);
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/api/controller/v1/MenuControllerSpec.groovy<|end_filename|>
package io.choerodon.iam.api.controller.v1
import io.choerodon.core.exception.ExceptionResponse
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.infra.dto.MenuDTO
import io.choerodon.iam.infra.mapper.MenuMapper
import org.springframework.beans.BeanUtils
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.boot.test.web.client.TestRestTemplate
import org.springframework.context.annotation.Import
import org.springframework.http.HttpEntity
import org.springframework.http.HttpMethod
import spock.lang.Shared
import spock.lang.Specification
import spock.lang.Stepwise
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
//@Stepwise
class MenuControllerSpec extends Specification {
private static final String BASE_PATH = "/v1/menus"
@Autowired
private TestRestTemplate restTemplate
@Autowired
private MenuMapper menuMapper
@Shared
def needInit = true
@Shared
def needClean = false
@Shared
def menuDOList = new ArrayList<MenuDTO>()
def setup() {
if (needInit) {
given: "构造参数"
needInit = false
for (int i = 0; i < 3; i++) {
MenuDTO menuDO = new MenuDTO()
menuDO.setCode("choerodon.code.testroot" + i)
menuDO.setName("菜单测试" + i)
menuDO.setResourceLevel("site")
menuDO.setParentCode("1")
menuDO.setType("root")
menuDO.setIcon("icon")
menuDO.setDefault(true)
menuDOList.add(menuDO)
}
for (int i = 0; i < 3; i++) {
MenuDTO menuDO = new MenuDTO()
menuDO.setCode("choerodon.code.testmenu" + i)
menuDO.setName("菜单测试" + i)
menuDO.setResourceLevel("site")
menuDO.setParentCode("1")
menuDO.setType("menu")
menuDO.setIcon("icon")
menuDOList.add(menuDO)
}
when: "插入记录"
//不能insertList,否则不能插入多语言表
def count = 0
for (MenuDTO menuDO : menuDOList) {
menuMapper.insert(menuDO)
}
then: "校验结果"
count == 6
}
}
def cleanup() {
if (needClean) {
given: ""
needClean = false
def count = 0
when: "删除记录"
for (MenuDTO menuDO : menuDOList) {
count += menuMapper.deleteByPrimaryKey(menuDO)
}
then: "校验结果"
count == 6
}
}
def "Query"() {
given: "构造请求参数"
def menuId = menuDOList.get(0).getId()
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/{menu_id}", MenuDTO, menuId)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getId().equals(menuDOList.get(0).getId())
entity.getBody().getCode().equals(menuDOList.get(0).getCode())
entity.getBody().getName().equals(menuDOList.get(0).getName())
entity.getBody().getResourceLevel().equals(menuDOList.get(0).getResourceLevel())
entity.getBody().getParentCode().equals(menuDOList.get(0).getParentCode())
}
def "Create"() {
given: "构造请求参数"
def menuDTO = menuDOList.get(0)
when: "调用方法[异常-不合法type]"
def menuDTO1 = new MenuDTO()
BeanUtils.copyProperties(menuDTO, menuDTO1)
menuDTO1.setType("error")
def entity = restTemplate.postForEntity(BASE_PATH, menuDTO1, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("不能为空")
when: "调用方法[异常-不合法level]"
def menuDTO2 = new MenuDTO()
BeanUtils.copyProperties(menuDTO, menuDTO2)
menuDTO2.setResourceLevel("error")
entity = restTemplate.postForEntity(BASE_PATH, menuDTO2, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("不能为空")
when: "调用方法[异常-menu code存在]"
def menuDTO3 = new MenuDTO()
BeanUtils.copyProperties(menuDTO, menuDTO3)
entity = restTemplate.postForEntity(BASE_PATH, menuDTO3, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("不能为空")
when: "调用方法[异常-menu code存在]"
def menuDTO4 = new MenuDTO()
BeanUtils.copyProperties(menuDTO, menuDTO4)
menuDTO4.setCode("create")
menuDTO4.setId(null)
entity = restTemplate.postForEntity(BASE_PATH, menuDTO4, MenuDTO)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
// entity.getBody().getCode().equals(menuDTO4.getCode())
// entity.getBody().getName().equals(menuDTO4.getName())
// entity.getBody().getResourceLevel().equals(menuDTO4.getResourceLevel())
// entity.getBody().getParentCode().equals(menuDTO4.getParentCode())
}
def "Update"() {
given: "构造请求参数"
def menuDTO = menuDOList.get(5)
when: "调用方法"
def menuDTO1 = new MenuDTO()
BeanUtils.copyProperties(menuDTO, menuDTO1)
menuDTO1.setId(1000L)
def httpEntity = new HttpEntity<MenuDTO>(menuDTO1)
def entity = restTemplate.exchange(BASE_PATH + "/{menu_id}", HttpMethod.PUT, httpEntity, ExceptionResponse, menuDTO1.getId())
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("不能为空")
when: "调用方法"
def menuDTO2 = new MenuDTO()
BeanUtils.copyProperties(menuDTO, menuDTO2)
menuDTO2.setObjectVersionNumber(1)
httpEntity = new HttpEntity<MenuDTO>(menuDTO2)
entity = restTemplate.exchange(BASE_PATH + "/{menu_id}", HttpMethod.PUT, httpEntity, MenuDTO, menuDTO2.getId())
then: "校验结果"
entity.statusCode.is2xxSuccessful()
// entity.getBody().getId().equals(menuDTO2.getId())
// entity.getBody().getCode().equals(menuDTO2.getCode())
// entity.getBody().getName().equals(menuDTO2.getName())
// entity.getBody().getResourceLevel().equals(menuDTO2.getResourceLevel())
// entity.getBody().getParentCode().equals(menuDTO2.getParentCode())
}
def "Delete"() {
given: "构造请求参数"
def menuDTO = menuDOList.get(1)
when: "调用方法[异常-有子菜单]"
def menuDTO1 = menuDOList.get(0)
def httpEntity = new HttpEntity<MenuDTO>(menuDTO1)
def entity = restTemplate.exchange(BASE_PATH + "/{menu_id}", HttpMethod.DELETE, httpEntity, ExceptionResponse, menuDTO1.getId())
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.menu.default")
when: "调用方法[异常-菜单不存在]"
def menuDTO2 = new MenuDTO()
menuDTO2.setId(1000L)
httpEntity = new HttpEntity<MenuDTO>(menuDTO2)
entity = restTemplate.exchange(BASE_PATH + "/{menu_id}", HttpMethod.DELETE, httpEntity, ExceptionResponse, menuDTO2.getId())
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.menu.not.exist")
when: "调用方法[异常-菜单是默认的]"
def menuDTO3 = new MenuDTO()
BeanUtils.copyProperties(menuDTO, menuDTO3)
httpEntity = new HttpEntity<MenuDTO>(menuDTO3)
entity = restTemplate.exchange(BASE_PATH + "/{menu_id}", HttpMethod.DELETE, httpEntity, ExceptionResponse, menuDTO3.getId())
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.menu.default")
when: "调用方法"
def menuDTO4 = menuDOList.get(5)
httpEntity = new HttpEntity<MenuDTO>(menuDTO4)
entity = restTemplate.exchange(BASE_PATH + "/{menu_id}", HttpMethod.DELETE, httpEntity, Boolean, menuDTO4.getId())
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody() == null
}
// def "ListTreeMenusWithPermissions"() {
// given: "构造请求参数"
// def paramsMap = new HashMap<String, Object>()
// def level = "site"
// def testPermission = false
// paramsMap.put("level", level)
// paramsMap.put("test_permission", testPermission)
//
// when: "调用方法[异常-level不合法]"
// paramsMap.put("level", "error")
// def entity = restTemplate.getForEntity(BASE_PATH + "/tree?test_permission={test_permission}&level={level}", ExceptionResponse, paramsMap)
//
// then: "校验结果"
// entity.statusCode.is2xxSuccessful()
// entity.getBody().getCode().equals("error.level.illegal")
//
// when: "调用方法"
// paramsMap.put("level", level)
// entity = restTemplate.getForEntity(BASE_PATH + "/tree?test_permission={test_permission}&level={level}", List, paramsMap)
//
// then: "校验结果"
// entity.statusCode.is2xxSuccessful()
// entity.getBody().size() == 4
// }
// def "ListAfterTestPermission"() {
// given: "构造请求参数"
// def paramsMap = new HashMap<String, Object>()
// def level = "site"
// def sourceId = 0
// paramsMap.put("level", level)
// paramsMap.put("source_id", sourceId)
//
// when: "调用方法[异常-level不合法]"
// paramsMap.put("level", "error")
// def entity = restTemplate.getForEntity(BASE_PATH + "?source_id={source_id}&level={level}", ExceptionResponse, paramsMap)
//
// then: "校验结果"
// entity.statusCode.is2xxSuccessful()
// entity.getBody().getCode().equals("error.level.illegal")
//
// when: "调用方法"
// paramsMap.put("level", level)
// entity = restTemplate.getForEntity(BASE_PATH + "?source_id={source_id}&level={level}", List, paramsMap)
//
// then: "校验结果"
// entity.statusCode.is2xxSuccessful()
// entity.getBody().size() == 4
// }
//
// def "SaveListTree"() {
// given: "构造请求参数"
// def paramsMap = new HashMap<String, Object>()
// def level = "site"
// def testPermission = false
// paramsMap.put("level", level)
// paramsMap.put("test_permission", testPermission)
// def entity = restTemplate.getForEntity(BASE_PATH + "/tree?test_permission={test_permission}&level={level}", List, paramsMap)
// def updateMenuDTOList = entity.getBody()
//
// when: "调用方法"
// entity = restTemplate.postForEntity(BASE_PATH + "/tree?level={level}", updateMenuDTOList, List, paramsMap)
//
//
// then: "校验结果"
// entity.statusCode.is2xxSuccessful()
// entity.getBody().size() == 4
// }
def "Check"() {
given: "构造请求参数"
def menuDTO = new MenuDTO()
when: "调用方法[异常-菜单编码为空]"
def entity = restTemplate.postForEntity(BASE_PATH + "/check", menuDTO, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.menu.code.empty")
when: "调用方法[异常-菜单level为空]"
menuDTO.setCode("check")
entity = restTemplate.postForEntity(BASE_PATH + "/check", menuDTO, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
// entity.getBody().getCode().equals("error.menu.level.empty")
when: "调用方法[异常-菜单type为空]"
menuDTO.setCode("check")
menuDTO.setResourceLevel("site")
entity = restTemplate.postForEntity(BASE_PATH + "/check", menuDTO, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
// entity.getBody().getCode().equals("error.menu.type.empty")
when: "调用方法[异常-菜单重复]"
menuDTO.setCode("choerodon.code.testroot1")
menuDTO.setResourceLevel("site")
menuDTO.setType("root")
entity = restTemplate.postForEntity(BASE_PATH + "/check", menuDTO, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
// entity.getBody().getCode().equals("error.menu.code-level-type.exist")
when: "调用方法"
menuDTO.setCode("check")
menuDTO.setResourceLevel("site")
menuDTO.setType("root")
entity = restTemplate.postForEntity(BASE_PATH + "/check", menuDTO, Void)
needClean = true
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/dto/RoleNameAndEnabledDTO.java<|end_filename|>
package io.choerodon.iam.api.dto;
import io.swagger.annotations.ApiModelProperty;
public class RoleNameAndEnabledDTO {
@ApiModelProperty(value = "角色名")
private String name;
@ApiModelProperty(value = "角色代码")
private String code;
@ApiModelProperty(value = "角色是否启用")
private boolean enabled;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public RoleNameAndEnabledDTO(String code, String name, boolean enabled) {
this.code = code;
this.name = name;
this.enabled = enabled;
}
public RoleNameAndEnabledDTO() {
}
}
<|start_filename|>react/src/app/iam/stores/organization/password-policy/index.js<|end_filename|>
import PasswordPolicy from './PasswordPolicyStore';
export default PasswordPolicy;
<|start_filename|>src/main/java/io/choerodon/iam/infra/dto/AuditDTO.java<|end_filename|>
package io.choerodon.iam.infra.dto;
import io.choerodon.mybatis.entity.BaseDTO;
import io.swagger.annotations.ApiModelProperty;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.validation.constraints.NotEmpty;
import javax.validation.constraints.NotNull;
/**
* @author superlee
* @since 2019-04-23
*/
@Table(name = "fd_audit")
public class AuditDTO extends BaseDTO {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@ApiModelProperty(value = "主键ID/非必填")
private Long id;
@NotNull(message = "error.audit.userId.empty")
@ApiModelProperty(value = "用户Id/必填")
private Long userId;
@NotEmpty(message = "error.audit.type.empty")
@ApiModelProperty(value = "操作类型,create,update,delete,unknown/必填")
private String type;
@ApiModelProperty(value = "业务类型。登录、登出、更新环境等。/非必填")
private String businessType;
@ApiModelProperty(value = "数据类型。服务名+数据,eg.: iam-service.user/非必填")
private String dataType;
@ApiModelProperty(value = "操作数据/非必填")
private String message;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getUserId() {
return userId;
}
public void setUserId(Long userId) {
this.userId = userId;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getBusinessType() {
return businessType;
}
public void setBusinessType(String businessType) {
this.businessType = businessType;
}
public String getDataType() {
return dataType;
}
public void setDataType(String dataType) {
this.dataType = dataType;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/validator/MenuValidator.java<|end_filename|>
package io.choerodon.iam.api.validator;
import io.choerodon.base.enums.MenuType;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.infra.dto.MenuDTO;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
/**
* @author azengqiang
*/
public class MenuValidator {
/**
* 插入菜单校验.
* 编码不能为空
* 名称不能为空
* 图标不能为空
* 父级编码不能为空
*
* @param menu 菜单DTO
* @param level 资源层级
*/
public static void insertValidate(MenuDTO menu, String level) {
menu.setResourceLevel(level);
menu.setType(MenuType.MENU.value());
String code = menu.getCode();
if (StringUtils.isEmpty(code)) {
throw new CommonException("error.menu.code.empty");
}
if (StringUtils.isEmpty(menu.getName())) {
throw new CommonException("error.menu.name.empty", code);
}
if (StringUtils.isEmpty(menu.getIcon())) {
throw new CommonException("error.menu.icon.empty", code);
}
if (StringUtils.isEmpty(menu.getParentCode())) {
throw new CommonException("error.menu.parent.code.empty", code);
}
if (menu.getSort() == null) {
menu.setSort(0);
}
menu.setDefault(false);
}
/**
* 删除菜单校验.
* 菜单下有其他菜单,不能删除
*
* @param menu 菜单DTO
*/
public static void deleteValidate(MenuDTO menu) {
if (menu.getDefault()) {
throw new CommonException("error.menu.default");
}
if (!MenuType.isMenu(menu.getType())) {
throw new CommonException("error.menu.not.self", menu.getName());
}
if (!CollectionUtils.isEmpty(menu.getSubMenus())) {
throw new CommonException("error.menu.have.children", menu.getName());
}
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/ApplicationTokenController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.iam.app.service.ApplicationService;
import io.choerodon.iam.infra.dto.ApplicationDTO;
import io.swagger.annotations.ApiOperation;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* @author jiameng.cao
* @date 2019/6/18
*/
@RestController
@RequestMapping(value = "/v1/applications")
public class ApplicationTokenController {
private ApplicationService applicationService;
public ApplicationTokenController(ApplicationService applicationService) {
this.applicationService = applicationService;
}
@Permission(type = ResourceType.SITE, permissionPublic = true)
@ApiOperation(value = "根据token查询应用接口")
@PostMapping(value = "/token")
public ResponseEntity<ApplicationDTO> getApplicationByToken(@RequestBody ApplicationDTO applicationDTO) {
return new ResponseEntity<>(applicationService.getApplicationByToken(applicationDTO.getApplicationToken()), HttpStatus.OK);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/AccessTokenMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import java.util.List;
import io.choerodon.iam.infra.dto.AccessTokenDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
/**
* @author Eugen
*/
public interface AccessTokenMapper extends Mapper<AccessTokenDTO> {
List<AccessTokenDTO> selectTokens(@Param("userName") String userName,
@Param("clientId") String clientId);
List<AccessTokenDTO> selectTokenList(@Param("tokenIds") List<String> tokenIds);
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/ApplicationController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import java.util.List;
import javax.validation.Valid;
import com.github.pagehelper.PageInfo;
import io.swagger.annotations.ApiOperation;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.constant.PageConstant;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.iam.api.query.ApplicationQuery;
import io.choerodon.iam.app.service.ApplicationService;
import io.choerodon.iam.infra.dto.ApplicationDTO;
import io.choerodon.iam.infra.dto.ApplicationExplorationDTO;
/**
* @author superlee
* @since 0.15.0
**/
@RestController
@RequestMapping(value = "/v1/organizations/{organization_id}/applications")
public class ApplicationController {
private ApplicationService applicationService;
public ApplicationController(ApplicationService applicationService) {
this.applicationService = applicationService;
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "创建应用")
@PostMapping
public ResponseEntity<ApplicationDTO> create(@PathVariable("organization_id") Long organizationId,
@RequestBody @Valid ApplicationDTO applicationDTO) {
applicationDTO.setOrganizationId(organizationId);
return new ResponseEntity<>(applicationService.create(applicationDTO), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "根据主键更新应用")
@PostMapping("/{id}")
public ResponseEntity<ApplicationDTO> update(@PathVariable("organization_id") Long organizationId,
@PathVariable("id") Long id,
@RequestBody @Valid ApplicationDTO applicationDTO) {
applicationDTO.setOrganizationId(organizationId);
applicationDTO.setId(id);
return new ResponseEntity<>(applicationService.update(applicationDTO), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "删除应用")
@DeleteMapping("/{id}")
public ResponseEntity delete(@PathVariable("organization_id") Long organizationId, @PathVariable("id") Long id) {
applicationService.delete(organizationId, id);
return new ResponseEntity(HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "分页查询应用")
@GetMapping
public ResponseEntity<PageInfo<ApplicationDTO>> pagingQuery(@PathVariable("organization_id") Long organizationId,
@RequestParam(defaultValue = PageConstant.PAGE, required = false) final int page,
@RequestParam(defaultValue = PageConstant.SIZE, required = false) final int size,
@RequestParam(defaultValue = "true", name = "with_descendants") Boolean withDescendants,
ApplicationQuery applicationSearchDTO) {
applicationSearchDTO.setOrganizationId(organizationId);
return new ResponseEntity<>(applicationService.pagingQuery(page, size, applicationSearchDTO, withDescendants), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "启用应用")
@PutMapping(value = "/{id}/enable")
public ResponseEntity<ApplicationDTO> enabled(@PathVariable Long id) {
return new ResponseEntity<>(applicationService.enable(id), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "禁用应用")
@PutMapping(value = "/{id}/disable")
public ResponseEntity<ApplicationDTO> disable(@PathVariable Long id) {
return new ResponseEntity<>(applicationService.disable(id), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "获取application的类型列表")
@GetMapping(value = "/types")
public ResponseEntity<List<String>> types() {
return new ResponseEntity<>(applicationService.types(), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "重名校验")
@PostMapping("/check")
public ResponseEntity check(@PathVariable("organization_id") Long organizationId,
@RequestBody ApplicationDTO applicationDTO) {
applicationDTO.setOrganizationId(organizationId);
applicationService.check(applicationDTO);
return new ResponseEntity(HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "将应用/组合应用添加到组合应用中")
@PostMapping("/{id}/add_to_combination")
public ResponseEntity addToCombination(@PathVariable("organization_id") Long organizationId,
@PathVariable("id") Long id,
@RequestBody Long[] ids) {
applicationService.addToCombination(organizationId, id, ids);
return new ResponseEntity(HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "将应用/组合应用从组合应用中移除")
@PostMapping("/{id}/delete_combination")
public ResponseEntity deleteCombination(@PathVariable("organization_id") Long organizationId,
@PathVariable("id") Long id,
@RequestBody Long[] ids) {
applicationService.deleteCombination(organizationId, id, ids);
return new ResponseEntity(HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "查询组合应用的后代")
@GetMapping("/{id}/descendant")
public ResponseEntity<List<ApplicationExplorationDTO>> queryDescendant(@PathVariable("organization_id") Long organizationId,
@PathVariable("id") Long id) {
return new ResponseEntity<>(applicationService.queryDescendant(id), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "查询可以向指定组合应用添加的后代,判别标准是不构成环")
@GetMapping("/{id}/enabled_app")
public ResponseEntity<List<ApplicationDTO>> queryEnabledApplication(@PathVariable("organization_id") Long organizationId,
@PathVariable("id") Long id) {
return new ResponseEntity<>(applicationService.queryEnabledApplication(organizationId, id), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "查询组合应用下普通应用清单")
@GetMapping("/{id}/app_list")
public ResponseEntity<PageInfo<ApplicationDTO>> queryApplicationList(@RequestParam(defaultValue = PageConstant.PAGE, required = false) final int page,
@RequestParam(defaultValue = PageConstant.SIZE, required = false) final int size,
@PathVariable("organization_id") Long organizationId,
@PathVariable("id") Long id,
@RequestParam(required = false) String name,
@RequestParam(required = false) String code) {
return new ResponseEntity<>(applicationService.queryApplicationList(page, size, id, name, code), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "根据id查询应用详情")
@GetMapping("/{id}")
public ResponseEntity<ApplicationDTO> query(@PathVariable("organization_id") Long organizationId,
@PathVariable("id") Long id,
@RequestParam(defaultValue = "true", name = "with_descendants") Boolean withDescendants) {
return new ResponseEntity<>(applicationService.query(id, withDescendants), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "获取application的token")
@GetMapping(value = "/{id}/token")
public ResponseEntity<String> getToken(@PathVariable("organization_id") Long organizationId, @PathVariable("id") Long id) {
return new ResponseEntity<>(applicationService.getToken(id), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "根据code查询APP的id")
@GetMapping(value = "/code")
public ResponseEntity<Long> getIdByCode(@PathVariable("organization_id") Long organizationId, @RequestParam String code, @RequestParam Long projectId) {
return new ResponseEntity<>(applicationService.getIdByCode(code, projectId), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "创建application的token")
@PostMapping(value = "/{id}/token")
public ResponseEntity<String> createToken(@PathVariable("organization_id") Long organizationId, @PathVariable("id") Long id) {
return new ResponseEntity<>(applicationService.createToken(id), HttpStatus.OK);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/OrganizationUserController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.domain.Sort;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.core.base.BaseController;
import io.choerodon.core.iam.ResourceLevel;
import io.choerodon.iam.api.dto.UserSearchDTO;
import io.choerodon.iam.app.service.ExcelService;
import io.choerodon.iam.app.service.OrganizationUserService;
import io.choerodon.iam.app.service.UploadHistoryService;
import io.choerodon.iam.app.service.UserService;
import io.choerodon.iam.infra.dto.UploadHistoryDTO;
import io.choerodon.iam.infra.dto.UserDTO;
import io.choerodon.mybatis.annotation.SortDefault;
import io.choerodon.swagger.annotation.CustomPageRequest;
import io.swagger.annotations.ApiOperation;
import org.springframework.core.io.Resource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import springfox.documentation.annotations.ApiIgnore;
/**
* @author superlee
*/
@RestController
@RequestMapping(value = "/v1/organizations/{organization_id}")
public class OrganizationUserController extends BaseController {
private OrganizationUserService organizationUserService;
private UserService userService;
private ExcelService excelService;
private UploadHistoryService uploadHistoryService;
public OrganizationUserController(OrganizationUserService organizationUserService,
UserService userService,
ExcelService excelService,
UploadHistoryService uploadHistoryService) {
this.organizationUserService = organizationUserService;
this.userService = userService;
this.excelService = excelService;
this.uploadHistoryService = uploadHistoryService;
}
/**
* 新增用户
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "创建用户")
@PostMapping("/users")
public ResponseEntity<UserDTO> create(@PathVariable(name = "organization_id") Long organizationId,
@RequestBody @Validated UserDTO userDTO) {
userDTO.setOrganizationId(organizationId);
//新增用户不能创建ldap用户
userDTO.setLdap(false);
return new ResponseEntity<>(organizationUserService.create(userDTO, true), HttpStatus.OK);
}
/**
* 更新用户
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "修改用户")
@PutMapping(value = "/users/{id}")
public ResponseEntity<UserDTO> update(@PathVariable(name = "organization_id") Long organizationId,
@PathVariable Long id,
@RequestBody @Validated UserDTO userDTO) {
//不能更新admin字段
userDTO.setAdmin(null);
//不能更新ldap字段
userDTO.setLdap(null);
userDTO.setOrganizationId(organizationId);
userDTO.setId(id);
return new ResponseEntity<>(organizationUserService.update(userDTO), HttpStatus.OK);
}
/**
* 更新用户
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "重置用户密码")
@PutMapping(value = "/users/{id}/reset")
public ResponseEntity<UserDTO> resetUserPassword(@PathVariable(name = "organization_id") Long organizationId, @PathVariable Long id) {
return new ResponseEntity<>(organizationUserService.resetUserPassword(organizationId, id), HttpStatus.OK);
}
/**
* 分页查询
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "分页查询用户")
@PostMapping(value = "/users/search")
@CustomPageRequest
public ResponseEntity<PageInfo<UserDTO>> list(@PathVariable(name = "organization_id") Long organizationId,
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestBody UserSearchDTO user) {
user.setOrganizationId(organizationId);
return new ResponseEntity<>(organizationUserService.pagingQuery(pageRequest, user), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "查询组织下的用户")
@GetMapping(value = "/users/{id}")
public ResponseEntity<UserDTO> query(@PathVariable(name = "organization_id") Long organizationId,
@PathVariable Long id) {
return new ResponseEntity<>(organizationUserService.query(organizationId, id), HttpStatus.OK);
}
/**
* 解锁用户
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "解锁用户")
@GetMapping(value = "/users/{id}/unlock")
public ResponseEntity<UserDTO> unlock(@PathVariable(name = "organization_id") Long organizationId,
@PathVariable Long id) {
return new ResponseEntity<>(organizationUserService.unlock(organizationId, id), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "启用用户")
@PutMapping(value = "/users/{id}/enable")
public ResponseEntity<UserDTO> enableUser(@PathVariable(name = "organization_id") Long organizationId,
@PathVariable Long id) {
return new ResponseEntity<>(organizationUserService.enableUser(organizationId, id), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "禁用用户")
@PutMapping(value = "/users/{id}/disable")
public ResponseEntity<UserDTO> disableUser(@PathVariable(name = "organization_id") Long organizationId,
@PathVariable Long id) {
return new ResponseEntity<>(organizationUserService.disableUser(organizationId, id), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "用户信息重名校验")
@PostMapping(value = "/users/check")
public ResponseEntity check(@PathVariable(name = "organization_id") Long organizationId,
@RequestBody UserDTO user) {
userService.check(user);
return new ResponseEntity(HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation("从excel里面批量导入用户")
@PostMapping("/users/batch_import")
public ResponseEntity importUsersFromExcel(@PathVariable(name = "organization_id") Long id,
@RequestPart MultipartFile file) {
excelService.importUsers(id, file);
return new ResponseEntity(HttpStatus.NO_CONTENT);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation("下载导入用户的模板文件")
@GetMapping("/users/download_templates")
public ResponseEntity<Resource> downloadTemplates(@PathVariable(name = "organization_id") Long id) {
HttpHeaders headers = excelService.getHttpHeaders();
Resource resource = excelService.getUserTemplates();
//excel2007
return ResponseEntity.ok().headers(headers).contentType(MediaType.parseMediaType("application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")).body(resource);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation("查询最新的导入历史")
@GetMapping("/users/{user_id}/upload/history")
public ResponseEntity<UploadHistoryDTO> latestHistory(@PathVariable(name = "organization_id") Long organizationId,
@PathVariable(name = "user_id") Long userId) {
return new ResponseEntity<>(uploadHistoryService.latestHistory(userId, "user", organizationId, ResourceLevel.ORGANIZATION.value()), HttpStatus.OK);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/eventhandler/NotifyListener.java<|end_filename|>
package io.choerodon.iam.api.eventhandler;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.choerodon.asgard.saga.annotation.SagaTask;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.api.dto.payload.UserEventPayload;
import io.choerodon.iam.app.service.UserService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static io.choerodon.iam.infra.common.utils.SagaTopic.User.TASK_USER_CREATE;
import static io.choerodon.iam.infra.common.utils.SagaTopic.User.USER_CREATE;
/**
* @author dengyouquan
**/
@Component
public class NotifyListener {
private static final Logger LOGGER = LoggerFactory.getLogger(NotifyListener.class);
private static final String ADD_USER = "addUser";
private final ObjectMapper mapper = new ObjectMapper();
private UserService userService;
public NotifyListener(UserService userService) {
this.userService = userService;
}
@SagaTask(code = TASK_USER_CREATE, sagaCode = USER_CREATE, seq = 1, description = "创建用户成功后发送站内信事件")
public List<UserEventPayload> create(String message) throws IOException {
JavaType javaType = mapper.getTypeFactory().constructCollectionType(List.class, UserEventPayload.class);
List<UserEventPayload> payloads = mapper.readValue(message, javaType);
if (payloads == null || payloads.isEmpty()) {
throw new CommonException("error.sagaTask.sendPm.payloadsIsEmpty");
}
//暂时区分创建单个用户还是批量创建用户(批量创建一条会有问题)
if (payloads.size() > 1) {
return payloads;
}
//发送通知
UserEventPayload payload = payloads.get(0);
List<Long> userIds = new ArrayList<>();
userIds.add(payload.getFromUserId());
Map<String, Object> paramsMap = new HashMap<>();
paramsMap.put("addCount", 1);
//发送的人和接受站内信的人是同一个人
userService.sendNotice(payload.getFromUserId(), userIds, ADD_USER, paramsMap, payload.getOrganizationId());
LOGGER.info("NotifyListener create user send station letter.");
return payloads;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/UserServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.pagehelper.Page;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import io.choerodon.asgard.saga.dto.StartInstanceDTO;
import io.choerodon.asgard.saga.feign.SagaClient;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.core.exception.CommonException;
import io.choerodon.core.iam.ResourceLevel;
import io.choerodon.core.notify.NoticeSendDTO;
import io.choerodon.core.oauth.CustomUserDetails;
import io.choerodon.core.oauth.DetailsHelper;
import io.choerodon.iam.api.dto.*;
import io.choerodon.iam.api.dto.payload.UserEventPayload;
import io.choerodon.iam.api.validator.ResourceLevelValidator;
import io.choerodon.iam.api.validator.UserPasswordValidator;
import io.choerodon.iam.app.service.UserService;
import io.choerodon.iam.infra.asserts.*;
import io.choerodon.iam.infra.common.utils.ImageUtils;
import io.choerodon.iam.infra.common.utils.PageUtils;
import io.choerodon.iam.infra.common.utils.ParamUtils;
import io.choerodon.iam.infra.dto.MemberRoleDTO;
import io.choerodon.iam.infra.dto.OrganizationDTO;
import io.choerodon.iam.infra.dto.ProjectDTO;
import io.choerodon.iam.infra.dto.RoleDTO;
import io.choerodon.iam.infra.dto.UserDTO;
import io.choerodon.iam.infra.exception.UpdateExcetion;
import io.choerodon.iam.infra.feign.FileFeignClient;
import io.choerodon.iam.infra.feign.NotifyFeignClient;
import io.choerodon.iam.infra.mapper.*;
import io.choerodon.oauth.core.password.PasswordPolicyManager;
import io.choerodon.oauth.core.password.domain.BasePasswordPolicyDTO;
import io.choerodon.oauth.core.password.domain.BaseUserDTO;
import io.choerodon.oauth.core.password.mapper.BasePasswordPolicyMapper;
import io.choerodon.oauth.core.password.record.PasswordRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.annotation.AsyncResult;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.ObjectUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.multipart.MultipartFile;
import java.time.LocalDate;
import java.util.*;
import java.util.concurrent.Future;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static io.choerodon.iam.infra.common.utils.SagaTopic.User.USER_UPDATE;
import static io.choerodon.iam.infra.asserts.UserAssertHelper.WhichColumn;
/**
* @author superlee
*/
@Component
@RefreshScope
public class UserServiceImpl implements UserService {
private static final Logger LOGGER = LoggerFactory.getLogger(UserService.class);
private static final BCryptPasswordEncoder ENCODER = new BCryptPasswordEncoder();
private static final String USER_NOT_LOGIN_EXCEPTION = "error.user.not.login";
private static final String USER_ID_NOT_EQUAL_EXCEPTION = "error.user.id.not.equals";
@Value("${choerodon.category.enabled:false}")
private boolean enableCategory;
@Value("${choerodon.devops.message:false}")
private boolean devopsMessage;
@Value("${spring.application.name:default}")
private String serviceName;
private PasswordRecord passwordRecord;
private FileFeignClient fileFeignClient;
private BasePasswordPolicyMapper basePasswordPolicyMapper;
private PasswordPolicyManager passwordPolicyManager;
private UserPasswordValidator userPasswordValidator;
private SagaClient sagaClient;
private MemberRoleMapper memberRoleMapper;
private final ObjectMapper mapper = new ObjectMapper();
private ProjectMapCategoryMapper projectMapCategoryMapper;
private NotifyFeignClient notifyFeignClient;
private UserMapper userMapper;
private UserAssertHelper userAssertHelper;
private OrganizationAssertHelper organizationAssertHelper;
private ProjectMapper projectMapper;
private OrganizationMapper organizationMapper;
private ProjectAssertHelper projectAssertHelper;
private RoleAssertHelper roleAssertHelper;
public UserServiceImpl(PasswordRecord passwordRecord,
FileFeignClient fileFeignClient,
SagaClient sagaClient,
BasePasswordPolicyMapper basePasswordPolicyMapper,
UserPasswordValidator userPasswordValidator,
PasswordPolicyManager passwordPolicyManager,
MemberRoleMapper memberRoleMapper,
ProjectMapCategoryMapper projectMapCategoryMapper,
NotifyFeignClient notifyFeignClient,
UserMapper userMapper,
UserAssertHelper userAssertHelper,
OrganizationAssertHelper organizationAssertHelper,
ProjectMapper projectMapper,
OrganizationMapper organizationMapper,
ProjectAssertHelper projectAssertHelper,
RoleAssertHelper roleAssertHelper) {
this.passwordRecord = passwordRecord;
this.fileFeignClient = fileFeignClient;
this.sagaClient = sagaClient;
this.basePasswordPolicyMapper = basePasswordPolicyMapper;
this.passwordPolicyManager = passwordPolicyManager;
this.userPasswordValidator = userPasswordValidator;
this.memberRoleMapper = memberRoleMapper;
this.projectMapCategoryMapper = projectMapCategoryMapper;
this.notifyFeignClient = notifyFeignClient;
this.userMapper = userMapper;
this.userAssertHelper = userAssertHelper;
this.organizationAssertHelper = organizationAssertHelper;
this.projectMapper = projectMapper;
this.organizationMapper = organizationMapper;
this.projectAssertHelper = projectAssertHelper;
this.roleAssertHelper = roleAssertHelper;
}
@Override
public UserDTO querySelf() {
CustomUserDetails customUserDetails = DetailsHelperAssert.userDetailNotExisted();
Long userId = customUserDetails.getUserId();
UserDTO userDTO = userMapper.selectByPrimaryKey(userId);
if (userDTO != null) {
OrganizationDTO organizationDTO = organizationAssertHelper.organizationNotExisted(userDTO.getOrganizationId());
userDTO.setOrganizationName(organizationDTO.getName());
userDTO.setOrganizationCode(organizationDTO.getCode());
if (userDTO.getPhone() == null || userDTO.getPhone().isEmpty()) {
userDTO.setInternationalTelCode("");
}
}
return userDTO;
}
@Override
public List<OrganizationDTO> queryOrganizations(Long userId, Boolean includedDisabled) {
CustomUserDetails customUserDetails = DetailsHelperAssert.userDetailNotExisted();
if (!userId.equals(customUserDetails.getUserId())) {
throw new CommonException(USER_ID_NOT_EQUAL_EXCEPTION);
}
boolean isAdmin = false;
if (customUserDetails.getAdmin() != null) {
isAdmin = customUserDetails.getAdmin();
}
//superAdmin例外处理
if (isAdmin) {
return organizationMapper.selectAll();
} else {
return getOwnedOrganizations(userId, includedDisabled);
}
}
@Override
public List<ProjectDTO> queryProjects(Long id, Boolean includedDisabled) {
CustomUserDetails customUserDetails = checkLoginUser(id);
boolean isAdmin = false;
if (customUserDetails.getAdmin() != null) {
isAdmin = customUserDetails.getAdmin();
}
List<ProjectDTO> projects;
//superAdmin例外处理
if (isAdmin) {
projects = projectMapper.selectAll();
} else {
ProjectDTO project = new ProjectDTO();
if (!includedDisabled) {
project.setEnabled(true);
}
projects = projectMapper.selectProjectsByUserId(id, project);
}
if (enableCategory) {
projects = mergeCategories(projects);
}
return projects;
}
private List<ProjectDTO> mergeCategories(List<ProjectDTO> projectDTOS) {
List<ProjectMapCategorySimpleDTO> projectMapCategorySimpleDTOS = projectMapCategoryMapper.selectAllProjectMapCategories();
projectDTOS.forEach(p -> {
List<ProjectCategoryDTO> collectCategories = new ArrayList<>();
List<String> collect = projectMapCategorySimpleDTOS.stream().filter(c -> c.getProjectId().equals(p.getId())).map(c -> c.getCategory()).collect(Collectors.toList());
for (String collectOne : collect) {
ProjectCategoryDTO projectCategoryDTO = new ProjectCategoryDTO();
projectCategoryDTO.setName(collectOne);
collectCategories.add(projectCategoryDTO);
}
List<ProjectCategoryDTO> categories = new ArrayList<>();
categories.addAll(collectCategories);
p.setCategories(categories);
});
return projectDTOS;
}
@Override
public List<ProjectDTO> queryProjectsByOrganizationId(Long userId, Long organizationId) {
checkLoginUser(userId);
ProjectDTO projectDTO = new ProjectDTO();
projectDTO.setOrganizationId(organizationId);
return new ArrayList<>();
}
private CustomUserDetails checkLoginUser(Long id) {
CustomUserDetails customUserDetails = DetailsHelper.getUserDetails();
if (customUserDetails == null) {
throw new CommonException(USER_NOT_LOGIN_EXCEPTION);
}
if (!id.equals(customUserDetails.getUserId())) {
throw new CommonException(USER_ID_NOT_EQUAL_EXCEPTION);
}
return customUserDetails;
}
@Override
public PageInfo<UserDTO> pagingQueryUsersWithRoles(PageRequest pageRequest, RoleAssignmentSearchDTO roleAssignmentSearchDTO,
Long sourceId, ResourceType resourceType) {
int page = pageRequest.getPage();
int size = pageRequest.getSize();
boolean doPage = (size != 0);
Page<UserDTO> result = new Page<>(page, size);
if (doPage) {
int start = PageUtils.getBegin(page, size);
int count = userMapper.selectCountUsers(roleAssignmentSearchDTO, sourceId, resourceType.value(),
ParamUtils.arrToStr(roleAssignmentSearchDTO.getParam()));
List<UserDTO> users =
userMapper.selectUserWithRolesByOption(
roleAssignmentSearchDTO, sourceId, resourceType.value(), start, size,
ParamUtils.arrToStr(roleAssignmentSearchDTO.getParam()));
result.setTotal(count);
result.addAll(users);
} else {
List<UserDTO> users =
userMapper.selectUserWithRolesByOption(roleAssignmentSearchDTO, sourceId, resourceType.value(), null, null,
ParamUtils.arrToStr(roleAssignmentSearchDTO.getParam()));
result.setTotal(users.size());
result.addAll(users);
}
return result.toPageInfo();
}
@Override
public PageInfo<UserDTO> pagingQueryUsersByRoleIdOnSiteLevel(PageRequest pageRequest, RoleAssignmentSearchDTO roleAssignmentSearchDTO, Long roleId, boolean doPage) {
return pagingQueryUsersByRoleIdAndLevel(pageRequest, roleAssignmentSearchDTO, roleId, 0L, ResourceLevel.SITE.value(), doPage);
}
private PageInfo<UserDTO> pagingQueryUsersByRoleIdAndLevel(PageRequest pageRequest, RoleAssignmentSearchDTO roleAssignmentSearchDTO, Long roleId, Long sourceId, String level, boolean doPage) {
String param = Optional.ofNullable(roleAssignmentSearchDTO).map(dto -> ParamUtils.arrToStr(dto.getParam())).orElse(null);
if (!doPage) {
List<UserDTO> users =
userMapper.selectUsersFromMemberRoleByOptions(roleId, "user", sourceId,
level, roleAssignmentSearchDTO, param);
Page<UserDTO> result = new Page<>();
result.addAll(users);
return result.toPageInfo();
}
return PageHelper.startPage(pageRequest.getPage(), pageRequest.getSize()).doSelectPageInfo(() -> userMapper.selectUsersFromMemberRoleByOptions(roleId, "user", sourceId,
level, roleAssignmentSearchDTO, param));
}
@Override
public PageInfo<UserDTO> pagingQueryUsersByRoleIdOnOrganizationLevel(PageRequest pageRequest, RoleAssignmentSearchDTO roleAssignmentSearchDTO, Long roleId, Long sourceId, boolean doPage) {
return pagingQueryUsersByRoleIdAndLevel(pageRequest, roleAssignmentSearchDTO, roleId, sourceId, ResourceLevel.ORGANIZATION.value(), doPage);
}
@Override
public PageInfo<UserDTO> pagingQueryUsersByRoleIdOnProjectLevel(PageRequest pageRequest, RoleAssignmentSearchDTO roleAssignmentSearchDTO, Long roleId, Long sourceId, boolean doPage) {
return pagingQueryUsersByRoleIdAndLevel(pageRequest, roleAssignmentSearchDTO, roleId, sourceId, ResourceLevel.PROJECT.value(), doPage);
}
@Override
public String uploadPhoto(Long id, MultipartFile file) {
checkLoginUser(id);
return fileFeignClient.uploadFile("iam-service", file.getOriginalFilename(), file).getBody();
}
@Override
public String savePhoto(Long id, MultipartFile file, Double rotate, Integer axisX, Integer axisY, Integer width, Integer height) {
checkLoginUser(id);
UserDTO dto = userAssertHelper.userNotExisted(id);
try {
file = ImageUtils.cutImage(file, rotate, axisX, axisY, width, height);
String photoUrl = fileFeignClient.uploadFile("iam-service", file.getOriginalFilename(), file).getBody();
dto.setImageUrl(photoUrl);
if (userMapper.updateByPrimaryKeySelective(dto) != 1) {
throw new CommonException("error.user.update");
}
return photoUrl;
} catch (Exception e) {
LOGGER.warn("error happened when save photo {}", e.getMessage());
throw new CommonException("error.user.photo.save");
}
}
@Override
public List<OrganizationDTO> queryOrganizationWithProjects() {
return new ArrayList<>(0);
}
private List<OrganizationDTO> getOwnedOrganizations(Long userId, Boolean includedDisabled) {
List<OrganizationDTO> resultOrganizations = organizationMapper.selectFromMemberRoleByMemberId(userId, includedDisabled);
List<OrganizationDTO> notIntoOrganizations = organizationMapper.selectOrgByUserAndPros(userId, includedDisabled);
List<Long> resultIds = resultOrganizations.stream().map(OrganizationDTO::getId).collect(Collectors.toList());
List<Long> notIntoIds = notIntoOrganizations.stream().map(OrganizationDTO::getId).collect(Collectors.toList());
//差集
notIntoIds.removeAll(resultIds);
notIntoOrganizations = notIntoOrganizations.stream().filter(o -> notIntoIds.contains(o.getId())).collect(Collectors.toList());
notIntoOrganizations.forEach(i -> i.setInto(false));
resultOrganizations.addAll(notIntoOrganizations);
return resultOrganizations;
}
@Override
public void selfUpdatePassword(Long userId, UserPasswordDTO userPasswordDTO, Boolean checkPassword, Boolean checkLogin) {
if (checkLogin) {
checkLoginUser(userId);
}
UserDTO user = userAssertHelper.userNotExisted(userId);
if (user.getLdap()) {
throw new CommonException("error.ldap.user.can.not.update.password");
}
if (!ENCODER.matches(userPasswordDTO.getOriginalPassword(), user.getPassword())) {
throw new CommonException("error.password.originalPassword");
}
//密码策略
if (checkPassword) {
BaseUserDTO baseUserDTO = new BaseUserDTO();
BeanUtils.copyProperties(user, baseUserDTO);
OrganizationDTO organizationDTO = organizationMapper.selectByPrimaryKey(user.getOrganizationId());
if (organizationDTO != null) {
BasePasswordPolicyDTO example = new BasePasswordPolicyDTO();
example.setOrganizationId(organizationDTO.getId());
BasePasswordPolicyDTO basePasswordPolicyDO = basePasswordPolicyMapper.selectOne(example);
if (userPasswordDTO.getPassword() != null) {
passwordPolicyManager.passwordValidate(userPasswordDTO.getPassword(), baseUserDTO, basePasswordPolicyDO);
}
// 校验用户密码
userPasswordValidator.validate(userPasswordDTO.getPassword(), organizationDTO.getId(), true);
}
}
user.setPassword(ENCODER.encode(userPasswordDTO.getPassword()));
updateSelective(user);
passwordRecord.updatePassword(user.getId(), user.getPassword());
// send siteMsg
Map<String, Object> paramsMap = new HashMap<>();
paramsMap.put("userName", user.getRealName());
List<Long> userIds = new ArrayList<>();
userIds.add(user.getId());
sendNotice(user.getId(), userIds, "modifyPassword", paramsMap, 0L);
}
private UserDTO updateSelective(UserDTO userDTO) {
userAssertHelper.objectVersionNumberNotNull(userDTO.getObjectVersionNumber());
if (userMapper.updateByPrimaryKeySelective(userDTO) != 1) {
throw new UpdateExcetion("error.user.update");
}
return userMapper.selectByPrimaryKey(userDTO);
}
@Override
public UserDTO queryInfo(Long userId) {
checkLoginUser(userId);
UserDTO user = userAssertHelper.userNotExisted(userId);
OrganizationDTO organizationDTO = organizationAssertHelper.organizationNotExisted(user.getOrganizationId());
user.setOrganizationName(organizationDTO.getName());
user.setOrganizationCode(organizationDTO.getCode());
return user;
}
@Override
public RegistrantInfoDTO queryRegistrantInfoAndAdmin(String orgCode) {
OrganizationDTO organizationDTO = organizationAssertHelper.organizationNotExisted(orgCode);
Long userId = organizationDTO.getUserId();
UserDTO user = userAssertHelper.userNotExisted(userId);
UserDTO admin = userAssertHelper.userNotExisted(WhichColumn.LOGIN_NAME, "admin");
RegistrantInfoDTO registrantInfoDTO = new RegistrantInfoDTO();
registrantInfoDTO.setUser(user);
registrantInfoDTO.setOrganizationName(organizationDTO.getName());
registrantInfoDTO.setAdminId(admin.getId());
return registrantInfoDTO;
}
@Override
@Transactional
public UserDTO updateInfo(UserDTO userDTO, Boolean checkLogin) {
if (checkLogin) {
checkLoginUser(userDTO.getId());
}
UserDTO dto;
if (devopsMessage) {
UserEventPayload userEventPayload = new UserEventPayload();
dto = updateSelective(userDTO);
userEventPayload.setEmail(dto.getEmail());
userEventPayload.setId(dto.getId().toString());
userEventPayload.setName(dto.getRealName());
userEventPayload.setUsername(dto.getLoginName());
BeanUtils.copyProperties(dto, dto);
try {
String input = mapper.writeValueAsString(userEventPayload);
sagaClient.startSaga(USER_UPDATE, new StartInstanceDTO(input, "user", "" + dto.getId()));
} catch (Exception e) {
throw new CommonException("error.UserService.updateInfo.event", e);
}
} else {
dto = updateSelective(userDTO);
}
OrganizationDTO organizationDTO = organizationAssertHelper.organizationNotExisted(dto.getOrganizationId());
dto.setOrganizationName(organizationDTO.getName());
dto.setOrganizationCode(organizationDTO.getCode());
return dto;
}
@Override
public void check(UserDTO user) {
Boolean checkLoginName = !StringUtils.isEmpty(user.getLoginName());
Boolean checkEmail = !StringUtils.isEmpty(user.getEmail());
Boolean checkPhone = !StringUtils.isEmpty(user.getPhone());
if (!checkEmail && !checkLoginName && !checkPhone) {
throw new CommonException("error.user.validation.fields.empty");
}
if (checkLoginName) {
checkLoginName(user);
}
if (checkEmail) {
checkEmail(user);
}
if (checkPhone) {
checkPhone(user);
}
}
/**
* 校验在启用用户中手机号唯一
*
* @param user 用户信息
*/
private void checkPhone(UserDTO user) {
Boolean createCheck = StringUtils.isEmpty(user.getId());
String phone = user.getPhone();
UserDTO userDTO = new UserDTO();
userDTO.setPhone(phone);
userDTO.setEnabled(true);
if (createCheck) {
Boolean existed = userMapper.selectOne(userDTO) != null;
if (existed) {
throw new CommonException("error.user.phone.exist");
}
} else {
Long id = user.getId();
UserDTO dto = userMapper.selectOne(userDTO);
Boolean existed = dto != null && !id.equals(dto.getId());
if (existed) {
throw new CommonException("error.user.phone.exist");
}
}
}
private void checkEmail(UserDTO user) {
Boolean createCheck = StringUtils.isEmpty(user.getId());
String email = user.getEmail();
UserDTO userDTO = new UserDTO();
userDTO.setEmail(email);
if (createCheck) {
Boolean existed = userMapper.selectOne(userDTO) != null;
if (existed) {
throw new CommonException("error.user.email.exist");
}
} else {
Long id = user.getId();
UserDTO dto = userMapper.selectOne(userDTO);
Boolean existed = dto != null && !id.equals(dto.getId());
if (existed) {
throw new CommonException("error.user.email.exist");
}
}
}
private void checkLoginName(UserDTO user) {
Boolean createCheck = StringUtils.isEmpty(user.getId());
String loginName = user.getLoginName();
if (!Pattern.matches(UserDTO.LOGIN_NAME_REG, loginName)) {
throw new CommonException("error.user.loginName.regex");
}
UserDTO userDTO = new UserDTO();
userDTO.setLoginName(loginName);
if (createCheck) {
Boolean existed = userMapper.selectOne(userDTO) != null;
if (existed) {
throw new CommonException("error.user.loginName.exist");
}
} else {
Long id = user.getId();
UserDTO dto = userMapper.selectOne(userDTO);
Boolean existed = dto != null && !id.equals(dto.getId());
if (existed) {
throw new CommonException("error.user.loginName.exist");
}
}
}
@Override
public UserDTO queryByLoginName(String loginName) {
UserDTO dto = new UserDTO();
dto.setLoginName(loginName);
return userMapper.selectOne(dto);
}
@Override
public UserDTO lockUser(Long userId, Integer lockExpireTime) {
UserDTO userDTO = userAssertHelper.userNotExisted(userId);
userDTO.setLocked(true);
userDTO.setLockedUntilAt(new Date(System.currentTimeMillis() + lockExpireTime * 1000));
return updateSelective(userDTO);
}
@Override
public PageInfo<UserDTO> pagingQueryAdminUsers(PageRequest pageRequest, UserDTO userDTO, String params) {
userDTO.setAdmin(true);
return PageHelper
.startPage(pageRequest.getPage(), pageRequest.getSize())
.doSelectPageInfo(() -> userMapper.selectAdminUserPage(userDTO, params));
}
@Override
@Transactional
public void addAdminUsers(long[] ids) {
for (long id : ids) {
UserDTO dto = userMapper.selectByPrimaryKey(id);
if (dto != null && !dto.getAdmin()) {
dto.setAdmin(true);
updateSelective(dto);
}
}
}
@Override
public void deleteAdminUser(long id) {
UserDTO dto = userAssertHelper.userNotExisted(id);
UserDTO userDTO = new UserDTO();
userDTO.setAdmin(true);
if (userMapper.selectCount(userDTO) > 1) {
if (dto.getAdmin()) {
dto.setAdmin(false);
updateSelective(dto);
}
} else {
throw new CommonException("error.user.admin.size");
}
}
@Override
public List<UserDTO> listUsersByIds(Long[] ids, Boolean onlyEnabled) {
if (ObjectUtils.isEmpty(ids)) {
return new ArrayList<>();
} else {
return userMapper.listUsersByIds(ids, onlyEnabled);
}
}
@Override
public List<UserDTO> listUsersByEmails(String[] emails) {
if (ObjectUtils.isEmpty(emails)) {
return new ArrayList<>();
} else {
return userMapper.listUsersByEmails(emails);
}
}
@Override
public List<UserDTO> listUsersByLoginNames(String[] loginNames, Boolean onlyEnabled) {
if (ObjectUtils.isEmpty(loginNames)) {
return new ArrayList<>();
} else {
return userMapper.listUsersByLoginNames(loginNames, onlyEnabled);
}
}
@Override
@Transactional(rollbackFor = Exception.class)
public UserInfoDTO updateUserInfo(Long id, UserInfoDTO userInfoDTO) {
// 更新用户密码
UserPasswordDTO passwordDTO = new UserPasswordDTO();
passwordDTO.setOriginalPassword(userInfoDTO.getOriginalPassword());
passwordDTO.setPassword(userInfoDTO.getPassword());
selfUpdatePassword(id, passwordDTO, true, false);
// 更新用户名
String userName = userInfoDTO.getUserName();
if (!StringUtils.isEmpty(userName)) {
UserDTO user = userMapper.selectByPrimaryKey(id);
user.setRealName(userName);
updateInfo(user, false);
}
return userInfoDTO;
}
@Override
public PageInfo<OrganizationDTO> pagingQueryOrganizationsWithRoles(PageRequest pageRequest, Long id, String params) {
int page = pageRequest.getPage();
int size = pageRequest.getSize();
Page<OrganizationDTO> result = new Page<>(page, size);
int start = PageUtils.getBegin(page, size);
int count = memberRoleMapper.selectCountBySourceId(id, "organization");
result.setTotal(count);
result.addAll(organizationMapper.selectOrganizationsWithRoles(id, start, size, params));
return result.toPageInfo();
}
@Override
public PageInfo<ProjectDTO> pagingQueryProjectAndRolesById(PageRequest pageRequest, Long id, String params) {
int page = pageRequest.getPage();
int size = pageRequest.getSize();
Page<ProjectDTO> result = new Page<>(page, size);
if (size == 0) {
List<ProjectDTO> projectList = projectMapper.selectProjectsWithRoles(id, null, null, params);
result.setTotal(projectList.size());
result.addAll(projectList);
} else {
int start = PageUtils.getBegin(page, size);
int count = memberRoleMapper.selectCountBySourceId(id, "project");
result.setTotal(count);
List<ProjectDTO> projectList = projectMapper.selectProjectsWithRoles(id, start, size, params);
result.addAll(projectList);
}
return result.toPageInfo();
}
@Override
@Transactional
public UserDTO createUserAndAssignRoles(final CreateUserWithRolesDTO userWithRoles) {
List<RoleDTO> roles = validateRoles(userWithRoles);
UserDTO user = validateUser(userWithRoles);
if (userMapper.insertSelective(user) != 1) {
throw new CommonException("error.user.create");
}
UserDTO userDTO = userMapper.selectByPrimaryKey(user);
Long userId = userDTO.getId();
roles.forEach(r -> {
MemberRoleDTO memberRole = new MemberRoleDTO();
memberRole.setMemberId(userId);
memberRole.setMemberType(userWithRoles.getMemberType());
memberRole.setRoleId(r.getId());
memberRole.setSourceId(userWithRoles.getSourceId());
memberRole.setSourceType(userWithRoles.getSourceType());
if (memberRoleMapper.selectOne(memberRole) == null
&& memberRoleMapper.insertSelective(memberRole) != 1) {
throw new CommonException("error.memberRole.insert");
}
});
return userDTO;
}
@Override
public PageInfo<ProjectDTO> pagingQueryProjectsSelf(ProjectDTO projectDTO,
PageRequest pageRequest, String params) {
CustomUserDetails customUserDetails = DetailsHelperAssert.userDetailNotExisted();
Long userId = customUserDetails.getUserId();
return PageHelper
.startPage(pageRequest.getPage(), pageRequest.getSize())
.doSelectPageInfo(() -> projectMapper.selectProjectsByUserIdWithParam(userId, projectDTO, params));
}
@Override
public PageInfo<OrganizationDTO> pagingQueryOrganizationsSelf(OrganizationDTO organizationDTO,
PageRequest pageRequest, String params) {
CustomUserDetails customUserDetails = DetailsHelper.getUserDetails();
return PageHelper
.startPage(pageRequest.getPage(), pageRequest.getSize())
.doSelectPageInfo(() -> organizationMapper.selectOrganizationsByUserId(customUserDetails.getUserId(), organizationDTO, params));
}
@Override
public Long[] listUserIds() {
return userMapper.listUserIds();
}
private UserDTO validateUser(CreateUserWithRolesDTO userWithRoles) {
UserDTO user = userWithRoles.getUser();
String loginName = user.getLoginName();
String email = user.getEmail();
if (StringUtils.isEmpty(loginName)) {
throw new CommonException("error.user.loginName.empty");
}
if (StringUtils.isEmpty(email)) {
throw new CommonException("error.user.email.empty");
}
userAssertHelper.loginNameExisted(loginName);
userAssertHelper.emailExisted(email);
validatePassword(user);
user.setPassword(ENCODER.encode(user.getPassword()));
user.setEnabled(true);
user.setLdap(false);
if (user.getLanguage() == null) {
user.setLanguage("zh_CN");
}
if (user.getTimeZone() == null) {
user.setTimeZone("CTT");
}
user.setLastPasswordUpdatedAt(new Date(System.currentTimeMillis()));
user.setLocked(false);
user.setAdmin(false);
return user;
}
private void validatePassword(UserDTO user) {
String password = user.getPassword();
if (StringUtils.isEmpty(password)) {
throw new CommonException("error.user.password.empty");
}
Long organizationId = user.getOrganizationId();
BaseUserDTO userDO = new BaseUserDTO();
BeanUtils.copyProperties(user, userDO);
BasePasswordPolicyDTO example = new BasePasswordPolicyDTO();
example.setOrganizationId(organizationId);
Optional.ofNullable(basePasswordPolicyMapper.selectOne(example))
.ifPresent(passwordPolicy -> {
if (!password.equals(passwordPolicy.getOriginalPassword())) {
passwordPolicyManager.passwordValidate(password, userDO, passwordPolicy);
}
});
}
private List<RoleDTO> validateRoles(CreateUserWithRolesDTO userWithRoles) {
UserDTO user = userWithRoles.getUser();
if (user == null) {
throw new CommonException("error.user.null");
}
Long sourceId = userWithRoles.getSourceId();
String sourceType = userWithRoles.getSourceType();
validateSourceType(user, sourceId, sourceType);
if (userWithRoles.getMemberType() == null) {
userWithRoles.setMemberType("user");
}
Set<String> roleCodes = userWithRoles.getRoleCode();
List<RoleDTO> roles = new ArrayList<>();
if (roleCodes == null) {
throw new CommonException("error.roleCode.null");
} else {
roleCodes.forEach(code -> {
RoleDTO role = roleAssertHelper.roleNotExisted(code);
if (!role.getResourceLevel().equals(sourceType)) {
throw new CommonException("error.illegal.role.level");
}
roles.add(role);
});
}
return roles;
}
private void validateSourceType(UserDTO user, Long sourceId, String sourceType) {
ResourceLevelValidator.validate(sourceType);
if (ResourceLevel.SITE.value().equals(sourceType)
|| ResourceLevel.USER.value().equals(sourceType)) {
throw new CommonException("error.illegal.sourceType");
} else if (ResourceLevel.PROJECT.value().equals(sourceType)) {
ProjectDTO projectDTO = projectAssertHelper.projectNotExisted(sourceId);
Long organizationId = projectDTO.getOrganizationId();
user.setOrganizationId(organizationId);
} else {
//organization level
organizationAssertHelper.organizationNotExisted(sourceId);
user.setOrganizationId(sourceId);
}
}
@Override
public Long queryOrgIdByEmail(String email) {
return userAssertHelper.userNotExisted(WhichColumn.EMAIL, email).getOrganizationId();
}
@Override
public OrganizationProjectDTO queryByUserIdWithRoleOrganizationAndProject(Long userId) {
OrganizationProjectDTO organizationProjectDTO = new OrganizationProjectDTO();
organizationProjectDTO.setOrganizationList(organizationMapper.selectFromMemberRoleByMemberId(userId, false).stream().map(organizationDO ->
OrganizationProjectDTO.newInstanceOrganization(organizationDO.getId(), organizationDO.getName(), organizationDO.getCode())
).collect(Collectors.toList()));
ProjectDTO projectDTO = new ProjectDTO();
//查询启用的项目
projectDTO.setEnabled(true);
organizationProjectDTO.setProjectList(projectMapper.selectProjectsByUserId(userId, projectDTO)
.stream().map(projectDO1 ->
OrganizationProjectDTO.newInstanceProject(projectDO1.getId(), projectDO1.getName(), projectDO1.getCode())).collect(Collectors.toList()));
return organizationProjectDTO;
}
@Override
public PageInfo<SimplifiedUserDTO> pagingQueryAllUser(PageRequest pageRequest, String param, Long organizationId) {
if (StringUtils.isEmpty(param) && Long.valueOf(0).equals(organizationId)) {
Page<SimplifiedUserDTO> result = new Page<>(0, 20);
result.setTotal(0);
return result.toPageInfo();
}
int page = pageRequest.getPage();
int size = pageRequest.getSize();
if (organizationId.equals(0L)) {
return PageHelper.startPage(page, size).doSelectPageInfo(() -> userMapper.selectAllUsersSimplifiedInfo(param));
} else {
return PageHelper.startPage(page, size).doSelectPageInfo(() -> userMapper.selectUsersOptional(param, organizationId));
}
}
@Override
public PageInfo<UserDTO> pagingQueryUsersOnSiteLevel(Long userId, String email, PageRequest pageRequest, String param) {
return
PageHelper
.startPage(pageRequest.getPage(), pageRequest.getSize())
.doSelectPageInfo(() -> userMapper.selectUsersByLevelAndOptions(ResourceLevel.SITE.value(), 0L, userId, email, param));
}
@Override
public Map<String, Object> queryAllAndNewUsers() {
Map<String, Object> map = new HashMap<>();
UserDTO dto = new UserDTO();
map.put("allUsers", userMapper.selectCount(dto));
LocalDate localDate = LocalDate.now();
String begin = localDate.toString();
String end = localDate.plusDays(1).toString();
map.put("newUsers", userMapper.newUsersByDate(begin, end));
return map;
}
@Override
public PageInfo<UserRoleDTO> pagingQueryRole(PageRequest pageRequest, String param, Long userId, Long organizationId) {
CustomUserDetails customUserDetails = DetailsHelperAssert.userDetailNotExisted();
Long id = customUserDetails.getUserId();
if (!id.equals(userId)) {
throw new CommonException("error.permission.id.notMatch");
}
PageInfo<UserRoleDTO> result = PageHelper.startPage(pageRequest.getPage(), pageRequest.getSize()).doSelectPageInfo(() -> userMapper.selectRoles(userId, param, organizationId));
result.getList().forEach(i -> {
String[] roles = i.getRoleNames().split(",");
List<RoleNameAndEnabledDTO> list = new ArrayList<>(roles.length);
for (int j = 0; j < roles.length; j++) {
String[] nameAndEnabled = roles[j].split("\\|");
boolean roleEnabled = true;
if (nameAndEnabled[2].equals("0")) {
roleEnabled = false;
}
list.add(new RoleNameAndEnabledDTO(nameAndEnabled[0], nameAndEnabled[1], roleEnabled));
}
i.setRoles(list);
if (ResourceLevel.PROJECT.value().equals(i.getLevel())) {
i.setOrganizationId(projectMapper.selectByPrimaryKey(i.getId()).getOrganizationId());
}
});
return result;
}
@Override
@Async("notify-executor")
public Future<String> sendNotice(Long fromUserId, List<Long> userIds, String code,
Map<String, Object> params, Long sourceId) {
return sendNotice(fromUserId, userIds, code, params, sourceId, false);
}
@Override
@Async("notify-executor")
public Future<String> sendNotice(Long fromUserId, List<Long> userIds, String code, Map<String, Object> params, Long sourceId, boolean sendAll) {
LOGGER.info("ready : send Notice to {} users", userIds.size());
if (userIds == null || userIds.isEmpty()) {
return new AsyncResult<>("userId is null");
}
long beginTime = System.currentTimeMillis();
NoticeSendDTO noticeSendDTO = new NoticeSendDTO();
noticeSendDTO.setCode(code);
NoticeSendDTO.User currentUser = new NoticeSendDTO.User();
currentUser.setId(fromUserId);
noticeSendDTO.setFromUser(currentUser);
noticeSendDTO.setParams(params);
noticeSendDTO.setSourceId(sourceId);
List<NoticeSendDTO.User> users = new LinkedList<>();
userIds.forEach(id -> {
NoticeSendDTO.User user = new NoticeSendDTO.User();
user.setId(id);
//如果是发送给所有人,我们无需查看是否有角色分配,全部发送,避免查表
if (!sendAll) {
UserDTO userDTO = userMapper.selectByPrimaryKey(id);
if (userDTO != null) {
//有角色分配,但是角色已经删除
user.setEmail(userDTO.getEmail());
users.add(user);
}
} else {
users.add(user);
}
});
noticeSendDTO.setTargetUsers(users);
LOGGER.info("start : send Notice to {} users", userIds.size());
notifyFeignClient.postNotice(noticeSendDTO);
LOGGER.info("end : send Notice to {} users", userIds.size());
return new AsyncResult<>((System.currentTimeMillis() - beginTime) / 1000 + "s");
}
@Override
public UserDTO updateUserDisabled(Long userId) {
UserDTO userDTO = userAssertHelper.userNotExisted(userId);
userDTO.setEnabled(false);
return updateSelective(userDTO);
}
}
<|start_filename|>react/src/app/iam/containers/global/member-role/MemberRoleType.js<|end_filename|>
import { axios } from '@choerodon/boot';
import querystring from 'query-string';
export const pageSize = 10;
/**
* 公用方法类
* 当要改写 src/app/iam/containers/global/member-role/MemberRoleType.js中的内容时可以逐步把用到的东西移到store里
*/
export default class MemberRoleType {
constructor(context) {
this.context = context;
const { AppState } = this.context.props;
this.data = AppState.currentMenuType;
const { type, id, name } = this.data;
let apiGetway = `/iam/v1/${type}s/${id}`;
let codePrefix;
switch (type) {
case 'organization':
codePrefix = 'organization';
break;
case 'project':
codePrefix = 'project';
break;
case 'site':
codePrefix = 'global';
apiGetway = `/iam/v1/${type}`;
break;
default:
break;
}
this.code = `${codePrefix}.memberrole`;
this.values = { name: name || AppState.getSiteInfo.systemName || 'Choerodon' };
this.urlRoles = `${apiGetway}/role_members/users/roles`; // 查询用户列表以及该用户拥有的角色
this.urlUserCount = `${apiGetway}/role_members/users/count`; // 查询角色列表以及该角色下的用户数量
this.urlUsers = `${apiGetway}/role_members/users`; // 查询角色下的用户
this.urlClientRoles = `${apiGetway}/role_members/clients/roles`; // 查询客户端列表以及该客户端拥有的角色
this.urlClientCount = `${apiGetway}/role_members/clients/count`; // 查询角色列表以及该角色下的客户端数量
this.urlClients = `${apiGetway}/role_members/clients`; // 查询角色下的客户端
this.urlDeleteMember = `${apiGetway}/role_members/delete`; // 批量移除用户角色
this.urlRoleMember = `${apiGetway}/role_members`; // 批量分配给用户/客户端角色
this.roleId = id || 0;
}
// fetch分配角色(post)
fetchRoleMember(memberIds, body, memberType, isEdit) {
let str = `member_ids=${memberIds.join(',')}`;
if (isEdit === true) {
str += '&is_edit=true';
if (memberType === 'client') {
str += '&member_type=client';
}
}
return axios.post(`${this.urlRoleMember}?${str}`, JSON.stringify(body));
}
// delete分配角色(delete)
deleteRoleMember(body) {
const { id } = this.data;
body.sourceId = id || 0;
return axios.post(this.urlDeleteMember, JSON.stringify(body));
}
// 根据用户名查询memberId
searchMemberId(loginName) {
if (loginName) {
return axios.get(`/iam/v1/users?login_name=${loginName}`);
}
}
searchMemberIds(loginNames) {
const promises = loginNames.map((index, value) => this.searchMemberId(index));
return axios.all(promises);
}
/**
* 加载单个角色下的成员
* @param roleData
* @param current
* @param loginName
* @param realName
* @param params
* @returns {PromiseLike<T | never> | Promise<T | never>}
*/
loadRoleMemberData(roleData, { current }, { loginName, realName }, params) {
const { id: roleId, users, name } = roleData;
const body = {
loginName: loginName && loginName[0],
realName: realName && realName[0],
param: params && params.length ? params : undefined,
};
const queryObj = { role_id: roleId, size: pageSize, page: current };
roleData.loading = true;
return axios.post(`${this.urlUsers}?${querystring.stringify(queryObj)}`,
JSON.stringify(body))
.then(({ list }) => {
roleData.users = users.concat((list || []).map((member) => {
member.roleId = roleId;
member.roleName = name;
return member;
}));
delete roleData.loading;
this.context.forceUpdate();
});
}
/**
* 加载单个客户端下的成员
* @param roleData 单个角色数据
* @param current
* @param clientName
* @param realName
* @param params
* @returns {PromiseLike<T | never> | Promise<T | never>}
*/
loadClientRoleMemberData(roleData, { current }, { clientName }, params) {
const { id: roleId, users, name } = roleData;
const body = {
clientName: clientName && clientName[0],
};
const queryObj = { role_id: roleId, size: pageSize, page: current };
roleData.loading = true;
return axios.post(`${this.urlClients}?${querystring.stringify(queryObj)}`,
JSON.stringify(body))
.then(({ list }) => {
roleData.users = users.concat((list || []).map((member) => {
member.roleId = roleId;
member.roleName = name;
member.clientName = member.name;
delete member.name;
return member;
}));
delete roleData.loading;
this.context.forceUpdate();
});
}
// 用户模式下的成员数据
loadMemberDatas({ pageSize: size, current }, { loginName, realName, roles }, params) {
const body = {
loginName: loginName && loginName[0],
roleName: roles && roles[0],
realName: realName && realName[0],
param: params,
};
const queryObj = { size, page: current, sort: 'id' };
return axios.post(`${this.urlRoles}?${querystring.stringify(queryObj)}`, JSON.stringify(body));
}
// 用户模式下的角色数据
loadRoleMemberDatas({ loginName, realName, roleName }, params) {
const body = {
roleName: roleName && roleName[0],
loginName: loginName && loginName[0],
realName: realName && realName[0],
param: params && params.length ? params : undefined,
};
return axios.post(this.urlUserCount, JSON.stringify(body));
}
// 客户端模式下的成员数据
loadClientMemberDatas({ pageSize: size, current }, { name, roles }, params) {
const body = {
clientName: name && name[0],
roleName: roles && roles[0],
param: params,
};
const queryObj = { size, page: current, sort: 'id' };
return axios.post(`${this.urlClientRoles}?${querystring.stringify(queryObj)}`, JSON.stringify(body));
}
// 客户端模式下的角色数据
loadClientRoleMemberDatas({ clientName, name }) {
const body = {
clientName: clientName && clientName[0],
roleName: name && name[0],
};
return axios.post(this.urlClientCount, JSON.stringify(body));
}
// 多路请求
fetch() {
const { memberRolePageInfo, memberRoleFilters, roleMemberFilters, expandedKeys, params, roleMemberParams } = this.context.state;
this.context.setState({
loading: true,
});
return axios.all([
this.loadMemberDatas(memberRolePageInfo, memberRoleFilters, params),
this.loadRoleMemberDatas({ name: roleMemberParams, ...roleMemberFilters }),
]).then(([{ list, total, pageNum }, roleData]) => {
this.context.setState({
memberDatas: list, // 用户-成员列表数据源
expandedKeys,
// 用户-角色表数据源
roleMemberDatas: roleData.filter((role) => {
role.users = role.users || [];
if (role.userCount > 0) {
if (expandedKeys.find(expandedKey => expandedKey.split('-')[1] === String(role.id))) {
this.loadRoleMemberData(role, {
current: 1,
pageSize,
}, roleMemberFilters);
}
return true;
}
return false;
}),
roleData,
loading: false,
memberRolePageInfo: {
total,
current: pageNum,
pageSize,
},
});
});
}
// 客户端 多路请求
fetchClient() {
const { clientMemberRolePageInfo, clientMemberRoleFilters, clientRoleMemberFilters, expandedKeys, clientParams, clientRoleMemberParams } = this.context.state;
this.context.setState({
loading: true,
});
return axios.all([
this.loadClientMemberDatas(clientMemberRolePageInfo, clientMemberRoleFilters, clientParams),
this.loadClientRoleMemberDatas({ name: clientRoleMemberParams, ...clientRoleMemberFilters }),
]).then(([{ list, total, pageNum }, roleData]) => {
this.context.setState({
clientMemberDatas: list,
expandedKeys,
cilentRoleMemberDatas: roleData.filter((role) => {
role.users = role.users || [];
if (role.userCount > 0) {
if (expandedKeys.find(expandedKey => expandedKey.split('-')[1] === String(role.id))) {
this.loadClientRoleMemberData(role, {
current: 1,
pageSize,
}, clientRoleMemberFilters);
}
return true;
}
return false;
}),
roleData,
loading: false,
clientMemberRolePageInfo: {
total,
current: pageNum,
pageSize,
},
});
});
}
}
<|start_filename|>react/src/app/iam/containers/organization/application/Application.js<|end_filename|>
import React, { Component } from 'react';
import { Button, Form, Table, Tooltip, Icon, Modal, Spin, Input } from 'choerodon-ui';
import { inject, observer } from 'mobx-react';
import { withRouter } from 'react-router-dom';
import { Action, Content, Header, Page, axios } from '@choerodon/boot';
import { injectIntl, FormattedMessage } from 'react-intl';
import './Application.scss';
import MouseOverWrapper from '../../../components/mouseOverWrapper';
import StatusTag from '../../../components/statusTag';
import EditSider from './EditSider';
import { callbackify } from 'util';
const intlPrefix = 'organization.application';
@Form.create({})
@withRouter
@injectIntl
@inject('AppState')
@observer
export default class Application extends Component {
state = {
curToken: '',
createToken: false,
hasToken: false,
interfaceName: '',
showTokenModal: false,
};
componentDidMount() {
this.setState({
curToken: '',
hasToken: false,
createToken: false,
interfaceName: '',
showTokenModal: false,
});
this.refresh();
}
refresh = () => {
const { ApplicationStore } = this.props;
ApplicationStore.refresh();
}
handleClickAddApplication = () => {
const { AppState: { currentMenuType: { name, id } }, history } = this.props;
history.push(`/iam/application/add?type=organization&id=${id}&name=${encodeURIComponent(name)}`);
}
handleopenTab = (record, operation) => {
const { ApplicationStore } = this.props;
ApplicationStore.setEditData(record);
ApplicationStore.showSidebar();
};
handleEnable = (record) => {
const { ApplicationStore } = this.props;
if (record.enabled) {
ApplicationStore.disableApplication(record.id).then(() => {
ApplicationStore.loadData();
});
} else {
ApplicationStore.enableApplication(record.id).then(() => {
ApplicationStore.loadData();
});
}
};
handlePageChange = (pagination, filters, sorter, params) => {
const { ApplicationStore } = this.props;
ApplicationStore.loadData(pagination, filters, sorter, params);
};
handleSaveMsg = () => {
const { ApplicationStore } = this.props;
ApplicationStore.closeSidebar();
ApplicationStore.loadData();
}
handleCancelSider = () => {
const { ApplicationStore } = this.props;
ApplicationStore.closeSidebar();
}
handleManage = (record) => {
const { AppState: { currentMenuType: { name, id } }, history } = this.props;
history.push(`/iam/application/manage/${record.id}?type=organization&id=${id}&name=${encodeURIComponent(name)}`);
}
handleToken = (record, hasToken) => {
this.setState({ showTokenModal: true, interfaceName: record.name, hasToken });
const { organizationId, id } = record;
if (!hasToken) {
axios.post(`/iam/v1/organizations/${organizationId}/applications/${id}/token`).then(res => {
console.log('get token: ', res);
this.setState({
hasToken: true,
curToken: res,
createToken: true,
})
})
} else {
this.setState({
hasToken: true,
createToken: false,
curToken: record.applicationToken,
})
}
};
render() {
const { showTokenModal, interfaceName, hasToken, curToken, createToken } = this.state;
const { ApplicationStore: { filters, pagination, params }, AppState, intl, ApplicationStore, ApplicationStore: { applicationData } } = this.props;
const unHandleData = ApplicationStore.applicationData.slice();
const hasChild = unHandleData.some(v => v.applicationCategory === 'combination-application' && v.descendants && v.descendants.length);
const columns = [
{
title: <FormattedMessage id={`${intlPrefix}.name`} />,
dataIndex: 'name',
width: '25%',
filters: [],
filteredValue: filters.name || [],
render: (text, record) => (
<span
style={{
borderLeft: record.isFirst && hasChild ? '1px solid rgba(0, 0, 0, 0.12)' : 'none',
paddingLeft: hasChild ? 20 : 'auto',
display: 'inline-block',
maxWidth: hasChild ? 'calc(100% - 50px)' : '100%',
overflow: 'hidden',
textOverflow: 'ellipsis',
whiteSpace: 'nowrap',
}}
>
<Icon type={record.applicationCategory === 'combination-application' ? 'grain' : 'predefine'} style={{ marginRight: 5, verticalAlign: 'text-top' }} />
{text}
</span>
),
},
{
title: <FormattedMessage id={`${intlPrefix}.code`} />,
dataIndex: 'code',
key: 'code',
width: '15%',
filters: [],
filteredValue: filters.code || [],
render: (text, record) => {
if (!record.isFirst) return null;
return <span>{text}</span>;
},
},
{
title: <FormattedMessage id={`${intlPrefix}.category`} />,
dataIndex: 'applicationCategory',
width: '10%',
render: (category, record) => (!record.isFirst ? null : <FormattedMessage id={`${intlPrefix}.category.${category.toLowerCase()}`} />),
filters: [{
text: '组合应用',
value: 'combination-application',
}, {
text: '普通应用',
value: 'application',
}],
filteredValue: filters.applicationCategory || [],
},
{
title: <FormattedMessage id={`${intlPrefix}.application-type`} />,
dataIndex: 'applicationType',
filters: [{
text: '开发应用',
value: 'normal',
}, {
text: '测试应用',
value: 'test',
}],
filteredValue: filters.applicationType || [],
width: '10%',
render: (text, record) => (
!record.isFirst ? null
: (
<MouseOverWrapper text={text} width={0.2}>
{text ? intl.formatMessage({ id: `${intlPrefix}.type.${text}` }) : ''}
</MouseOverWrapper>
)
),
},
{
title: <FormattedMessage id={`${intlPrefix}.project-name`} />,
dataIndex: 'projectName',
filters: [],
filteredValue: filters.projectName || [],
width: '20%',
render: (text, record) => (
!record.isFirst ? null
: (
<div>
{
text && (
<div className="c7n-iam-application-name-avatar">
{
record.imageUrl ? (
<img src={record.imageUrl} alt="avatar" style={{ width: '100%' }} />
) : (
<React.Fragment>{text.split('')[0]}</React.Fragment>
)
}
</div>
)
}
<MouseOverWrapper text={text} width={0.2}>
{text}
</MouseOverWrapper>
</div>
)
),
},
{
title: <FormattedMessage id="status" />,
dataIndex: 'enabled',
width: '10%',
filters: [{
text: intl.formatMessage({ id: 'enable' }),
value: 'true',
}, {
text: intl.formatMessage({ id: 'disable' }),
value: 'false',
}],
filteredValue: filters.enabled || [],
key: 'enabled',
render: (enabled, record) => (
!record.isFirst ? null
: (
<span style={{ marginRight: 8, fontSize: '12px', lineHeight: '18px', padding: '2px 6px', background: record.enabled ? 'rgba(0, 191, 165, 0.1)' : 'rgba(244, 67, 54, 0.1)', color: record.enabled ? '#009688' : '#D50000', borderRadius: '2px', border: '1px solid', borderColor: record.enabled ? '#009688' : '#D50000' }}>
{record.enabled ? '启用' : '停用'}
</span>
)
),
},
{
title: '',
key: 'action',
width: '10%',
align: 'right',
render: (text, record) => {
if (!record.isFirst) {
return null
}
const curRecordhasToken = record.applicationToken !== null;
const actionDatas = [{
icon: '',
type: 'site',
text: intl.formatMessage({id: 'modify'}),
action: () => this.handleopenTab(record, 'edit'),
}];
if (record.applicationCategory === 'combination-application') {
actionDatas.push({
icon: '',
type: 'site',
text: intl.formatMessage({id: 'edit'}),
action: () => this.handleManage(record),
})
}
if (record.enabled) {
actionDatas.push({
icon: '',
type: 'site',
text: intl.formatMessage({id: 'disable'}),
action: () => this.handleEnable(record),
});
} else {
actionDatas.push({
icon: '',
type: 'site',
text: intl.formatMessage({id: 'enable'}),
action: () => this.handleEnable(record),
});
}
actionDatas.push({
icon: '',
type: 'site',
text: intl.formatMessage(
{
id: curRecordhasToken
? `${intlPrefix}.view.token`
: `${intlPrefix}.create.and.view.token`
}
),
action: () => this.handleToken(record, curRecordhasToken),
});
return <Action data={actionDatas} />;
}
}
];
// if (unHandleData.length) {
// unHandleData[0].descendants = [{ ...unHandleData[1], id: 1001, isFirst: false }, { ...unHandleData[1], id: 1002, isFirst: false }];
// }
return (
<Page className="c7n-iam-application">
<Header title="应用管理">
<Button
onClick={this.handleClickAddApplication}
icon="playlist_add"
>
<FormattedMessage id={`${intlPrefix}.create`} />
</Button>
<Button
icon="refresh"
onClick={this.refresh}
>
<FormattedMessage id="refresh" />
</Button>
</Header>
<Content
code={intlPrefix}
>
<Table
pagination={pagination}
columns={columns}
dataSource={unHandleData}
rowKey={record => record.__uuid__}
filters={params.slice()}
onChange={this.handlePageChange}
loading={ApplicationStore.loading}
filterBarPlaceholder={intl.formatMessage({ id: 'filtertable' })}
childrenColumnName="descendants"
scroll={{ x: true }}
/>
<Modal
className='c7n-iam-application-token-modal'
title={intl.formatMessage({ id: `${intlPrefix}.view.interface.token` }, {interfaceName})}
visible={showTokenModal}
okText={intl.formatMessage({ id: 'close' })}
onCancel={() => {
this.setState({ showTokenModal: false });
if (createToken) {
this.refresh();
}
}}
onOk={() => {
this.setState({ showTokenModal: false });
if (createToken) {
this.refresh();
}
}}
center
>
<Spin spinning={!hasToken}>
<Input
copy
readOnly={true}
value={curToken}
label="AccessToken"
/>
</Spin>
</Modal>
</Content>
{
ApplicationStore.sidebarVisible ? (
<EditSider
onCancel={this.handleCancelSider}
onOk={this.handleSaveMsg}
/>
) : null
}
</Page>
);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/PasswordPolicyMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.infra.dto.PasswordPolicyDTO;
import io.choerodon.mybatis.common.Mapper;
/**
* @author wuguokai
*/
public interface PasswordPolicyMapper extends Mapper<PasswordPolicyDTO> {
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/asserts/RoleAssertHelper.java<|end_filename|>
package io.choerodon.iam.infra.asserts;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.infra.dto.RoleDTO;
import io.choerodon.iam.infra.exception.AlreadyExsitedException;
import io.choerodon.iam.infra.exception.NotExistedException;
import io.choerodon.iam.infra.mapper.RoleMapper;
import org.springframework.stereotype.Component;
/**
* 角色断言帮助类
*
* @author superlee
* @since 2019-04-15
*/
@Component
public class RoleAssertHelper extends AssertHelper {
private RoleMapper roleMapper;
public RoleAssertHelper(RoleMapper roleMapper) {
this.roleMapper = roleMapper;
}
public void codeExisted(String code) {
RoleDTO dto = new RoleDTO();
dto.setCode(code);
if (roleMapper.selectOne(dto) != null) {
throw new AlreadyExsitedException("error.role.code.existed");
}
}
public RoleDTO roleNotExisted(Long id) {
return roleNotExisted(id, "error.role.not.exist");
}
public RoleDTO roleNotExisted(Long id, String message) {
RoleDTO dto = roleMapper.selectByPrimaryKey(id);
if (dto == null) {
throw new CommonException(message, id);
}
return dto;
}
public RoleDTO roleNotExisted(String code) {
return roleNotExisted(code, "error.role.not.existed");
}
public RoleDTO roleNotExisted(String code, String message) {
RoleDTO dto = new RoleDTO();
dto.setCode(code);
RoleDTO result = roleMapper.selectOne(dto);
if (result == null) {
throw new NotExistedException(message);
}
return result;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/config/MultipartSupportConfig.java<|end_filename|>
package io.choerodon.iam.infra.config;
import feign.codec.Encoder;
import feign.form.spring.SpringFormEncoder;
import org.springframework.beans.factory.ObjectFactory;
import org.springframework.boot.autoconfigure.http.HttpMessageConverters;
import org.springframework.cloud.openfeign.support.SpringEncoder;
import org.springframework.context.annotation.Bean;
import org.springframework.stereotype.Component;
/**
* @author superlee
*/
@Component
public class MultipartSupportConfig {
private ObjectFactory<HttpMessageConverters> messageConverters;
public MultipartSupportConfig(ObjectFactory<HttpMessageConverters> messageConverters) {
this.messageConverters = messageConverters;
}
@Bean
public Encoder feignFormEncoder() {
return new SpringFormEncoder(new SpringEncoder(messageConverters));
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/asserts/UserAssertHelper.java<|end_filename|>
package io.choerodon.iam.infra.asserts;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.infra.dto.UserDTO;
import io.choerodon.iam.infra.exception.AlreadyExsitedException;
import io.choerodon.iam.infra.mapper.UserMapper;
import org.springframework.stereotype.Component;
/**
* 用户断言帮助类
*
* @author superlee
* @since 2019-05-10
*/
@Component
public class UserAssertHelper extends AssertHelper {
private UserMapper userMapper;
public UserAssertHelper(UserMapper userMapper) {
this.userMapper = userMapper;
}
public UserDTO userNotExisted(Long id) {
return userNotExisted(id, "error.user.not.exist");
}
public UserDTO userNotExisted(WhichColumn whichColumn, String value) {
switch (whichColumn) {
case LOGIN_NAME:
return loginNameNotExisted(value, "error.user.loginName.not.existed");
case EMAIL:
return emailNotExisted(value, "error.user.email.not.existed");
default:
throw new CommonException("error.illegal.whichColumn", whichColumn.value);
}
}
public UserDTO userNotExisted(WhichColumn whichColumn, String value, String message) {
switch (whichColumn) {
case LOGIN_NAME:
return loginNameNotExisted(value, message);
case EMAIL:
return emailNotExisted(value, message);
default:
throw new CommonException("error.illegal.whichColumn", whichColumn.value);
}
}
private UserDTO emailNotExisted(String email, String message) {
UserDTO dto = new UserDTO();
dto.setEmail(email);
UserDTO result = userMapper.selectOne(dto);
if (result == null) {
throw new CommonException(message, email);
}
return result;
}
private UserDTO loginNameNotExisted(String loginName, String message) {
UserDTO dto = new UserDTO();
dto.setLoginName(loginName);
UserDTO result = userMapper.selectOne(dto);
if (result == null) {
throw new CommonException(message, loginName);
}
return result;
}
public UserDTO userNotExisted(Long id, String message) {
UserDTO dto = userMapper.selectByPrimaryKey(id);
if (dto == null) {
throw new CommonException(message, id);
}
return dto;
}
public void loginNameExisted(String loginName) {
loginNameExisted(loginName,"error.user.loginName.exist");
}
public void loginNameExisted(String loginName, String message) {
UserDTO dto = new UserDTO();
dto.setLoginName(loginName);
if (userMapper.selectOne(dto)!=null) {
throw new AlreadyExsitedException(message);
}
}
public void emailExisted(String email) {
emailExisted(email,"error.user.email.existed");
}
public void emailExisted(String email,String message) {
UserDTO dto = new UserDTO();
dto.setEmail(email);
if (userMapper.selectOne(dto)!=null) {
throw new AlreadyExsitedException(message);
}
}
public enum WhichColumn {
/**
* 登录名字段
*/
LOGIN_NAME("login_name"),
/**
* 邮箱字段
*/
EMAIL("email");
private String value;
WhichColumn(String value) {
this.value = value;
}
public static boolean contains(String value) {
for (WhichColumn whichColumn : WhichColumn.values()) {
if (whichColumn.value.equals(value)) {
return true;
}
}
return false;
}
}
}
<|start_filename|>react/src/app/iam/stores/organization/user/CreateUserStore.js<|end_filename|>
import { action, computed, observable } from 'mobx';
import { axios, store } from '@choerodon/boot';
@store('CreateUserStore')
class CreateUserStore {
@observable language;
@observable passwordPolicy;
@action
setLanguage(lang) {
this.language = lang;
}
@computed
get getLanguage() {
return this.language;
}
@action
setPasswordPolicy(data) {
this.passwordPolicy = data;
}
@computed
get getPasswordPolicy() {
return this.passwordPolicy;
}
loadPasswordPolicyById(id) {
return axios.get(`/iam/v1/organizations/${id}/password_policies`).then((data) => {
this.setPasswordPolicy(data);
});
}
checkUsername = (organizationId, loginName) => (
axios.post(`/iam/v1/organizations/${organizationId}/users/check`, JSON.stringify({ organizationId, loginName }))
);
checkEmailAddress = (organizationId, email) => (
axios.post(`/iam/v1/organizations/${organizationId}/users/check`, JSON.stringify({ organizationId, email }))
);
createUser = (user, id) => (
axios.post(`/iam/v1/organizations/${id}/users`, JSON.stringify(user))
);
getUserInfoById = (orgId, id) => (
axios.get(`/iam/v1/organizations/${orgId}/users/${id}`)
);
updateUser = (orgId, id, user) => (
axios.put(`/iam/v1/organizations/${orgId}/users/${id}`, JSON.stringify(user))
);
}
const createUserStore = new CreateUserStore();
export default createUserStore;
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/ProjectMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.infra.dto.ProjectDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
import java.util.Set;
/**
* @author wuguokai
*/
public interface ProjectMapper extends Mapper<ProjectDTO> {
int fulltextSearchCount(@Param("project") ProjectDTO projectDTO,
@Param("param") String param);
int fulltextSearchCountIgnoreProgramProject(@Param("project") ProjectDTO projectDTO,
@Param("param") String param);
List<ProjectDTO> fulltextSearch(@Param("project") ProjectDTO projectDTO,
@Param("param") String param,
@Param("start") Integer start,
@Param("size") Integer size);
List<ProjectDTO> fulltextSearchCategory(@Param("project") ProjectDTO projectDTO,
@Param("param") String param,
@Param("start") Integer start,
@Param("size") Integer size);
List<ProjectDTO> selectProjectsByUserId(@Param("userId") Long userId,
@Param("projectDTO") ProjectDTO projectDTO);
List<ProjectDTO> selectProjectsByUserIdWithParam(@Param("userId") Long userId,
@Param("projectDTO") ProjectDTO projectDTO,
@Param("param") String param);
List<ProjectDTO> selectProjectsWithRoles(
@Param("id") Long id,
@Param("start") Integer start,
@Param("size") Integer size,
@Param("params") String params);
List<ProjectDTO> selectUserProjectsUnderOrg(@Param("userId") Long userId,
@Param("orgId") Long orgId,
@Param("isEnabled") Boolean isEnabled);
List<Long> listUserIds(@Param("projectId") Long projectId);
/**
* 获取所有项目,附带项目类型名
*/
List<ProjectDTO> selectAllWithProjectType();
Boolean projectEnabled(@Param("sourceId") Long sourceId);
List<ProjectDTO> selectByIds(@Param("ids") Set<Long> ids);
/**
* 获取组织下指定type的项目名
*
* @param type 项目类型Code
* @param orgId 组织Id
* @return 组织下指定type的项目名List
*/
List<String> selectProjectNameByType(@Param("type") String type,
@Param("orgId") Long orgId);
/**
* 获取组织下没有项目类型的项目名
*
* @param orgId 组织Id
* @return 组织下没有项目类型的项目名List
*/
List<String> selectProjectNameNoType(@Param("orgId") Long orgId);
/**
* 获取组织下不是项目群的项目
*
* @param orgId 组织Id
* @return 组织下不是项目群的项目列表
*/
List<ProjectDTO> selectProjsNotGroup(@Param("orgId") Long orgId, @Param("projectId") Long projectId);
/**
* 获取组织下不是项目群的且无所属的项目
*
* @param orgId 组织Id
* @return 组织下不是项目群的且无所属的项目列表
*/
List<ProjectDTO> selectProjsNotInAnyGroup(@Param("orgId") Long orgId);
/**
* 根据组织Id和项目Id查询当前项目生效的普通项目群信息.
*
* @param orgId 组织Id
* @param projectId 项目Id
* @return 普通项目群信息
*/
ProjectDTO selectGroupInfoByEnableProject(@Param("orgId") Long orgId, @Param("projectId") Long projectId);
List<ProjectDTO> selectCategoryByPrimaryKey(@Param("projectId") Long projectId);
List<ProjectDTO> selectByOrgIdAndCategory(@Param("organizationId") Long organizationId, @Param("param") String param);
List<ProjectDTO> selectByOrgIdAndCategoryEnable(@Param("organizationId") Long organizationId, @Param("agile") String agile, @Param("param") String param);
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/OrganizationController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.domain.Sort;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.core.base.BaseController;
import io.choerodon.core.iam.InitRoleCode;
import io.choerodon.core.iam.ResourceLevel;
import io.choerodon.core.oauth.DetailsHelper;
import io.choerodon.iam.api.dto.OrgSharesDTO;
import io.choerodon.iam.api.dto.OrganizationSimplifyDTO;
import io.choerodon.iam.app.service.OrganizationService;
import io.choerodon.iam.infra.common.utils.ParamUtils;
import io.choerodon.iam.infra.dto.OrganizationDTO;
import io.choerodon.iam.infra.dto.UserDTO;
import io.choerodon.mybatis.annotation.SortDefault;
import io.choerodon.swagger.annotation.CustomPageRequest;
import io.swagger.annotations.ApiOperation;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import javax.validation.Valid;
import java.util.List;
import java.util.Set;
/**
* @author wuguokai
* @author superlee
*/
@RestController
@RequestMapping(value = "/v1/organizations")
public class OrganizationController extends BaseController {
private OrganizationService organizationService;
public OrganizationController(OrganizationService organizationService) {
this.organizationService = organizationService;
}
/**
* 修改组织信息
*
* @return 修改成功后的组织信息
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "全局层修改组织")
@PutMapping(value = "/{organization_id}")
public ResponseEntity<OrganizationDTO> update(@PathVariable(name = "organization_id") Long id,
@RequestBody @Valid OrganizationDTO organizationDTO) {
return new ResponseEntity<>(organizationService.updateOrganization(id, organizationDTO, ResourceLevel.SITE.value(), 0L),
HttpStatus.OK);
}
/**
* 组织层修改组织信息
*
* @return 修改成功后的组织信息
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "组织层修改组织")
@PutMapping(value = "/{organization_id}/organization_level")
public ResponseEntity<OrganizationDTO> updateOnOrganizationLevel(@PathVariable(name = "organization_id") Long id,
@RequestBody @Valid OrganizationDTO organizationDTO) {
return new ResponseEntity<>(organizationService.updateOrganization(id, organizationDTO, ResourceLevel.ORGANIZATION.value(), id),
HttpStatus.OK);
}
/**
* 根据组织id查询组织
*
* @param id 所要查询的组织id号
* @return 组织信息
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "全局层根据组织id查询组织")
@GetMapping(value = "/{organization_id}")
public ResponseEntity<OrganizationDTO> query(@PathVariable(name = "organization_id") Long id) {
return new ResponseEntity<>(organizationService.queryOrganizationById(id), HttpStatus.OK);
}
/**
* 组织层根据组织id查询组织,附带该用户在该组织分配了那些角色,以及该组织下所有的项目数量
*
* @param id 所要查询的组织id号
* @return 组织信息
*/
@Permission(type = ResourceType.ORGANIZATION, permissionLogin = true)
@ApiOperation(value = "组织层根据组织id查询组织,并查询被分配的角色")
@GetMapping(value = "/{organization_id}/org_level")
public ResponseEntity<OrganizationDTO> queryOrgLevel(@PathVariable(name = "organization_id") Long id) {
return new ResponseEntity<>(organizationService.queryOrganizationWithRoleById(id), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "分页查询组织")
@GetMapping
@CustomPageRequest
public ResponseEntity<PageInfo<OrganizationDTO>> pagingQuery(@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(required = false) String name,
@RequestParam(required = false) String code,
@RequestParam(required = false) Boolean enabled,
@RequestParam(required = false) String[] params) {
OrganizationDTO organization = new OrganizationDTO();
organization.setName(name);
organization.setCode(code);
organization.setEnabled(enabled);
return new ResponseEntity<>(organizationService.pagingQuery(organization, pageRequest, ParamUtils.arrToStr(params)), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE, roles = {InitRoleCode.SITE_ADMINISTRATOR})
@ApiOperation(value = "分页查询所有组织基本信息")
@GetMapping(value = "/all")
@CustomPageRequest
public ResponseEntity<PageInfo<OrganizationSimplifyDTO>> getAllOrgs(@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest) {
return new ResponseEntity<>(organizationService.getAllOrgs(pageRequest), HttpStatus.OK);
}
/**
* 根据id集合查询组织
*
* @param ids id集合,去重
* @return 组织集合
*/
@Permission(permissionWithin = true)
@ApiOperation(value = "根据id集合查询组织")
@PostMapping("/ids")
public ResponseEntity<List<OrganizationDTO>> queryByIds(@RequestBody Set<Long> ids) {
return new ResponseEntity<>(organizationService.queryByIds(ids), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "启用组织")
@PutMapping(value = "/{organization_id}/enable")
public ResponseEntity<OrganizationDTO> enableOrganization(@PathVariable(name = "organization_id") Long id) {
Long userId = DetailsHelper.getUserDetails().getUserId();
return new ResponseEntity<>(organizationService.enableOrganization(id, userId), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "禁用组织")
@PutMapping(value = "/{organization_id}/disable")
public ResponseEntity<OrganizationDTO> disableOrganization(@PathVariable(name = "organization_id") Long id) {
Long userId = DetailsHelper.getUserDetails().getUserId();
return new ResponseEntity<>(organizationService.disableOrganization(id, userId), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "组织信息校验")
@PostMapping(value = "/check")
public ResponseEntity check(@RequestBody OrganizationDTO organization) {
organizationService.check(organization);
return new ResponseEntity(HttpStatus.OK);
}
/**
* 根据organizationId和param模糊查询loginName和realName两列
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "分页模糊查询组织下的用户")
@GetMapping(value = "/{organization_id}/users")
@CustomPageRequest
public ResponseEntity<PageInfo<UserDTO>> pagingQueryUsersOnOrganization(@PathVariable(name = "organization_id") Long id,
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(required = false, name = "id") Long userId,
@RequestParam(required = false) String email,
@RequestParam(required = false) String param) {
return new ResponseEntity<>(organizationService.pagingQueryUsersInOrganization(id, userId, email, pageRequest, param), HttpStatus.OK);
}
@CustomPageRequest
@PostMapping("/specified")
@Permission(permissionWithin = true)
@ApiOperation(value = "根据组织Id列表分页查询组织简要信息")
public ResponseEntity<PageInfo<OrgSharesDTO>> pagingSpecified(@SortDefault(value = "id", direction = Sort.Direction.ASC) PageRequest pageRequest,
@RequestParam(required = false) String name,
@RequestParam(required = false) String code,
@RequestParam(required = false) Boolean enabled,
@RequestParam(required = false) String params,
@RequestBody Set<Long> orgIds) {
return new ResponseEntity<>(organizationService.pagingSpecified(orgIds, name, code, enabled, params, pageRequest), HttpStatus.OK);
}
}
<|start_filename|>react/src/app/iam/stores/global/member-role/index.js<|end_filename|>
/**
*create by Qyellow on 2018/4/3
*/
import MemberRoleStore from './MemberRoleStore';
export default MemberRoleStore;
<|start_filename|>react/src/app/iam/stores/global/project-type/ProjectTypeStore.js<|end_filename|>
import { action, computed, observable } from 'mobx';
import { axios, store } from '@choerodon/boot';
import queryString from 'query-string';
import { handleFiltersParams } from '../../../common/util';
@store('ProjectTypeStore')
class ProjectTypeStore {
@observable projectTypeData = [];
@observable loading = false;
@observable pagination = {
current: 1,
pageSize: 10,
total: 0,
};
@observable filters = {};
@observable sort = {};
@observable params = [];
@observable editData = {};
@observable sidebarVisible = false;
@observable sidebarType = 'create';
@action
setSidebarType(type) {
this.sidebarType = type;
}
refresh() {
this.loadData({ current: 1, pageSize: 10 }, {}, {}, []);
}
@action
setEditData(data) {
this.editData = data;
}
@action
showSideBar() {
this.sidebarVisible = true;
}
@action
hideSideBar() {
this.sidebarVisible = false;
}
@action
updateData(values) {
this.loading = true;
return axios.post(`/iam/v1/projects/types/${this.editData.id}`, JSON.stringify({
...values,
id: this.editData.id,
objectVersionNumber: this.editData.objectVersionNumber,
}))
.then(action((data) => {
this.loading = false;
this.sidebarVisible = false;
return data;
}))
.catch(action((error) => {
Choerodon.handleResponseError(error);
this.loading = false;
}));
}
@action
createType(values) {
this.loading = true;
return axios.post('/iam/v1/projects/types', values).then(action((data) => {
this.loading = false;
this.sidebarVisible = false;
return data;
})).catch(action((error) => {
Choerodon.handleResponseError(error);
this.loading = false;
}));
}
@action
loadData(pagination = this.pagination, filters = this.filters, sort = this.sort, params = this.params) {
const { columnKey, order } = sort;
const sorter = [];
if (columnKey) {
sorter.push(columnKey);
if (order === 'descend') {
sorter.push('desc');
}
}
this.loading = true;
this.filters = filters;
this.sort = sort;
this.params = params;
// 若params或filters含特殊字符表格数据置空
const isIncludeSpecialCode = handleFiltersParams(params, filters);
if (isIncludeSpecialCode) {
this.projectTypeData.length = 0;
this.pagination = {
current: 1,
pageSize: 10,
total: 0,
};
this.loading = false;
return;
}
return axios.get(`/iam/v1/projects/types/paging_query?${queryString.stringify({
name: filters.name,
code: filters.code,
description: filters.description,
params: params.join(','),
sort: sorter.join(','),
})}`)
.then(action(({ failed, list, total }) => {
if (!failed) {
this.projectTypeData = list;
this.pagination = {
...pagination,
total,
};
}
this.loading = false;
}))
.catch(action((error) => {
Choerodon.handleResponseError(error);
this.loading = false;
}));
}
}
const projectTypeStore = new ProjectTypeStore();
export default projectTypeStore;
<|start_filename|>src/test/groovy/io/choerodon/iam/api/eventhandler/DevopsListenerSpec.groovy<|end_filename|>
package io.choerodon.iam.api.eventhandler
import io.choerodon.asgard.saga.dto.StartInstanceDTO
import io.choerodon.asgard.saga.feign.SagaClient
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.infra.dto.MemberRoleDTO
import io.choerodon.iam.infra.mapper.LabelMapper
import io.choerodon.iam.infra.mapper.MemberRoleMapper
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.context.annotation.Import
import spock.lang.Specification
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class DevopsListenerSpec extends Specification {
private MemberRoleMapper memberRoleMapper = Mock(MemberRoleMapper)
private LabelMapper labelMapper = Mock(LabelMapper)
private SagaClient sagaClient = Mock(SagaClient)
private DevopsListener devopsListener = new DevopsListener(memberRoleMapper,sagaClient,labelMapper)
int count = 3
def "AssignRolesOnProject"() {
given: "构造请求参数"
String message = "message"
List<MemberRoleDTO> memberRoles = new ArrayList<>()
for (int i = 0; i < count; i++) {
MemberRoleDTO memberRoleDO = new MemberRoleDTO()
memberRoleDO.setSourceId(i)
memberRoleDO.setMemberId(i)
memberRoleDO.setSourceType("site")
memberRoleDO.setMemberType("user")
memberRoles.add(memberRoleDO)
}
when: "调用方法"
devopsListener.assignRolesOnProject(message)
then: "校验结果"
1 * memberRoleMapper.select(_) >> { memberRoles }
count * labelMapper.selectLabelNamesInRoleIds(_) >> { new HashSet<String>() }
1 * sagaClient.startSaga(_, _ as StartInstanceDTO)
0 * _
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/ProjectMapCategoryMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.api.dto.ProjectCategoryDTO;
import io.choerodon.iam.api.dto.ProjectMapCategorySimpleDTO;
import io.choerodon.iam.infra.dto.ProjectMapCategoryDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* @author Eugen
*/
public interface ProjectMapCategoryMapper extends Mapper<ProjectMapCategoryDTO> {
List<String> selectProjectCategories(@Param("projectId") Long projectId);
List<ProjectCategoryDTO> selectProjectCategoryNames(@Param("projectId") Long projectId);
/**
* 批量插入
*
* @param records
* @return
*/
int batchInsert(@Param("records") List<ProjectMapCategoryDTO> records);
List<ProjectMapCategorySimpleDTO> selectAllProjectMapCategories();
}
<|start_filename|>react/src/app/iam/dashboard/ProjectInfo/index.js<|end_filename|>
import React, { Component } from 'react';
import { Link } from 'react-router-dom';
import { FormattedMessage, injectIntl } from 'react-intl';
import { inject, observer } from 'mobx-react';
import {axios, DashBoardNavBar} from '@choerodon/boot';
import { Spin } from 'choerodon-ui';
import ProjectStore from '../../stores/organization/project/ProjectStore';
import './index.scss';
const intlPrefix = 'dashboard.projectinfo';
@injectIntl
@inject('AppState', 'HeaderStore')
@observer
export default class ProjectInfo extends Component {
state = {
categoryEnabled: false,
};
componentWillMount() {
this.loadProjectInfo();
}
componentDidMount() {
this.loadEnableCategory();
}
loadEnableCategory = () => {
axios.get(`/iam/v1/system/setting/enable_category`)
.then((response) => {
this.setState({
categoryEnabled: response,
});
});
};
loadProjectInfo = () => {
const { AppState: { currentMenuType: { id }, getUserInfo: { id: userId } } } = this.props;
ProjectStore.loadMyData(id, userId);
};
render() {
const { categoryEnabled } = this.state;
const { HeaderStore, AppState, intl } = this.props;
const { myRoles } = ProjectStore;
const { id: projectId, organizationId, type } = AppState.currentMenuType;
const projectData = HeaderStore.getProData || [];
const orgData = HeaderStore.getOrgData || [];
const { name, code, categories } = projectData.find(({ id }) => String(id) === String(projectId)) || {};
const { name: organizeName } = orgData.find(({ id }) => String(id) === String(organizationId)) || {};
return (
<div className="c7n-iam-dashboard-project">
<dl>
<dt><FormattedMessage id={`${intlPrefix}.name`} /></dt>
<dd>{name}</dd>
<dt><FormattedMessage id={`${intlPrefix}.code`} /></dt>
<dd>{code}</dd>
{
categoryEnabled && (
<div>
<dt><FormattedMessage id={`${intlPrefix}.type`} /></dt>
<dd>{categories && categories.map(value => value.name + " ") || intl.formatMessage({ id: 'dashboard.empty' })}</dd>
</div>
)
}
<dt><FormattedMessage id={`${intlPrefix}.organization`} /></dt>
<dd>{organizeName}</dd>
<dt><FormattedMessage id={`${intlPrefix}.role`} /></dt>
<dd>
{myRoles.length ? myRoles.map(({ name: roleName }) => roleName).join(', ') : intl.formatMessage({ id: 'dashboard.empty' })}
</dd>
</dl>
</div>
);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/dto/ErrorUserDTO.java<|end_filename|>
package io.choerodon.iam.api.dto;
/**
* excel导入失败用户
* @author superlee
*/
public class ErrorUserDTO {
private String loginName;
private String realName;
private String email;
private String password;
private String phone;
//导入失败的原因
private String cause;
public String getLoginName() {
return loginName;
}
public void setLoginName(String loginName) {
this.loginName = loginName;
}
public String getRealName() {
return realName;
}
public void setRealName(String realName) {
this.realName = realName;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getPhone() {
return phone;
}
public void setPhone(String phone) {
this.phone = phone;
}
public String getCause() {
return cause;
}
public void setCause(String cause) {
this.cause = cause;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/OrganizationProjectService.java<|end_filename|>
package io.choerodon.iam.app.service;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.iam.infra.dto.ProjectDTO;
import java.util.List;
import java.util.Map;
/**
* @author flyleft
*/
public interface OrganizationProjectService {
ProjectDTO createProject(ProjectDTO projectDTO);
ProjectDTO create(ProjectDTO projectDTO);
List<ProjectDTO> queryAll(ProjectDTO projectDTO);
PageInfo<ProjectDTO> pagingQuery(ProjectDTO projectDTO, PageRequest pageRequest, String param);
ProjectDTO update(Long organizationId, ProjectDTO projectDTO);
ProjectDTO updateSelective(ProjectDTO projectDTO);
ProjectDTO enableProject(Long organizationId, Long projectId, Long userId);
ProjectDTO disableProject(Long organizationId, Long projectId, Long userId);
void check(ProjectDTO projectDTO);
/**
* 查询 组织下 各类型下的项目数及项目名
*
* @param organizationId 组织Id
* @return map
*/
Map<String, Object> getProjectsByType(Long organizationId);
/**
* 查询组织下可被分配至当前项目群的敏捷项目
*
* @param organizationId 组织Id
* @param projectId 项目Id
* @return 项目列表
*/
List<ProjectDTO> getAvailableAgileProj(Long organizationId, Long projectId);
ProjectDTO selectCategoryByPrimaryKey(Long projectId);
/**
* 查询当前项目生效的普通项目群信息(项目为启用状态且当前时间在其有效期内).
*
* @param organizationId 组织Id
* @param projectId 项目Id
* @return 普通项目群信息
*/
ProjectDTO getGroupInfoByEnableProject(Long organizationId, Long projectId);
List<ProjectDTO> getAgileProjects(Long organizationId, String param);
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/UserDashboardService.java<|end_filename|>
package io.choerodon.iam.app.service;
import io.choerodon.iam.infra.dto.UserDashboardDTO;
import java.util.List;
/**
* @author <EMAIL>
*/
public interface UserDashboardService {
List<UserDashboardDTO> list(String level, Long sourceId);
List<UserDashboardDTO> update(String level, Long sourceId, List<UserDashboardDTO> dashboardList);
void reset(String level, Long sourceId);
}
<|start_filename|>react/src/app/iam/containers/global/system-setting/LogoUploader.js<|end_filename|>
import React, { Component } from 'react';
import { FormattedMessage, injectIntl } from 'react-intl';
import { inject } from 'mobx-react';
import { Button, Icon, Modal, Upload } from 'choerodon-ui';
import { axios } from '@choerodon/boot';
import querystring from 'query-string';
import UserInfoStore from '../../../stores/user/user-info/UserInfoStore';
import SystemSettingStore from '../../../stores/global/system-setting/SystemSettingStore';
import '../../user/user-info/Userinfo.scss';
const Dragger = Upload.Dragger;
const { round } = Math;
const editorWidth = 540;
const editorHeight = 300;
const defaultRectSize = 200;
const minRectSize = 80;
const intlPrefix = 'global.system-setting.edit';
const prefixClas = 'c7n-iam-system-setting-edit';
const limitSize = 1024;
let relativeX = 0;
let relativeY = 0;
let resizeMode;
let resizeX = 0;
let resizeY = 0;
let resizeSize = 0;
function rotateFlag(rotate) {
return (rotate / 90) % 2 !== 0;
}
@inject('AppState')
@injectIntl
export default class AvatarUploader extends Component {
state = {
submitting: false,
img: null,
file: null,
size: defaultRectSize,
x: 0,
y: 0,
rotate: 0,
};
handleOk = () => {
const { id, intl, AppState, onSave } = this.props;
const { x, y, size, rotate, file, imageStyle: { width, height }, img: { naturalWidth, naturalHeight } } = this.state;
const flag = rotateFlag(rotate);
const scale = naturalWidth / width;
const startX = flag ? x - ((width - height) / 2) : x;
const startY = flag ? y + ((width - height) / 2) : y;
const qs = querystring.stringify({
rotate,
startX: round(startX * scale),
startY: round(startY * scale),
endX: round(size * scale),
endY: round(size * scale),
});
const data = new FormData();
data.append('file', file);
this.setState({ submitting: true });
axios.post(`${Choerodon.API_HOST}/iam/v1/system/setting/upload/logo?${qs}`, data)
.then((res) => {
if (res.failed) {
Choerodon.prompt(res.message);
} else {
onSave(res);
this.close();
}
this.setState({ submitting: false });
})
.catch((error) => {
Choerodon.handleResponseError(error);
this.setState({ submitting: false });
});
};
close() {
this.setState({
img: null,
});
this.props.onVisibleChange(false);
}
handleCancel = () => {
this.close();
};
handleMoveStart = ({ clientX, clientY }) => {
const { x, y } = this.state;
relativeX = clientX - x;
relativeY = clientY - y;
document.addEventListener('mousemove', this.handleMoving);
document.addEventListener('mouseup', this.handleMoveEnd);
};
handleMoving = ({ clientX, clientY }) => {
const { size, imageStyle: { width, height }, rotate } = this.state;
const flag = rotateFlag(rotate);
const minX = flag ? (width - height) / 2 : 0;
const minY = flag ? (height - width) / 2 : 0;
const maxX = width - size - minX;
const maxY = height - size - minY;
this.setState({
x: Math.min(Math.max(minX, clientX - relativeX), maxX),
y: Math.min(Math.max(minY, clientY - relativeY), maxY),
});
};
handleMoveEnd = () => {
document.removeEventListener('mousemove', this.handleMoving);
document.removeEventListener('mouseup', this.handleMoveEnd);
};
handleResizeStart = (e) => {
e.stopPropagation();
const { currentTarget, clientX, clientY } = e;
const { x, y, size } = this.state;
relativeX = clientX - x;
relativeY = clientY - y;
resizeMode = currentTarget.className;
resizeX = x;
resizeY = y;
resizeSize = size;
document.addEventListener('mousemove', this.handleResizing);
document.addEventListener('mouseup', this.handleResizeEnd);
};
handleResizing = ({ clientX, clientY }) => {
const { imageStyle: { width, height }, rotate } = this.state;
const flag = rotateFlag(rotate);
const newX = clientX - relativeX;
const newY = clientY - relativeY;
let x = resizeX;
let y = resizeY;
let size;
if (resizeMode === 'lt') {
const relative = Math.min(newX - resizeX, newY - resizeY);
x += relative;
y += relative;
size = (resizeSize - x) + resizeX;
} else if (resizeMode === 'rt') {
const relative = Math.min(resizeX - newX, newY - resizeY);
y += relative;
size = (resizeSize - y) + resizeY;
} else if (resizeMode === 'lb') {
const relative = Math.min(newX - resizeX, resizeY - newY);
x += relative;
size = (resizeSize - x) + resizeX;
} else {
const relative = Math.min(resizeX - newX, resizeY - newY);
size = resizeSize - relative;
}
const minX = flag ? (width - height) / 2 : 0;
const minY = flag ? (height - width) / 2 : 0;
const maxWidth = flag ? ((width - height) / 2) + height : width;
const maxHeight = flag ? ((height - width) / 2) + width : height;
x = Math.min(Math.max(minX, x), (resizeSize - minRectSize) + resizeX);
y = Math.min(Math.max(minY, y), (resizeSize - minRectSize) + resizeY);
this.setState({
x,
y,
size: Math.max(Math.min(size, maxWidth - x, maxHeight - y), minRectSize),
});
};
handleResizeEnd = () => {
document.removeEventListener('mousemove', this.handleResizing);
document.removeEventListener('mouseup', this.handleResizeEnd);
};
initImageSize(img, rotate = 0) {
const { naturalWidth, naturalHeight } = img;
const flag = rotateFlag(rotate);
let width = flag ? naturalHeight : naturalWidth;
let height = flag ? naturalWidth : naturalHeight;
if (width < minRectSize || height < minRectSize) {
if (width > height) {
width = (width / height) * minRectSize;
height = minRectSize;
} else {
height = (height / width) * minRectSize;
width = minRectSize;
}
} else if (width > editorWidth || height > editorHeight) {
if (width / editorWidth > height / editorHeight) {
height = (height / width) * editorWidth;
width = editorWidth;
} else {
width = (width / height) * editorHeight;
height = editorHeight;
}
}
if (flag) {
const tmp = width;
width = height;
height = tmp;
}
const size = Math.min(defaultRectSize, width, height);
this.setState({
img,
imageStyle: {
width,
height,
top: (editorHeight - height) / 2,
left: (editorWidth - width) / 2,
transform: `rotate(${rotate}deg)`,
},
size,
x: (width - size) / 2,
y: (height - size) / 2,
rotate,
});
}
loadImage(src) {
const img = new Image();
img.src = src;
img.onload = () => {
this.initImageSize(img);
};
}
getPreviewProps(previewSize) {
const { size, x, y, img: { src }, rotate, imageStyle: { width, height } } = this.state;
const previewScale = previewSize / size;
let radius = (rotate % 360) / 90;
let px = -x;
let py = -y;
if (radius < 0) radius += 4;
if (radius === 1) {
py = ((x + ((height - width) / 2)) - height) + size;
px = ((height - width) / 2) - y;
} else if (radius === 2) {
px = (x - width) + size;
py = (y - height) + size;
} else if (radius === 3) {
px = ((y + ((width - height) / 2)) - width) + size;
py = ((width - height) / 2) - x;
}
return {
style: {
width: previewSize,
height: previewSize,
backgroundImage: `url('${src}')`,
backgroundSize: `${width * previewScale}px ${height * previewScale}px`,
backgroundPosition: `${px * previewScale}px ${py * previewScale}px`,
transform: `rotate(${rotate}deg)`,
},
};
}
renderPreviewItem(previewSize) {
return (
<div className={`${prefixClas}-preview-item`}>
<i {...this.getPreviewProps(previewSize)} />
<p>{`${previewSize}*${previewSize}`}</p>
</div>
);
}
renderEditor(props) {
const { img, imageStyle, file, size, x, y, rotate } = this.state;
const { src } = img;
const { left, top } = imageStyle;
const style = {
width: editorWidth,
height: editorHeight,
};
const maskStyle = {
borderTopWidth: y + top,
borderRightWidth: editorWidth - x - left - size,
borderBottomWidth: editorHeight - y - top - size,
borderLeftWidth: x + left,
};
return (
<div>
<h3 className={`${prefixClas}-text`}>
<FormattedMessage id={`${intlPrefix}.text`} />
<Icon type="keyboard_arrow_right" />
<span>{file.name}</span>
</h3>
<h4 className={`${prefixClas}-hint`}>
<FormattedMessage id={`${intlPrefix}.hint`} />
</h4>
<div className={`${prefixClas}-wraper`}>
<div className={prefixClas} style={style}>
<img alt="" src={src} style={imageStyle} />
<div className={`${prefixClas}-mask`} style={maskStyle}>
<div onMouseDown={this.handleMoveStart}>
<i className="lt" onMouseDown={this.handleResizeStart} />
<i className="rt" onMouseDown={this.handleResizeStart} />
<i className="lb" onMouseDown={this.handleResizeStart} />
<i className="rb" onMouseDown={this.handleResizeStart} />
</div>
</div>
</div>
<div className={`${prefixClas}-toolbar`}>
<Button icon="replay_90" shape="circle" onClick={() => this.initImageSize(img, rotate - 90)} />
<Button icon="play_90" shape="circle" onClick={() => this.initImageSize(img, rotate + 90)} />
</div>
<div className={`${prefixClas}-preview`}>
<h5 className={`${prefixClas}-preview-title`}>
<FormattedMessage id={`${intlPrefix}.preview`} />
</h5>
{this.renderPreviewItem(80)}
{this.renderPreviewItem(30)}
{this.renderPreviewItem(18)}
</div>
</div>
<div className={`${prefixClas}-button`}>
<Upload {...props}>
<Button icon="file_upload" type="primary">
<FormattedMessage id={`${intlPrefix}.button`} />
</Button>
</Upload>
</div>
</div>
);
}
getUploadProps() {
const { intl } = this.props;
return {
multiple: false,
name: 'file',
accept: 'image/jpeg, image/png, image/jpg',
action: `${Choerodon.API_HOST}/iam/v1/system/setting/upload/logo`,
headers: {
Authorization: `bearer ${Choerodon.getCookie('access_token')}`,
},
showUploadList: false,
beforeUpload: (file) => {
const { size } = file;
if (size > limitSize * 1024) {
Choerodon.prompt(intl.formatMessage({ id: `${intlPrefix}.file.size.limit` }, { size: `${limitSize / 1024}M` }));
return false;
}
this.setState({ file });
const windowURL = window.URL || window.webkitURL;
if (windowURL && windowURL.createObjectURL) {
this.loadImage(windowURL.createObjectURL(file));
return false;
}
},
onChange: ({ file }) => {
const { status, response } = file;
if (status === 'done') {
this.loadImage(response);
} else if (status === 'error') {
Choerodon.prompt(`${response.message}`);
}
},
};
}
renderContainer() {
const { img } = this.state;
const props = this.getUploadProps();
return img ? (
this.renderEditor(props)
) :
(
<Dragger className="user-info-avatar-dragger" {...props}>
<Icon type="inbox" />
<h3 className="user-info-avatar-dragger-text">
<FormattedMessage id={`${intlPrefix}.dragger.text`} />
</h3>
<h4 className="user-info-avatar-dragger-hint">
<FormattedMessage id={`${intlPrefix}.dragger.hint`} values={{ size: `${limitSize / 1024}M`, access: 'PNG、JPG、JPEG' }} />
</h4>
</Dragger>
);
}
render() {
const { visible, type } = this.props;
const { img, submitting } = this.state;
const modalFooter = [
<Button disabled={submitting} key="cancel" onClick={this.handleCancel}>
<FormattedMessage id="cancel" />
</Button>,
<Button key="save" type="primary" disabled={!img} loading={submitting} onClick={this.handleOk}>
<FormattedMessage id="save" />
</Button>,
];
return (
<Modal
title={<FormattedMessage id={`${intlPrefix}.title`} values={{ name: type === 'favicon' ? '徽标' : '导航栏图形标' }} />}
className="user-info-avatar-modal"
visible={visible}
width={980}
closable={false}
maskClosable={false}
footer={modalFooter}
>
{this.renderContainer()}
</Modal>
);
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/app/service/impl/MenuServiceImplSpec.groovy<|end_filename|>
package io.choerodon.iam.app.service.impl
import spock.lang.Specification
/**
* @author dengyouquan
* */
class MenuServiceImplSpec extends Specification {
def "Create"() {
}
def "Delete"() {
}
def "Update"() {
}
def "List"() {
}
def "ListAfterTestPermission"() {
}
def "ListTreeMenusWithPermissions"() {
}
def "Check"() {
}
def "SaveListTree"() {
}
def "QueryMenusWithPermissions"() {
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/ProjectCategoryMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.api.dto.ProjectCategoryDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* @author jiameng.cao
* @date 2019/6/4
*/
public interface ProjectCategoryMapper extends Mapper<ProjectCategoryDTO> {
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/PasswordPolicyServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.app.service.PasswordPolicyService;
import io.choerodon.iam.infra.asserts.OrganizationAssertHelper;
import io.choerodon.iam.infra.dto.PasswordPolicyDTO;
import io.choerodon.iam.infra.exception.InsertException;
import io.choerodon.iam.infra.exception.UpdateExcetion;
import io.choerodon.iam.infra.mapper.PasswordPolicyMapper;
import org.springframework.stereotype.Component;
/**
* @author wuguokai
*/
@Component
public class PasswordPolicyServiceImpl implements PasswordPolicyService {
private OrganizationAssertHelper organizationAssertHelper;
private PasswordPolicyMapper passwordPolicyMapper;
public PasswordPolicyServiceImpl(OrganizationAssertHelper organizationAssertHelper,
PasswordPolicyMapper passwordPolicyMapper) {
this.organizationAssertHelper = organizationAssertHelper;
this.passwordPolicyMapper = passwordPolicyMapper;
}
@Override
public PasswordPolicyDTO create(Long orgId, PasswordPolicyDTO passwordPolicyDTO) {
organizationAssertHelper.organizationNotExisted(orgId);
passwordPolicyDTO.setOrganizationId(orgId);
if (passwordPolicyMapper.insertSelective(passwordPolicyDTO) != 1) {
throw new InsertException("error.passwordPolicy.create");
}
return passwordPolicyMapper.selectByPrimaryKey(passwordPolicyDTO.getId());
}
@Override
public PasswordPolicyDTO queryByOrgId(Long orgId) {
PasswordPolicyDTO dto = new PasswordPolicyDTO();
dto.setOrganizationId(orgId);
return passwordPolicyMapper.selectOne(dto);
}
@Override
public PasswordPolicyDTO query(Long id) {
return passwordPolicyMapper.selectByPrimaryKey(id);
}
@Override
public PasswordPolicyDTO update(Long orgId, Long id, PasswordPolicyDTO passwordPolicyDTO) {
organizationAssertHelper.organizationNotExisted(orgId);
PasswordPolicyDTO old = passwordPolicyMapper.selectByPrimaryKey(id);
if (!orgId.equals(old.getOrganizationId())) {
throw new CommonException("error.passwordPolicy.organizationId.not.same");
}
passwordPolicyDTO.setId(id);
if (passwordPolicyMapper.updateByPrimaryKeySelective(passwordPolicyDTO) != 1) {
throw new UpdateExcetion("error.passwordPolicy.update");
}
return passwordPolicyMapper.selectByPrimaryKey(passwordPolicyDTO.getId());
}
}
<|start_filename|>react/src/app/iam/stores/global/root-user/RootUserStore.js<|end_filename|>
import { action, computed, observable } from 'mobx';
import { axios, store } from '@choerodon/boot';
import querystring from 'query-string';
@store('RootUserStore')
class RootUserStore {
@observable loading = true;
@observable rootUserData = [];
@observable usersData = []; // 全平台启用用户数据
@action
setRootUserData(data) {
this.rootUserData = data;
}
@computed
get getRootUserData() {
return this.rootUserData;
}
@action
setLoading(loading) {
this.loading = loading;
}
@computed
get getLoading() {
return this.loading;
}
@action setUsersData(data) {
this.usersData = data;
}
@computed get getUsersData() {
return this.usersData;
}
loadRootUserData(
{ current, pageSize },
{ loginName, realName, enabled, locked },
{ columnKey = 'id', order = 'descend' },
params) {
const queryObj = {
page: current,
size: pageSize,
loginName,
realName,
enabled,
locked,
params,
};
if (columnKey) {
const sorter = [];
sorter.push(columnKey);
if (order === 'descend') {
sorter.push('desc');
}
queryObj.sort = sorter.join(',');
}
return axios.get(`/iam/v1/users/admin?${querystring.stringify(queryObj)}`);
}
searchMemberIds(loginNames) {
const promises = loginNames.map(index => axios.get(`/iam/v1/users?login_name=${index}`));
return axios.all(promises);
}
addRootUser(ids) {
const id = ids.join(',');
return axios.post(`/iam/v1/users/admin?id=${id}`);
}
deleteRootUser(id) {
return axios.delete(`/iam/v1/users/admin/${id}`);
}
loadUsers = (queryObj = { sort: 'id' }) => axios.get(`/iam/v1/all/users?${querystring.stringify(queryObj)}`);
}
const rootUserStore = new RootUserStore();
export default rootUserStore;
<|start_filename|>react/src/app/iam/guide/SiteBasic/index.js<|end_filename|>
import React, { Component } from 'react';
import { Link } from 'react-router-dom';
import './index.scss';
import { Button, Icon, Timeline } from 'choerodon-ui';
import { inject, observer } from 'mobx-react';
import { injectIntl } from 'react-intl';
import { StepFooter, StepBar, GuideMask, AutoGuide } from '@choerodon/boot';
import EasyImg from '../../components/easyImg';
import picSiteRole from './image/site-role.png';
import picRootRole from './image/root-role.png';
import picAddMenu from './image/add-menu.png';
import picCard from './image/card.png';
@inject('GuideStore', 'AppState')
@injectIntl
@observer
export default class SiteBasic extends Component {
getGuideCard(iconType, name) {
return (
<div className="guide-card">
<Icon type={iconType} style={{ color: 'rgba(0,0,0,0.65)', fontSize: '24px' }} />
<span style={{ color: '#000', fontSize: '16px' }}>{name}</span>
</div>
);
}
handleCheck = dom => dom.children[0].className !== 'guide-card'
renderStep(current) {
const { AppState, GuideStore, intl } = this.props;
switch (current) {
case 0:
return (
<div>
<h1>{intl.formatMessage({ id: 'guide.iam.sitebasic.h1' })}</h1>
<h2>概览</h2>
<p className="text">在平台层您可以管理组织,管理角色对应的权限,给成员分配平台层角色,设置root用户,自定义平台菜单、仪表盘、标志等基本配置。
当然,您想进行这些操作,您需是平台管理员及其以上的权限角色。</p>
<p>在此教程,您将学习以下操作:</p>
<ul className="step-dire">
<li>管理组织</li>
<li>创建角色</li>
<li>分配平台角色</li>
<li>设置Root用户</li>
<li>平台自定义设置</li>
</ul>
</div>
);
case 1:
return (
<div>
<h1>{intl.formatMessage({ id: 'guide.iam.sitebasic.h1' })}</h1>
<h2>导航至平台设置页面</h2>
<Timeline style={{ height: '100px' }}>
<Timeline.Item>
<p>
点击<GuideMask highLight="icon-settings" level={1}>管理</GuideMask>按钮
</p>
</Timeline.Item>
<Timeline.Item>
<p>
打开平台左侧的<GuideMask highLight="c7n-boot-header-logo-menu-icon" level={0}>菜单</GuideMask>
</p>
</Timeline.Item>
<Timeline.Item>
<p>选择<GuideMask highLight="icon-IAM" level={2}>平台设置</GuideMask>部分</p>
</Timeline.Item>
</Timeline>
<AutoGuide
highLight={['icon-settings', 'icon-settings', 'c7n-boot-header-logo-menu-icon', 'c7n-boot-header-logo-menu-icon', 'icon-IAM']}
idx={[0, 0, 0, 0, 0]}
level={[1, 1, 0, 0, 2]}
mode={['mask', 'click', 'mask', 'click', 'mask']}
onStart={() => AppState.setMenuExpanded(false)}
>
<div className="guide-card">
<Icon type="IAM" style={{ color: 'rgba(0,0,0,0.65)', fontSize: '24px' }} />
<span style={{ color: '#000', fontSize: '16px' }}>平台设置</span>
<Icon type="play_circle_filled" style={{ marginLeft: 'calc(100% - 199.5px)', fontSize: '15px' }} />
<span style={{ marginRight: '16px' }}>互动演示</span>
</div>
</AutoGuide>
</div>
);
case 2:
return (
<div>
<h1>{intl.formatMessage({ id: 'guide.iam.sitebasic.h1' })}</h1>
<h2>管理组织</h2>
<p>组织是项目的上一个层级,用户必须属于一个组织。通过组织管理您可以修改组织或者查看组织详情。</p>
<Timeline>
<Timeline.Item>
<p>选择<GuideMask highLight="icon icon-manage_organization" level={2} mode="checkMask" onCheck={dom => this.handleCheck(dom)}>组织管理</GuideMask></p>
<GuideMask highLight="icon icon-manage_organization" className="no-border" level={2} mode="checkMask" onCheck={dom => this.handleCheck(dom)}>
{this.getGuideCard('manage_organization', '组织管理')}
</GuideMask>
</Timeline.Item>
<Timeline.Item>
<p>
点击
<GuideMask highLight="icon-mode_edit" level={1} siteLevel="site" route="/iam/organization">
修改
<Icon type="mode_edit" style={{ fontSize: 10 }} />
</GuideMask>图标,您可以修改组织信息。
</p>
</Timeline.Item>
<Timeline.Item>
<p>
点击
<GuideMask highLight="icon-find_in_page" level={1} siteLevel="site" route="/iam/organization">
详情
<Icon type="find_in_page" style={{ fontSize: 10 }} />
</GuideMask>图标,您可以查看组织信息。
</p>
</Timeline.Item>
</Timeline>
<div />
</div>
);
case 3:
return (
<div>
<h1>{intl.formatMessage({ id: 'guide.iam.sitebasic.h1' })}</h1>
<h2>创建角色</h2>
<p>角色是您可分配给成员的一组权限。通过角色管理您可以创建、启停用角色,为角色添加权限。</p>
<Timeline>
<Timeline.Item>
<p>选择<GuideMask highLight="icon-assignment_ind" level={2} mode="checkMask" onCheck={dom => this.handleCheck(dom)}>角色管理</GuideMask></p>
<GuideMask highLight="icon-assignment_ind" className="no-border" level={2} mode="checkMask" onCheck={dom => this.handleCheck(dom)}>
{this.getGuideCard('assignment_ind', '角色管理')}
</GuideMask>
</Timeline.Item>
<Timeline.Item>
<p>点击 <GuideMask highLight="icon-playlist_add" level={1} siteLevel="site" route="/iam/role">创建角色</GuideMask>按钮。</p>
<ul className="ul1">
<li>选择角色层级,输入角色编码、角色名称。</li>
<li>选择角色标签。(选填)</li>
<li>点击
<GuideMask highLight="icon-add" level={1} siteLevel="site" route="/iam/role/create">添加权限</GuideMask>,选择要给角色添加的权限.</li>
<li>点击创建按钮完成角色创建</li>
</ul>
</Timeline.Item>
</Timeline>
<p><Icon type="info" style={{ color: '#ffb100', marginBottom: '3px' }} /><span> 角色标签用于定义角色的特定逻辑的功能,需与代码开发结合。</span></p>
</div>
);
case 4:
return (
<div>
<h1>{intl.formatMessage({ id: 'guide.iam.sitebasic.h1' })}</h1>
<h2>分配平台角色</h2>
<p>通过平台角色分配您可以向成员分配平台层的角色,以便于成员有权限在平台层操作。</p>
<Timeline>
<Timeline.Item>
<p>选择<GuideMask highLight="icon-person_add" level={2} mode="checkMask" onCheck={dom => this.handleCheck(dom)}>平台角色分配</GuideMask></p>
<GuideMask highLight="icon-person_add" level={2} siteLevel="site" className="no-border" mode="checkMask" onCheck={dom => this.handleCheck(dom)}>
{this.getGuideCard('person_add', '平台角色分配')}
</GuideMask>
</Timeline.Item>
<Timeline.Item>
<p>点击<GuideMask highLight="icon-arrow_drop_down" level={3} idx={1} siteLevel="site" route="/iam/member-role">成员类型选择框</GuideMask>,切换您要进行角色分配的成员类型。</p>
</Timeline.Item>
<Timeline.Item>
<p>点击 <GuideMask highLight="icon-playlist_add" level={1} siteLevel="site" route="/iam/member-role">添加</GuideMask>按钮。 </p>
<ul className="ul1">
<li>首先,选择成员类型</li>
<li>输入要添加角色的成员登录名。</li>
<li>选择对应角色</li>
<li>点击添加按钮完成角色分配</li>
</ul>
<EasyImg src={picSiteRole} />
</Timeline.Item>
</Timeline>
</div>
);
case 5:
return (
<div>
<h1>{intl.formatMessage({ id: 'guide.iam.sitebasic.h1' })}</h1>
<h2>设置root用户</h2>
<p>Root用户拥有系统的最高权限。他可以管理平台以及平台上的所有组织和项目。通过Root用户设置您可以添加或移除root用户。</p>
<Timeline>
<Timeline.Item>
<p>选择<GuideMask highLight="icon-root" level={2} siteLevel="site" mode="checkMask" onCheck={dom => this.handleCheck(dom)}>Root用户设置</GuideMask></p>
<GuideMask highLight="icon-root" level={2} siteLevel="site" className="no-border" mode="checkMask" onCheck={dom => this.handleCheck(dom)}>{this.getGuideCard('root', 'Root用户设置')}</GuideMask>
</Timeline.Item>
<Timeline.Item>
<p>点击 <GuideMask highLight="icon-playlist_add" level={1} siteLevel="site" route="/iam/root-user">添加</GuideMask>按钮。</p>
<ul className="ul1">
<li>输入要分配Root权限的成员登录名。</li>
<li>点击添加按钮完成Root用户设置。</li>
</ul>
<EasyImg src={picRootRole} />
</Timeline.Item>
</Timeline>
<p><Icon type="info" style={{ color: '#f44336', marginBottom: '3px' }} /><span> Root用户拥有系统的最高权限,请谨慎操作。</span></p>
</div>
);
case 6:
return (
<div>
<h1>{intl.formatMessage({ id: 'guide.iam.sitebasic.h1' })}</h1>
<h2>平台自定义设置</h2>
<p>您可以自定义您的菜单,仪表盘以及平台logo等标志性配置。</p>
<Timeline>
<Timeline.Item>
<p>选择<GuideMask highLight="icon-view_list" level={2} mode="checkMask" onCheck={dom => this.handleCheck(dom)}>菜单配置</GuideMask></p>
<GuideMask highLight="icon-view_list" level={2} className="no-border" mode="checkMask" onCheck={dom => this.handleCheck(dom)}>
{this.getGuideCard('view_list', '菜单配置')}
</GuideMask>
<p>拖动目录/菜单,调整目录/菜单的顺序。</p>
<p>点击 <GuideMask highLight="icon-playlist_add" level={1} siteLevel="site" route="/iam/menu-setting">创建目录</GuideMask>按钮。向对应层级添加目录。</p>
<EasyImg src={picAddMenu} />
<p>点击<GuideMask highLight="c7n-btn-primary" level={0} siteLevel="site" route="/iam/menu-setting">保存</GuideMask>按钮完成菜单配置。</p>
</Timeline.Item>
<Timeline.Item>
<p>
选择
<GuideMask highLight="icon-donut_small" level={2} mode="checkMask" onCheck={dom => this.handleCheck(dom)}>仪表盘配置</GuideMask>
</p>
<GuideMask highLight="icon-donut_small" level={2} className="no-border" mode="checkMask" onCheck={dom => this.handleCheck(dom)}>
{this.getGuideCard('donut_small', '仪表盘配置')}
</GuideMask>
<p>
点击
<GuideMask highLight="icon-mode_edit" level={1} siteLevel="site" route="/iam/dashboard-setting">
修改
<Icon type="mode_edit" style={{ fontSize: 10 }} />
</GuideMask>图标,您可以修改卡片的信息,选择是否开启角色控制。
</p>
<EasyImg src={picCard} />
<p>点击
<GuideMask highLight="icon-remove_circle_outline" level={1} siteLevel="site" route="/iam/dashboard-setting">
启停用
<Icon type="finished" style={{ fontSize: 10 }} />
</GuideMask>按钮,可以控制此卡片是否启停用。
</p>
</Timeline.Item>
<Timeline.Item>
<p>选择<GuideMask highLight="icon-settings" idx={1} level={2}>系统配置</GuideMask></p>
<GuideMask highLight="icon-settings" idx={1} level={2} className="no-border">
{this.getGuideCard('settings', '系统配置')}
</GuideMask>
<p>您可以上传平台徽标、图形标,自定义平台简称、全称,更改平台默认密码和默认语言的顺序。</p>
<p>点击<GuideMask highLight="c7n-btn-primary" level={0} siteLevel="site" route="/iam/system-setting">保存</GuideMask>按钮完成系统配置。</p>
<p>您可以点击 <GuideMask highLight="icon-swap_horiz" level={1} siteLevel="site" route="/iam/system-setting">重置</GuideMask>按钮,还原到默认配置。</p>
</Timeline.Item>
</Timeline>
</div>
);
default:
return (
<div>
<h1>{intl.formatMessage({ id: 'guide.iam.sitebasic.h1' })}</h1>
<h2>完成</h2>
<div className="icon-winner" />
<p>恭喜!</p>
<p>现在您已经知道作为平台管理员,要如何进行系统平台的一些列配置和设置。</p>
<p>您可以点击表单页面的“<a href="http://choerodon.io" target="_blank">了解更多<Icon type="open_in_new" /></a>”,了解系统配置的更多用户手册。</p>
<p><a href="http://v0-10.choerodon.io/zh/docs/user-guide/system-configuration/platform/menu_configuration/" target="_blank">菜单配置<Icon type="open_in_new" /></a> 用于配置平台菜单</p>
<p><a href="http://v0-10.choerodon.io/zh/docs/user-guide/system-configuration/platform/dashboard-config/" target="_blank">仪表盘配置<Icon type="open_in_new" /></a> 用于预置用户可见的仪表盘卡片</p>
<p><a href="http://choerodon.io/zh/docs/user-guide/system-configuration/tenant/ldap/" target="_blank">LDAP<Icon type="open_in_new" /></a> 对组织应用的LDAP信息设置的管理</p>
</div>
);
}
}
render() {
const { GuideStore } = this.props;
return (
<div className="c7n-iam-guide-site-basic">
<div style={{ width: '90%', margin: '0 auto' }}>
<StepBar current={GuideStore.getCurrentStep} total={7} />
{this.renderStep(GuideStore.getCurrentStep)}
</div>
<StepFooter total={7} />
</div>
);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/dto/FileDTO.java<|end_filename|>
package io.choerodon.iam.api.dto;
import io.swagger.annotations.ApiModelProperty;
public class FileDTO {
@ApiModelProperty(value = "文件服务器地址")
private String endPoint;
@ApiModelProperty(value = "原始文件名")
private String originFileName;
@ApiModelProperty(value = "新文件名")
private String fileName;
public String getEndPoint() {
return endPoint;
}
public void setEndPoint(String endPoint) {
this.endPoint = endPoint;
}
public String getOriginFileName() {
return originFileName;
}
public void setOriginFileName(String originFileName) {
this.originFileName = originFileName;
}
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/ParsePermissionServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.choerodon.core.exception.CommonException;
import io.choerodon.core.iam.InitRoleCode;
import io.choerodon.core.iam.ResourceLevel;
import io.choerodon.core.swagger.PermissionData;
import io.choerodon.core.swagger.SwaggerExtraData;
import io.choerodon.eureka.event.EurekaEventPayload;
import io.choerodon.iam.app.service.UploadHistoryService;
import io.choerodon.iam.infra.dto.PermissionDTO;
import io.choerodon.iam.infra.dto.RoleDTO;
import io.choerodon.iam.infra.dto.RolePermissionDTO;
import io.choerodon.iam.infra.mapper.PermissionMapper;
import io.choerodon.iam.infra.mapper.RoleMapper;
import io.choerodon.iam.infra.mapper.RolePermissionMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
import org.springframework.web.client.RestTemplate;
import java.io.IOException;
import java.util.*;
/**
* @author zhipeng.zuo
* @author superlee
*/
@Service
public class ParsePermissionServiceImpl implements UploadHistoryService.ParsePermissionService {
private static final Logger logger = LoggerFactory.getLogger(UploadHistoryService.ParsePermissionService.class);
private final ObjectMapper objectMapper = new ObjectMapper();
private PermissionMapper permissionMapper;
private RolePermissionMapper rolePermissionMapper;
private RestTemplate restTemplate = new RestTemplate();
private RoleMapper roleMapper;
@Value("${choerodon.cleanPermission:false}")
private boolean cleanPermission;
public void setRestTemplate(RestTemplate restTemplate) {
this.restTemplate = restTemplate;
}
public ParsePermissionServiceImpl(PermissionMapper permissionMapper,
RolePermissionMapper rolePermissionMapper,
RoleMapper roleMapper) {
this.permissionMapper = permissionMapper;
this.rolePermissionMapper = rolePermissionMapper;
this.roleMapper = roleMapper;
}
private void fetchSwaggerJsonByIp(final EurekaEventPayload payload) {
ResponseEntity<String> response = restTemplate.getForEntity("http://" + payload.getInstanceAddress() + "/v2/choerodon/api-docs",
String.class);
if (response.getStatusCode() == HttpStatus.OK) {
payload.setApiData(response.getBody());
} else {
throw new CommonException("fetch swagger error, statusCode is not 2XX, serviceId: " + payload.getId());
}
}
@Override
public void parser(EurekaEventPayload payload) {
try {
fetchSwaggerJsonByIp(payload);
String serviceCode = payload.getAppName();
String json = payload.getApiData();
logger.info("receive service: {} message, version: {}, ip: {}", serviceCode, payload.getVersion(), payload.getInstanceAddress());
if (!StringUtils.isEmpty(serviceCode) && !StringUtils.isEmpty(json)) {
JsonNode node = objectMapper.readTree(json);
Iterator<Map.Entry<String, JsonNode>> pathIterator = node.get("paths").fields();
Map<String, RoleDTO> initRoleMap = queryInitRoleByCode();
List<String> permissionCodes = new ArrayList<>();
while (pathIterator.hasNext()) {
Map.Entry<String, JsonNode> pathNode = pathIterator.next();
Iterator<Map.Entry<String, JsonNode>> methodIterator = pathNode.getValue().fields();
parserMethod(methodIterator, pathNode, serviceCode, initRoleMap, permissionCodes);
}
logger.info("cleanPermission : {}", cleanPermission);
if (cleanPermission) {
deleteDeprecatedPermission(permissionCodes, serviceCode);
//清理role_permission表层级不符的脏数据,会导致基于角色创建失败
cleanRolePermission();
}
}
} catch (IOException e) {
throw new CommonException("error.parsePermissionService.parse.IOException", e);
}
}
private void deleteDeprecatedPermission(List<String> permissionCodes, String serviceName) {
PermissionDTO dto = new PermissionDTO();
dto.setServiceCode(serviceName);
List<PermissionDTO> permissions = permissionMapper.select(dto);
int count = 0;
for (PermissionDTO permission : permissions) {
if (!permissionCodes.contains(permission.getCode())) {
permissionMapper.deleteByPrimaryKey(permission.getId());
RolePermissionDTO rolePermissionDTO = new RolePermissionDTO();
rolePermissionDTO.setPermissionId(permission.getId());
rolePermissionMapper.delete(rolePermissionDTO);
logger.info("@@@ service {} delete deprecated permission {}", serviceName, permission.getCode());
count++;
}
}
logger.info("service {} delete deprecated permission, total {}", serviceName, count);
}
private void cleanRolePermission() {
List<RoleDTO> roles = roleMapper.selectAll();
int count = 0;
for (RoleDTO role : roles) {
List<PermissionDTO> permissions = permissionMapper.selectErrorLevelPermissionByRole(role);
for (PermissionDTO permission : permissions) {
RolePermissionDTO rp = new RolePermissionDTO();
rp.setRoleId(role.getId());
rp.setPermissionId(permission.getId());
rolePermissionMapper.delete(rp);
logger.info("delete error role_permission, role id: {}, code: {}, level: {} ## permission id: {}, code:{}, level: {}",
role.getId(), role.getCode(), role.getResourceLevel(), permission.getId(), permission.getCode(), permission.getResourceLevel());
count++;
}
}
logger.info("clean error role_permission finished, total: {}", count);
}
/**
* 解析文档树某个路径的所有方法
*
* @param methodIterator 所有方法
* @param pathNode 路径
* @param serviceCode 服务名
*/
private void parserMethod(Iterator<Map.Entry<String, JsonNode>> methodIterator,
Map.Entry<String, JsonNode> pathNode, String serviceCode,
Map<String, RoleDTO> initRoleMap, List<String> permissionCode) {
while (methodIterator.hasNext()) {
Map.Entry<String, JsonNode> methodNode = methodIterator.next();
JsonNode tags = methodNode.getValue().get("tags");
String resourceCode = processResourceCode(tags);
try {
JsonNode extraDataNode = methodNode.getValue().get("description");
if (resourceCode == null || extraDataNode == null) {
continue;
}
SwaggerExtraData extraData = objectMapper.readValue(extraDataNode.asText(), SwaggerExtraData.class);
permissionCode.add(processPermission(extraData, pathNode.getKey(), methodNode, serviceCode, resourceCode, initRoleMap));
} catch (IOException e) {
logger.info("extraData read failed.", e);
}
}
}
private PermissionDTO selectByCode(String code) {
PermissionDTO dto = new PermissionDTO();
dto.setCode(code);
return permissionMapper.selectOne(dto);
}
private PermissionDTO insertSelective(PermissionDTO permissionDTO) {
if (permissionMapper.insertSelective(permissionDTO) != 1) {
throw new CommonException("error.permission.insert");
}
return permissionMapper.selectByPrimaryKey(permissionDTO);
}
@Override
public String processPermission(String[] roles, String path, String method, String description, PermissionData permission, String serviceName, String resourceCode, Map<String, RoleDTO> initRoleMap) {
String action = permission.getAction();
String code = serviceName + "." + resourceCode + "." + action;
PermissionDTO permissionDTO = selectByCode(code);
PermissionDTO newPermission = new PermissionDTO();
newPermission.setCode(code);
newPermission.setPath(path);
newPermission.setMethod(method);
newPermission.setResourceLevel(permission.getPermissionLevel());
newPermission.setDescription(description);
newPermission.setAction(action);
newPermission.setController(resourceCode);
newPermission.setPublicAccess(permission.isPermissionPublic());
newPermission.setLoginAccess(permission.isPermissionLogin());
newPermission.setWithin(permission.isPermissionWithin());
newPermission.setServiceCode(serviceName);
if (permissionDTO == null) {
//插入操作
PermissionDTO returnPermission = insertSelective(newPermission);
if (returnPermission != null) {
insertRolePermission(returnPermission, initRoleMap, roles);
logger.debug("###insert permission, {}", newPermission);
}
} else {
//更新操作
newPermission.setObjectVersionNumber(permissionDTO.getObjectVersionNumber());
newPermission.setId(permissionDTO.getId());
if (!permissionDTO.equals(newPermission)) {
if (permissionMapper.updateByPrimaryKeySelective(newPermission) != 1) {
throw new CommonException("error.permission.update");
}
}
updateRolePermission(newPermission, initRoleMap, roles);
logger.debug("###update permission, {}", newPermission);
}
return code;
}
private String processPermission(SwaggerExtraData extraData, String path, Map.Entry<String, JsonNode> methodNode,
String serviceCode, String resourceCode, Map<String, RoleDTO> initRoleMap) {
String[] roles = null;
if (extraData.getPermission() != null) {
roles = extraData.getPermission().getRoles();
}
String method = methodNode.getKey();
String description = methodNode.getValue().get("summary").asText();
PermissionData permission = extraData.getPermission();
String action = permission.getAction();
String code = serviceCode + "." + resourceCode + "." + action;
PermissionDTO newPermission = new PermissionDTO();
newPermission.setCode(code);
newPermission.setPath(path);
newPermission.setMethod(method);
newPermission.setResourceLevel(permission.getPermissionLevel());
newPermission.setDescription(description);
newPermission.setAction(action);
newPermission.setController(resourceCode);
newPermission.setPublicAccess(permission.isPermissionPublic());
newPermission.setLoginAccess(permission.isPermissionLogin());
newPermission.setWithin(permission.isPermissionWithin());
newPermission.setServiceCode(serviceCode);
PermissionDTO permissionDTO = selectByCode(code);
if (permissionDTO == null) {
//插入操作
PermissionDTO returnPermission = insertSelective(newPermission);
if (returnPermission != null) {
insertRolePermission(returnPermission, initRoleMap, roles);
logger.debug("###insert permission, {}", newPermission);
}
} else {
//更新操作
newPermission.setId(permissionDTO.getId());
newPermission.setObjectVersionNumber(permissionDTO.getObjectVersionNumber());
if (!permissionDTO.equals(newPermission)) {
if (permissionMapper.updateByPrimaryKeySelective(newPermission) != 1) {
throw new CommonException("error.permission.update");
}
}
updateRolePermission(newPermission, initRoleMap, roles);
logger.debug("###update permission, {}", newPermission);
}
return code;
}
private String processResourceCode(JsonNode tags) {
String resourceCode = null;
boolean illegal = true;
List<String> illegalTags = new ArrayList<>();
for (int i = 0; i < tags.size(); i++) {
String tag = tags.get(i).asText();
//添加choerodon-eureka例外的以-endpoint结尾的tag,
if (tag.endsWith("-controller")) {
illegal = false;
resourceCode = tag.substring(0, tag.length() - "-controller".length());
} else if (tag.endsWith("-endpoint")) {
illegal = false;
resourceCode = tag.substring(0, tag.length() - "-endpoint".length());
} else {
illegalTags.add(tag);
}
}
if (illegal) {
logger.warn("skip the controller/endpoint because of the illegal tags {}, please ensure the controller is end with ##Controller## or ##EndPoint##", illegalTags);
}
return resourceCode;
}
private void updateRolePermission(PermissionDTO permission, Map<String, RoleDTO> initRoleMap, String[] roles) {
Long permissionId = permission.getId();
String level = permission.getResourceLevel();
RoleDTO role = getRoleByLevel(initRoleMap, level);
if (role != null) {
RolePermissionDTO rp = new RolePermissionDTO();
rp.setRoleId(role.getId());
rp.setPermissionId(permissionId);
if (rolePermissionMapper.selectOne(rp) == null) {
if (rolePermissionMapper.insert(rp) != 1) {
throw new CommonException("error.rolePermission.insert");
}
}
}
List<RoleDTO> roleList = roleMapper.selectInitRolesByPermissionId(permissionId);
//删掉除去SITE_ADMINISTRATOR,ORGANIZATION_ADMINISTRATOR,PROJECT_ADMINISTRATOR的所有role_permission关系
for (RoleDTO roleDTO : roleList) {
String code = roleDTO.getCode();
if (!InitRoleCode.SITE_ADMINISTRATOR.equals(code)
&& !InitRoleCode.PROJECT_ADMINISTRATOR.equals(code)
&& !InitRoleCode.ORGANIZATION_ADMINISTRATOR.equals(code)) {
RolePermissionDTO rolePermission = new RolePermissionDTO();
rolePermission.setRoleId(roleDTO.getId());
rolePermission.setPermissionId(permissionId);
rolePermissionMapper.delete(rolePermission);
}
}
if (roles != null) {
processRolePermission(initRoleMap, roles, permissionId, level);
}
}
/**
* 先根据permission level关联相应层级的管理员角色
* level=site -> SITE_ADMINISTRATOR
* level=organization -> ORGANIZATION_ADMINISTRATOR
* level=project -> PROJECT_ADMINISTRATOR
*/
private void insertRolePermission(PermissionDTO permission, Map<String, RoleDTO> initRoleMap, String[] roles) {
Long permissionId = permission.getId();
String level = permission.getResourceLevel();
RoleDTO role = getRoleByLevel(initRoleMap, level);
if (role != null) {
RolePermissionDTO dto = new RolePermissionDTO();
dto.setRoleId(role.getId());
dto.setPermissionId(permissionId);
if (rolePermissionMapper.insert(dto) != 1) {
throw new CommonException("error.rolePermission.insert");
}
}
//roles不为空,关联自定义角色
if (roles != null) {
processRolePermission(initRoleMap, roles, permissionId, level);
}
}
private void processRolePermission(Map<String, RoleDTO> initRoleMap, String[] roles, Long permissionId, String level) {
Set<String> roleSet = new HashSet<>(Arrays.asList(roles));
for (String roleCode : roleSet) {
RoleDTO role = initRoleMap.get(roleCode);
if (role == null) {
//找不到code,说明没有初始化进去角色或者角色code拼错了
logger.info("can not find the role, role code is : {}", roleCode);
} else {
if (level.equals(role.getResourceLevel())) {
RolePermissionDTO rp = new RolePermissionDTO();
rp.setRoleId(role.getId());
rp.setPermissionId(permissionId);
if (rolePermissionMapper.selectOne(rp) == null) {
if (rolePermissionMapper.insert(rp) != 1) {
throw new CommonException("error.rolePermission.insert");
}
}
} else {
logger.info("init role level does not match the permission level, permission id: {}, level: {}, @@ role code: {}, level: {}",
permissionId, level, role.getCode(), role.getResourceLevel());
}
}
}
}
private RoleDTO getRoleByLevel(Map<String, RoleDTO> initRoleMap, String level) {
if (ResourceLevel.SITE.value().equals(level)) {
return initRoleMap.get(InitRoleCode.SITE_ADMINISTRATOR);
}
if (ResourceLevel.ORGANIZATION.value().equals(level)) {
return initRoleMap.get(InitRoleCode.ORGANIZATION_ADMINISTRATOR);
}
if (ResourceLevel.PROJECT.value().equals(level)) {
return initRoleMap.get(InitRoleCode.PROJECT_ADMINISTRATOR);
}
return null;
}
@Override
public Map<String, RoleDTO> queryInitRoleByCode() {
Map<String, RoleDTO> map = new HashMap<>(10);
String[] codes = InitRoleCode.values();
for (String code : codes) {
RoleDTO dto = new RoleDTO();
dto.setCode(code);
RoleDTO role = roleMapper.selectOne(dto);
if (role == null) {
logger.info("init roles do not exist, code: {}", code);
}
map.put(code, role);
}
return map;
}
}
<|start_filename|>react/src/app/iam/stores/organization/user/index.js<|end_filename|>
import UserStore from './UserStore';
export default UserStore;
<|start_filename|>react/src/app/iam/containers/user/user-info/UserInfo.js<|end_filename|>
import React, { Component } from 'react';
import { inject, observer } from 'mobx-react';
import { Button, Form, Icon, Input, Select } from 'choerodon-ui';
import { FormattedMessage, injectIntl } from 'react-intl';
import { Content, Header, Page, Permission } from '@choerodon/boot';
import UserInfoStore from '../../../stores/user/user-info/UserInfoStore';
import AvatarUploader from './AvatarUploader';
import './Userinfo.scss';
import TextEditToggle from './textEditToggle';
import PhoneWrapper from './phoneWrapper';
const { Text, Edit } = TextEditToggle;
const Option = Select.Option;
const intlPrefix = 'user.userinfo';
@Form.create({})
@injectIntl
@inject('AppState')
@observer
export default class UserInfo extends Component {
constructor(props) {
super(props);
this.editFocusInput = React.createRef();
}
state = {
submitting: false,
visible: false,
phoneZone: UserInfoStore.getUserInfo.internationalTelCode ? UserInfoStore.getUserInfo.internationalTelCode.split('+')[1] : undefined,
phone: UserInfoStore.getUserInfo.phone,
};
componentWillMount() {
this.loadUserInfo();
}
loadUserInfo = () => {
UserInfoStore.setUserInfo(this.props.AppState.getUserInfo);
};
checkEmailAddress = (rule, value, callback) => {
const { edit, intl } = this.props;
if (!edit || value !== this.state.userInfo.email) {
UserInfoStore.checkEmailAddress(value).then(({ failed }) => {
if (failed) {
callback(intl.formatMessage({ id: `${intlPrefix}.email.used.msg` }));
} else {
callback();
}
}).catch(Choerodon.handleResponseError);
} else {
callback();
}
};
openAvatorUploader = () => {
this.setState({
visible: true,
});
};
handleVisibleChange = (visible) => {
this.setState({ visible });
};
handleSubmitPhone = (value) => {
const originUser = UserInfoStore.getUserInfo;
const user = {
...originUser,
...value,
imageUrl: UserInfoStore.getAvatar,
};
user.internationalTelCode = user.internationalTelCode ? `+${value.internationalTelCode}` : '';
user.phone = user.phone || '';
this.submitForm(user);
}
handleSubmit = (formKey, value) => {
const originUser = UserInfoStore.getUserInfo;
this.setState({
submitting: true,
});
const user = {
...originUser,
[formKey]: value,
imageUrl: UserInfoStore.getAvatar,
};
this.submitForm(user);
};
submitForm = (user) => {
const { AppState, intl } = this.props;
user.loginName = null;
UserInfoStore.updateUserInfo(user).then((data) => {
if (data.failed) {
Choerodon.prompt(data.message);
} else {
this.props.form.resetFields();
UserInfoStore.setUserInfo(data);
Choerodon.prompt(intl.formatMessage({ id: 'modify.success' }));
this.setState({ submitting: false });
AppState.setUserInfo(data);
}
}).catch(() => {
Choerodon.prompt(intl.formatMessage({ id: 'modify.error' }));
this.setState({ submitting: false });
});
}
getLanguageOptions() {
let language;
if (language) {
return language.content.map(({ code, name }) => (<Option key={code} value={code}>{name}</Option>));
} else {
return [
<Option key="zh_CN" value="zh_CN"><FormattedMessage id={`${intlPrefix}.language.zhcn`} /></Option>,
// <Option key="en_US" value="en_US"><FormattedMessage id={`${intlPrefix}.language.enus`}/></Option>,
];
}
}
getTimeZoneOptions() {
const timeZone = [];
if (timeZone.length > 0) {
return timeZone.map(({ code, description }) => (<Option key={code} value={code}>{description}</Option>));
} else {
return [
<Option key="CTT" value="CTT"><FormattedMessage id={`${intlPrefix}.timezone.ctt`} /></Option>,
// <Option key="EST" value="EST"><FormattedMessage id={`${intlPrefix}.timezone.est`}/></Option>,
];
}
}
getAvatar({ id, realName }) {
const { visible } = this.state;
const avatar = UserInfoStore.getAvatar;
return (
<div className="user-info-avatar-wrap">
<div
className="user-info-avatar"
style={
avatar && {
backgroundImage: `url(${Choerodon.fileServer(avatar)})`,
}
}
>
{!avatar && realName && realName.charAt(0)}
<Permission
service={['iam-service.user.uploadPhoto']}
type="site"
>
<Button className="user-info-avatar-button" onClick={this.openAvatorUploader}>
<div className="user-info-avatar-button-icon">
<Icon type="photo_camera" />
</div>
</Button>
<AvatarUploader id={id} visible={visible} onVisibleChange={this.handleVisibleChange} />
</Permission>
</div>
</div>
);
}
renderForm(user) {
const { intl } = this.props;
const { loginName, realName, email, language, timeZone, phone, ldap, organizationName, organizationCode, internationalTelCode } = user;
return (
<Form layout="vertical" className="user-info">
<div className="user-info-top-container">
<div className="user-info-avatar-wrap-container">
{this.getAvatar(user)}
</div>
<div className="user-info-login-info">
<div>{loginName}</div>
<div>{intl.formatMessage({ id: `${intlPrefix}.source` })}:{ldap ? intl.formatMessage({ id: `${intlPrefix}.ldap` }) : intl.formatMessage({ id: `${intlPrefix}.notldap` })}</div>
<div>
<span>{intl.formatMessage({ id: `${intlPrefix}.name` })}:</span>
<TextEditToggle
formKey="realName"
formStyle={{ width: '80px' }}
originData={realName}
className="user-info-info-container-account-content-realName"
onSubmit={value => this.handleSubmit('realName', value)}
validate={{
validateFirst: true,
}}
rules={[
{
required: true,
whitespace: true,
message: intl.formatMessage({ id: `${intlPrefix}.name.require.msg` }),
},
]}
>
<Text style={{ fontSize: '13px' }}>
<span>{realName}</span>
</Text>
<Edit>
<Input autoComplete="off" />
</Edit>
</TextEditToggle>
</div>
</div>
</div>
<div className="user-info-info-container">
<div className="user-info-info-container-account">
<div>{intl.formatMessage({ id: `${intlPrefix}.account.info` })}</div>
<div>
<div>
<Icon type="markunread" className="form-icon" />
<span className="user-info-info-container-account-title">{intl.formatMessage({ id: `${intlPrefix}.email` })}:</span>
<TextEditToggle
formStyle={{ width: '289px' }}
formKey="email"
originData={email}
className="user-info-info-container-account-content"
onSubmit={value => this.handleSubmit('email', value)}
validate={{
validateTrigger: 'onBlur',
validateFirst: true,
}}
rules={[
{
required: true,
whitespace: true,
message: intl.formatMessage({ id: `${intlPrefix}.email.require.msg` }),
},
{
type: 'email',
message: intl.formatMessage({ id: `${intlPrefix}.email.pattern.msg` }),
},
{
validator: this.checkEmailAddress,
},
]}
>
<Text>
<span style={{ width: '300px' }}>{email}</span>
</Text>
<Edit>
<Input autoComplete="off" />
</Edit>
</TextEditToggle>
</div>
<div>
<Icon type="phone_iphone" className="form-icon" />
<span className="user-info-info-container-account-title">{intl.formatMessage({ id: `${intlPrefix}.phone` })}:</span>
<PhoneWrapper
initialPhone={phone}
initialCode={internationalTelCode}
onSubmit={value => this.handleSubmitPhone(value)}
/>
</div>
<div>
<Icon type="language" className="form-icon" />
<span className="user-info-info-container-account-title">{intl.formatMessage({ id: `${intlPrefix}.language` })}:</span>
<TextEditToggle
formKey="language"
originData={language}
className="user-info-info-container-account-content user-info-info-container-account-content-short"
formStyle={{ width: '80px' }}
>
<Text>
<span>{'简体中文'}</span>
</Text>
<Edit>
<Select
getPopupContainer={() => document.getElementsByClassName('page-content')[0]}
>
{this.getLanguageOptions()}
</Select>,
</Edit>
</TextEditToggle>
</div>
<div>
<Icon type="location_city" className="form-icon" />
<span className="user-info-info-container-account-title">{intl.formatMessage({ id: `${intlPrefix}.timezone` })}:</span>
<TextEditToggle
formKey="timeZone"
originData={timeZone || 'CTT'}
className="user-info-info-container-account-content user-info-info-container-account-content-short"
formStyle={{ width: '80px' }}
>
<Text>
<span>{'中国'}</span>
</Text>
<Edit>
<Select
getPopupContainer={() => document.getElementsByClassName('page-content')[0]}
>
{this.getTimeZoneOptions()}
</Select>,
</Edit>
</TextEditToggle>
</div>
</div>
</div>
<div className="user-info-info-container-account">
<div>{intl.formatMessage({ id: `${intlPrefix}.orginfo` })}</div>
<div>
<div>
<Icon type="domain" className="form-icon" />
<span className="user-info-info-container-account-title">{intl.formatMessage({ id: `${intlPrefix}.org.name` })}:</span>
<span className="user-info-info-container-account-content">{organizationName}</span>
</div>
<div>
<Icon type="copyright" className="form-icon" />
<span className="user-info-info-container-account-title">{intl.formatMessage({ id: `${intlPrefix}.org.code` })}:</span>
<span className="user-info-info-container-account-content">{organizationCode}</span>
</div>
</div>
</div>
</div>
</Form>
);
}
render() {
const user = UserInfoStore.getUserInfo;
return (
<Page
service={[
'iam-service.user.query',
'iam-service.user.check',
'iam-service.user.querySelf',
'iam-service.user.queryInfo',
'iam-service.user.updateInfo',
'iam-service.user.uploadPhoto',
'iam-service.user.queryProjects',
]}
>
<Header
title={<FormattedMessage id={`${intlPrefix}.header.title`} />}
>
{/* <Button onClick={this.refresh} icon="refresh"> */}
{/* <FormattedMessage id="refresh" /> */}
{/* </Button> */}
</Header>
<Content className="user-info-container">
{this.renderForm(user)}
</Content>
</Page>
);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/dto/BookMarkDTO.java<|end_filename|>
package io.choerodon.iam.infra.dto;
import io.choerodon.mybatis.entity.BaseDTO;
import io.swagger.annotations.ApiModelProperty;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.validation.constraints.NotEmpty;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
/**
* @author superlee
* @since 2019-04-23
*/
@Table(name = "IAM_BOOK_MARK")
public class BookMarkDTO extends BaseDTO {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@ApiModelProperty(value = "主键ID/非必填")
private Long id;
@ApiModelProperty(value = "书签名称/必填")
@NotEmpty(message = "error.bookMark.name.empty")
@Size(max = 64, min = 1, message = "error.bookMark.name.length")
private String name;
@ApiModelProperty(value = "书签url/必填")
@NotEmpty(message = "error.bookMark.url.empty")
@Size(max = 255, min = 1, message = "error.bookMark.url.length")
private String url;
@ApiModelProperty(value = "书签图标code/必填")
@NotEmpty(message = "error.bookMark.icon.empty")
@Size(max = 128, min = 1, message = "error.bookMark.icon.length")
private String icon;
@ApiModelProperty(value = "书签图标颜色/非必填")
private String color;
@ApiModelProperty(value = "书签顺序/必填")
@NotNull(message = "error.bookMark.sort.null")
private Long sort;
@ApiModelProperty(value = "书签用户ID/非必填")
private Long userId;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getIcon() {
return icon;
}
public void setIcon(String icon) {
this.icon = icon;
}
public String getColor() {
return color;
}
public void setColor(String color) {
this.color = color;
}
public Long getSort() {
return sort;
}
public void setSort(Long sort) {
this.sort = sort;
}
public Long getUserId() {
return userId;
}
public void setUserId(Long userId) {
this.userId = userId;
}
}
<|start_filename|>react/src/app/iam/containers/global/role/Role.js<|end_filename|>
import React, { Component } from 'react';
import { withRouter } from 'react-router-dom';
import { inject, observer } from 'mobx-react';
import querystring from 'query-string';
import { Button, Form, Icon, Table, Select, Menu, Dropdown } from 'choerodon-ui';
import { injectIntl, FormattedMessage } from 'react-intl';
import { Action, Content, Header, Page, Permission } from '@choerodon/boot';
import { RESOURCES_LEVEL } from '@choerodon/boot/lib/containers/common/constants';
import RoleStore from '../../../stores/global/role/RoleStore';
import './Role.scss';
import MouseOverWrapper from '../../../components/mouseOverWrapper';
import StatusTag from '../../../components/statusTag';
const intlPrefix = 'global.role';
const levels = RESOURCES_LEVEL.split(',');
@Form.create({})
@withRouter
@injectIntl
@inject('AppState')
@observer
export default class Role extends Component {
constructor(props) {
super(props);
const queryObj = querystring.parse(props.location.search);
this.state = {
selectedRoleIds: {},
params: [],
filters: {},
pagination: {
current: 1,
pageSize: 10,
total: 0,
},
sort: {
columnKey: 'id',
order: 'descend',
},
level: queryObj.level || levels[0],
};
}
// state = this.getInitState();
componentDidMount() {
this.loadRole();
}
getInitStat() {
return {
id: '',
selectedRoleIds: {},
params: [],
filters: {},
pagination: {
current: 1,
pageSize: 10,
total: 0,
},
sort: {
columnKey: 'id',
order: 'descend',
},
selectedData: '',
level: levels[0],
};
}
getSelectedRowKeys() {
return Object.keys(this.state.selectedRoleIds).map(id => Number(id));
}
showModal = (ids) => {
this.props.history.push(`role/create?level=${this.state.level}&roleId=${ids}`);
}
goCreate = () => {
RoleStore.setChosenLevel('');
RoleStore.setLabel([]);
RoleStore.setSelectedRolesPermission([]);
this.props.history.push(`role/create?level=${this.state.level}`);
};
loadRole(paginationIn, sortIn, filtersIn, paramsIn) {
const {
pagination: paginationState,
sort: sortState,
filters: filtersState,
params: paramsState,
level,
} = this.state;
const pagination = paginationIn || paginationState;
const sort = sortIn || sortState;
const filters = filtersIn || filtersState;
const params = paramsIn || paramsState;
this.setState({ filters });
RoleStore.loadRole(level, pagination, sort, filters, params)
.then((data) => {
RoleStore.setIsLoading(false);
RoleStore.setRoles(data.list || []);
this.setState({
sort,
filters,
params,
pagination: {
current: data.pageNum,
pageSize: data.pageSize,
total: data.total,
},
});
})
.catch((error) => {
Choerodon.handleResponseError(error);
});
}
linkToChange = (url) => {
this.props.history.push(`${url}`);
};
handleRefresh = () => {
// this.setState(this.getInitState(), () => {
// this.loadRole();
// });
this.loadRole();
};
handleEnable = (record) => {
const { intl } = this.props;
if (record.enabled) {
RoleStore.disableRole(record.id).then(() => {
Choerodon.prompt(intl.formatMessage({ id: 'disable.success' }));
this.loadRole();
});
} else {
RoleStore.enableRole(record.id).then(() => {
Choerodon.prompt(intl.formatMessage({ id: 'enable.success' }));
this.loadRole();
});
}
};
changeSelects = (selectedRowKeys, selectedRows) => {
const { selectedRoleIds } = this.state;
Object.keys(selectedRoleIds).forEach((id) => {
if (selectedRowKeys.indexOf(Number(id)) === -1) {
delete selectedRoleIds[id];
}
});
selectedRows.forEach(({ id, level }) => {
selectedRoleIds[id] = level;
});
this.setState({
selectedRoleIds,
});
};
handlePageChange = (pagination, filters, sort, params) => {
this.loadRole(pagination, sort, filters, params);
};
handleChangeLevel = ({ key }) => {
const { level } = this.state;
if (key !== level) {
this.setState({
level: key,
}, () => this.loadRole());
}
}
createByThis(record) {
this.linkToChange(`role/create?level=${this.state.level}&base=${record.id}`);
}
createByMultiple = () => {
this.createBased();
};
createBased = () => {
const ids = this.getSelectedRowKeys();
this.linkToChange(`role/create?level=${this.state.level}&base=${ids.join(',')}`);
};
renderLevel(text) {
if (text === 'organization') {
return <FormattedMessage id="organization" />;
} else if (text === 'project') {
return <FormattedMessage id="project" />;
} else {
return <FormattedMessage id="global" />;
}
}
renderLevelSelect = () => {
const menu = (
<Menu onClick={this.handleChangeLevel}>
{
levels.filter(v => v !== 'user').map(level => (
<Menu.Item key={level}>
{this.renderLevel(level)}
</Menu.Item>
))
}
</Menu>
);
return (
<Dropdown overlay={menu} trigger={['click']} overlayClassName="c7n-role-popover">
<a className="c7n-dropdown-link" href="#">
{this.renderLevel(this.state.level)} <Icon type="arrow_drop_down" />
</a>
</Dropdown>
);
}
render() {
const { intl, AppState } = this.props;
const { sort: { columnKey, order }, pagination, filters, params } = this.state;
const selectedRowKeys = this.getSelectedRowKeys();
const columns = [{
dataIndex: 'id',
key: 'id',
hidden: true,
sortOrder: columnKey === 'id' && order,
}, {
title: <FormattedMessage id="name" />,
dataIndex: 'name',
key: 'name',
width: '25%',
filters: [],
sorter: true,
sortOrder: columnKey === 'name' && order,
filteredValue: filters.name || [],
render: text => (
<MouseOverWrapper text={text} width={0.2}>
{text}
</MouseOverWrapper>
),
}, {
title: <FormattedMessage id="code" />,
dataIndex: 'code',
key: 'code',
width: '25%',
filters: [],
// sorter: true,
// sortOrder: columnKey === 'code' && order,
filteredValue: filters.code || [],
render: text => (
<MouseOverWrapper text={text} width={0.2}>
{text}
</MouseOverWrapper>
),
}, {
title: <FormattedMessage id="level" />,
dataIndex: 'level',
key: 'level',
filters: [
{
text: intl.formatMessage({ id: 'global' }),
value: 'site',
}, {
text: intl.formatMessage({ id: 'organization' }),
value: 'organization',
}, {
text: intl.formatMessage({ id: 'project' }),
value: 'project',
}],
render: text => this.renderLevel(text),
sorter: true,
sortOrder: columnKey === 'level' && order,
filteredValue: filters.level || [],
}, {
title: <FormattedMessage id="source" />,
dataIndex: 'builtIn',
key: 'builtIn',
filters: [{
text: intl.formatMessage({ id: `${intlPrefix}.builtin.predefined` }),
value: 'true',
}, {
text: intl.formatMessage({ id: `${intlPrefix}.builtin.custom` }),
value: 'false',
}],
render: (text, record) => (
<StatusTag
mode="icon"
name={intl.formatMessage({ id: record.builtIn ? 'predefined' : 'custom' })}
colorCode={record.builtIn ? 'PREDEFINE' : 'CUSTOM'}
/>
),
sorter: true,
sortOrder: columnKey === 'builtIn' && order,
filteredValue: filters.builtIn || [],
}, {
title: <FormattedMessage id="status" />,
dataIndex: 'enabled',
key: 'enabled',
filters: [{
text: intl.formatMessage({ id: 'enable' }),
value: 'true',
}, {
text: intl.formatMessage({ id: 'disable' }),
value: 'false',
}],
render: enabled => (<StatusTag mode="icon" name={intl.formatMessage({ id: enabled ? 'enable' : 'disable' })} colorCode={enabled ? 'COMPLETED' : 'DISABLE'} />),
// sorter: true,
// sortOrder: columnKey === 'enabled' && order,
filteredValue: filters.enabled || [],
}, {
title: '',
key: 'action',
align: 'right',
render: (text, record) => {
const actionDatas = [{
service: ['iam-service.role.createBaseOnRoles'],
type: 'site',
icon: '',
text: intl.formatMessage({ id: `${intlPrefix}.create.byone` }),
action: this.createByThis.bind(this, record),
}, {
service: ['iam-service.role.update'],
icon: '',
type: 'site',
text: intl.formatMessage({ id: 'modify' }),
action: this.showModal.bind(this, record.id),
}];
if (record.enabled) {
actionDatas.push({
service: ['iam-service.role.disableRole'],
icon: '',
type: 'site',
text: intl.formatMessage({ id: 'disable' }),
action: this.handleEnable.bind(this, record),
});
} else {
actionDatas.push({
service: ['iam-service.role.enableRole'],
icon: '',
type: 'site',
text: intl.formatMessage({ id: 'enable' }),
action: this.handleEnable.bind(this, record),
});
}
return <Action data={actionDatas} />;
},
}];
const rowSelection = {
selectedRowKeys,
onChange: this.changeSelects,
};
return (
<Page
service={[
'iam-service.role.createBaseOnRoles',
'iam-service.role.update',
'iam-service.role.disableRole',
'iam-service.role.enableRole',
'iam-service.role.create',
'iam-service.role.check',
'iam-service.role.listRolesWithUserCountOnOrganizationLevel',
'iam-service.role.listRolesWithUserCountOnProjectLevel',
'iam-service.role.list',
'iam-service.role.listRolesWithUserCountOnSiteLevel',
'iam-service.role.queryWithPermissionsAndLabels',
'iam-service.role.pagingQueryUsersByRoleIdOnOrganizationLevel',
'iam-service.role.pagingQueryUsersByRoleIdOnProjectLevel',
'iam-service.role.pagingQueryUsersByRoleIdOnSiteLevel',
]}
className="choerodon-role"
>
<Header
title={<FormattedMessage id={`${intlPrefix}.header.title`} />}
>
{this.renderLevelSelect()}
<Permission
service={['iam-service.role.create']}
>
<Button
icon="playlist_add"
onClick={this.goCreate}
style={{ marginLeft: 30 }}
>
<FormattedMessage id={`${intlPrefix}.create`} />
</Button>
</Permission>
<Permission
service={['iam-service.role.createBaseOnRoles']}
>
<Button
icon="content_copy"
onClick={this.createByMultiple}
disabled={!selectedRowKeys.length}
>
<FormattedMessage id={`${intlPrefix}.create.byselect`} />
</Button>
</Permission>
<Button
onClick={this.handleRefresh}
icon="refresh"
>
<FormattedMessage id="refresh" />
</Button>
</Header>
<Content
code={intlPrefix}
values={{ name: AppState.getSiteInfo.systemName || 'Choerodon' }}
>
<Table
columns={columns}
dataSource={RoleStore.getRoles}
pagination={pagination}
rowSelection={rowSelection}
rowKey={record => record.id}
filters={params}
onChange={this.handlePageChange}
loading={RoleStore.getIsLoading}
filterBarPlaceholder={intl.formatMessage({ id: 'filtertable' })}
/>
</Content>
</Page>
);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/dto/LookupValueDTO.java<|end_filename|>
package io.choerodon.iam.infra.dto;
import io.choerodon.mybatis.annotation.MultiLanguage;
import io.choerodon.mybatis.annotation.MultiLanguageField;
import io.choerodon.mybatis.entity.BaseDTO;
import io.swagger.annotations.ApiModelProperty;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.validation.constraints.NotEmpty;
/**
* @author superlee
* @since 2019-04-23
*/
@MultiLanguage
@Table(name = "fd_lookup_value")
public class LookupValueDTO extends BaseDTO {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@ApiModelProperty(value = "主键")
private Long id;
@ApiModelProperty(value = "快码值code")
@NotEmpty(message = "error.code.empty")
private String code;
@ApiModelProperty(value = "所属快码id", hidden = true)
private Long lookupId;
@MultiLanguageField
@ApiModelProperty(value = "快码值描述")
private String description;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public Long getLookupId() {
return lookupId;
}
public void setLookupId(Long lookupId) {
this.lookupId = lookupId;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/UserController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import javax.validation.Valid;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.domain.Sort;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.api.dto.*;
import io.choerodon.iam.infra.dto.OrganizationDTO;
import io.choerodon.iam.infra.dto.PasswordPolicyDTO;
import io.choerodon.iam.infra.dto.ProjectDTO;
import io.choerodon.iam.infra.dto.UserDTO;
import io.choerodon.mybatis.annotation.SortDefault;
import io.choerodon.swagger.annotation.CustomPageRequest;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import springfox.documentation.annotations.ApiIgnore;
import io.choerodon.core.base.BaseController;
import io.choerodon.core.exception.NotFoundException;
import io.choerodon.core.iam.InitRoleCode;
import io.choerodon.iam.app.service.PasswordPolicyService;
import io.choerodon.iam.app.service.UserService;
import io.choerodon.iam.infra.annotation.NamingRuleTrans;
import io.choerodon.iam.infra.common.utils.ParamUtils;
/**
* @author superlee
*/
@RestController
@RequestMapping(value = "/v1/users")
public class UserController extends BaseController {
private UserService userService;
private PasswordPolicyService passwordPolicyService;
public UserController(UserService userService, PasswordPolicyService passwordPolicyService) {
this.userService = userService;
this.passwordPolicyService = passwordPolicyService;
}
@Permission(type = ResourceType.SITE, permissionLogin = true)
@ApiOperation(value = "查询当前用户信息")
@GetMapping(value = "/self")
public ResponseEntity<UserDTO> querySelf() {
return new ResponseEntity<>(userService.querySelf(), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE, permissionLogin = true)
@ApiOperation(value = "根据id查询用户信息")
@GetMapping(value = "/{id}/info")
public ResponseEntity<UserDTO> queryInfo(@PathVariable Long id) {
return Optional.ofNullable(userService.queryInfo(id))
.map(result -> new ResponseEntity<>(result, HttpStatus.OK))
.orElseThrow(NotFoundException::new);
}
@Permission(permissionWithin = true)
@GetMapping(value = "/registrant")
public ResponseEntity<RegistrantInfoDTO> queryInfoSkipLogin(
@RequestParam(value = "org_code") String orgCode) {
return Optional.ofNullable(userService.queryRegistrantInfoAndAdmin(orgCode))
.map(result -> new ResponseEntity<>(result, HttpStatus.OK))
.orElseThrow(NotFoundException::new);
}
@Permission(type = ResourceType.SITE, permissionLogin = true)
@ApiOperation(value = "修改用户信息")
@PutMapping(value = "/{id}/info")
public ResponseEntity<UserDTO> updateInfo(@PathVariable Long id,
@RequestBody UserDTO userDTO) {
userDTO.setId(id);
if (userDTO.getObjectVersionNumber() == null) {
throw new CommonException("error.user.objectVersionNumber.null");
}
userDTO.setAdmin(null);
//不能修改状态
userDTO.setEnabled(null);
userDTO.setLdap(null);
userDTO.setOrganizationId(null);
userDTO.setLoginName(null);
return new ResponseEntity<>(userService.updateInfo(userDTO, true), HttpStatus.OK);
}
/**
* 上传头像到文件服务返回头像url
*/
@Permission(type = ResourceType.SITE, permissionLogin = true)
@ApiOperation(value = "用户头像上传")
@PostMapping(value = "/{id}/upload_photo")
public ResponseEntity<String> uploadPhoto(@PathVariable Long id,
@RequestPart MultipartFile file) {
return new ResponseEntity<>(userService.uploadPhoto(id, file), HttpStatus.OK);
}
/**
* 上传头像,支持裁剪,旋转,并保存
*/
@Permission(type = ResourceType.SITE, permissionLogin = true)
@ApiOperation(value = "用户头像上传裁剪,旋转并保存")
@PostMapping(value = "/{id}/save_photo")
public ResponseEntity<String> savePhoto(@PathVariable Long id,
@RequestPart MultipartFile file,
@ApiParam(name = "rotate", value = "顺时针旋转的角度", example = "90")
@RequestParam(required = false) Double rotate,
@ApiParam(name = "startX", value = "裁剪的X轴", example = "100")
@RequestParam(required = false, name = "startX") Integer axisX,
@ApiParam(name = "startY", value = "裁剪的Y轴", example = "100")
@RequestParam(required = false, name = "startY") Integer axisY,
@ApiParam(name = "endX", value = "裁剪的宽度", example = "200")
@RequestParam(required = false, name = "endX") Integer width,
@ApiParam(name = "endY", value = "裁剪的高度", example = "200")
@RequestParam(required = false, name = "endY") Integer height) {
return new ResponseEntity<>(userService.savePhoto(id, file, rotate, axisX, axisY, width, height), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION, permissionLogin = true)
@ApiOperation(value = "查询用户所在组织列表")
@GetMapping(value = "/{id}/organizations")
public ResponseEntity<List<OrganizationDTO>> queryOrganizations(@PathVariable Long id,
@RequestParam(required = false, name = "included_disabled")
boolean includedDisabled) {
return new ResponseEntity<>(userService.queryOrganizations(id, includedDisabled), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION, permissionLogin = true)
@ApiOperation(value = "查询用户所在项目列表")
@GetMapping(value = "/{id}/projects")
public ResponseEntity<List<ProjectDTO>> queryProjects(@PathVariable Long id,
@RequestParam(required = false, name = "included_disabled")
boolean includedDisabled) {
return new ResponseEntity<>(userService.queryProjects(id, includedDisabled), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION, permissionLogin = true)
@ApiOperation(value = "分页查询当前登录用户所有项目列表")
@GetMapping(value = "/self/projects/paging_query")
@CustomPageRequest
public ResponseEntity<PageInfo<ProjectDTO>> pagingQueryProjectsSelf(@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@NamingRuleTrans ProjectDTO projectDTO,
@RequestParam(required = false) String[] params) {
return new ResponseEntity<>(userService.pagingQueryProjectsSelf(projectDTO, pageRequest, ParamUtils.arrToStr(params)), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION, permissionLogin = true)
@ApiOperation(value = "分页查询当前登录用户所有组织列表")
@GetMapping(value = "/self/organizations/paging_query")
@CustomPageRequest
public ResponseEntity<PageInfo<OrganizationDTO>> pagingQueryOrganizationsSelf(@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(required = false) String name,
@RequestParam(required = false) String code,
@RequestParam(required = false) Boolean enabled,
@RequestParam(required = false) String[] params) {
OrganizationDTO organizationDTO = new OrganizationDTO();
organizationDTO.setName(name);
organizationDTO.setCode(code);
organizationDTO.setEnabled(enabled);
return new ResponseEntity<>(userService.pagingQueryOrganizationsSelf(organizationDTO, pageRequest, ParamUtils.arrToStr(params)), HttpStatus.OK);
}
/**
* @deprecated 已过期
*/
@ApiIgnore
@Deprecated
@Permission(type = ResourceType.ORGANIZATION, permissionLogin = true)
@ApiOperation(value = "查询当前用户在某组织下所在的项目列表")
@GetMapping(value = "/{id}/organizations/{organization_id}/projects")
public ResponseEntity<List<ProjectDTO>> queryProjectsByOrganizationId(@PathVariable Long id,
@PathVariable(name = "organization_id") Long organizationId) {
return new ResponseEntity<>(userService.queryProjectsByOrganizationId(id, organizationId), HttpStatus.OK);
}
/**
* @deprecated 已过期
*/
@ApiIgnore
@Deprecated
@Permission(type = ResourceType.SITE, permissionLogin = true)
@ApiOperation(value = "查询当前用户所在组织列表以及用户在该组织下所在的项目列表")
@GetMapping(value = "/self/organizations_projects")
public ResponseEntity<List<OrganizationDTO>> queryOrganizationWithProjects() {
return new ResponseEntity<>(userService.queryOrganizationWithProjects(), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION, permissionLogin = true)
@ApiOperation(value = "根据用户名查询用户信息")
@GetMapping
public ResponseEntity<UserDTO> query(@RequestParam(name = "login_name") String loginName) {
return new ResponseEntity<>(userService.queryByLoginName(loginName), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE, permissionLogin = true)
@ApiOperation(value = "修改密码")
@PutMapping(value = "/{id}/password")
public ResponseEntity selfUpdatePassword(@PathVariable Long id,
@RequestBody @Valid UserPasswordDTO userPasswordDTO) {
userService.selfUpdatePassword(id, userPasswordDTO, true, true);
return new ResponseEntity(HttpStatus.OK);
}
@Permission(type = ResourceType.SITE, permissionPublic = true)
@ApiOperation(value = "用户信息校验")
@PostMapping(value = "/check")
public ResponseEntity check(@RequestBody UserDTO user) {
userService.check(user);
return new ResponseEntity(HttpStatus.OK);
}
/**
* 分页查询所有的admin用户
*
* @return 分页的admin用户
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "分页模糊查询管理员用户列表")
@GetMapping("/admin")
@CustomPageRequest
public ResponseEntity<PageInfo<UserDTO>> pagingQueryAdminUsers(
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(required = false, name = "loginName") String loginName,
@RequestParam(required = false, name = "realName") String realName,
@RequestParam(required = false, name = "enabled") Boolean enabled,
@RequestParam(required = false, name = "locked") Boolean locked,
@RequestParam(required = false, name = "params") String[] params
) {
UserDTO userDTO = new UserDTO();
userDTO.setLoginName(loginName);
userDTO.setRealName(realName);
userDTO.setEnabled(enabled);
userDTO.setLocked(locked);
return new ResponseEntity<>(userService.pagingQueryAdminUsers(pageRequest, userDTO, ParamUtils.arrToStr(params)), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "批量给用户添加管理员身份")
@PostMapping("/admin")
public ResponseEntity addDefaultUsers(@ModelAttribute("id") long[] ids) {
userService.addAdminUsers(ids);
return new ResponseEntity<>(HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "清除用户的管理员身份")
@DeleteMapping("/admin/{id}")
public ResponseEntity deleteDefaultUser(@PathVariable long id) {
userService.deleteAdminUser(id);
return new ResponseEntity<>(HttpStatus.OK);
}
@Permission(permissionWithin = true)
@ApiOperation(value = "根据id批量查询用户信息列表")
@PostMapping(value = "/ids")
public ResponseEntity<List<UserDTO>> listUsersByIds(@RequestBody Long[] ids,
@RequestParam(value = "only_enabled", defaultValue = "true", required = false) Boolean onlyEnabled) {
return new ResponseEntity<>(userService.listUsersByIds(ids, onlyEnabled), HttpStatus.OK);
}
@Permission(permissionWithin = true)
@ApiOperation(value = "根据email批量查询用户信息列表")
@PostMapping(value = "/emails")
public ResponseEntity<List<UserDTO>> listUsersByEmails(@RequestBody String[] emails) {
return new ResponseEntity<>(userService.listUsersByEmails(emails), HttpStatus.OK);
}
@Permission(permissionWithin = true)
@ApiOperation(value = "根据email批量查询用户信息列表")
@PostMapping(value = "/login_names")
public ResponseEntity<List<UserDTO>> listUsersByLoginNames(@RequestBody String[] loginNames,
@RequestParam(value = "only_enabled", defaultValue = "true", required = false) Boolean onlyEnabled) {
return new ResponseEntity<>(userService.listUsersByLoginNames(loginNames, onlyEnabled), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE, permissionLogin = true)
@ApiOperation("根据id分页获取组织列表和角色")
@GetMapping("/{id}/organization_roles")
@CustomPageRequest
public ResponseEntity<PageInfo<OrganizationDTO>> pagingQueryOrganizationAndRolesById(
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@PathVariable(value = "id") Long id,
@RequestParam(value = "params", required = false) String[] params) {
return new ResponseEntity<>(userService.pagingQueryOrganizationsWithRoles(pageRequest, id, ParamUtils.arrToStr(params)), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE, permissionLogin = true)
@ApiOperation("根据id分页获取项目列表和角色")
@GetMapping("/{id}/project_roles")
@CustomPageRequest
public ResponseEntity<PageInfo<ProjectDTO>> pagingQueryProjectAndRolesById(
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@PathVariable("id") Long id,
@RequestParam(value = "params", required = false) String[] params) {
return new ResponseEntity<>(userService.pagingQueryProjectAndRolesById(pageRequest, id, ParamUtils.arrToStr(params)), HttpStatus.OK);
}
@Permission(permissionWithin = true)
@ApiOperation("新建用户,并根据角色code分配角色")
@PostMapping("/init_role")
public ResponseEntity<UserDTO> createUserAndAssignRoles(@RequestBody CreateUserWithRolesDTO userWithRoles) {
return new ResponseEntity<>(userService.createUserAndAssignRoles(userWithRoles), HttpStatus.OK);
}
@Permission(permissionWithin = true)
@ApiOperation("得到所有用户id")
@GetMapping("/ids")
public ResponseEntity<Long[]> getUserIds() {
return new ResponseEntity<>(userService.listUserIds(), HttpStatus.OK);
}
/**
* 根据用户邮箱查询对应组织下的密码策略
*
* @return 目标组织密码策略
*/
@Permission(permissionPublic = true)
@ApiOperation(value = "根据用户邮箱查询对应组织下的密码策略")
@GetMapping("/password_policies")
public ResponseEntity<PasswordPolicyDTO> queryByUserEmail(@RequestParam(value = "email", required = false) String email) {
Long organizationId = userService.queryOrgIdByEmail(email);
return new ResponseEntity<>(passwordPolicyService.queryByOrgId(organizationId), HttpStatus.OK);
}
@Permission(permissionPublic = true)
@ApiOperation(value = "查询用户id对应的组织和项目")
@GetMapping("/{id}/organization_project")
public ResponseEntity<OrganizationProjectDTO> queryByUserIdOrganizationProject(@PathVariable("id") Long id) {
return new ResponseEntity<>(userService.queryByUserIdWithRoleOrganizationAndProject(id), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE, roles = {InitRoleCode.SITE_ADMINISTRATOR})
@ApiOperation(value = "卡片:新增用户统计")
@GetMapping("/new")
public ResponseEntity<Map<String, Object>> queryNewAndAllUsers() {
return new ResponseEntity<>(userService.queryAllAndNewUsers(), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE, permissionLogin = true)
@ApiOperation("根据id分页获取用户所有角色列表")
@GetMapping("/{id}/roles")
@CustomPageRequest
public ResponseEntity<PageInfo<UserRoleDTO>> pagingQueryRole(@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@PathVariable("id") Long id,
@RequestParam(required = false) Long organizationId,
@RequestParam(required = false) String params) {
return new ResponseEntity<>(userService.pagingQueryRole(pageRequest, params, id, organizationId), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE, permissionPublic = true, permissionWithin = true)
@ApiOperation(value = "完善用户信息,修改用户名、密码(供组织服务feign调用)")
@PutMapping(value = "/{id}/userInfo")
public ResponseEntity<UserInfoDTO> updateUserInfo(@PathVariable Long id,
@RequestBody @Valid UserInfoDTO userInfoDTO) {
return new ResponseEntity<>(userService.updateUserInfo(id, userInfoDTO), HttpStatus.OK);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/ClientServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.iam.infra.asserts.ClientAssertHelper;
import io.choerodon.iam.infra.asserts.OrganizationAssertHelper;
import io.choerodon.iam.infra.common.utils.JsonUtils;
import io.choerodon.iam.infra.common.utils.ParamUtils;
import io.choerodon.iam.infra.dto.ClientDTO;
import io.choerodon.iam.infra.exception.AlreadyExsitedException;
import io.choerodon.iam.infra.exception.EmptyParamException;
import io.choerodon.iam.infra.exception.InsertException;
import io.choerodon.iam.infra.exception.UpdateExcetion;
import io.choerodon.iam.infra.mapper.ClientMapper;
import io.choerodon.iam.infra.mapper.MemberRoleMapper;
import org.apache.commons.lang.RandomStringUtils;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.api.query.ClientRoleQuery;
import io.choerodon.iam.api.dto.SimplifiedClientDTO;
import io.choerodon.iam.app.service.ClientService;
import java.util.Optional;
/**
* @author wuguokai
*/
@Service
public class ClientServiceImpl implements ClientService {
private static final String ORGANIZATION_ID_NOT_EQUAL_EXCEPTION = "error.organizationId.not.same";
private OrganizationAssertHelper organizationAssertHelper;
private ClientAssertHelper clientAssertHelper;
private ClientMapper clientMapper;
private MemberRoleMapper memberRoleMapper;
public ClientServiceImpl(OrganizationAssertHelper organizationAssertHelper,
ClientAssertHelper clientAssertHelper,
ClientMapper clientMapper,
MemberRoleMapper memberRoleMapper) {
this.organizationAssertHelper = organizationAssertHelper;
this.clientMapper = clientMapper;
this.clientAssertHelper = clientAssertHelper;
this.memberRoleMapper = memberRoleMapper;
}
@Override
public ClientDTO create(Long orgId, ClientDTO clientDTO) {
organizationAssertHelper.organizationNotExisted(orgId);
validateAdditionalInfo(clientDTO);
clientDTO.setId(null);
clientDTO.setOrganizationId(orgId);
if (clientMapper.insertSelective(clientDTO) != 1) {
throw new InsertException("error.client.create");
}
return clientMapper.selectByPrimaryKey(clientDTO.getId());
}
/**
* 创建客户端时生成随机的clientId和secret
*/
@Override
public ClientDTO getDefaultCreateData(Long orgId) {
ClientDTO clientDTO = new ClientDTO();
clientDTO.setName(generateUniqueName());
clientDTO.setSecret(RandomStringUtils.randomAlphanumeric(16));
return clientDTO;
}
@Override
public ClientDTO update(ClientDTO clientDTO) {
preUpdate(clientDTO);
if (clientMapper.updateByPrimaryKey(clientDTO) != 1) {
throw new UpdateExcetion("error.client.update");
}
return clientMapper.selectByPrimaryKey(clientDTO.getId());
}
@Transactional(rollbackFor = Exception.class)
@Override
public void delete(Long orgId, Long clientId) {
ClientDTO dto = clientAssertHelper.clientNotExisted(clientId);
if (!dto.getOrganizationId().equals(orgId)) {
throw new CommonException(ORGANIZATION_ID_NOT_EQUAL_EXCEPTION);
}
memberRoleMapper.deleteMemberRoleByMemberIdAndMemberType(clientId, "client");
clientMapper.deleteByPrimaryKey(clientId);
}
@Override
public ClientDTO query(Long orgId, Long clientId) {
ClientDTO dto = clientAssertHelper.clientNotExisted(clientId);
if (!orgId.equals(dto.getOrganizationId())) {
throw new CommonException(ORGANIZATION_ID_NOT_EQUAL_EXCEPTION);
}
return dto;
}
@Override
public ClientDTO queryByName(Long orgId, String clientName) {
ClientDTO dto = clientAssertHelper.clientNotExisted(clientName);
if (!orgId.equals(dto.getOrganizationId())) {
throw new CommonException(ORGANIZATION_ID_NOT_EQUAL_EXCEPTION);
}
return dto;
}
@Override
public PageInfo<ClientDTO> list(ClientDTO clientDTO, PageRequest pageRequest, String param) {
return PageHelper
.startPage(pageRequest.getPage(), pageRequest.getSize())
.doSelectPageInfo(() -> clientMapper.fulltextSearch(clientDTO, param));
}
@Override
public void check(ClientDTO client) {
String name = client.getName();
if (StringUtils.isEmpty(name)) {
throw new EmptyParamException(("error.clientName.null"));
}
checkName(client);
}
@Override
public PageInfo<ClientDTO> pagingQueryUsersByRoleId(PageRequest pageRequest, ResourceType resourceType, Long sourceId, ClientRoleQuery clientRoleSearchDTO, Long roleId) {
String param = Optional.ofNullable(clientRoleSearchDTO).map(dto -> ParamUtils.arrToStr(dto.getParam())).orElse(null);
return PageHelper
.startPage(pageRequest.getPage(), pageRequest.getSize())
.doSelectPageInfo(() -> clientMapper.selectClientsByRoleIdAndOptions(roleId, sourceId, resourceType.value(), clientRoleSearchDTO, param));
}
@Override
public PageInfo<SimplifiedClientDTO> pagingQueryAllClients(PageRequest pageRequest, String params) {
return PageHelper.startPage(pageRequest.getPage(), pageRequest.getSize()).doSelectPageInfo(() -> clientMapper.selectAllClientSimplifiedInfo(params));
}
private String generateUniqueName() {
String uniqueName;
ClientDTO dto = new ClientDTO();
while (true) {
uniqueName = RandomStringUtils.randomAlphanumeric(12);
dto.setName(uniqueName);
if (clientMapper.selectOne(dto) == null) {
break;
}
}
return uniqueName;
}
private void preUpdate(ClientDTO clientDTO) {
if (StringUtils.isEmpty(clientDTO.getName())) {
throw new EmptyParamException("error.clientName.empty");
}
Long id = clientDTO.getId();
ClientDTO dto = clientAssertHelper.clientNotExisted(id);
//组织id不可修改
clientDTO.setOrganizationId(dto.getOrganizationId());
validateAdditionalInfo(clientDTO);
}
private void validateAdditionalInfo(ClientDTO clientDTO) {
String additionalInfo = clientDTO.getAdditionalInformation();
if (StringUtils.isEmpty(additionalInfo)) {
clientDTO.setAdditionalInformation("{}");
} else if (!JsonUtils.isJSONValid(additionalInfo)) {
throw new CommonException("error.client.additionalInfo.notJson");
}
}
private void checkName(ClientDTO client) {
Boolean createCheck = StringUtils.isEmpty(client.getId());
String name = client.getName();
ClientDTO clientDTO = new ClientDTO();
clientDTO.setName(name);
if (createCheck) {
Boolean existed = clientMapper.selectOne(clientDTO) != null;
if (existed) {
throw new AlreadyExsitedException("error.clientName.exist");
}
} else {
Long id = client.getId();
ClientDTO dto = clientMapper.selectOne(clientDTO);
Boolean existed = dto != null && !id.equals(dto.getId());
if (existed) {
throw new AlreadyExsitedException("error.clientName.exist");
}
}
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/OrganizationRoleController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.domain.Sort;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.core.base.BaseController;
import io.choerodon.iam.api.query.RoleQuery;
import io.choerodon.iam.app.service.RoleService;
import io.choerodon.iam.infra.dto.RoleDTO;
import io.choerodon.mybatis.annotation.SortDefault;
import io.swagger.annotations.ApiOperation;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author Eugen
*/
@RestController
@RequestMapping(value = "/v1/organizations/{organization_id}/roles")
public class OrganizationRoleController extends BaseController {
private RoleService roleService;
public OrganizationRoleController(RoleService roleService) {
this.roleService = roleService;
}
/**
* 分页查询组织层角色
*
* @param organizationId 组织Id
* @param pageRequest 分页信息
* @param roleQuery 查询数据
* @return 分页查询结果
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "组织层分页查询组织层角色(包括该组织创建的角色 及 平台层创建的组织角色)")
@PostMapping(value = "/paging")
public ResponseEntity<PageInfo<RoleDTO>> pagingQuery(@PathVariable(name = "organization_id") Long organizationId,
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestBody RoleQuery roleQuery) {
roleQuery.setSourceId(organizationId);
roleQuery.setSourceType(ResourceType.ORGANIZATION.value());
return new ResponseEntity<>(roleService.pagingQueryOrgRoles(organizationId, pageRequest, roleQuery), HttpStatus.OK);
}
/**
* 组织层通过ID查询角色信息(包括权限信息 和 标签信息)
*
* @param id 角色ID
* @return 查询角色信息
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "组织层通过ID查询角色信息(包括权限信息 和 标签信息)")
@GetMapping(value = "/{id}")
public ResponseEntity<RoleDTO> queryById(@PathVariable(name = "organization_id") Long organizationId,
@PathVariable Long id) {
return new ResponseEntity<>(roleService.queryWithPermissionsAndLabels(id), HttpStatus.OK);
}
/**
* 组织层创建组织层角色
*
* @param organizationId 组织ID
* @param roleDTO 角色创建信息
* @return 创建角色结果信息
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "组织层创建角色")
@PostMapping
public ResponseEntity<RoleDTO> create(@PathVariable(name = "organization_id") Long organizationId,
@RequestBody @Validated RoleDTO roleDTO) {
roleDTO.setLabels(null);
roleDTO.setResourceLevel(ResourceType.ORGANIZATION.value());
roleDTO.setOrganizationId(organizationId);
return new ResponseEntity<>(roleService.create(roleDTO), HttpStatus.OK);
}
/**
* 组织层修改角色
* 只能修改本组织创建的角色
*
* @param organizationId 组织Id
* @param id 角色id
* @param roleDTO 修改角色信息
* @return 修改结果
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "组织层修改角色")
@PutMapping(value = "/{id}")
public ResponseEntity<RoleDTO> update(@PathVariable(name = "organization_id") Long organizationId,
@PathVariable Long id,
@RequestBody RoleDTO roleDTO) {
roleDTO.setId(id);
roleDTO.setLabels(null);
return new ResponseEntity<>(roleService.orgUpdate(roleDTO, organizationId), HttpStatus.OK);
}
/**
* 组织层启用角色
*
* @param organizationId 组织ID
* @param id 要启用的角色Id
* @return 启用角色信息
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "组织层启用角色")
@PutMapping(value = "/{id}/enable")
public ResponseEntity<RoleDTO> enable(@PathVariable(name = "organization_id") Long organizationId,
@PathVariable Long id) {
return new ResponseEntity<>(roleService.orgEnableRole(id, organizationId), HttpStatus.OK);
}
/**
* 组织层停用角色
*
* @param organizationId 组织ID
* @param id 要停用的角色Id
* @return 停用角色信息
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "组织层禁用角色")
@PutMapping(value = "/{id}/disable")
public ResponseEntity<RoleDTO> disable(@PathVariable(name = "organization_id") Long organizationId,
@PathVariable Long id) {
return new ResponseEntity<>(roleService.orgDisableRole(id, organizationId), HttpStatus.OK);
}
/**
* 组织层角色信息校验
*
* @param role 角色信息
* @return 校验结果(如校验通过则不返回数据)
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "组织层角色信息校验")
@PostMapping(value = "/check")
public ResponseEntity check(@PathVariable(name = "organization_id") Long organizationId,
@RequestBody RoleDTO role) {
roleService.check(role);
return new ResponseEntity(HttpStatus.OK);
}
/**
* 根据标签查询组织层角色
*
* @return 查询结果
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "根据标签查询组织层角色")
@GetMapping(value = "/selectByLabel")
public ResponseEntity<List<RoleDTO>> selectByLabel(@PathVariable(name = "organization_id") Long organizationId,
@RequestParam String label) {
return ResponseEntity.ok(roleService.selectByLabel(label, organizationId));
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/AuditController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import javax.validation.Valid;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.domain.Sort;
import io.choerodon.iam.infra.dto.AuditDTO;
import io.choerodon.mybatis.annotation.SortDefault;
import io.swagger.annotations.ApiOperation;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import io.choerodon.iam.app.service.AuditService;
import io.choerodon.swagger.annotation.CustomPageRequest;
import springfox.documentation.annotations.ApiIgnore;
/**
* @author Eugen
**/
@RestController
@RequestMapping(value = "/v1/audit")
public class AuditController {
private AuditService auditService;
public AuditController(AuditService auditService) {
this.auditService = auditService;
}
@Permission(permissionWithin = true)
@ApiOperation(value = "创建审计记录")
@PostMapping(value = "/insert")
public ResponseEntity<AuditDTO> create(@RequestBody @Valid AuditDTO auditDTO) {
return new ResponseEntity<>(auditService.create(auditDTO), HttpStatus.OK);
}
@Permission(permissionWithin = true)
@ApiOperation(value = "分页查询审计记录")
@CustomPageRequest
@GetMapping
public ResponseEntity<PageInfo<AuditDTO>> pagingQuery(@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(name = "userId", required = false) Long userId,
@RequestParam(value = "dataType", required = false) String dataType,
@RequestParam(value = "businessType", required = false) String businessType) {
return new ResponseEntity<>(auditService.pagingQuery(userId, businessType, dataType,pageRequest), HttpStatus.OK);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/LookupServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.iam.app.service.LookupService;
import io.choerodon.iam.infra.asserts.AssertHelper;
import io.choerodon.iam.infra.dto.LookupDTO;
import io.choerodon.iam.infra.dto.LookupValueDTO;
import io.choerodon.iam.infra.exception.EmptyParamException;
import io.choerodon.iam.infra.exception.InsertException;
import io.choerodon.iam.infra.exception.UpdateExcetion;
import io.choerodon.iam.infra.mapper.LookupMapper;
import io.choerodon.iam.infra.mapper.LookupValueMapper;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.ObjectUtils;
import java.util.List;
/**
* @author superlee
*/
@Service
public class LookupServiceImpl implements LookupService {
private LookupMapper lookupMapper;
private LookupValueMapper lookupValueMapper;
private AssertHelper assertHelper;
public LookupServiceImpl(LookupMapper lookupMapper,
LookupValueMapper lookupValueMapper,
AssertHelper assertHelper) {
this.lookupMapper = lookupMapper;
this.lookupValueMapper = lookupValueMapper;
this.assertHelper = assertHelper;
}
@Transactional(rollbackFor = Exception.class)
@Override
public LookupDTO create(LookupDTO lookupDTO) {
lookupDTO.setId(null);
List<LookupValueDTO> values = lookupDTO.getLookupValues();
if (lookupMapper.insertSelective(lookupDTO) != 1) {
throw new InsertException("error.repo.lookup.insert");
}
if (!ObjectUtils.isEmpty(values)) {
values.forEach(v -> {
v.setId(null);
v.setLookupId(lookupDTO.getId());
if (lookupValueMapper.insertSelective(v) != 1) {
throw new InsertException("error.lookupValue.insert");
}
});
}
return lookupDTO;
}
@Override
public PageInfo<LookupDTO> pagingQuery(PageRequest pageRequest, LookupDTO lookupDTO, String param) {
return PageHelper
.startPage(pageRequest.getPage(), pageRequest.getSize())
.doSelectPageInfo(() -> lookupMapper.fulltextSearch(lookupDTO, param));
}
@Transactional(rollbackFor = Exception.class)
@Override
public void delete(Long id) {
lookupMapper.deleteByPrimaryKey(id);
//删除lookup级联删除lookupValue
LookupValueDTO lookupValue = new LookupValueDTO();
lookupValue.setLookupId(id);
lookupValueMapper.delete(lookupValue);
}
@Transactional(rollbackFor = Exception.class)
@Override
public LookupDTO update(LookupDTO lookupDTO) {
assertHelper.objectVersionNumberNotNull(lookupDTO.getObjectVersionNumber());
List<LookupValueDTO> values = lookupDTO.getLookupValues();
if (lookupMapper.updateByPrimaryKeySelective(lookupDTO) != 1) {
throw new UpdateExcetion("error.repo.lookup.update");
}
LookupValueDTO dto = new LookupValueDTO();
dto.setLookupId(lookupDTO.getId());
List<LookupValueDTO> list = lookupValueMapper.select(dto);
if (!ObjectUtils.isEmpty(values)) {
values.forEach(v -> {
if (v.getId() == null) {
throw new EmptyParamException("error.lookupValue.id.null");
}
list.forEach(d -> {
if (d.getId().equals(v.getId())) {
d.setCode(v.getCode());
d.setDescription(v.getDescription());
lookupValueMapper.updateByPrimaryKeySelective(d);
}
});
});
}
return lookupDTO;
}
@Override
public LookupDTO queryById(Long id) {
LookupDTO lookup = lookupMapper.selectByPrimaryKey(id);
if (lookup == null) {
return null;
}
LookupValueDTO lookupValue = new LookupValueDTO();
lookupValue.setLookupId(id);
lookup.setLookupValues(lookupValueMapper.select(lookupValue));
return lookup;
}
@Override
public LookupDTO listByCodeWithLookupValues(String code) {
return lookupMapper.selectByCodeWithLookupValues(code);
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/api/controller/v1/LdapControllerSpec.groovy<|end_filename|>
package io.choerodon.iam.api.controller.v1
import io.choerodon.base.domain.PageRequest
import io.choerodon.core.exception.ExceptionResponse
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.api.dto.LdapAccountDTO
import io.choerodon.iam.api.dto.LdapConnectionDTO
import io.choerodon.iam.app.service.LdapService
import io.choerodon.iam.app.service.impl.LdapServiceImpl
import io.choerodon.iam.infra.dto.LdapDTO
import io.choerodon.iam.infra.dto.LdapErrorUserDTO
import io.choerodon.iam.infra.dto.LdapHistoryDTO
import io.choerodon.iam.infra.dto.OrganizationDTO
import io.choerodon.iam.infra.enums.LdapErrorUserCause
import io.choerodon.iam.infra.mapper.LdapErrorUserMapper
import io.choerodon.iam.infra.mapper.LdapHistoryMapper
import io.choerodon.iam.infra.mapper.LdapMapper
import io.choerodon.iam.infra.mapper.OrganizationMapper
import org.springframework.beans.BeanUtils
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.boot.test.web.client.TestRestTemplate
import org.springframework.context.annotation.Import
import org.springframework.http.HttpEntity
import org.springframework.http.HttpMethod
import org.springframework.transaction.annotation.Transactional
import spock.lang.Shared
import spock.lang.Specification
import spock.lang.Stepwise
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
@Stepwise
class LdapControllerSpec extends Specification {
private static final String BASE_PATH = "/v1/organizations/{organization_id}/ldaps"
@Autowired
private TestRestTemplate restTemplate
@Autowired
private OrganizationMapper organizationMapper
@Autowired
private LdapMapper ldapMapper
@Autowired
private LdapErrorUserMapper ldapErrorUserMapper
@Autowired
private LdapHistoryMapper ldapHistoryMapper
//设置为共享,以免每个方法使用false,调用
@Shared
def isInit = false
@Shared
def needClean = false
def ldapDTO = new LdapDTO()
@Shared
def organizationId = 1L
@Shared
OrganizationDTO organization
@Shared
def organizationDTO
def setup() {
ldapDTO.setOrganizationId(organizationId)
ldapDTO.setServerAddress("ldap://ac.hand-china.com")
ldapDTO.setObjectClass("person")
ldapDTO.setSagaBatchSize(500)
ldapDTO.setName("hand")
ldapDTO.setOrganizationId(organizationId)
ldapDTO.setServerAddress("ldap://ac.hand-china.com")
ldapDTO.setObjectClass("person")
ldapDTO.setConnectionTimeout(10)
ldapDTO.setAccount("test")
ldapDTO.setPassword("<PASSWORD>")
ldapDTO.setPort("389")
ldapDTO.setUuidField("uid")
if (!isInit) {
given: "构造参数"
organization = new OrganizationDTO()
organization.setName("汉得")
organization.setCode("hand")
organization.setEnabled(true)
organizationDTO = new OrganizationDTO()
organization.setName("猪齿鱼")
organization.setCode("choerodon")
organization.setEnabled(true)
isInit = true
LdapDTO ldap = new LdapDTO()
ldap.setName("choerodon")
ldap.setOrganizationId(2L)
ldap.setServerAddress("ldap://ac.hand-china.com")
ldap.setObjectClass("person")
when: "调用方法"
int count = organizationMapper.insert(organization)
count += organizationMapper.insert(organizationDTO)
count += ldapMapper.insert(ldap)
then: "检验插入是否成功"
count == 3
}
}
def cleanup() {
if (needClean) {
organizationMapper.deleteByPrimaryKey(organization)
organizationMapper.deleteByPrimaryKey(organizationDTO)
}
}
def "Create"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
when: "调用方法[异常-组织id不存在]"
paramsMap.put("organization_id", 1000L)
def entity = restTemplate.postForEntity(BASE_PATH, ldapDTO, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.organization.not.exist")
when: "调用方法"
paramsMap.put("organization_id", organizationId)
entity = restTemplate.postForEntity(BASE_PATH, ldapDTO, LdapDTO, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "Update"() {
given: "构造请求参数"
def updateLdapDTO = new LdapDTO()
BeanUtils.copyProperties(ldapDTO, updateLdapDTO)
def paramsMap = new HashMap<String, Object>()
when: "调用方法[异常-组织id不存在]"
paramsMap.put("organization_id", 1000L)
paramsMap.put("id", 1)
paramsMap.put("sagaBathSize", 500)
def entity = restTemplate.postForEntity(BASE_PATH + "/{id}", updateLdapDTO, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.organization.not.exist")
when: "调用方法[异常-ldap不存在]"
paramsMap.put("organization_id", 1)
paramsMap.put("id", 1000)
entity = restTemplate.postForEntity(BASE_PATH + "/{id}", updateLdapDTO, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.ldap.not.exist")
when: "调用方法"
paramsMap.put("organization_id", organizationId)
paramsMap.put("id", 1)
BeanUtils.copyProperties(ldapMapper.selectByPrimaryKey(1L), updateLdapDTO)
updateLdapDTO.setAccount("account")
updateLdapDTO.setPassword("password")
updateLdapDTO.setBaseDn("base/dn")
updateLdapDTO.setObjectClass("objectclass")
updateLdapDTO.setCustomFilter("(filter)")
entity = restTemplate.postForEntity(BASE_PATH + "/{id}", updateLdapDTO, LdapDTO, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getName() == "choerodon"
entity.getBody().getOrganizationId() == 1L
entity.getBody().getServerAddress() == "please edit"
entity.getBody().getObjectClass() == "objectclass"
}
def "EnableLdap"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
when: "调用方法[异常-组织id不匹配]"
paramsMap.put("organization_id", 1000L)
paramsMap.put("id", 1)
paramsMap.put("sagaBathSize", 500)
def httpEntity = new HttpEntity<Object>()
def entity = restTemplate.exchange(BASE_PATH + "/{id}/enable", HttpMethod.PUT, httpEntity, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.ldap.organizationId.not.match")
when: "调用方法[异常-ldap不存在]"
paramsMap.put("organization_id", 1L)
paramsMap.put("id", 1000)
entity = restTemplate.exchange(BASE_PATH + "/{id}/enable", HttpMethod.PUT, httpEntity, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.ldap.not.exist")
when: "调用方法"
paramsMap.put("organization_id", 1L)
paramsMap.put("id", 1)
entity = restTemplate.exchange(BASE_PATH + "/{id}/enable", HttpMethod.PUT, httpEntity, LdapDTO, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getEnabled()
}
def "DisableLdap"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
when: "调用方法[异常-组织id不匹配]"
paramsMap.put("organization_id", 1000L)
paramsMap.put("id", 1)
paramsMap.put("sagaBathSize", 500)
def httpEntity = new HttpEntity<Object>()
def entity = restTemplate.exchange(BASE_PATH + "/{id}/disable", HttpMethod.PUT, httpEntity, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.ldap.organizationId.not.match")
when: "调用方法[异常-ldap不存在]"
paramsMap.put("organization_id", 1L)
paramsMap.put("id", 1000)
entity = restTemplate.exchange(BASE_PATH + "/{id}/disable", HttpMethod.PUT, httpEntity, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.ldap.not.exist")
when: "调用方法"
paramsMap.put("organization_id", 1L)
paramsMap.put("id", 1)
entity = restTemplate.exchange(BASE_PATH + "/{id}/disable", HttpMethod.PUT, httpEntity, LdapDTO, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
!entity.getBody().getEnabled()
}
def "QueryByOrgId"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
when: "调用方法[异常-组织id不存在]"
paramsMap.put("organization_id", 1000L)
paramsMap.put("sagaBathSize", 500)
def entity = restTemplate.getForEntity(BASE_PATH, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.organization.not.exist")
when: "调用方法"
paramsMap.put("organization_id", 3)
paramsMap.put("id", 2)
entity = restTemplate.getForEntity(BASE_PATH, LdapDTO, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "Delete"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
def httpEntity = new HttpEntity<Object>()
when: "调用方法[异常-组织id不存在]"
paramsMap.put("organization_id", 1000L)
paramsMap.put("id", 2)
paramsMap.put("sagaBathSize", 500)
def entity = restTemplate.exchange(BASE_PATH + "/{id}", HttpMethod.DELETE, httpEntity, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.organization.not.exist")
// when: "调用方法"
// paramsMap.put("organization_id", 1)
// paramsMap.put("id", 2)
// entity = restTemplate.exchange(BASE_PATH + "/{id}", HttpMethod.DELETE, httpEntity, Boolean, paramsMap)
//
// then: "校验结果"
// entity.statusCode.is2xxSuccessful()
}
def "TestConnect"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
def ldapAccountDTO = new LdapAccountDTO()
when: "调用方法[异常-组织id不存在]"
paramsMap.put("organization_id", 1000L)
paramsMap.put("id", 1)
paramsMap.put("sagaBathSize", 500)
def entity = restTemplate.postForEntity(BASE_PATH + "/{id}/test_connect", ldapAccountDTO, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.organization.not.exist")
when: "调用方法"
OrganizationDTO organizationDTO = new OrganizationDTO()
organizationDTO.setCode("tets-org123sd1")
organizationDTO.setName("name")
organizationDTO.setEnabled(true)
organizationMapper.insertSelective(organizationDTO)
paramsMap.put("organization_id", organizationDTO.getId())
paramsMap.put("id", 1)
entity = restTemplate.postForEntity(BASE_PATH + "/{id}/test_connect", ldapAccountDTO, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.organization.not.has.ldap")
when: "调用方法"
paramsMap.put("organization_id", 3)
paramsMap.put("id", 3)
entity = restTemplate.postForEntity(BASE_PATH + "/{id}/test_connect", ldapAccountDTO, LdapConnectionDTO, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "SyncUsers"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
when: "调用方法[异常-组织不存在]"
paramsMap.put("organization_id", 1000)
paramsMap.put("id", 3)
paramsMap.put("sagaBathSize", 500)
def entity = restTemplate.postForEntity(BASE_PATH + "/{id}/sync_users", Void, ExceptionResponse, paramsMap)
needClean = true
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.organization.not.exist")
when: "调用方法"
paramsMap.put("organization_id", 1)
paramsMap.put("id", 1)
entity = restTemplate.postForEntity(BASE_PATH + "/{id}/sync_users", Void, Void, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "LatestHistory"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
when: "调用方法"
paramsMap.put("organization_id", 3)
paramsMap.put("id", 3)
paramsMap.put("sagaBathSize", 500)
def entity = restTemplate.getForEntity(BASE_PATH + "/{id}/latest_history", LdapHistoryDTO, paramsMap)
needClean = true
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "stop"() {
given: "新建一个ldapHistory"
LdapHistoryDTO ldapHistory = new LdapHistoryDTO()
ldapHistory.setLdapId(1L)
ldapHistory.setSyncBeginTime(new Date(System.currentTimeMillis()))
ldapHistoryMapper.insertSelective(ldapHistory)
LdapHistoryDTO returnValue = ldapHistoryMapper.selectByPrimaryKey(ldapHistory)
long id = returnValue.getId()
when: "调用controller"
def entity = restTemplate.exchange("/v1/organizations/1/ldaps/" + id + "/stop", HttpMethod.PUT, HttpEntity.EMPTY, LdapHistoryDTO)
then: "校验"
entity.statusCode.is2xxSuccessful()
entity.body.syncEndTime != null
}
@Transactional
def "pagingQueryHistories"() {
given:
LdapService ldapService = new LdapServiceImpl(null, null, null, null, null,null, ldapHistoryMapper)
LdapController ldapController = new LdapController(ldapService)
PageRequest pageRequest = new PageRequest(1, 20)
LdapHistoryDTO ldapHistory = new LdapHistoryDTO()
ldapHistory.setLdapId(1L)
ldapHistoryMapper.insertSelective(ldapHistory)
when:
def entity = ldapController.pagingQueryHistories(pageRequest, 1L, 1L)
then:
entity.statusCode.is2xxSuccessful()
entity.body.total == 1
}
def "pagingQueryErrorUsers"() {
given:
LdapService ldapService = new LdapServiceImpl(null, null, null,null,null, ldapErrorUserMapper,null)
LdapController ldapController = new LdapController(ldapService)
PageRequest pageRequest = new PageRequest(1, 10)
LdapErrorUserDTO ldapErrorUser = new LdapErrorUserDTO()
ldapErrorUser.setLdapHistoryId(1L)
ldapErrorUser.setUuid("uuid")
ldapErrorUser.setCause(LdapErrorUserCause.EMAIL_ALREADY_EXISTED.value())
ldapErrorUserMapper.insertSelective(ldapErrorUser)
when:
def entity = ldapController.pagingQueryErrorUsers(pageRequest, 1L,1L, null)
then:
entity.statusCode.is2xxSuccessful()
entity.body.total == 1
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/eventhandler/ApplicationListener.java<|end_filename|>
package io.choerodon.iam.api.eventhandler;
import static io.choerodon.iam.infra.common.utils.SagaTopic.Application.*;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.springframework.util.ObjectUtils;
import org.springframework.util.StringUtils;
import io.choerodon.asgard.saga.annotation.SagaTask;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.api.dto.payload.DevOpsAppSyncPayload;
import io.choerodon.iam.infra.asserts.OrganizationAssertHelper;
import io.choerodon.iam.infra.asserts.ProjectAssertHelper;
import io.choerodon.iam.infra.dto.ApplicationDTO;
import io.choerodon.iam.infra.dto.ApplicationExplorationDTO;
import io.choerodon.iam.infra.enums.ApplicationCategory;
import io.choerodon.iam.infra.enums.ApplicationType;
import io.choerodon.iam.infra.mapper.ApplicationExplorationMapper;
import io.choerodon.iam.infra.mapper.ApplicationMapper;
/**
* 应用监听器
*
* @since 0.15.0
*/
@Component
public class ApplicationListener {
private static final String SUCCESSFUL = "successful";
private static final String FAILED = "failed";
private static final String SEPARATOR = "/";
private final Logger logger = LoggerFactory.getLogger(ApplicationListener.class);
private ObjectMapper objectMapper = new ObjectMapper();
private ApplicationMapper applicationMapper;
private ApplicationExplorationMapper applicationExplorationMapper;
private OrganizationAssertHelper organizationAssertHelper;
private ProjectAssertHelper projectAssertHelper;
public ApplicationListener(ApplicationMapper applicationMapper,
ApplicationExplorationMapper applicationExplorationMapper,
OrganizationAssertHelper organizationAssertHelper,
ProjectAssertHelper projectAssertHelper) {
this.applicationMapper = applicationMapper;
this.applicationExplorationMapper = applicationExplorationMapper;
this.organizationAssertHelper = organizationAssertHelper;
this.projectAssertHelper = projectAssertHelper;
}
@SagaTask(code = IAM_SYNC_APP, sagaCode = APP_SYNC, seq = 1, description = "devops发送application集合进行同步")
public void syncApplications(String message) throws IOException {
List<ApplicationDTO> applications = objectMapper.readValue(message, new TypeReference<List<ApplicationDTO>>() {
});
logger.info("begin to sync applications, total: {}", applications.size());
if (applications.isEmpty()) {
logger.warn("receiving no one application while syncing applications");
return;
}
Map<String, Integer> statisticsMap = new HashMap<>(2);
statisticsMap.put(SUCCESSFUL, 0);
statisticsMap.put(FAILED, 0);
applications.forEach(app -> {
int successful = statisticsMap.get(SUCCESSFUL);
int failed = statisticsMap.get(FAILED);
if (isIllegal(app)) {
statisticsMap.put(FAILED, ++failed);
return;
}
try {
applicationMapper.insertSelective(app);
long appId = app.getId();
ApplicationExplorationDTO example = new ApplicationExplorationDTO();
example.setApplicationId(appId);
String path = SEPARATOR + appId + SEPARATOR;
example.setPath(path);
example.setRootId(appId);
example.setHashcode(String.valueOf(path.hashCode()));
example.setEnabled(true);
applicationExplorationMapper.insertSelective(example);
statisticsMap.put(SUCCESSFUL, ++successful);
} catch (Exception e) {
statisticsMap.put(FAILED, ++failed);
logger.error("insert application into db failed, application: {}, exception: {} ", app, e);
}
});
logger.info("syncing applications has done, successful: {}, failed: {}", statisticsMap.get(SUCCESSFUL), statisticsMap.get(FAILED));
}
private boolean isIllegal(ApplicationDTO app) {
Long organizationId = app.getOrganizationId();
if (ObjectUtils.isEmpty(organizationId)) {
logger.error("illegal application because of organization id is empty, application: {}", app);
} else {
try {
organizationAssertHelper.organizationNotExisted(organizationId);
} catch (CommonException e) {
logger.error("illegal application because of organization does not existed, application: {}", app);
return true;
}
}
Long projectId = app.getProjectId();
if (ObjectUtils.isEmpty(projectId)) {
logger.error("illegal application because of project id is empty, application: {}", app);
} else {
try {
projectAssertHelper.projectNotExisted(projectId);
} catch (CommonException e) {
logger.error("illegal application because of project does not existed, application: {}", app);
return true;
}
}
String name = app.getName();
if (StringUtils.isEmpty(name)) {
logger.error("illegal application because of name is empty, application: {}", app);
return true;
}
String code = app.getCode();
if (StringUtils.isEmpty(code)) {
logger.error("illegal application because of code is empty, application: {}", app);
return true;
}
if (!ApplicationType.matchCode(app.getApplicationType())) {
logger.error("illegal application because of type is illegal, application: {}", app);
return true;
}
ApplicationDTO example = new ApplicationDTO();
example.setName(name);
example.setOrganizationId(organizationId);
example.setProjectId(projectId);
if (!applicationMapper.select(example).isEmpty()) {
logger.error("illegal application because of name is duplicated, application: {}", app);
return true;
}
example.setName(null);
example.setCode(code);
if (!applicationMapper.select(example).isEmpty()) {
logger.error("illegal application because of code is duplicated, application: {}", app);
return true;
}
if (ObjectUtils.isEmpty(app.getEnabled())) {
logger.warn("the enabled of application is null, so set default value true, application: {}", app);
app.setEnabled(true);
}
app.setApplicationCategory(ApplicationCategory.APPLICATION.code());
return false;
}
@SagaTask(code = APP_UPDATE_ABNORMAL, sagaCode = APP_DEVOPS_CREATE_FAIL, seq = 1, description = "iam接收devops创建应用失败事件")
public void updateApplicationAbnormal(String message) throws IOException {
ApplicationDTO applicationDTO = objectMapper.readValue(message, ApplicationDTO.class);
if (applicationDTO == null) {
throw new CommonException("error.application.payload.empty");
}
long id = applicationDTO.getId();
ApplicationDTO application = applicationMapper.selectByPrimaryKey(id);
if (application == null) {
throw new CommonException("error.application.not.exist", applicationDTO);
}
application.setAbnormal(true);
applicationMapper.updateByPrimaryKey(application);
}
@SagaTask(code = APP_SYNC_DELETE, sagaCode = DEVOPS_APP_DELETE, seq = 1, description = "iam接收devops删除应用事件")
public void syncDeleteApplication(String message) throws IOException {
DevOpsAppSyncPayload appDelPayload = getPayload(message);
validatePayload(appDelPayload, false, false);
ApplicationDTO applicationDTO = getTargetApplicationByUniqueIndex(appDelPayload);
applicationExplorationMapper.deleteDescendantByApplicationId(applicationDTO.getId());
applicationMapper.deleteByPrimaryKey(applicationDTO);
}
@SagaTask(code = APP_SYNC_ACTIVE, sagaCode = DEVOPS_SYNC_APP_ACTIVE, seq = 1, description = "iam接收devops启用、禁用应用事件")
public void syncApplicationActiveStatus(String message) throws IOException {
DevOpsAppSyncPayload devOpsAppSyncPayload = getPayload(message);
validatePayload(devOpsAppSyncPayload, true, false);
ApplicationDTO applicationDTO = getTargetApplicationByUniqueIndex(devOpsAppSyncPayload);
applicationDTO.setEnabled(devOpsAppSyncPayload.getActive());
applicationMapper.updateByPrimaryKeySelective(applicationDTO);
}
@SagaTask(code = APP_SYNC_NAME, sagaCode = DEVOPS_SYNC_APP_NAME, seq = 1, description = "iam接收devops更新应用名称事件")
public void syncApplicationName(String message) throws IOException {
DevOpsAppSyncPayload syncPayload = getPayload(message);
validatePayload(syncPayload, false, true);
ApplicationDTO applicationDTO = getTargetApplicationByUniqueIndex(syncPayload);
applicationDTO.setName(syncPayload.getName());
applicationMapper.updateByPrimaryKeySelective(applicationDTO);
}
private DevOpsAppSyncPayload getPayload(String message) throws IOException {
DevOpsAppSyncPayload devOpsAppSyncPayload = objectMapper.readValue(message, DevOpsAppSyncPayload.class);
if (devOpsAppSyncPayload == null) {
throw new CommonException("error.application.payload.empty");
}
return devOpsAppSyncPayload;
}
private void validatePayload(DevOpsAppSyncPayload devOpsAppSyncPayload, boolean activeValidate, boolean nameValidate) {
if (devOpsAppSyncPayload.getCode() == null) {
throw new CommonException("error.application.code.empty", devOpsAppSyncPayload);
}
if (devOpsAppSyncPayload.getProjectId() == null) {
throw new CommonException("error.application.projectId.empty", devOpsAppSyncPayload);
}
if (devOpsAppSyncPayload.getOrganizationId() == null) {
throw new CommonException("error.application.organizationId.empty", devOpsAppSyncPayload);
}
if (activeValidate && devOpsAppSyncPayload.getActive() == null) {
throw new CommonException("error.application.name.empty", devOpsAppSyncPayload);
}
if (nameValidate && devOpsAppSyncPayload.getName() == null) {
throw new CommonException("error.application.name.empty", devOpsAppSyncPayload);
}
}
private ApplicationDTO getTargetApplicationByUniqueIndex(DevOpsAppSyncPayload syncPayload) {
ApplicationDTO applicationDTO = new ApplicationDTO();
applicationDTO.setCode(syncPayload.getCode());
applicationDTO.setProjectId(syncPayload.getProjectId());
applicationDTO.setOrganizationId(syncPayload.getOrganizationId());
applicationDTO = applicationMapper.selectOne(applicationDTO);
if (applicationDTO == null) {
throw new CommonException("error.application.not.exist", syncPayload);
}
return applicationDTO;
}
}
<|start_filename|>react/src/app/iam/stores/user/project-info/index.js<|end_filename|>
import ProjectInfoStore from './ProjectInfoStore';
export default ProjectInfoStore;
<|start_filename|>react/src/app/iam/containers/organization/user/User.js<|end_filename|>
import React, { Component } from 'react';
import { Button, Modal, Table, Tooltip, Upload, Spin } from 'choerodon-ui';
import { injectIntl, FormattedMessage } from 'react-intl';
import { inject, observer } from 'mobx-react';
import { withRouter } from 'react-router-dom';
import { Action, axios, Content, Header, Page, Permission } from '@choerodon/boot';
import MouseOverWrapper from '../../../components/mouseOverWrapper';
import UserEdit from './UserEdit';
import './User.scss';
import StatusTag from '../../../components/statusTag';
import { handleFiltersParams } from '../../../common/util';
const { Sidebar } = Modal;
const intlPrefix = 'organization.user';
@withRouter
@injectIntl
@inject('AppState')
@observer
export default class User extends Component {
state = this.getInitState();
getInitState() {
return {
submitting: false,
open: false,
status: 'create', // 'create' 'edit' 'upload'
id: '',
page: 1,
isLoading: true,
params: [],
filters: {},
pagination: {
current: 1,
pageSize: 10,
total: '',
},
sort: 'id,desc',
visible: false,
fileLoading: false,
selectedData: '',
};
}
componentDidMount() {
this.loadUser();
}
componentWillUnmount() {
this.timer = 0;
}
handleRefresh = () => {
this.setState(this.getInitState(), () => {
this.loadUser();
});
};
onEdit = (id) => {
this.setState({
visible: true,
status: 'modify',
selectedData: id,
});
};
loadUser = (paginationIn, sortIn, filtersIn, paramsIn) => {
const { AppState, UserStore } = this.props;
const {
pagination: paginationState,
sort: sortState,
filters: filtersState,
params: paramsState,
} = this.state;
const { id } = AppState.currentMenuType;
const pagination = paginationIn || paginationState;
const sort = sortIn || sortState;
const filters = filtersIn || filtersState;
const params = paramsIn || paramsState;
// 防止标签闪烁
this.setState({ filters });
// 若params或filters含特殊字符表格数据置空
const isIncludeSpecialCode = handleFiltersParams(params, filters);
if (isIncludeSpecialCode) {
UserStore.setUsers([]);
this.setState({
pagination: {
total: 0,
},
params,
sort,
});
return;
}
UserStore.loadUsers(
id,
pagination,
sort,
filters,
params,
).then((data) => {
UserStore.setUsers(data.list || []);
this.setState({
pagination: {
current: data.pageNum,
pageSize: data.pageSize,
total: data.total,
},
params,
sort,
});
})
.catch(error => Choerodon.handleResponseError(error));
};
handleCreate = () => {
this.setState({
visible: true,
status: 'create',
});
};
/*
* 解锁
* */
handleUnLock = (record) => {
const { AppState, UserStore, intl } = this.props;
const menuType = AppState.currentMenuType;
const organizationId = menuType.id;
UserStore.unLockUser(organizationId, record.id).then(() => {
Choerodon.prompt(intl.formatMessage({ id: `${intlPrefix}.unlock.success` }));
this.loadUser();
}).catch((error) => {
Choerodon.prompt(intl.formatMessage({ id: `${intlPrefix}.unlock.failed` }));
});
};
/*
* 启用停用
* */
handleAble = (record) => {
const { UserStore, AppState, intl } = this.props;
const menuType = AppState.currentMenuType;
const organizationId = menuType.id;
if (record.enabled) {
// 禁用
UserStore.UnenableUser(organizationId, record.id, !record.enabled).then(() => {
Choerodon.prompt(intl.formatMessage({ id: 'disable.success' }));
this.loadUser();
}).catch((error) => {
Choerodon.prompt(intl.formatMessage({ id: 'disable.error' }));
});
} else {
UserStore.EnableUser(organizationId, record.id, !record.enabled).then(() => {
Choerodon.prompt(intl.formatMessage({ id: 'enable.success' }));
this.loadUser();
}).catch((error) => {
Choerodon.prompt(intl.formatMessage({ id: 'enable.error' }));
});
}
};
/**
* 重置用户密码
* @param record
*/
handleReset = (record) => {
const { intl } = this.props;
const { loginName } = record;
const { UserStore, AppState } = this.props;
const organizationId = AppState.currentMenuType.id;
Modal.confirm({
className: 'c7n-iam-confirm-modal',
title: intl.formatMessage({ id: `${intlPrefix}.reset.title` }),
content: intl.formatMessage({ id: `${intlPrefix}.reset.content` }, { loginName }),
onOk: () => UserStore.resetUserPwd(organizationId, record.id).then(({ failed, message }) => {
if (failed) {
Choerodon.prompt(message);
} else {
Choerodon.prompt(intl.formatMessage({ id: `${intlPrefix}.reset.success` }));
}
}).catch(() => {
Choerodon.prompt(intl.formatMessage({ id: `${intlPrefix}.reset.failed` }));
}),
});
}
changeLanguage = (code) => {
if (code === 'zh_CN') {
return '简体中文';
} else if (code === 'en_US') {
return 'English';
}
return null;
};
handlePageChange(pagination, filters, { field, order }, params) {
const sorter = [];
if (field) {
sorter.push(field);
if (order === 'descend') {
sorter.push('desc');
}
}
this.loadUser(pagination, sorter.join(','), filters, params);
}
handleDownLoad = (organizationId) => {
const { UserStore } = this.props;
UserStore.downloadTemplate(organizationId).then((result) => {
const blob = new Blob([result], {
type: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;charset=utf-8' });
const url = window.URL.createObjectURL(blob);
const linkElement = document.getElementById('c7n-user-download-template');
linkElement.setAttribute('href', url);
linkElement.click();
});
};
upload = (e) => {
e.stopPropagation();
const { UserStore } = this.props;
const uploading = UserStore.getUploading;
const { fileLoading } = this.state;
if (uploading || fileLoading) {
return;
}
const uploadElement = document.getElementsByClassName('c7n-user-upload-hidden')[0];
uploadElement.click();
};
handleUpload = () => {
this.handleUploadInfo(true);
this.setState({
visible: true,
status: 'upload',
});
};
/**
* application/vnd.ms-excel 2003-2007
* application/vnd.openxmlformats-officedocument.spreadsheetml.sheet 2010
*/
getUploadProps = (organizationId) => {
const { intl } = this.props;
return {
multiple: false,
name: 'file',
accept: 'application/vnd.ms-excel, application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
action: organizationId && `${process.env.API_HOST}/iam/v1/organizations/${organizationId}/users/batch_import`,
headers: {
Authorization: `bearer ${Choerodon.getCookie('access_token')}`,
},
showUploadList: false,
onChange: ({ file }) => {
const { status, response } = file;
const { fileLoading } = this.state;
if (status === 'done') {
this.handleUploadInfo(true);
} else if (status === 'error') {
Choerodon.prompt(`${response.message}`);
this.setState({
fileLoading: false,
});
}
if (response && response.failed === true) {
Choerodon.prompt(`${response.message}`);
this.setState({
fileLoading: false,
});
}
if (!fileLoading) {
this.setState({
fileLoading: status === 'uploading',
});
}
},
};
}
handleSubmit = (e) => {
this.editUser.handleSubmit(e);
}
handleUploadInfo = (immediately) => {
const { UserStore, AppState: { currentMenuType, getUserId: userId } } = this.props;
const { id: organizationId } = currentMenuType;
const { fileLoading } = this.state;
const uploadInfo = UserStore.getUploadInfo || {};
if (uploadInfo.finished !== null && fileLoading) {
this.setState({
fileLoading: false,
});
}
if (immediately) {
UserStore.handleUploadInfo(organizationId, userId);
return;
}
if (uploadInfo.finished !== null) {
clearInterval(this.timer);
return;
}
clearInterval(this.timer);
this.timer = setInterval(() => {
UserStore.handleUploadInfo(organizationId, userId);
this.loadUser();
}, 2000);
}
getSidebarText() {
const { submitting, status, fileLoading } = this.state;
const { UserStore } = this.props;
const uploading = UserStore.getUploading;
if (submitting) {
return <FormattedMessage id="loading" />;
} else if (uploading) {
return <FormattedMessage id="uploading" />;
} else if (fileLoading) {
return <FormattedMessage id={`${intlPrefix}.fileloading`} />;
}
return <FormattedMessage id={status} />;
}
getSpentTime = (startTime, endTime) => {
const { intl } = this.props;
const timeUnit = {
day: intl.formatMessage({ id: 'day' }),
hour: intl.formatMessage({ id: 'hour' }),
minute: intl.formatMessage({ id: 'minute' }),
second: intl.formatMessage({ id: 'second' }),
};
const spentTime = new Date(endTime).getTime() - new Date(startTime).getTime(); // 时间差的毫秒数
// 天数
const days = Math.floor(spentTime / (24 * 3600 * 1000));
// 小时
const leave1 = spentTime % (24 * 3600 * 1000); // 计算天数后剩余的毫秒数
const hours = Math.floor(leave1 / (3600 * 1000));
// 分钟
const leave2 = leave1 % (3600 * 1000); // 计算小时数后剩余的毫秒数
const minutes = Math.floor(leave2 / (60 * 1000));
// 秒数
const leave3 = leave2 % (60 * 1000); // 计算分钟数后剩余的毫秒数
const seconds = Math.round(leave3 / 1000);
const resultDays = days ? (days + timeUnit.day) : '';
const resultHours = hours ? (hours + timeUnit.hour) : '';
const resultMinutes = minutes ? (minutes + timeUnit.minute) : '';
const resultSeconds = seconds ? (seconds + timeUnit.second) : '';
return resultDays + resultHours + resultMinutes + resultSeconds;
}
getUploadInfo = () => {
const { UserStore } = this.props;
const { fileLoading } = this.state;
const uploadInfo = UserStore.getUploadInfo || {};
const uploading = UserStore.getUploading;
const container = [];
if (uploading) {
container.push(this.renderLoading());
this.handleUploadInfo();
if (fileLoading) {
this.setState({
fileLoading: false,
});
}
} else if (fileLoading) {
container.push(this.renderLoading());
} else if (!uploadInfo.noData) {
const failedStatus = uploadInfo.finished ? 'detail' : 'error';
container.push(
<p key={`${intlPrefix}.upload.lasttime`}>
<FormattedMessage id={`${intlPrefix}.upload.lasttime`} />
{uploadInfo.beginTime}
(<FormattedMessage id={`${intlPrefix}.upload.spendtime`} />
{this.getSpentTime(uploadInfo.beginTime, uploadInfo.endTime)})
</p>,
<p key={`${intlPrefix}.upload.time`}>
<FormattedMessage
id={`${intlPrefix}.upload.time`}
values={{
successCount: <span className="success-count">{uploadInfo.successfulCount || 0}</span>,
failedCount: <span className="failed-count">{uploadInfo.failedCount || 0}</span>,
}}
/>
{uploadInfo.url && (
<span className={`download-failed-${failedStatus}`}>
<a href={uploadInfo.url}>
<FormattedMessage id={`${intlPrefix}.download.failed.${failedStatus}`} />
</a>
</span>
)}
</p>,
);
} else {
container.push(<p key={`${intlPrefix}.upload.norecord`}><FormattedMessage id={`${intlPrefix}.upload.norecord`} /></p>);
}
return (
<div className="c7n-user-upload-container">
{container}
</div>
);
};
renderLoading() {
const { intl: { formatMessage } } = this.props;
const { fileLoading } = this.state;
return (
<div className="c7n-user-uploading-container" key="c7n-user-uploading-container">
<div className="loading">
<Spin size="large" />
</div>
<p className="text">{formatMessage({
id: `${intlPrefix}.${fileLoading ? 'fileloading' : 'uploading'}.text` })}
</p>
{!fileLoading && (<p className="tip">{formatMessage({ id: `${intlPrefix}.uploading.tip` })}</p>)}
</div>
);
}
renderUpload(organizationId, organizationName) {
return (
<Content
code={`${intlPrefix}.upload`}
values={{
name: organizationName,
}}
className="sidebar-content"
>
<div style={{ width: '512px' }}>
{this.getUploadInfo()}
</div>
<div style={{ display: 'none' }}>
<Upload {...this.getUploadProps(organizationId)}>
<Button className="c7n-user-upload-hidden" />
</Upload>
</div>
</Content>);
}
renderSideTitle() {
const { status } = this.state;
switch (status) {
case 'create':
return <FormattedMessage id={`${intlPrefix}.create`} />;
case 'modify':
return <FormattedMessage id={`${intlPrefix}.modify`} />;
case 'upload':
return <FormattedMessage id={`${intlPrefix}.upload`} />;
default:
return '';
}
}
renderSideBar() {
const { selectedData, status, visible } = this.state;
return (
<UserEdit
id={selectedData}
visible={visible}
edit={status === 'modify'}
onRef={(node) => {
this.editUser = node;
}}
OnUnchangedSuccess={() => {
this.setState({
visible: false,
submitting: false,
});
}}
onSubmit={() => {
this.setState({
submitting: true,
});
}}
onSuccess={() => {
this.setState({
visible: false,
submitting: false,
});
this.loadUser();
}}
onError={() => {
this.setState({
submitting: false,
});
}}
/>
);
}
render() {
const {
UserStore: { getUsers, isLoading },
AppState: { currentMenuType, getType },
intl } = this.props;
const { filters, pagination, visible, status, submitting, params } = this.state;
const { id: organizationId, name: organizationName, type: menuType } = currentMenuType;
let type;
if (getType) {
type = getType;
} else if (sessionStorage.type) {
type = sessionStorage.type;
} else {
type = menuType;
}
const data = getUsers.slice() || [];
const columns = [
{
title: <FormattedMessage id={`${intlPrefix}.loginname`} />,
dataIndex: 'loginName',
key: 'loginName',
width: '20%',
filters: [],
filteredValue: filters.loginName || [],
render: text => (
<MouseOverWrapper text={text} width={0.15}>
{text}
</MouseOverWrapper>
),
}, {
title: <FormattedMessage id={`${intlPrefix}.realname`} />,
key: 'realName',
dataIndex: 'realName',
width: '20%',
filters: [],
filteredValue: filters.realName || [],
render: text => (
<MouseOverWrapper text={text} width={0.15}>
{text}
</MouseOverWrapper>
),
},
{
title: <FormattedMessage id={`${intlPrefix}.source`} />,
key: 'ldap',
width: '20%',
render: (text, record) => (
record.ldap
? <FormattedMessage id={`${intlPrefix}.ldap`} />
: <FormattedMessage id={`${intlPrefix}.notldap`} />
),
filters: [
{
text: intl.formatMessage({ id: `${intlPrefix}.ldap` }),
value: 'true',
}, {
text: intl.formatMessage({ id: `${intlPrefix}.notldap` }),
value: 'false',
},
],
filteredValue: filters.ldap || [],
},
// {
// title: <FormattedMessage id={`${intlPrefix}.language`} />,
// dataIndex: 'language',
// key: 'language',
// width: '17%',
// render: (text, record) => (
// this.changeLanguage(record.language)
// ),
// filters: [
// {
// text: '简体中文',
// value: 'zh_CN',
// }, {
// text: 'English',
// value: 'en_US',
// },
// ],
// filteredValue: filters.language || [],
// },
{
title: <FormattedMessage id={`${intlPrefix}.enabled`} />,
key: 'enabled',
dataIndex: 'enabled',
width: '15%',
render: text => (<StatusTag mode="icon" name={intl.formatMessage({ id: text ? 'enable' : 'disable' })} colorCode={text ? 'COMPLETED' : 'DISABLE'} />),
filters: [
{
text: intl.formatMessage({ id: 'enable' }),
value: 'true',
}, {
text: intl.formatMessage({ id: 'disable' }),
value: 'false',
},
],
filteredValue: filters.enabled || [],
}, {
title: <FormattedMessage id={`${intlPrefix}.locked`} />,
key: 'locked',
width: '15%',
render: (text, record) => (
record.locked
? <FormattedMessage id={`${intlPrefix}.lock`} />
: <FormattedMessage id={`${intlPrefix}.normal`} />
),
filters: [
{
text: intl.formatMessage({ id: `${intlPrefix}.normal` }),
value: 'false',
},
{
text: intl.formatMessage({ id: `${intlPrefix}.lock` }),
value: 'true',
},
],
filteredValue: filters.locked || [],
}, {
title: '',
key: 'action',
align: 'right',
render: (text, record) => {
const actionDatas = [{
service: ['iam-service.organization-user.update'],
icon: '',
text: intl.formatMessage({ id: 'modify' }),
action: this.onEdit.bind(this, record.id),
}];
if (record.enabled) {
actionDatas.push({
service: ['iam-service.organization-user.disableUser'],
icon: '',
text: intl.formatMessage({ id: 'disable' }),
action: this.handleAble.bind(this, record),
});
} else {
actionDatas.push({
service: ['iam-service.organization-user.enableUser'],
icon: '',
text: intl.formatMessage({ id: 'enable' }),
action: this.handleAble.bind(this, record),
});
}
if (record.locked) {
actionDatas.push({
service: ['iam-service.organization-user.unlock'],
icon: '',
text: intl.formatMessage({ id: `${intlPrefix}.unlock` }),
action: this.handleUnLock.bind(this, record),
});
}
actionDatas.push({
service: ['iam-service.organization-user.resetUserPassword'],
icon: '',
text: intl.formatMessage({ id: `${intlPrefix}.reset` }),
action: this.handleReset.bind(this, record),
});
return <Action data={actionDatas} getPopupContainer={() => document.getElementsByClassName('page-content')[0]} />;
},
}];
return (
<Page
service={[
'iam-service.organization-user.create',
'iam-service.organization-user.list',
'iam-service.organization-user.query',
'iam-service.organization-user.update',
'iam-service.organization-user.delete',
'iam-service.organization-user.disableUser',
'iam-service.organization-user.enableUser',
'iam-service.organization-user.unlock',
'iam-service.organization-user.check',
'iam-service.organization-user.resetUserPassword',
]}
>
<Header title={<FormattedMessage id={`${intlPrefix}.header.title`} />}>
<Permission
service={['iam-service.organization-user.create']}
type={type}
organizationId={organizationId}
>
<Button
onClick={this.handleCreate}
icon="playlist_add"
>
<FormattedMessage id={`${intlPrefix}.create`} />
</Button>
</Permission>
<Button
onClick={this.handleDownLoad.bind(this, organizationId)}
icon="get_app"
>
<FormattedMessage id={`${intlPrefix}.download.template`} />
<a id="c7n-user-download-template" href="" onClick={(event) => { event.stopPropagation(); }} download="userTemplate.xlsx" />
</Button>
<Button
icon="file_upload"
onClick={this.handleUpload}
>
<FormattedMessage id={`${intlPrefix}.upload.file`} />
</Button>
<Button
onClick={this.handleRefresh}
icon="refresh"
>
<FormattedMessage id="refresh" />
</Button>
</Header>
<Content
code={intlPrefix}
values={{ name: organizationName }}
>
<Table
size="middle"
pagination={pagination}
columns={columns}
dataSource={data}
rowKey="id"
onChange={this.handlePageChange.bind(this)}
loading={isLoading}
filters={params}
filterBarPlaceholder={intl.formatMessage({ id: 'filtertable' })}
/>
<Sidebar
title={this.renderSideTitle()}
visible={visible}
okText={this.getSidebarText()}
cancelText={<FormattedMessage id={status === 'upload' ? 'close' : 'cancel'} />}
onOk={status === 'upload' ? this.upload : this.handleSubmit}
onCancel={() => {
this.setState({
visible: false,
selectedData: '',
});
}}
confirmLoading={submitting}
>
{status === 'upload'
? this.renderUpload(organizationId, organizationName)
: this.renderSideBar()
}
</Sidebar>
</Content>
</Page>
);
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/app/service/impl/LdapSyncUserQuartzTaskSpec.groovy<|end_filename|>
package io.choerodon.iam.app.service.impl
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.app.service.LdapService
import io.choerodon.iam.app.task.LdapSyncUserQuartzTask
import io.choerodon.iam.infra.common.utils.ldap.LdapSyncUserTask
import io.choerodon.iam.infra.mapper.OrganizationMapper
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.context.annotation.Import
import spock.lang.Specification
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan
* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class LdapSyncUserQuartzTaskSpec extends Specification {
private LdapService ldapService = Mock(LdapService)
@Autowired
private OrganizationMapper organizationMapper
@Autowired
private LdapSyncUserTask ldapSyncUserTask
// @Autowired
// private ILdapService iLdapService
// private LdapHistoryRepository ldapHistoryRepository = Mock(LdapHistoryRepository)
private LdapSyncUserQuartzTask ldapSyncUserQuartzTask
def setup() {
ldapSyncUserQuartzTask = new LdapSyncUserQuartzTask(ldapService,
organizationMapper, ldapSyncUserTask, ldapHistoryRepository, iLdapService)
}
// def "SyncLdapUser"() {
// given: "构造请求参数"
// Map<String, Object> map = new HashMap<>()
// map.put("organizationCode", "error")
// map.put("countLimit", 500)
// LdapDTO ldapDTO = new LdapDTO()
// ldapDTO.setId(1L)
// ldapDTO.setObjectClass("person")
// ldapDTO.setOrganizationId(1L)
//
// when: "调用方法"
// ldapSyncUserQuartzTask.syncLdapUser(map)
//
// then: "校验结果"
// def exception = thrown(CommonException)
// exception.message.equals("error.ldapSyncUserTask.organizationNotNull")
//
// when: "调用方法"
// map.put("organizationCode", "operation")
// map.put("countLimit", 500)
// ldapSyncUserQuartzTask.syncLdapUser(map)
//
// then: "校验结果"
//// exception = thrown(CommonException)
// exception.message.equals("error.ldapSyncUserTask.organizationNotNull")
// 1 * ldapService.queryByOrganizationId(_ as Long) >> { ldapDTO }
// 1 * ldapService.validateLdap(_ as Long, _ as Long) >> { ConvertHelper.convert(ldapDTO, LdapDO) }
// }
}
<|start_filename|>react/src/app/iam/stores/user/permission-info/index.js<|end_filename|>
import PermissionInfoStore from './PermissionInfoStore';
export default PermissionInfoStore;
<|start_filename|>src/main/java/io/choerodon/iam/api/validator/PasswordPolicyValidator.java<|end_filename|>
package io.choerodon.iam.api.validator;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.infra.dto.PasswordPolicyDTO;
import io.choerodon.iam.infra.mapper.PasswordPolicyMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/**
* @author wuguokai
*/
@Component
public class PasswordPolicyValidator {
@Autowired
private PasswordPolicyMapper passwordPolicyMapper;
public void create(Long orgId, PasswordPolicyDTO passwordPolicyDTO) {
PasswordPolicyDTO dto = new PasswordPolicyDTO();
dto.setOrganizationId(orgId);
if (!passwordPolicyMapper.select(dto).isEmpty()) {
throw new CommonException("error.passwordPolicy.organizationId.exist");
}
dto.setOrganizationId(null);
dto.setCode(passwordPolicyDTO.getCode());
if (!passwordPolicyMapper.select(dto).isEmpty()) {
throw new CommonException("error.passwordPolicy.code.exist");
}
}
public void update(Long orgId, Long passwordPolicyId, PasswordPolicyDTO passwordPolicyDTO) {
PasswordPolicyDTO dto = passwordPolicyMapper.selectByPrimaryKey(passwordPolicyId);
if (dto == null) {
throw new CommonException("error.passwordPolicy.not.exist");
}
if (!orgId.equals(dto.getOrganizationId())) {
throw new CommonException("error.passwordPolicy.organizationId.not.same");
}
// the sum of all the fields with least length requirement is greater than maxLength
int allLeastRequiredLength = passwordPolicyDTO.getDigitsCount() +
passwordPolicyDTO.getSpecialCharCount() +
passwordPolicyDTO.getLowercaseCount() +
passwordPolicyDTO.getUppercaseCount();
if (allLeastRequiredLength > passwordPolicyDTO.getMaxLength()) {
throw new CommonException("error.allLeastRequiredLength.greaterThan.maxLength");
}
if (passwordPolicyDTO.getMinLength() > passwordPolicyDTO.getMaxLength()) {
throw new CommonException("error.maxLength.lessThan.minLength");
}
passwordPolicyDTO.setCode(null);
passwordPolicyDTO.setOrganizationId(null);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/dto/RoleLabelDTO.java<|end_filename|>
package io.choerodon.iam.infra.dto;
import io.choerodon.mybatis.entity.BaseDTO;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
/**
* @author superlee
* @since 2019-04-23
*/
@Table(name = "iam_role_label")
public class RoleLabelDTO extends BaseDTO {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
private Long roleId;
private Long labelId;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getRoleId() {
return roleId;
}
public void setRoleId(Long roleId) {
this.roleId = roleId;
}
public Long getLabelId() {
return labelId;
}
public void setLabelId(Long labelId) {
this.labelId = labelId;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/UserDashboardServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.choerodon.core.iam.ResourceLevel;
import io.choerodon.core.oauth.CustomUserDetails;
import io.choerodon.core.oauth.DetailsHelper;
import io.choerodon.iam.api.dto.DashboardPositionDTO;
import io.choerodon.iam.app.service.UserDashboardService;
import io.choerodon.iam.api.validator.MemberRoleValidator;
import io.choerodon.iam.infra.dto.DashboardDTO;
import io.choerodon.iam.infra.dto.UserDashboardDTO;
import io.choerodon.iam.infra.mapper.DashboardMapper;
import io.choerodon.iam.infra.mapper.DashboardRoleMapper;
import io.choerodon.iam.infra.mapper.UserDashboardMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
/**
* @author <EMAIL>
*/
@Service("userDashboardService")
public class UserDashboardServiceImpl implements UserDashboardService {
private static final Logger LOGGER = LoggerFactory.getLogger(UserDashboardService.class);
private ObjectMapper objectMapper = new ObjectMapper();
private UserDashboardMapper userDashboardMapper;
private DashboardMapper dashboardMapper;
private DashboardRoleMapper dashboardRoleMapper;
private MemberRoleValidator memberRoleValidator;
public UserDashboardServiceImpl(
UserDashboardMapper userDashboardMapper,
DashboardMapper dashboardMapper,
DashboardRoleMapper dashboardRoleMapper,
MemberRoleValidator memberRoleValidator) {
this.userDashboardMapper = userDashboardMapper;
this.dashboardMapper = dashboardMapper;
this.dashboardRoleMapper = dashboardRoleMapper;
this.memberRoleValidator = memberRoleValidator;
}
@Override
public List<UserDashboardDTO> list(String level, Long sourceId) {
CustomUserDetails userDetails = DetailsHelper.getUserDetails();
if (null == userDetails) {
return new ArrayList<>();
}
boolean isAdmin = false;
if (userDetails.getAdmin() != null) {
isAdmin = userDetails.getAdmin();
}
if (!ResourceLevel.SITE.value().equals(level)) {
memberRoleValidator.userHasRoleValidator(userDetails, level, sourceId, isAdmin);
}
UserDashboardDTO userDashboardDTO = new UserDashboardDTO();
userDashboardDTO.setUserId(userDetails.getUserId());
userDashboardDTO.setLevel(level);
userDashboardDTO.setSourceId(sourceId);
List<UserDashboardDTO> userDashboards = userDashboardMapper.selectWithDashboard(userDashboardDTO);
List<UserDashboardDTO> userDashboards1 = userDashboardMapper.selectWithDashboardNotExist(userDashboardDTO);
List<UserDashboardDTO> userDashboardsAdd = userDashboards1.stream().filter(
userDashboardDTO1 -> !dashboardExist(userDashboards, userDashboardDTO1))
.collect(Collectors.toList());
userDashboards.addAll(userDashboardsAdd);
return userDashboardHasRoleAndConvertPosition(userDashboards, userDetails.getUserId(), sourceId, level, isAdmin);
}
@Override
public List<UserDashboardDTO> update(String level, Long sourceId, List<UserDashboardDTO> userDashboards) {
CustomUserDetails userDetails = DetailsHelper.getUserDetails();
if (null == userDetails) {
return new ArrayList<>();
}
if (!isEquals(userDetails.getUserId(), sourceId, level)) {
List<DashboardDTO> dashboardList = dashboardMapper.selectByLevel(level);
for (DashboardDTO dashboard : dashboardList) {
UserDashboardDTO userDashboardDTO = new UserDashboardDTO();
userDashboardDTO.setDashboardId(dashboard.getId());
userDashboardDTO.setUserId(userDetails.getUserId());
userDashboardDTO.setLevel(level);
userDashboardDTO.setSourceId(sourceId);
if (null != userDashboardMapper.selectOne(userDashboardDTO)) {
continue;
}
userDashboardDTO.setLevel(dashboard.getLevel());
userDashboardDTO.setSort(dashboard.getSort());
if (ResourceLevel.SITE.value().equals(level)) {
userDashboardDTO.setSourceId(0L);
}
userDashboardMapper.insertSelective(userDashboardDTO);
for (UserDashboardDTO dto : userDashboards) {
if (dashboard.getId().equals(dto.getDashboardId())) {
dto.setId(userDashboardDTO.getId());
break;
}
}
}
}
for (UserDashboardDTO userDashboardDTO : userDashboards) {
userDashboardDTO.setPosition(convertPositionDTOToJson(userDashboardDTO.getPositionDTO()));
userDashboardDTO.setUserId(userDetails.getUserId());
userDashboardDTO.setLevel(level);
userDashboardDTO.setSourceId(sourceId);
userDashboardMapper.updateByPrimaryKeySelective(userDashboardDTO);
}
return list(level, sourceId);
}
private String convertPositionDTOToJson(DashboardPositionDTO positionDTO) {
if (positionDTO == null ||
(positionDTO.getPositionX() == null && positionDTO.getPositionY() == null
&& positionDTO.getHeight() == null && positionDTO.getWidth() == null)) {
return null;
}
if (positionDTO.getPositionX() == null) {
positionDTO.setPositionX(0);
}
if (positionDTO.getPositionY() == null) {
positionDTO.setPositionY(0);
}
if (positionDTO.getHeight() == null) {
positionDTO.setHeight(0);
}
if (positionDTO.getWidth() == null) {
positionDTO.setWidth(0);
}
try {
return objectMapper.writeValueAsString(positionDTO);
} catch (JsonProcessingException e) {
LOGGER.warn("error.userDashboardService.convertPositionDTOToJson.JsonProcessingException", e);
return null;
}
}
private boolean isEquals(Long userId, Long sourceId, String level) {
UserDashboardDTO userDashboardDTO = new UserDashboardDTO();
userDashboardDTO.setSourceId(sourceId);
userDashboardDTO.setUserId(userId);
return userDashboardMapper.selectCount(userDashboardDTO) == dashboardMapper.selectByLevel(level).size();
}
private boolean dashboardExist(List<UserDashboardDTO> userDashboardList, UserDashboardDTO userDashboard) {
boolean isExist = false;
for (UserDashboardDTO userDashboard1 : userDashboardList) {
if (userDashboard.getDashboardId().equals(userDashboard1.getDashboardId())) {
isExist = true;
break;
}
}
return isExist;
}
private List<UserDashboardDTO> userDashboardHasRoleAndConvertPosition(
List<UserDashboardDTO> userDashboardList, Long userId, Long sourceId, String level, Boolean isAdmin) {
List<Long> dashboardIds = dashboardRoleMapper.selectDashboardByUserId(userId, sourceId, level);
return userDashboardList.stream()
.filter(userDashboard -> dashboardNeedRole(dashboardIds, userDashboard, isAdmin))
.map(this::convertPosition)
.collect(Collectors.toList());
}
private UserDashboardDTO convertPosition(final UserDashboardDTO dto) {
if (StringUtils.isEmpty(dto.getPosition())) {
dto.setPositionDTO(new DashboardPositionDTO(0, 0, 0, 0));
} else {
try {
dto.setPositionDTO(objectMapper.readValue(dto.getPosition(), DashboardPositionDTO.class));
} catch (IOException e) {
dto.setPositionDTO(new DashboardPositionDTO(0, 0, 0, 0));
}
}
return dto;
}
private boolean dashboardNeedRole(List<Long> dashboardIds, UserDashboardDTO userDashboard, Boolean isAdmin) {
if (isAdmin) {
return true;
}
return !Optional.ofNullable(userDashboard.getNeedRoles()).orElse(false) || dashboardIds.contains(userDashboard.getDashboardId());
}
@Override
public void reset(String level, Long sourceId) {
CustomUserDetails userDetails = DetailsHelper.getUserDetails();
if (userDetails == null || userDetails.getUserId() == null) {
LOGGER.warn("error.userDashboardService.delete.userDetailsInvalid");
return;
}
UserDashboardDTO userDashboardDTO = new UserDashboardDTO();
userDashboardDTO.setUserId(userDetails.getUserId());
userDashboardDTO.setLevel(level);
userDashboardDTO.setSourceId(sourceId);
int row = userDashboardMapper.deleteWithDashboard(userDashboardDTO);
LOGGER.trace("delete userDashboard row {}, userId {}, level {}, sourceId {}", row, userDetails.getUserId(), level, sourceId);
}
}
<|start_filename|>react/src/app/iam/stores/user/token-manager/index.js<|end_filename|>
import TokenManagerStore from './TokenManagerStore';
export default TokenManagerStore;
<|start_filename|>src/main/java/io/choerodon/iam/infra/enums/MemberType.java<|end_filename|>
package io.choerodon.iam.infra.enums;
/**
* MemberType for table iam_member_role
*
* @author zmf
*/
public enum MemberType {
CLIENT("client"),
USER("user");
private final String value;
MemberType(String value) {
this.value = value;
}
public String value() {
return this.value;
}
}
<|start_filename|>react/src/app/iam/containers/global/dashboard-setting/DashboardSetting.js<|end_filename|>
import React, { Component } from 'react';
import { toJS } from 'mobx';
import { inject, observer } from 'mobx-react';
import { Button, Form, Icon, IconSelect, Input, Modal, Select, Table, Tooltip, Radio } from 'choerodon-ui';
import { Content, Header, Page, Permission } from '@choerodon/boot';
import { FormattedMessage, injectIntl } from 'react-intl';
import './DashboardSetting.scss';
import MouseOverWrapper from '../../../components/mouseOverWrapper';
import RoleStore from '../../../stores/global/role/RoleStore';
import StatusTag from '../../../components/statusTag';
const RadioGroup = Radio.Group;
const { Sidebar } = Modal;
const { Option } = Select;
const intlPrefix = 'global.dashboard-setting';
const FormItem = Form.Item;
const formItemLayout = {
labelCol: {
xs: { span: 24 },
sm: { span: 100 },
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 10 },
},
};
const inputWidth = 512;
@Form.create({})
@injectIntl
@inject('AppState')
@observer
class DashboardSetting extends Component {
constructor(props) {
super(props);
this.editFocusInput = React.createRef();
}
componentWillMount() {
this.fetchData();
}
handleDisable = (record) => {
const { intl, DashboardSettingStore } = this.props;
DashboardSettingStore.dashboardDisable(record)
.then(() => {
Choerodon.prompt(intl.formatMessage({ id: record.enabled ? 'disable.success' : 'enable.success' }));
})
.catch(Choerodon.handleResponseError);
};
handleRoleClick = () => {
const { DashboardSettingStore } = this.props;
DashboardSettingStore.setNeedUpdateRoles(true);
DashboardSettingStore.setNeedRoles(!DashboardSettingStore.needRoles);
};
handleRefresh = () => {
this.props.DashboardSettingStore.refresh();
};
handleOk = () => {
const { form, intl, DashboardStore, DashboardSettingStore } = this.props;
form.validateFields((error, values, modify) => {
Object.keys(values).forEach((key) => {
// 去除form提交的数据中的全部前后空格
if (typeof values[key] === 'string') values[key] = values[key].trim();
});
if (!error) {
if (modify || DashboardSettingStore.needUpdateRoles) {
DashboardSettingStore.updateData(values).then((data) => {
if (DashboardStore) {
DashboardStore.updateCachedData(data);
}
Choerodon.prompt(intl.formatMessage({ id: 'modify.success' }));
});
} else {
Choerodon.prompt(intl.formatMessage({ id: 'modify.success' }));
}
}
});
};
handleCancel = () => {
this.props.DashboardSettingStore.hideSideBar();
};
handleTableChange = (pagination, filters, sort, params) => {
this.fetchData(pagination, filters, sort, params);
};
fetchData(pagination, filters, sort, params) {
this.props.DashboardSettingStore.loadData(pagination, filters, sort, params);
}
editCard(record) {
const { DashboardSettingStore, form } = this.props;
DashboardSettingStore.setNeedRoles(record.needRoles);
RoleStore.loadRole(record.level, { pageSize: 999 }, {}, {}).then((data) => {
RoleStore.setRoles(data.list || []);
});
DashboardSettingStore.setEditData(record);
DashboardSettingStore.showSideBar();
form.resetFields();
setTimeout(() => {
this.editFocusInput.input.focus();
}, 10);
}
getTableColumns() {
const { intl, DashboardSettingStore: { sort: { columnKey, order }, filters } } = this.props;
return [
{
title: <FormattedMessage id={`${intlPrefix}.name`} />,
dataIndex: 'name',
key: 'name',
width: '20%',
filters: [],
filteredValue: filters.name || [],
render: text => (
<MouseOverWrapper text={text} width={0.1}>
{text}
</MouseOverWrapper>
),
}, {
title: <FormattedMessage id={`${intlPrefix}.namespace`} />,
dataIndex: 'namespace',
key: 'namespace',
width: '13%',
filters: [],
filteredValue: filters.namespace || [],
}, {
title: <FormattedMessage id={`${intlPrefix}.code`} />,
dataIndex: 'code',
key: 'code',
width: '13%',
filters: [],
filteredValue: filters.code || [],
},
{
title: <FormattedMessage id={`${intlPrefix}.card.title`} />,
dataIndex: 'title',
key: 'title',
render: (text, record) => (
<div>
<Icon type={record.icon} style={{ fontSize: 20, marginRight: '6px' }} />
<MouseOverWrapper text={text} width={0.1} style={{ display: 'inline' }}>
{text}
</MouseOverWrapper>
</div>
),
},
{
title: <FormattedMessage id={`${intlPrefix}.level`} />,
dataIndex: 'level',
key: 'level',
filters: [
{
text: intl.formatMessage({ id: `${intlPrefix}.level.site` }),
value: 'site',
}, {
text: intl.formatMessage({ id: `${intlPrefix}.level.organization` }),
value: 'organization',
}, {
text: intl.formatMessage({ id: `${intlPrefix}.level.project` }),
value: 'project',
},
],
filteredValue: filters.level || [],
render: text => (
<FormattedMessage id={`${intlPrefix}.level.${text}`} />
),
}, {
title: <FormattedMessage id={`${intlPrefix}.needRoles`} />,
dataIndex: 'needRoles',
key: 'needRoles',
width: '9%',
filters: [
{
text: intl.formatMessage({ id: 'global.dashboard-setting.needRoles.enable' }),
value: true,
}, {
text: intl.formatMessage({ id: 'global.dashboard-setting.needRoles.disable' }),
value: false,
},
],
filteredValue: filters.needRoles || [],
render: needRoles => intl.formatMessage({ id: `global.dashboard-setting.needRoles.${needRoles ? 'enable' : 'disable'}` }),
},
{
title: <FormattedMessage id="status" />,
dataIndex: 'enabled',
key: 'enabled',
filters: [{
text: intl.formatMessage({ id: 'enable' }),
value: 'true',
}, {
text: intl.formatMessage({ id: 'disable' }),
value: 'false',
}],
filteredValue: filters.enabled || [],
render: enabled => (<StatusTag mode="icon" name={intl.formatMessage({ id: enabled ? 'enable' : 'disable' })} colorCode={enabled ? 'COMPLETED' : 'DISABLE'} />),
},
{
title: '',
width: 100,
key: 'action',
align: 'right',
render: (text, record) => (
<Permission service={['iam-service.dashboard.update']}>
<Tooltip
title={<FormattedMessage id="edit" />}
placement="bottom"
>
<Button
shape="circle"
icon="mode_edit"
size="small"
onClick={() => this.editCard(record)}
/>
</Tooltip>
<Tooltip
title={<FormattedMessage id={record.enabled ? 'disable' : 'enable'} />}
placement="bottom"
>
<Button
size="small"
icon={record.enabled ? 'remove_circle_outline' : 'finished'}
shape="circle"
onClick={() => this.handleDisable(record)}
/>
</Tooltip>
</Permission>
),
},
];
}
renderRoleSelect = () => {
const roles = RoleStore.getRoles;
return roles.map(item =>
<Option key={item.code} value={item.code}>{item.name}</Option>);
};
renderForm() {
const roles = RoleStore.getRoles;
const {
form: { getFieldDecorator }, intl,
DashboardSettingStore: { editData: { code, name, level, icon, title, namespace, roleCodes }, needRoles },
} = this.props;
return (
<Content
className="dashboard-setting-siderbar-content"
code={`${intlPrefix}.modify`}
values={{ name }}
>
<Form>
{
getFieldDecorator('code', {
rules: [
{
required: true,
},
],
initialValue: code,
})(
<input type="hidden" />,
)
}
<FormItem {...formItemLayout} className="is-required">
<Input
autoComplete="off"
label={<FormattedMessage id={`${intlPrefix}.code`} />}
style={{ width: inputWidth }}
value={`${namespace}-${code}`}
disabled
/>
</FormItem>
<FormItem {...formItemLayout}>
{
getFieldDecorator('name', {
rules: [
{
required: true,
whitespace: true,
message: intl.formatMessage({ id: `${intlPrefix}.name.required` }),
},
],
initialValue: name,
})(
<Input
autoComplete="off"
label={<FormattedMessage id={`${intlPrefix}.name`} />}
style={{ width: inputWidth }}
ref={(e) => {
this.editFocusInput = e;
}}
maxLength={32}
showLengthInfo={false}
/>,
)
}
</FormItem>
<FormItem {...formItemLayout}>
{
getFieldDecorator('title', {
rules: [
{
required: true,
whitespace: true,
message: intl.formatMessage({ id: `${intlPrefix}.card.title.required` }),
},
],
initialValue: title,
})(
<Input
autoComplete="off"
label={<FormattedMessage id={`${intlPrefix}.card.title`} />}
style={{ width: inputWidth }}
maxLength={32}
showLengthInfo={false}
/>,
)
}
</FormItem>
<FormItem {...formItemLayout}>
{
getFieldDecorator('icon', {
initialValue: icon,
})(
<IconSelect
label={<FormattedMessage id={`${intlPrefix}.icon`} />}
// getPopupContainer={() => document.getElementsByClassName('ant-modal-body')[document.getElementsByClassName('ant-modal-body').length - 1]}
style={{ width: inputWidth }}
showArrow
/>,
)
}
</FormItem>
<FormItem {...formItemLayout}>
<RadioGroup onChange={this.handleRoleClick} value={needRoles}>
<Radio value><FormattedMessage id={`${intlPrefix}.open-role`} /></Radio>
<Radio value={false}><FormattedMessage id={`${intlPrefix}.close-role`} /></Radio>
</RadioGroup>
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('roleCodes', {
valuePropName: 'value',
initialValue: roleCodes && roleCodes.slice(),
})(
<Select
mode="multiple"
label={<FormattedMessage id={`${intlPrefix}.role`} />}
size="default"
// getPopupContainer={() => document.getElementsByClassName('ant-modal-body')[document.getElementsByClassName('ant-modal-body').length - 1]}
style={{
width: '512px',
display: needRoles ? 'inline-block' : 'none',
}}
>
{this.renderRoleSelect()}
</Select>,
)}
</FormItem>
</Form>
</Content>
);
}
render() {
const { AppState, DashboardSettingStore, intl } = this.props;
const { pagination, params, loading, dashboardData, sidebarVisible } = DashboardSettingStore;
return (
<Page
service={[
'iam-service.dashboard.list',
'iam-service.dashboard.query',
'iam-service.dashboard.update',
]}
>
<Header title={<FormattedMessage id={`${intlPrefix}.header.title`} />}>
<Button
onClick={this.handleRefresh}
icon="refresh"
>
<FormattedMessage id="refresh" />
</Button>
</Header>
<Content
code={intlPrefix}
values={{ name: AppState.getSiteInfo.systemName || 'Choerodon' }}
>
<Table
loading={loading}
className="dashboard-table"
columns={this.getTableColumns()}
dataSource={dashboardData.slice()}
pagination={pagination}
filters={params}
onChange={this.handleTableChange}
rowKey={({ code, namespace }) => `${namespace}-${code}`}
filterBarPlaceholder={intl.formatMessage({ id: 'filtertable' })}
/>
<Sidebar
title={<FormattedMessage id={`${intlPrefix}.sidebar.title`} />}
onOk={this.handleOk}
okText={<FormattedMessage id="save" />}
cancelText={<FormattedMessage id="cancel" />}
onCancel={this.handleCancel}
visible={sidebarVisible}
>
{this.renderForm()}
</Sidebar>
</Content>
</Page>
);
}
}
export default Choerodon.dashboard ? inject('DashboardStore')(DashboardSetting) : DashboardSetting;
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/LanguageMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.infra.dto.LanguageDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* @author superlee
*/
public interface LanguageMapper extends Mapper<LanguageDTO> {
List<LanguageDTO> fulltextSearch(@Param("languageDTO") LanguageDTO languageDTO,
@Param("param") String param);
}
<|start_filename|>src/main/java/io/choerodon/iam/api/dto/RoleAssignmentDeleteDTO.java<|end_filename|>
package io.choerodon.iam.api.dto;
import io.swagger.annotations.ApiModelProperty;
import javax.validation.constraints.NotEmpty;
import javax.validation.constraints.NotNull;
import java.util.List;
import java.util.Map;
/**
* @author superlee
*/
public class RoleAssignmentDeleteDTO {
@ApiModelProperty(value = "成员类型/必填")
private String memberType;
@ApiModelProperty(value = "来源ID/必填")
@NotNull(message = "error.memberRole.sourceId.null")
private Long sourceId;
/**
* view = "userView", key表示userId, value表示roleIds
* view = "roleView", key表示roleId, value表示userIds
*/
@ApiModelProperty(value = "视图类型,userView(key表示userId, value表示roleIds)、roleView(key表示roleId, value表示userIds)/必填")
@NotEmpty(message = "error.memberRole.view.empty")
private String view;
@ApiModelProperty(value = "角色分配数据/必填")
@NotNull(message = "error.memberRole.data.null")
private Map<Long, List<Long>> data;
public String getMemberType() {
return memberType;
}
public void setMemberType(String memberType) {
this.memberType = memberType;
}
public Map<Long, List<Long>> getData() {
return data;
}
public void setData(Map<Long, List<Long>> data) {
this.data = data;
}
public String getView() {
return view;
}
public void setView(String view) {
this.view = view;
}
public Long getSourceId() {
return sourceId;
}
public void setSourceId(Long sourceId) {
this.sourceId = sourceId;
}
}
<|start_filename|>react/src/app/iam/stores/global/root-user/index.js<|end_filename|>
/**
* Created by hulingfangzi on 2018/5/28.
*/
import RootUserStore from './RootUserStore';
export default RootUserStore;
<|start_filename|>react/src/app/iam/containers/global/system-setting/SystemSetting.js<|end_filename|>
import React, { Component } from 'react';
import { inject, observer, trace } from 'mobx-react';
import { withRouter } from 'react-router-dom';
import { Button, Form, Icon, Input, Select, Spin, InputNumber, Popover, Modal, Radio } from 'choerodon-ui';
import { axios, Content, Header, Page, Permission } from '@choerodon/boot';
import { FormattedMessage, injectIntl } from 'react-intl';
import './SystemSetting.scss';
import '../../../common/ConfirmModal.scss';
import LogoUploader from './LogoUploader';
// import AvatarUploader from '../../../components/AvatarUploader';
const intlPrefix = 'global.system-setting';
const prefixClas = 'c7n-iam-system-setting';
const inputPrefix = 'organization.pwdpolicy';
const limitSize = 1024;
const FormItem = Form.Item;
const RadioGroup = Radio.Group;
const Option = Select.Option;
const confirm = Modal.confirm;
const { TextArea } = Input;
const dirty = false;
const cardContentFavicon = (
<div>
<p><FormattedMessage id={`${intlPrefix}.favicon.tips`} /></p>
<div className={`${prefixClas}-tips-favicon`} />
</div>
);
const cardContentLogo = (
<div>
<p><FormattedMessage id={`${intlPrefix}.logo.tips`} /></p>
<div className={`${prefixClas}-tips-logo`} />
</div>
);
const cardContentTitle = (
<div>
<p><FormattedMessage id={`${intlPrefix}.title.tips`} /></p>
<div className={`${prefixClas}-tips-title`} />
</div>
);
const cardContentName = (
<div>
<p><FormattedMessage id={`${intlPrefix}.name.tips`} /></p>
<div className={`${prefixClas}-tips-name`} />
</div>
);
const cardTitle = (
<Popover content={cardContentTitle} getPopupContainer={() => document.getElementsByClassName('page-content')[0]}>
<Icon type="help" style={{ fontSize: 16, color: '#bdbdbd' }} />
</Popover>
);
const cardName = (
<Popover content={cardContentName} getPopupContainer={() => document.getElementsByClassName('page-content')[0]}>
<Icon type="help" style={{ fontSize: 16, color: '#bdbdbd' }} />
</Popover>
);
@Form.create({})
@withRouter
@injectIntl
@inject('AppState')
@observer
export default class SystemSetting extends Component {
state = {
loading: false,
submitting: false,
visible: false,
uploadLogoVisible: false,
};
componentWillMount() {
this.init();
}
init = () => {
const { SystemSettingStore } = this.props;
this.props.form.resetFields();
axios.get('/iam/v1/system/setting').then((data) => {
SystemSettingStore.setUserSetting(data);
SystemSettingStore.setFavicon(data.favicon);
SystemSettingStore.setLogo(data.systemLogo);
});
};
handleReset = () => {
const { SystemSettingStore, intl } = this.props;
SystemSettingStore.resetUserSetting().then(() => {
window.location.reload(true);
},
);
};
showDeleteConfirm = () => {
const that = this;
const { intl } = this.props;
confirm({
className: 'c7n-iam-confirm-modal',
title: intl.formatMessage({ id: `${intlPrefix}.reset.confirm.title` }),
content: intl.formatMessage({ id: `${intlPrefix}.reset.confirm.content` }),
okText: intl.formatMessage({ id: 'yes' }),
okType: 'danger',
cancelText: intl.formatMessage({ id: 'no' }),
onOk() {
that.handleReset();
},
onCancel() {
},
});
};
handleVisibleChange = () => {
const { visible } = this.state;
this.setState({
visible: !visible,
});
};
handleUploadLogoVisibleChange = () => {
const { uploadLogoVisible } = this.state;
this.setState({
uploadLogoVisible: !uploadLogoVisible,
});
};
checkMaxLength = (rule, value, callback) => {
const { getFieldValue } = this.props.form;
const { intl } = this.props;
const minPasswordLength = getFieldValue('minPasswordLength');
if (value < 0) {
callback(intl.formatMessage({ id: `${inputPrefix}.max.length` }));
} else if (value < minPasswordLength) {
callback('最大密码长度须大于或等于最小密码长度');
}
this.props.form.validateFields(['minPasswordLength'], { force: true });
callback();
};
checkMinLength = (rule, value, callback) => {
const { intl } = this.props;
const { getFieldValue } = this.props.form;
const maxPasswordLength = getFieldValue('maxPasswordLength');
if (value < 0) callback(intl.formatMessage({ id: `${inputPrefix}.number.pattern.msg` }));
else if (value > maxPasswordLength) callback(intl.formatMessage({ id: `${inputPrefix}.min.lessthan.more` }));
callback();
};
faviconContainer() {
const { SystemSettingStore } = this.props;
const { visible } = this.state;
const favicon = SystemSettingStore.getFavicon;
return (
<div className={`${prefixClas}-avatar-wrap`}>
<div className={`${prefixClas}-avatar`} style={favicon ? { backgroundImage: `url(${favicon})` } : {}}>
<Button className={`${prefixClas}-avatar-button`} onClick={() => this.setState({ visible: true })}>
<div className={`${prefixClas}-avatar-button-icon`}>
<Icon type="photo_camera" style={{ display: 'block', textAlign: 'center' }} />
</div>
</Button>
<LogoUploader type="favicon" visible={visible} onVisibleChange={this.handleVisibleChange} onSave={(res) => { SystemSettingStore.setFavicon(res); }} />
</div>
<span className={`${prefixClas}-tips`}>
<FormattedMessage id={`${intlPrefix}.favicon`} />
<Popover content={cardContentFavicon} getPopupContainer={() => document.getElementsByClassName('page-content')[0]}>
<Icon type="help" style={{ fontSize: 16, color: '#bdbdbd' }} />
</Popover>
</span>
</div>
);
}
beforeUpload = (file) => {
const { intl } = this.props;
const isLt1M = file.size / 1024 / 1024 < 1;
if (!isLt1M) {
Choerodon.prompt(intl.formatMessage({ id: `${intlPrefix}.file.size.limit` }, { size: `${limitSize / 1024}M` }));
}
return isLt1M;
};
handleFaviconChange = ({ file }) => {
const { status, response } = file;
const { SystemSettingStore } = this.props;
if (status === 'done') {
SystemSettingStore.setFavicon(response);
} else if (status === 'error') {
Choerodon.prompt(`${response.message}`);
}
};
handleLogoChange = ({ file }) => {
const { status, response } = file;
const { SystemSettingStore } = this.props;
if (status === 'uploading') {
this.setState({
loading: true,
});
} else if (status === 'done') {
SystemSettingStore.setLogo(response);
this.setState({
loading: false,
});
} else if (status === 'error') {
Choerodon.prompt(`${response.message}`);
this.setState({
loading: false,
});
}
};
getLanguageOptions() {
return [
<Option key="zh_CN" value="zh_CN"><FormattedMessage id={`${intlPrefix}.language.zhcn`} /></Option>,
<Option disabled key="en_US" value="en_US"><FormattedMessage id={`${intlPrefix}.language.enus`} /></Option>,
];
}
getByteLen = (val) => {
let len = 0;
val = val.split('');
val.forEach((v) => {
if (v.match(/[^\x00-\xff]/ig) != null) {
len += 2;
} else {
len += 1;
}
});
return len;
};
validateToInputName = (rule, value, callback) => {
if (this.getByteLen(value) > 18) {
callback('简称需要小于 9 个汉字或 18 个英文字母');
} else {
callback();
}
};
validateToPassword = (rule, value, callback) => {
if (!(/^[a-zA-Z0-9]{6,15}$/.test(value))) {
callback('密码至少为6位数字或字母组成');
} else {
callback();
}
};
handleSubmit = (e) => {
e.preventDefault();
const { SystemSettingStore, intl } = this.props;
this.setState({
submitting: true,
});
this.props.form.validateFieldsAndScroll((err, values) => {
if (err) {
this.setState({
submitting: false,
});
return;
}
let prevSetting = SystemSettingStore.getUserSetting;
prevSetting = { ...prevSetting };
const submitSetting = {
...values,
favicon: SystemSettingStore.getFavicon,
systemLogo: SystemSettingStore.getLogo,
};
const { defaultLanguage, defaultPassword, systemName, systemTitle, favicon, systemLogo, registerEnabled, registerUrl } = submitSetting;
submitSetting.objectVersionNumber = prevSetting.objectVersionNumber;
if (Object.keys(prevSetting).length) {
if (this.dirty || Object.keys(prevSetting).some(v => prevSetting[v] !== submitSetting[v])) {
SystemSettingStore.putUserSetting(submitSetting).then((data) => {
if (!data.failed) {
window.location.reload(true);
} else {
this.setState({
submitting: false,
});
}
}).catch((error) => {
this.setState({
submitting: false,
});
});
} else {
Choerodon.prompt(intl.formatMessage({ id: `${intlPrefix}.save.conflict` }));
this.setState({
submitting: false,
});
}
} else if (!this.dirty && defaultLanguage === 'zh_CN' && systemName === 'Choerodon' && systemTitle === 'Choerodon | 企业数字化服务平台' && defaultPassword === '<PASSWORD>' && !favicon && !systemLogo && !registerEnabled && !registerUrl) {
Choerodon.prompt(intl.formatMessage({ id: `${intlPrefix}.save.conflict` }));
this.setState({
submitting: false,
});
} else {
SystemSettingStore.postUserSetting(submitSetting).then((data) => {
if (!data.failed) {
window.location.reload(true);
} else {
this.setState({
submitting: false,
});
}
}).catch((error) => {
this.setState({
submitting: false,
});
});
}
});
};
render() {
const { SystemSettingStore, intl, AppState } = this.props;
const { getFieldDecorator } = this.props.form;
const { logoLoadingStatus, submitting, uploadLogoVisible } = this.state;
const { defaultLanguage = 'zh_CN', defaultPassword = '<PASSWORD>', systemName = 'Choerodon', systemTitle, maxPasswordLength, minPasswordLength, registerEnabled = false, registerUrl, resetGitlabPasswordUrl } = SystemSettingStore.getUserSetting;
const systemLogo = SystemSettingStore.getLogo;
const formItemLayout = {
labelCol: {
xs: { span: 24 },
sm: { span: 8 },
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 16 },
},
};
const inputHalfWidth = '236px';
const uploadButton = (
<div onClick={this.handleUploadLogoVisibleChange}>
{logoLoadingStatus ? <Spin /> : <div className={'initLogo'} />}
</div>
);
const mainContent = (
<Form onSubmit={this.handleSubmit} layout="vertical" className={prefixClas}>
<FormItem>
{
this.faviconContainer()
}
</FormItem>
<FormItem
{...formItemLayout}
>
<Input style={{ display: 'none' }} />
{getFieldDecorator('systemName', {
initialValue: systemName,
rules: [{
required: true,
message: intl.formatMessage({ id: `${intlPrefix}.systemName.error` }),
}, {
validator: this.validateToInputName,
}],
})(
<Input
autoComplete="new-password"
label={<FormattedMessage id={`${intlPrefix}.systemName`} />}
ref={(e) => { this.editFocusInput = e; }}
maxLength={18}
showLengthInfo={false}
suffix={cardTitle}
/>,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
<span className={`${prefixClas}-tips`}>
<FormattedMessage id={`${intlPrefix}.systemLogo`} />
<Popover content={cardContentLogo} getPopupContainer={() => document.getElementsByClassName('page-content')[0]}>
<Icon type="help" style={{ fontSize: 16, color: '#bdbdbd' }} />
</Popover>
</span>
<div className="c7n-upload c7n-upload-select c7n-upload-select-picture-card">
<LogoUploader type="logo" visible={uploadLogoVisible} onVisibleChange={this.handleUploadLogoVisibleChange} onSave={(res) => { SystemSettingStore.setLogo(res); }} />
{systemLogo ? <div className="c7n-upload" onClick={this.handleUploadLogoVisibleChange}><img src={systemLogo} alt="" style={{ width: '80px', height: '80px' }} /></div> : uploadButton}
</div>
</FormItem>
<FormItem
{...formItemLayout}
>
<Input style={{ display: 'none' }} />
{getFieldDecorator('systemTitle', {
initialValue: systemTitle || AppState.getSiteInfo.defaultTitle,
})(
<Input
autoComplete="new-password"
label={<FormattedMessage id={`${intlPrefix}.systemTitle`} />}
ref={(e) => { this.editFocusInput = e; }}
maxLength={32}
showLengthInfo
suffix={cardName}
/>,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
<Input style={{ display: 'none' }} />
{getFieldDecorator('defaultPassword', {
initialValue: defaultPassword,
rules: [{
required: true,
message: intl.formatMessage({ id: `${intlPrefix}.defaultPassword.error` }),
}, {
validator: this.validateToPassword,
}],
})(
<Input
autoComplete="new-password"
label={<FormattedMessage id={`${intlPrefix}.defaultPassword`} />}
maxLength={15}
type="password"
showPasswordEye
/>,
)}
</FormItem>
<FormItem
{...formItemLayout}
style={{ display: 'inline-block' }}
>
{getFieldDecorator('minPasswordLength', {
rules: [
{
validator: this.checkMinLength,
validateFirst: true,
},
],
initialValue: minPasswordLength,
})(
<InputNumber
label={<FormattedMessage id={`${intlPrefix}.min-length`} />}
style={{ width: inputHalfWidth }}
max={65535}
min={0}
onChange={() => { this.dirty = true; }}
/>,
)}
</FormItem>
<FormItem
{...formItemLayout}
style={{ display: 'inline-block', marginLeft: 40 }}
>
{getFieldDecorator('maxPasswordLength', {
rules: [
{
validator: this.checkMaxLength,
validateFirst: true,
},
],
initialValue: maxPasswordLength,
})(
<InputNumber
label={<FormattedMessage id={`${intlPrefix}.max-length`} />}
style={{ width: inputHalfWidth }}
max={65535}
min={0}
onChange={() => { this.dirty = true; }}
/>,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('defaultLanguage', {
initialValue: defaultLanguage,
rules: [{
required: true,
message: intl.formatMessage({ id: `${intlPrefix}.defaultLanguage.error` }),
}],
})(<Select getPopupContainer={() => document.getElementsByClassName('page-content')[0]} label={<FormattedMessage id={`${intlPrefix}.defaultLanguage`} />}>
{this.getLanguageOptions()}
</Select>,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('resetGitlabPasswordUrl', {
initialValue: resetGitlabPasswordUrl,
})(
<TextArea
autoComplete="new-password"
label={<FormattedMessage id={`${intlPrefix}.resetGitlabPasswordUrl`} />}
ref={(e) => { this.editFocusInput = e; }}
autosize={{ minRows: 2, maxRows: 6 }}
/>,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('registerEnabled', {
initialValue: registerEnabled,
})(
<RadioGroup label={<FormattedMessage id={`${intlPrefix}.registerEnabled`} />} className="radioGroup">
<Radio value><FormattedMessage id="yes" /></Radio>
<Radio value={false}><FormattedMessage id="no" /></Radio>
</RadioGroup>,
)}
</FormItem>
{
this.props.form.getFieldValue('registerEnabled') && <FormItem
{...formItemLayout}
>
<Input style={{ display: 'none' }} />
{getFieldDecorator('registerUrl', {
initialValue: registerUrl,
rules: [{
required: true,
message: intl.formatMessage({ id: `${intlPrefix}.registerUrl.error` }),
}],
})(
<TextArea
autoComplete="new-password"
label={<FormattedMessage id={`${intlPrefix}.registerUrl`} />}
ref={(e) => { this.editFocusInput = e; }}
autosize={{ minRows: 2, maxRows: 6 }}
/>,
)}
</FormItem>
}
<div className={`${prefixClas}-divider`} />
<div>
<Button
htmlType="submit"
funcType="raised"
type="primary"
loading={submitting}
><FormattedMessage id="save" /></Button>
<Button
funcType="raised"
onClick={this.init}
style={{ marginLeft: 16 }}
disabled={submitting}
><FormattedMessage id="cancel" /></Button>
</div>
</Form>
);
return (
<Page
service={[
'iam-service.system-setting.uploadFavicon',
'iam-service.system-setting.uploadLogo',
'iam-service.system-setting.addSetting',
'iam-service.system-setting.updateSetting',
'iam-service.system-setting.resetSetting',
'iam-service.system-setting.getSetting',
]}
>
<Header title={<FormattedMessage id={`${intlPrefix}.header`} />}>
<Button
onClick={this.showDeleteConfirm}
icon="swap_horiz"
>
<FormattedMessage id="reset" />
</Button>
<Button
onClick={this.init}
icon="refresh"
>
<FormattedMessage id="refresh" />
</Button>
</Header>
<Content code={intlPrefix}>
{mainContent}
</Content>
</Page>
);
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/api/service/impl/ProjectTypeServiceImplSpec.groovy<|end_filename|>
package io.choerodon.iam.api.service.impl
import io.choerodon.iam.api.dto.ProjectTypeDTO
import io.choerodon.iam.app.service.impl.ProjectTypeServiceImpl
import io.choerodon.iam.infra.mapper.ProjectTypeMapper
import spock.lang.Specification
class ProjectTypeServiceImplSpec extends Specification {
def "test list"() {
given:
def project = new ProjectTypeDTO()
project.setCode("code")
project.setName("name")
project.setDescription("desc")
def projectTypeMapper = Mock(ProjectTypeMapper) {
selectAll() >> [project]
}
def projectTypeService = new ProjectTypeServiceImpl(projectTypeMapper)
when:
def list = projectTypeService.list()
then:
list != null
list.size() == 1
list.get(0).code == project.getCode()
list.get(0).name == project.getName()
list.get(0).description == project.getDescription()
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/MenuMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.infra.dto.MenuDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
import java.util.Set;
/**
* @author wuguokai
* @author superlee
*/
public interface MenuMapper extends Mapper<MenuDTO> {
List<MenuDTO> queryProjectMenusWithCategoryByRootUser(@Param("category") String category);
/**
* 根据用户id查询member_role表查角色,再根据角色查权限,根据权限查menu
*
* @param memberId
* @param sourceType
* @param sourceId
* @param category
* @param memberType
* @return
*/
List<MenuDTO> selectMenusAfterCheckPermission(@Param("memberId") Long memberId,
@Param("sourceType") String sourceType,
@Param("sourceId") Long sourceId,
@Param("category") String category,
@Param("memberType") String memberType);
/**
* 根据层级查菜单附带权限,不包含top菜单
*
* @param level
* @return
*/
List<MenuDTO> selectMenusWithPermission(String level);
/**
* 根据层级查询该层级菜单,关联permission表查path作为route字段值
*
* @param level
* @return
*/
Set<MenuDTO> selectByLevelWithPermissionType(String level);
/**
* 查询root用户的菜单(根据层级 及 层级单位(组织/项目)的类别)
*
* @param categories
* @param level
* @return
*/
List<MenuDTO> queryMenusWithCategoryAndLevelByRootUser(@Param("categories") List<String> categories,
@Param("level") String level);
/**
* 根据用户id查询角色,再根据角色查权限,根据组织/项目id查类别,根据权限和类别查菜单,
*
* @param memberId
* @param sourceType
* @param sourceId
* @param categories
* @param memberType
* @return
*/
List<MenuDTO> selectMenusAfterPassingThePermissionCheck(@Param("memberId") Long memberId,
@Param("sourceType") String sourceType,
@Param("sourceId") Long sourceId,
@Param("categories") List<String> categories,
@Param("memberType") String memberType);
}
<|start_filename|>src/test/groovy/io/choerodon/iam/api/eventhandler/ApplicationListenerSpec.groovy<|end_filename|>
package io.choerodon.iam.api.eventhandler
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.api.dto.payload.DevOpsAppSyncPayload
import io.choerodon.iam.infra.asserts.OrganizationAssertHelper
import io.choerodon.iam.infra.asserts.ProjectAssertHelper
import io.choerodon.iam.infra.dto.ApplicationDTO
import io.choerodon.iam.infra.dto.ApplicationExplorationDTO
import io.choerodon.iam.infra.mapper.ApplicationExplorationMapper
import io.choerodon.iam.infra.mapper.ApplicationMapper
import org.codehaus.jackson.map.ObjectMapper
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.context.annotation.Import
import org.springframework.transaction.annotation.Transactional
import spock.lang.Specification
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan
*/
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class ApplicationListenerSpec extends Specification {
@Autowired
private ApplicationMapper applicationMapper
@Autowired
private ApplicationExplorationMapper applicationExplorationMapper
private OrganizationAssertHelper organizationAssertHelper = Mock(OrganizationAssertHelper)
private ProjectAssertHelper projectAssertHelper = Mock(ProjectAssertHelper)
private static final String SEPARATOR = "/";
private ObjectMapper objectMapper = new ObjectMapper()
@Transactional
def "updateApplicationAbnormal"() {
given: "构造请求参数"
ApplicationDTO applicationDTO = new ApplicationDTO();
applicationDTO.setOrganizationId(803)
applicationDTO.setProjectId(826)
applicationDTO.setCode("test")
applicationDTO.setName("test")
applicationDTO.setEnabled(true)
applicationDTO.setAbnormal(false)
applicationDTO.setApplicationCategory("application")
applicationDTO.setApplicationType("normal")
applicationMapper.insertSelective(applicationDTO)
applicationDTO = applicationMapper.selectOne(applicationDTO)
ApplicationListener applicationListener = new ApplicationListener(applicationMapper,
applicationExplorationMapper, organizationAssertHelper, projectAssertHelper)
String message = objectMapper.writeValueAsString(applicationDTO)
when: "调用方法"
applicationListener.updateApplicationAbnormal(message)
then: "校验结果"
ApplicationDTO applicationDTO1 = new ApplicationDTO();
applicationDTO1.setOrganizationId(803)
applicationDTO1.setProjectId(826)
applicationDTO1.setCode("test")
applicationMapper.selectOne(applicationDTO1).getAbnormal()
}
@Transactional
def "syncDeleteApplication"() {
given: "构造请求参数"
ApplicationDTO applicationDTO = new ApplicationDTO();
applicationDTO.setOrganizationId(803)
applicationDTO.setProjectId(826)
applicationDTO.setCode("test")
applicationDTO.setName("test")
applicationDTO.setEnabled(true)
applicationDTO.setAbnormal(false)
applicationDTO.setApplicationCategory("application")
applicationDTO.setApplicationType("normal")
applicationMapper.insertSelective(applicationDTO)
Long appId = applicationDTO.getId()
String path = SEPARATOR + appId + SEPARATOR
ApplicationExplorationDTO example = new ApplicationExplorationDTO()
example.setApplicationId(appId);
example.setPath(path);
example.setApplicationEnabled(true);
example.setRootId(appId);
example.setHashcode(String.valueOf(path.hashCode()))
applicationExplorationMapper.insertSelective(example)
ApplicationListener applicationListener = new ApplicationListener(applicationMapper,
applicationExplorationMapper, organizationAssertHelper, projectAssertHelper)
DevOpsAppSyncPayload payload = new DevOpsAppSyncPayload()
payload.setProjectId(applicationDTO.getProjectId())
payload.setOrganizationId(applicationDTO.getOrganizationId())
payload.setCode(applicationDTO.getCode())
String message = objectMapper.writeValueAsString(payload)
when: "调用方法"
applicationListener.syncDeleteApplication(message)
then: "校验结果"
ApplicationDTO applicationDTO1 = new ApplicationDTO();
applicationDTO1.setOrganizationId(803)
applicationDTO1.setProjectId(826)
applicationDTO1.setCode("test")
applicationMapper.select(applicationDTO1).size() == 0
applicationExplorationMapper.selectDescendantByPath(path).size() == 0
}
@Transactional
def "syncApplicationActiveStatus"() {
given: "构造请求参数"
ApplicationDTO applicationDTO = new ApplicationDTO();
applicationDTO.setOrganizationId(803)
applicationDTO.setProjectId(826)
applicationDTO.setCode("test")
applicationDTO.setName("test")
applicationDTO.setEnabled(false)
applicationDTO.setAbnormal(false)
applicationDTO.setApplicationCategory("application")
applicationDTO.setApplicationType("normal")
applicationMapper.insertSelective(applicationDTO)
ApplicationListener applicationListener = new ApplicationListener(applicationMapper,
applicationExplorationMapper, organizationAssertHelper, projectAssertHelper)
DevOpsAppSyncPayload devOpsAppSyncPayload = new DevOpsAppSyncPayload()
devOpsAppSyncPayload.setProjectId(applicationDTO.getProjectId())
devOpsAppSyncPayload.setOrganizationId(applicationDTO.getOrganizationId())
devOpsAppSyncPayload.setCode(applicationDTO.getCode())
devOpsAppSyncPayload.setActive(true)
String message = objectMapper.writeValueAsString(devOpsAppSyncPayload)
when: "调用方法"
applicationListener.syncApplicationActiveStatus(message)
then: "校验结果"
ApplicationDTO applicationDTO1 = new ApplicationDTO();
applicationDTO1.setOrganizationId(803)
applicationDTO1.setProjectId(826)
applicationDTO1.setCode("test")
applicationMapper.selectOne(applicationDTO1).getEnabled()
}
@Transactional
def "syncApplicationName"() {
given: "构造请求参数"
ApplicationDTO applicationDTO = new ApplicationDTO();
applicationDTO.setOrganizationId(803)
applicationDTO.setProjectId(826)
applicationDTO.setCode("test")
applicationDTO.setName("test")
applicationDTO.setEnabled(false)
applicationDTO.setAbnormal(false)
applicationDTO.setApplicationCategory("application")
applicationDTO.setApplicationType("normal")
applicationMapper.insertSelective(applicationDTO)
ApplicationListener applicationListener = new ApplicationListener(applicationMapper,
applicationExplorationMapper, organizationAssertHelper, projectAssertHelper)
DevOpsAppSyncPayload devOpsAppSyncPayload = new DevOpsAppSyncPayload()
devOpsAppSyncPayload.setProjectId(applicationDTO.getProjectId())
devOpsAppSyncPayload.setOrganizationId(applicationDTO.getOrganizationId())
devOpsAppSyncPayload.setCode(applicationDTO.getCode())
devOpsAppSyncPayload.setName("change-test")
String message = objectMapper.writeValueAsString(devOpsAppSyncPayload)
when: "调用方法"
applicationListener.syncApplicationName(message)
then: "校验结果"
ApplicationDTO applicationDTO1 = new ApplicationDTO();
applicationDTO1.setOrganizationId(803)
applicationDTO1.setProjectId(826)
applicationDTO1.setCode("test")
applicationMapper.selectOne(applicationDTO1).getName().equals("change-test")
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/api/controller/v1/SystemSettingControllerSpec.groovy<|end_filename|>
package io.choerodon.iam.api.controller.v1
import io.choerodon.asgard.saga.feign.SagaClient
import io.choerodon.core.exception.ExceptionResponse
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.app.service.impl.SystemSettingServiceImpl
import io.choerodon.iam.infra.dto.SystemSettingDTO
import io.choerodon.iam.infra.feign.FileFeignClient
import io.choerodon.iam.infra.mapper.SystemSettingMapper
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.boot.test.web.client.TestRestTemplate
import org.springframework.context.annotation.Import
import org.springframework.http.HttpEntity
import org.springframework.http.HttpMethod
import org.springframework.validation.BindingResult
import spock.lang.Specification
import spock.lang.Stepwise
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
*
* @author zmf*
*/
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
@Stepwise
class SystemSettingControllerSpec extends Specification {
private static final String BASE_PATH = "/v1/system/setting"
@Autowired
private TestRestTemplate restTemplate
@Autowired
SystemSettingMapper settingMapper
FileFeignClient fileFeignClient = Mock(FileFeignClient)
SagaClient sagaClient = Mock(SagaClient)
SystemSettingServiceImpl service
SystemSettingController controller
private SystemSettingDTO settingDTO
void setup() {
settingDTO = new SystemSettingDTO()
settingDTO.setDefaultLanguage("zh_CN")
settingDTO.setDefaultPassword("<PASSWORD>")
settingDTO.setFavicon("http://minio.staging.saas.hand-china.com/iam-service/file_2913c259dc524231909f5e6083e4c2bf_test.png")
settingDTO.setSystemName("choerodon")
settingDTO.setSystemTitle("Choerodon Platform")
settingDTO.setSystemLogo("http://minio.staging.saas.hand-china.com/iam-service/file_2913c259dc524231909f5e6083e4c2bf_test.png")
service = new SystemSettingServiceImpl(fileFeignClient, sagaClient, settingMapper, false)
controller = new SystemSettingController(service)
}
def "AddSetting"() {
given: "构造请求参数"
def httpEntity = new HttpEntity<Object>(settingDTO)
when: "调用方法[成功]"
def entity = restTemplate.postForEntity(BASE_PATH, httpEntity, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode() == "error.system.setting.update.send.event"
}
def "Add setting with invalid system name"() {
given: "构造请求参数"
settingDTO.setSystemName(systemName)
def httpEntity = new HttpEntity<Object>(settingDTO)
when: "调用方法"
def entity = restTemplate.exchange(BASE_PATH, HttpMethod.POST, httpEntity, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode() == code
where: "边界请求"
systemName | code
null | "error.setting.name.null"
"112312412412412412412412" | "error.setting.name.too.long"
}
def "add setting with invalid password"() {
given: "构造请求参数"
settingDTO.setDefaultPassword(password)
def httpEntity = new HttpEntity<Object>(settingDTO)
when: "调用方法"
def entity = restTemplate.exchange(BASE_PATH, HttpMethod.POST, httpEntity, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode() == code
where: "边界请求"
password | code
null | "error.setting.default.password.null"
"112312412412412412412412" | "error.setting.default.password.length.invalid"
"11" | "error.setting.default.password.length.invalid"
"12214441#" | "error.setting.default.password.format.invalid"
}
def "add setting more than once"() {
given: "构造请求参数"
def httpEntity = new HttpEntity<Object>(settingDTO)
when: "调用方法[成功]"
restTemplate.exchange(BASE_PATH, HttpMethod.POST, httpEntity, SystemSettingDTO)
def entity = restTemplate.exchange(BASE_PATH, HttpMethod.POST, httpEntity, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode() == "error.setting.already.one"
}
def "add setting without default language"() {
given: "构造请求参数"
settingDTO.setDefaultLanguage(null)
def httpEntity = new HttpEntity<Object>(settingDTO)
when: "调用方法[成功]"
restTemplate.exchange(BASE_PATH, HttpMethod.POST, httpEntity, SystemSettingDTO)
def entity = restTemplate.exchange(BASE_PATH, HttpMethod.POST, httpEntity, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode() == "error.setting.default.language.null"
}
def "UpdateSetting"() {
given: "准备场景"
controller.resetSetting()
BindingResult bindingResult = Mock(BindingResult)
bindingResult.hasErrors() >> false
def entity = controller.addSetting(settingDTO, bindingResult)
def objectVersionNumber = entity.getBody().getObjectVersionNumber()
settingDTO.setSystemName("choerodon-test")
settingDTO.setObjectVersionNumber(objectVersionNumber)
when: "调用方法"
def result = controller.updateSetting(settingDTO, bindingResult)
then: "校验结果"
result.statusCode.is2xxSuccessful()
1 * sagaClient.startSaga(_, _)
}
def "UpdateSetting with invalid input"() {
given: "准备场景"
controller.resetSetting()
BindingResult bindingResult = Mock(BindingResult)
bindingResult.hasErrors() >> false
def entity = controller.addSetting(settingDTO, bindingResult)
def objectVersionNumber = entity.getBody().getObjectVersionNumber()
settingDTO.setSystemName("a")
settingDTO.setObjectVersionNumber(objectVersionNumber)
when: "调用方法"
def result = controller.updateSetting(settingDTO, bindingResult)
then: "校验结果"
result.statusCode.is2xxSuccessful()
1 * sagaClient.startSaga(_, _)
}
def "UpdateSetting when the db is empty"() {
given: "准备场景"
restTemplate.delete(BASE_PATH)
def httpEntity = new HttpEntity<Object>(settingDTO)
when: "调用方法"
def entity = restTemplate.exchange(BASE_PATH, HttpMethod.PUT, httpEntity, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode() == "error.setting.update.invalid"
}
def "ResetSetting"() {
when: "调用方法"
restTemplate.delete(BASE_PATH, SystemSettingDTO)
then: "校验结果"
noExceptionThrown()
}
def "GetSetting"() {
given: "当数据库为空时请求"
restTemplate.delete(BASE_PATH)
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH, SystemSettingDTO)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getObjectVersionNumber() == null
when: "调用方法"
def httpEntity = new HttpEntity<Object>(settingDTO)
entity = restTemplate.exchange(BASE_PATH, HttpMethod.POST, httpEntity, String)
println(entity.getBody())
entity = restTemplate.getForEntity(BASE_PATH, SystemSettingDTO)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getObjectVersionNumber() != null
}
def "UploadFavicon"() {
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/upload/favicon", null, ExceptionResponse, 0L)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode() == "error.upload.multipartSize"
}
def "UploadLogo"() {
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/upload/logo", null, ExceptionResponse, 0L)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode() == "error.upload.multipartSize"
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/RoleMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.api.query.RoleQuery;
import io.choerodon.iam.infra.dto.RoleDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* @author wuguokai
*/
public interface RoleMapper extends Mapper<RoleDTO> {
List<Long> queryRoleByUser(@Param("userId") Long userId,
@Param("sourceType") String sourceType,
@Param("sourceId") Long sourceId);
List<RoleDTO> queryRolesInfoByUser(@Param("sourceType") String sourceType,
@Param("sourceId") Long sourceId,
@Param("userId") Long userId);
List<RoleDTO> fulltextSearch(@Param("roleQuery") RoleQuery roleQuery, @Param("param") String param);
List<RoleDTO> pagingQueryOrgRoles(@Param("orgId") Long orgId,
@Param("roleQuery") RoleQuery roleQuery,
@Param("param") String param);
RoleDTO selectRoleWithPermissionsAndLabels(Long id);
int rolesLevelCount(@Param("roleIds") List<Long> roleIds);
List<RoleDTO> selectRolesByLabelNameAndType(@Param("name") String name, @Param("type") String type,
@Param("organizationId") Long organizationId);
List<RoleDTO> selectInitRolesByPermissionId(Long permissionId);
List<RoleDTO> fuzzySearchRolesByName(@Param("roleName") String roleName, @Param("sourceType") String sourceType);
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/RefreshTokenMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.infra.dto.RefreshTokenDTO;
import io.choerodon.mybatis.common.Mapper;
/**
* @author Eugen
*/
public interface RefreshTokenMapper extends Mapper<RefreshTokenDTO> {
}
<|start_filename|>react/src/app/iam/containers/user/user-info/index.js<|end_filename|>
import React from 'react';
import { Route, Switch } from 'react-router-dom';
import { asyncRouter, nomatch } from '@choerodon/boot';
const index = asyncRouter(() => import('./UserInfo'));
const Index = ({ match }) => (
<Switch>
<Route exact path={match.url} component={index} />
<Route path={'*'} component={nomatch} />
</Switch>
);
export default Index;
<|start_filename|>react/src/app/iam/dashboard/AddedUsers/index.js<|end_filename|>
import React, { Component } from 'react';
import { axios } from '@choerodon/boot';
import { Link, withRouter } from 'react-router-dom';
import { FormattedMessage } from 'react-intl';
import { inject, observer } from 'mobx-react';
import { Spin } from 'choerodon-ui';
import './index.scss';
@withRouter
@inject('AppState')
@observer
export default class AddedUsers extends Component {
constructor(props) {
super(props);
this.state = {
newUsers: 0,
allUsers: 100,
loading: true,
};
}
componentWillMount() {
this.loadUserCount();
}
loadUserCount = () => {
axios.get('iam/v1/users/new').then((data) => {
if (!data.failed) {
this.setState({
newUsers: data.newUsers,
allUsers: data.allUsers,
loading: false,
});
} else {
Choerodon.prompt(data.message);
}
});
}
render() {
const { newUsers, allUsers, loading } = this.state;
return (
<div className="c7n-iam-dashboard-addedusers">
{
loading ? <Spin spinning={loading} /> : (
<React.Fragment>
<div className="c7n-iam-dashboard-addedusers-main">
<div>
<span>{newUsers}</span><span>人</span>
</div>
</div>
<div className="c7n-iam-dashboard-addedusers-bottom">
用户总数: {allUsers}
</div>
</React.Fragment>
)
}
</div>
);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/validator/MemberRoleValidator.java<|end_filename|>
package io.choerodon.iam.api.validator;
import java.util.List;
import io.choerodon.iam.infra.dto.MemberRoleDTO;
import io.choerodon.iam.infra.dto.RoleDTO;
import org.springframework.stereotype.Component;
import io.choerodon.core.exception.CommonException;
import io.choerodon.core.oauth.CustomUserDetails;
import io.choerodon.iam.infra.mapper.MemberRoleMapper;
import io.choerodon.iam.infra.mapper.RoleMapper;
/**
* @author wuguokai
*/
@Component
public class MemberRoleValidator {
private RoleMapper roleMapper;
private MemberRoleMapper memberRoleMapper;
public MemberRoleValidator(RoleMapper roleMapper, MemberRoleMapper memberRoleMapper) {
this.roleMapper = roleMapper;
this.memberRoleMapper = memberRoleMapper;
}
public void distributionRoleValidator(String level, List<MemberRoleDTO> memberRoleDTOS) {
memberRoleDTOS.forEach(memberRoleDTO -> {
if (memberRoleDTO.getRoleId() == null) {
throw new CommonException("error.roleId.null");
}
RoleDTO roleDTO = roleMapper.selectByPrimaryKey(memberRoleDTO.getRoleId());
if (roleDTO == null) {
throw new CommonException("error.role.not.exist");
}
if (!roleDTO.getResourceLevel().equals(level)) {
throw new CommonException("error.roles.in.same.level");
}
});
}
public Boolean userHasRoleValidator(CustomUserDetails userDetails, String sourceType, Long sourceId, Boolean isAdmin) {
if (!isAdmin) {
MemberRoleDTO memberRoleDTO = new MemberRoleDTO();
memberRoleDTO.setMemberId(userDetails.getUserId());
memberRoleDTO.setMemberType("user");
memberRoleDTO.setSourceType(sourceType);
memberRoleDTO.setSourceId(sourceId);
if (memberRoleMapper.select(memberRoleDTO).isEmpty()) {
throw new CommonException("error.memberRole.select");
}
}
return true;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/SystemSettingService.java<|end_filename|>
package io.choerodon.iam.app.service;
import io.choerodon.iam.infra.dto.SystemSettingDTO;
import org.springframework.web.multipart.MultipartFile;
/**
* 对系统设置进行增删改查
*
* @author zmf
* @since 2018-10-15
*/
public interface SystemSettingService {
/**
* 上传平台徽标(支持裁剪,旋转,并保存)
*
* @param file 徽标图片
* @return 图片的地址
*/
String uploadFavicon(MultipartFile file, Double rotate, Integer axisX, Integer axisY, Integer width, Integer height);
/**
* 上传平台导航栏图形标(支持裁剪,旋转,并保存)
*
* @param file 图片
* @return 图片的地址
*/
String uploadSystemLogo(MultipartFile file, Double rotate, Integer axisX, Integer axisY, Integer width, Integer height);
/**
* 增加系统设置
*
* @param systemSettingDTO 系统设置数据
* @return 返回增加的系统设置
*/
SystemSettingDTO addSetting(SystemSettingDTO systemSettingDTO);
/**
* 更新系统设置
*
* @param systemSettingDTO 系统设置数据
* @return 返回改变后的系统设置
*/
SystemSettingDTO updateSetting(SystemSettingDTO systemSettingDTO);
/**
* 重置系统设置
*/
void resetSetting();
/**
* 获取系统设置
*
* @return ,如果存在返回数据,否则返回 null
*/
SystemSettingDTO getSetting();
/**
* 获取是否启用项目/组织类型控制
* @return
*/
Boolean getEnabledStateOfTheCategory();
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/asserts/ClientAssertHelper.java<|end_filename|>
package io.choerodon.iam.infra.asserts;
import io.choerodon.iam.infra.dto.ClientDTO;
import io.choerodon.iam.infra.exception.NotExistedException;
import io.choerodon.iam.infra.mapper.ClientMapper;
import org.springframework.stereotype.Component;
/**
* 客户端断言帮助类
*
* @author superlee
* @since 2019-07-10
*/
@Component
public class ClientAssertHelper extends AssertHelper {
private ClientMapper clientMapper;
public ClientAssertHelper(ClientMapper clientMapper) {
this.clientMapper = clientMapper;
}
public ClientDTO clientNotExisted(Long id) {
return clientNotExisted(id, "error.client.not.existed");
}
public ClientDTO clientNotExisted(Long id, String message) {
ClientDTO dto = clientMapper.selectByPrimaryKey(id);
if (dto == null) {
throw new NotExistedException(message);
}
return dto;
}
public ClientDTO clientNotExisted(String name) {
return clientNotExisted(name, "error.client.not.existed");
}
public ClientDTO clientNotExisted(String name, String message) {
ClientDTO dto = new ClientDTO();
dto.setName(name);
ClientDTO result = clientMapper.selectOne(dto);
if (result == null) {
throw new NotExistedException(message);
}
return result;
}
}
<|start_filename|>react/src/app/iam/containers/organization/project/Project.js<|end_filename|>
import React, {Component} from 'react';
import {Button, Checkbox, Form, Icon, Input, Modal, Radio, Select, Table, Tooltip} from 'choerodon-ui';
import moment from 'moment';
import {inject, observer} from 'mobx-react';
import {withRouter} from 'react-router-dom';
import {axios, Content, Header, Page, Permission, stores} from '@choerodon/boot';
import {FormattedMessage, injectIntl} from 'react-intl';
import classnames from 'classnames';
import {PREFIX_CLS} from '@choerodon/boot/lib/containers/common/constants';
import './Project.less';
import MouseOverWrapper from '../../../components/mouseOverWrapper';
import StatusTag from '../../../components/statusTag';
import {handleFiltersParams} from '../../../common/util';
import AvatarUploader from '../../../components/avatarUploader';
let timer;
const prefixCls = `${PREFIX_CLS}`;
const {HeaderStore} = stores;
const FormItem = Form.Item;
const ORGANIZATION_TYPE = 'organization';
const PROJECT_TYPE = 'project';
const {Sidebar} = Modal;
const {Option} = Select;
const RadioGroup = Radio.Group;
const intlPrefix = 'organization.project';
const formItemLayout = {
labelCol: {
xs: {span: 24},
sm: {span: 8},
},
wrapperCol: {
xs: {span: 24},
sm: {span: 16},
},
};
const isNum = /^\d+$/;
@Form.create({})
@withRouter
@injectIntl
@inject('AppState')
@observer
export default class Project extends Component {
constructor(props) {
super(props);
this.state = {
overflow: false,
categoryEnabled: false,
selectLoading: true,
sidebar: false,
page: 1,
id: '',
open: false,
projectDatas: {
name: null,
},
visible: false,
visibleCreate: false,
checkName: false,
buttonClicked: false,
filters: {
params: [],
},
pagination: {
current: 1,
pageSize: 10,
total: '',
},
sort: {
columnKey: null,
order: null,
},
submitting: false,
isShowAvatar: false,
imgUrl: null,
expandedRowKeys: [],
};
this.editFocusInput = React.createRef();
this.createFocusInput = React.createRef();
}
componentWillMount() {
this.setState({
isLoading: true,
});
this.loadEnableCategory();
this.loadProjectCategories({});
}
componentWillUnmount() {
clearInterval(this.timer);
clearTimeout(timer);
}
loadEnableCategory = () => {
axios.get(`/iam/v1/system/setting/enable_category`)
.then((response) => {
this.setState({
categoryEnabled: response,
});
});
};
componentDidMount() {
this.loadProjects();
this.loadProjectTypes();
this.updateSelectContainer();
}
updateSelectContainer() {
const body = this.sidebarBody;
if (body) {
const {overflow} = this.state;
const bodyOverflow = body.clientHeight < body.scrollHeight;
if (bodyOverflow !== overflow) {
this.setState({
overflow: bodyOverflow,
});
}
}
}
linkToChange = (url) => {
const {history} = this.props;
history.push(url);
};
loadProjectTypes = () => {
const {ProjectStore} = this.props;
ProjectStore.loadProjectTypes().then((data) => {
if (data.failed) {
Choerodon.prompt(data.message);
} else {
ProjectStore.setProjectTypes(data);
}
}).catch((error) => {
Choerodon.handleResponseError(error);
});
};
loadProjects = (paginationIn, sortIn, filtersIn) => {
const {
pagination: paginationState,
sort: sortState,
filters: filtersState,
} = this.state;
const pagination = paginationIn || paginationState;
const sort = sortIn || sortState;
const filters = filtersIn || filtersState;
const {AppState, ProjectStore} = this.props;
const menuType = AppState.currentMenuType;
const organizationId = menuType.id;
ProjectStore.changeLoading(true);
// 防止标签闪烁
this.setState({filters});
// 若params或filters含特殊字符表格数据置空
const currentParams = filters.params;
const currentFilters = {
name: filters.name,
code: filters.code,
enabled: filters.enabled,
};
const isIncludeSpecialCode = handleFiltersParams(currentParams, currentFilters);
if (isIncludeSpecialCode) {
ProjectStore.changeLoading(false);
ProjectStore.setProjectData([]);
this.setState({
sort,
pagination: {
total: 0,
},
});
return;
}
ProjectStore.loadProject(organizationId, pagination, sort, filters)
.then((data) => {
ProjectStore.changeLoading(false);
ProjectStore.setProjectData(data.list || []);
const expandedRowKeys = this.state.expandedRowKeys.filter(v => data.list.find(l => l.id === v).projects.length);
this.setState({
sort,
pagination: {
current: data.pageNum,
pageSize: data.pageSize,
total: data.total,
expandedRowKeys,
},
});
})
.catch(error =>
Choerodon.handleResponseError(error),
);
};
handleopenTab = (data, operation) => {
const {form, ProjectStore, AppState} = this.props;
const menuType = AppState.currentMenuType;
const organizationId = menuType.id;
form.resetFields();
this.setState({
errorMeg: '',
successMeg: '',
projectDatas: data || {name: null},
operation,
imgUrl: operation === 'edit' ? data.imageUrl : null,
sidebar: true,
});
if (operation === 'edit') {
setTimeout(() => {
this.editFocusInput.input.focus();
}, 10);
} else if (operation === 'create') {
setTimeout(() => {
this.createFocusInput.input.focus();
}, 10);
} else {
form.resetFields();
ProjectStore.getProjectsByGroupId(data.id).then((groupData) => {
if (groupData.failed) {
Choerodon.prompt(groupData.message);
} else {
ProjectStore.setCurrentGroup(data);
ProjectStore.setGroupProjects(groupData);
if (groupData.length === 0) {
ProjectStore.addNewProjectToGroup();
}
}
ProjectStore.getAgileProject(organizationId, data.id).then((optionAgileData) => {
if (optionAgileData.failed) {
Choerodon.prompt(optionAgileData.message);
} else {
ProjectStore.setOptionAgileData(optionAgileData);
}
});
});
}
};
handleTabClose = () => {
if (this.state.isShowAvatar) {
this.setState({
isShowAvatar: false,
});
return;
}
this.setState({
sidebar: false,
submitting: false,
});
this.props.ProjectStore.setGroupProjects([]);
this.props.ProjectStore.setCurrentGroup(null);
this.props.ProjectStore.clearProjectRelationNeedRemove();
};
handleSubmit = (e) => {
e.preventDefault();
const {AppState, ProjectStore} = this.props;
const {projectDatas, imgUrl} = this.state;
const menuType = AppState.currentMenuType;
const organizationId = menuType.id;
let data;
if (this.state.operation === 'create') {
const {validateFields} = this.props.form;
validateFields((err, {code, name, type, category}) => {
let find = ProjectStore.getProjectCategories.find(item => item.code === category);
if (find) {
category = find.id;
} else {
category = undefined;
}
{
data = {
code,
name: name.trim(),
organizationId,
categoryIds: [category],
type: type === 'no' || undefined ? null : type,
imageUrl: imgUrl || null,
};
this.setState({submitting: true});
ProjectStore.createProject(organizationId, data)
.then((value) => {
this.setState({submitting: false});
if (value) {
Choerodon.prompt(this.props.intl.formatMessage({id: 'create.success'}));
this.handleTabClose();
this.loadProjects();
const targetType = (ProjectStore.getProjectTypes.find(item => item.code === value.type));
value.typeName = targetType ? targetType.name : null;
value.type = 'project';
value.categories = [find && find.name];
HeaderStore.addProject(value);
}
}).catch((error) => {
Choerodon.handleResponseError(error);
this.setState({
submitting: false,
visibleCreate: false,
});
});
}
});
} else if (this.state.operation === 'edit') {
const {validateFields} = this.props.form;
validateFields((err, {name, type}, modify) => {
if (!err) {
if (projectDatas.imageUrl !== imgUrl) modify = true;
if (!modify) {
Choerodon.prompt(this.props.intl.formatMessage({id: 'modify.success'}));
this.handleTabClose();
return;
}
data = {
name: name.trim(),
type: type === 'no' || undefined ? null : type,
imageUrl: imgUrl || null,
};
this.setState({submitting: true, buttonClicked: true});
ProjectStore.updateProject(organizationId,
{
...data,
objectVersionNumber: projectDatas.objectVersionNumber,
code: projectDatas.code,
},
this.state.projectDatas.id).then((value) => {
this.setState({submitting: false, buttonClicked: false});
if (value) {
Choerodon.prompt(this.props.intl.formatMessage({id: 'modify.success'}));
this.handleTabClose();
this.loadProjects();
value.type = 'project';
HeaderStore.updateProject(value);
}
}).catch((error) => {
Choerodon.handleResponseError(error);
});
}
});
} else {
const {validateFields} = this.props.form;
validateFields((err, rawData) => {
if (!err) {
this.setState({ submitting: true, buttonClicked: true });
ProjectStore.axiosDeleteProjectsFromGroup(this.loadProjects);
ProjectStore.saveProjectGroup(rawData).then((savedData) => {
if (savedData.empty) {
this.setState({submitting: false, buttonClicked: false, sidebar: false});
this.loadProjects();
}
if (savedData.failed) {
Choerodon.prompt(this.props.intl.formatMessage({id: savedData.message}));
this.setState({submitting: false, buttonClicked: false, sidebar: true});
} else {
Choerodon.prompt(this.props.intl.formatMessage({id: 'save.success'}));
this.setState({submitting: false, buttonClicked: false, sidebar: false});
this.loadProjects();
}
}).catch((error) => {
Choerodon.prompt(this.props.intl.formatMessage({id: 'save.error'}));
Choerodon.handleResponseError(error);
}).finally(() => {
this.setState({submitting: false});
});
}
});
}
};
/**
* 获得所有的在前端被选择过的项目id
* @returns {any[]}
*/
getSelectedProject = () => {
const fieldsValue = this.props.form.getFieldsValue();
return Object.keys(fieldsValue).filter(v => isNum.test(v)).map(v => fieldsValue[v]);
};
/**
* 根据index获得不同的可选时间
* @param startValue
* @param index
* @returns {boolean}
*/
disabledStartDate = (startValue, index) => {
const {ProjectStore: {disabledTime, currentGroup}, form} = this.props;
const projectId = form.getFieldValue(index);
const endDate = form.getFieldValue(`endDate-${index}`);
if (!startValue) return false;
if (currentGroup.category === 'ANALYTICAL') return false;
if (!startValue) return false;
// 结束时间没有选的时候
if (!endDate) {
return disabledTime[projectId] && disabledTime[projectId].some(({start, end}) => {
if (end === null) {
end = '2199-12-31';
}
// 若有不在可选范围之内的(开始前,结束后是可选的)则返回true
return !(startValue.isBefore(moment(start)) || startValue.isAfter(moment(end).add(1, 'hours')));
});
}
if (endDate && startValue && startValue.isAfter(moment(endDate).add(1, 'hours'))) {
return true;
}
let lastDate = moment('1970-12-31');
if (disabledTime[projectId]) {
disabledTime[projectId].forEach((data) => {
if (data.end && moment(data.end).isAfter(lastDate) && moment(data.end).isBefore(moment(endDate))) lastDate = moment(data.end);
});
}
return !(startValue.isBefore(moment(endDate).add(1, 'hours')) && startValue.isAfter(moment(lastDate).add(1, 'hours')));
};
/**
* 根据index获得不同的可选时间
* @param endValue
* @param index
*/
disabledEndDate = (endValue, index) => {
const {ProjectStore: {disabledTime, currentGroup}, form} = this.props;
const projectId = form.getFieldValue(index);
const startDate = form.getFieldValue(`startDate-${index}`);
if (!endValue) return false;
// 开始时间没有选的时候
if (!startDate) {
return disabledTime[projectId] && disabledTime[projectId].some(({start, end}) => {
if (end === null) {
end = '2199-12-31';
}
// 若有不在可选范围之内的(开始前,结束后是可选的)则返回true
return !(endValue.isBefore(moment(start)) || endValue.isAfter(moment(end).add(1, 'hours')));
});
}
if (startDate && endValue && endValue.isBefore(startDate)) {
return true;
}
if (currentGroup.category === 'ANALYTICAL') return false;
let earlyDate = moment('2199-12-31');
if (disabledTime[projectId]) {
disabledTime[projectId].forEach((data) => {
if (moment(data.start).isBefore(earlyDate) && moment(data.start).isAfter(startDate)) earlyDate = moment(data.start);
});
}
return !(endValue.isAfter(moment(startDate).subtract(1, 'hours')) && endValue.isBefore(earlyDate));
};
/* 停用启用 */
handleEnable = (record) => {
const {ProjectStore, AppState, intl} = this.props;
const userId = AppState.getUserId;
const menuType = AppState.currentMenuType;
const orgId = menuType.id;
ProjectStore.enableProject(orgId, record.id, record.enabled).then((value) => {
Choerodon.prompt(intl.formatMessage({id: record.enabled ? 'disable.success' : 'enable.success'}));
this.loadProjects();
HeaderStore.axiosGetOrgAndPro(sessionStorage.userId || userId).then((org) => {
org[0].forEach((item) => {
item.type = ORGANIZATION_TYPE;
});
org[1].forEach((item) => {
item.type = PROJECT_TYPE;
});
HeaderStore.setProData(org[0]);
HeaderStore.setProData(org[1]);
this.forceUpdate();
});
}).catch((error) => {
Choerodon.prompt(intl.formatMessage({id: 'operation.error'}));
});
};
/* 分页处理 */
handlePageChange(pagination, filters, sorter, params) {
filters.params = params;
this.loadProjects(pagination, sorter, filters);
}
async handleDatePickerOpen(index) {
const {form} = this.props;
if (form.getFieldValue(`${index}`)) {
form.validateFields([`${index}`], {force: true});
}
this.forceUpdate();
}
/**
* 校验项目编码唯一性
* @param value 项目编码
* @param callback 回调函数
*/
checkCode = (rule, value, callback) => {
const {AppState, ProjectStore, intl} = this.props;
const menuType = AppState.currentMenuType;
const organizationId = menuType.id;
const params = {code: value};
ProjectStore.checkProjectCode(organizationId, params)
.then((mes) => {
if (mes.failed) {
callback(intl.formatMessage({id: `${intlPrefix}.code.exist.msg`}));
} else {
callback();
}
});
};
renderSideTitle() {
switch (this.state.operation) {
case 'create':
return <FormattedMessage id={`${intlPrefix}.create`}/>;
case 'edit':
return <FormattedMessage id={`${intlPrefix}.modify`}/>;
default:
return <FormattedMessage id={`${intlPrefix}.config-sub-project`}/>;
}
}
getSidebarContentInfo(operation) {
const {AppState} = this.props;
const menuType = AppState.currentMenuType;
const orgname = menuType.name;
switch (operation) {
case 'create':
return {
code: `${intlPrefix}.create`,
values: {
name: orgname,
},
};
case 'edit':
return {
code: `${intlPrefix}.modify`,
values: {
name: this.state.projectDatas.code,
},
};
default:
return {
code: `${intlPrefix}.config-sub-project`,
values: {
app: this.state.projectDatas.category === 'ANALYTICAL' ? '分析型项目群' : '普通项目群',
name: this.state.projectDatas.code,
},
};
}
}
getOption = (current) => {
const {ProjectStore: {optionAgileData, groupProjects}, form} = this.props;
if (groupProjects[current].id) {
const {projectId, projName, code} = groupProjects[current];
const options = [];
options.push(<Option value={projectId} key={projectId} title={projName}>
<Tooltip title={code} placement="right" align={{offset: [20, 0]}}>
<span style={{display: 'inline-block', width: '100%'}}>{projName}</span>
</Tooltip>
</Option>);
return options;
}
return optionAgileData.filter(value => this.getSelectedProject().every(existProject =>
existProject !== value.id || existProject === form.getFieldValue(current),
)).filter(v => v.code).reduce((options, {id, name, enabled, code}) => {
options.push(
<Option value={id} key={id} title={name}>
<Tooltip title={code} placement="right" align={{offset: [20, 0]}}>
<span style={{display: 'inline-block', width: '100%'}}>{name}</span>
</Tooltip>
</Option>,
);
return options;
}, []);
};
handleSelectProject = (projectId, index) => {
const {ProjectStore: {groupProjects}, ProjectStore} = this.props;
ProjectStore.setGroupProjectByIndex(index, {
projectId,
startDate: groupProjects[index].startDate,
endDate: groupProjects[index].endDate,
enabled: groupProjects[index].enabled
});
};
handleCheckboxChange = (value, index) => {
const {form, ProjectStore, ProjectStore: {groupProjects, currentGroup}} = this.props;
if (currentGroup.category === 'ANALYTICAL') return;
if (value && groupProjects[index].id) {
const newValue = {};
newValue[`enabled-${index}`] = value.target.checked;
form.setFieldsValue(newValue);
ProjectStore.setGroupProjectByIndex(index, {...groupProjects[index], enabled: value.target.checked});
form.resetFields(`enabled-${index}`);
}
};
validateDate = (projectId, index, callback) => {
callback();
// const { ProjectStore: { disabledTime, groupProjects }, form, ProjectStore } = this.props;
// if (!projectId) callback();
// if (groupProjects[projectId] && groupProjects[projectId].id) callback();
// if (projectId) {
// ProjectStore.setDisabledTime(projectId).then(() => {
// if (disabledTime[projectId]) {
// const startValue = form.getFieldValue(`startDate-${index}`);
// const endValue = form.getFieldValue(`endDate-${index}`);
// if (this.disabledStartDate(startValue, index) || this.disabledEndDate(endValue, index)) {
// callback('日期冲突,请重新选择日期');
// } else {
// callback();
// }
// }
// }).catch((err) => {
// callback('网络错误');
// Choerodon.handleResponseError(err);
// });
// }
};
getAddGroupProjectContent = (operation) => {
const {intl, ProjectStore: {groupProjects}, form} = this.props;
const {getFieldDecorator} = form;
if (operation !== 'add') return;
const formItems = groupProjects.map(({projectId, enabled, id}, index) => {
const key = !projectId ? `project-index-${index}` : String(projectId);
return (
<React.Fragment>
<FormItem
{...formItemLayout}
key={key}
className="c7n-iam-project-inline-formitem"
>
{getFieldDecorator(`${index}`, {
initialValue: projectId,
rules: [{
required: true,
message: '请选择项目',
}, {
validator: (rule, value, callback) => this.validateDate(value, index, callback),
}],
})(
<Select
className="member-role-select"
style={{width: 200, marginTop: -2}}
label={<FormattedMessage id="organization.project.name"/>}
disabled={!!id}
onChange={e => this.handleSelectProject(e, index)}
filterOption={(input, option) => {
const childNode = option.props.children;
if (childNode && React.isValidElement(childNode)) {
return childNode.props.children.props.children.toLowerCase().indexOf(input.toLowerCase()) >= 0;
}
return false;
}}
filter
>
{this.getOption(index)}
</Select>,
)}
</FormItem>
<FormItem
{...formItemLayout}
className="c7n-iam-project-inline-formitem c7n-iam-project-inline-formitem-checkbox"
>
{getFieldDecorator(`enabled-${index}`, {
initialValue: enabled,
})(
<Checkbox onChange={value => this.handleCheckboxChange(value, index)}
checked={form.getFieldValue(`enabled-${index}`)}>是否启用</Checkbox>,
)}
</FormItem>
<Button
size="small"
icon="delete"
shape="circle"
onClick={() => this.removeProjectFromGroup(index)}
// disabled={roleIds.length === 1 && selectType === 'create'}
className="c7n-iam-project-inline-formitem-button"
/>
</React.Fragment>
);
});
return formItems;
};
removeProjectFromGroup = (index) => {
this.props.ProjectStore.removeProjectFromGroup(index);
this.props.form.resetFields();
};
renderSidebarContent() {
const {intl, ProjectStore, form} = this.props;
const {getFieldDecorator} = form;
const {operation, projectDatas, categoryEnabled, overflow} = this.state;
const types = ProjectStore.getProjectTypes;
const inputWidth = 512;
const contentInfo = this.getSidebarContentInfo(operation);
return (
<Content
{...contentInfo}
className="sidebar-content"
>
<Form layout="vertical" className="rightForm" style={{width: operation === 'add' ? 512 : 800}}>
{operation === 'create' && operation !== 'add' && (<FormItem
{...formItemLayout}
>
{getFieldDecorator('code', {
rules: [{
required: true,
whitespace: true,
message: intl.formatMessage({id: `${intlPrefix}.code.require.msg`}),
}, {
max: 14,
message: intl.formatMessage({id: `${intlPrefix}.code.length.msg`}),
}, {
pattern: /^[a-z](([a-z0-9]|-(?!-))*[a-z0-9])*$/,
message: intl.formatMessage({id: `${intlPrefix}.code.pattern.msg`}),
}, {
validator: this.checkCode,
}],
validateTrigger: 'onBlur',
validateFirst: true,
})(
<Input
autoComplete="off"
label={<FormattedMessage id={`${intlPrefix}.code`}/>}
style={{width: inputWidth}}
ref={(e) => {
this.createFocusInput = e;
}}
maxLength={14}
showLengthInfo={false}
/>,
)}
</FormItem>)}
{operation !== 'add' && (
<FormItem
{...formItemLayout}
>
{getFieldDecorator('name', {
rules: [{
required: true,
whitespace: true,
message: intl.formatMessage({id: `${intlPrefix}.name.require.msg`}),
}, {
/* eslint-disable-next-line */
pattern: /^[-—\.\w\s\u4e00-\u9fa5]{1,32}$/,
message: intl.formatMessage({id: `${intlPrefix}.name.pattern.msg`}),
}],
validateTrigger: 'onBlur',
validateFirst: true,
initialValue: operation === 'create' ? undefined : projectDatas.name,
})(
<Input
autoComplete="off"
label={<FormattedMessage id={`${intlPrefix}.name`}/>}
style={{width: inputWidth}}
ref={(e) => {
this.editFocusInput = e;
}}
maxLength={32}
showLengthInfo={false}
/>,
)}
</FormItem>
)}
{operation === 'create' && operation !== 'add' && categoryEnabled && (
<FormItem
{...formItemLayout}
>
{getFieldDecorator('category', {
rules: [{
required: true,
whitespace: true,
message: intl.formatMessage({id: `${intlPrefix}.category.require.msg`}),
}],
initialValue: 'AGILE',
})(
<Select
style={{width: 512}}
label={<FormattedMessage id={`${intlPrefix}.category`}/>}
notFoundContent={intl.formatMessage({id: 'organization.project.category.notfound'})}
onFilterChange={this.handleCategorySelectFilter}
getPopupContainer={() => document.getElementsByClassName('sidebar-content')[0].parentNode}
filterOption={false}
optionFilterProp="children"
loading={this.state.selectLoading}
filter
>
{this.getCategoriesOption()}
</Select>,
)}
</FormItem>
)}
{operation !== 'add' && (
<div>
<span style={{color: 'rgba(0,0,0,.6)'}}>{intl.formatMessage({id: `${intlPrefix}.avatar`})}</span>
{this.getAvatar()}
</div>
)}
{this.getAddGroupProjectContent(operation)}
</Form>
</Content>
);
}
getAvatar() {
const {isShowAvatar, imgUrl, projectDatas} = this.state;
return (
<div className="c7n-iam-project-avatar">
<div
className="c7n-iam-project-avatar-wrap"
style={{
backgroundColor: projectDatas.name ? ' #c5cbe8' : '#ccc',
backgroundImage: imgUrl ? `url(${Choerodon.fileServer(imgUrl)})` : '',
}}
>
{!imgUrl && projectDatas && projectDatas.name && projectDatas.name.charAt(0)}
<Button className={classnames('c7n-iam-project-avatar-button', {
'c7n-iam-project-avatar-button-create': !projectDatas.name,
'c7n-iam-project-avatar-button-edit': projectDatas.name
})} onClick={this.openAvatarUploader}>
<div className="c7n-iam-project-avatar-button-icon">
<Icon type="photo_camera"/>
</div>
</Button>
<AvatarUploader visible={isShowAvatar} intlPrefix="organization.project.avatar.edit"
onVisibleChange={this.closeAvatarUploader} onUploadOk={this.handleUploadOk}/>
</div>
</div>
);
}
/**
* 打开上传图片模态框
*/
openAvatarUploader = () => {
this.setState({
isShowAvatar: true,
});
}
/**
* 关闭上传图片模态框
* @param visible 模态框是否可见
*/
closeAvatarUploader = (visible) => {
this.setState({
isShowAvatar: visible,
});
}
handleUploadOk = (res) => {
this.setState({
imgUrl: res,
isShowAvatar: false,
});
}
handleExpand = (expanded, record) => {
const { expandedRowKeys } = this.state;
if (expanded) {
expandedRowKeys.push(record.id);
} else {
expandedRowKeys.splice(expandedRowKeys.findIndex(v => v === record.id), 1);
}
this.setState({
expandedRowKeys,
});
}
goToProject = (record) => {
if (this.canGotoProject(record)) {
window.location = `#/?type=project&id=${record.id}&name=${record.name}&organizationId=${record.organizationId}`;
}
};
handleCategorySelectFilter = (value) => {
this.setState({
selectLoading: true,
});
const queryObj = {
param: value,
};
if (timer) {
clearTimeout(timer);
}
if (value) {
timer = setTimeout(() => this.loadProjectCategories(queryObj), 300);
} else {
return this.loadProjectCategories(queryObj);
}
}
// 加载全部项目类别
loadProjectCategories = (queryObj) => {
const { ProjectStore } = this.props;
ProjectStore.loadProjectCategories(queryObj).then((data) => {
ProjectStore.setProjectCategories((data.list || []).slice());
this.setState({
selectLoading: false,
});
});
}
/**
* 获取项目类型下拉选项
* @returns {any[]}
*/
getCategoriesOption = () => {
const { ProjectStore } = this.props;
const projectCategories = ProjectStore.getProjectCategories;
return projectCategories && projectCategories.length > 0 ? (
projectCategories.map(({code, name}) => (
<Option key={code} value={`${code}`}>{name}</Option>
))
) : null;
}
canGotoProject = record => HeaderStore.proData.some(v => v.id === record.id);
getGotoTips = (record) => {
if (this.canGotoProject(record)) {
return (<FormattedMessage id={`${intlPrefix}.redirect`} values={{name: record.name}}/>);
} else if (!record.enabled) {
return (<FormattedMessage id={`${intlPrefix}.redirect.disable`}/>);
} else {
return (<FormattedMessage id={`${intlPrefix}.redirect.no-permission`}/>);
}
};
getAddOtherBtn = () => (
<Button type="primary" className="add-other-project" icon="add" onClick={this.addProjectList}>
<FormattedMessage id="organization.project.add.project"/>
</Button>
);
addProjectList = () => {
const {ProjectStore, AppState, intl} = this.props;
ProjectStore.addNewProjectToGroup();
};
getCategoryIcon = (category) => {
switch (category) {
case 'AGILE':
return 'project_line';
case 'PROGRAM':
return 'project_group';
case 'ANALYTICAL':
return 'project_group_analyze';
default:
return 'project_line';
}
};
renderExpandRowRender(source) {
const {intl} = this.props;
if (!source.category === 'PROGRAM') {
return null;
}
const columns = [{
title: <FormattedMessage id="name"/>,
dataIndex: 'name',
key: 'name',
// width: '25%',
width: '320px',
render: (text, record) => (
<div className="c7n-iam-project-name-link" onClick={() => this.goToProject(record)} style={{paddingLeft: 26}}>
<MouseOverWrapper text={text} width={0.2}>
<StatusTag mode="icon" name={text} colorCode={record.enabled ? 'COMPLETED' : 'DISABLE'}/>
{/* {text} */}
</MouseOverWrapper>
</div>
),
}, {
title: <FormattedMessage id="code"/>,
dataIndex: 'code',
}];
return (
<Table
pagination={false}
filterBar={false}
showHeader={false}
bordered={false}
columns={columns}
dataSource={source.projects || []}
rowKey={record => record.id}
/>
);
}
render() {
const {ProjectStore, AppState, intl} = this.props;
const projectData = ProjectStore.getProjectData;
const projectTypes = ProjectStore.getProjectTypes;
const categories = ProjectStore.getProjectCategories;
const menuType = AppState.currentMenuType;
const orgId = menuType.id;
const orgname = menuType.name;
const {filters, operation, categoryEnabled} = this.state;
const {type} = menuType;
const filtersType = projectTypes && projectTypes.map(({name}) => ({
value: name,
text: name,
}));
const preColumn = [{
title: <FormattedMessage id="name"/>,
dataIndex: 'name',
key: 'name',
filters: [],
filteredValue: filters.name || [],
// width: categoryEnabled ? '20%' : '30%',
width: '320px',
render: (text, record) => (
<div className="c7n-iam-project-name-link" onClick={() => this.goToProject(record)}>
<MouseOverWrapper text={text} width={0.2}>
<Icon type={record.category === 'PROGRAM' ? 'project_group' : 'project_line'}
style={{marginRight: 8}}/>{text}
</MouseOverWrapper>
</div>
),
}, {
title: <FormattedMessage id="code"/>,
dataIndex: 'code',
filters: [],
filteredValue: filters.code || [],
key: 'code',
// width: categoryEnabled ? '20%' : '30%',
render: text => (
<MouseOverWrapper text={text} width={0.2}>
{text}
</MouseOverWrapper>
),
}, {
title: <FormattedMessage id="status"/>,
width: '160px',
dataIndex: 'enabled',
filters: [{
text: intl.formatMessage({id: 'enable'}),
value: 'true',
}, {
text: intl.formatMessage({id: 'disable'}),
value: 'false',
}],
filteredValue: filters.enabled || [],
key: 'enabled',
render: (enabled, record) => (
<span style={{
marginRight: 8,
fontSize: '12px',
lineHeight: '18px',
padding: '2px 6px',
background: record.enabled ? 'rgba(0, 191, 165, 0.1)' : 'rgba(244, 67, 54, 0.1)',
color: record.enabled ? '#009688' : '#D50000',
borderRadius: '2px',
border: '1px solid',
borderColor: record.enabled ? '#009688' : '#D50000'
}}>
{record.enabled ? '启用' : '停用'}
</span>
),
}];
const nextColumn = [{
title: '',
key: 'action',
width: '120px',
align: 'right',
render: (text, record) => (
<div>
{record.category === 'PROGRAM' && record.enabled && (
<Tooltip
title={<FormattedMessage id={`${intlPrefix}.config`}/>}
placement="bottom"
>
<Button
shape="circle"
size="small"
onClick={this.handleopenTab.bind(this, record, 'add')}
icon="predefine"
/>
</Tooltip>
)}
<Permission service={['iam-service.organization-project.update']} type={type} organizationId={orgId}>
<Tooltip
title={<FormattedMessage id="modify"/>}
placement="bottom"
>
<Button
shape="circle"
size="small"
onClick={this.handleopenTab.bind(this, record, 'edit')}
icon="mode_edit"
/>
</Tooltip>
</Permission>
<Permission
service={['iam-service.organization-project.disableProject', 'iam-service.organization-project.enableProject']}
type={type}
organizationId={orgId}
>
<Tooltip
title={<FormattedMessage id={record.enabled ? 'disable' : 'enable'}/>}
placement="bottom"
>
<Button
shape="circle"
size="small"
onClick={this.handleEnable.bind(this, record)}
icon={record.enabled ? 'remove_circle_outline' : 'finished'}
/>
</Tooltip>
</Permission>
</div>
),
}];
const middleColumn = categoryEnabled ? [{
title: <FormattedMessage id={`${intlPrefix}.type.category`}/>,
dataIndex: 'category',
key: 'category',
width: '15%',
render: category => {
let find = categories && categories.find(item => item.code === category);
return (
<span>{find ? find.name : ''}</span>)
},
// filters: filtersType,
filteredValue: filters.typeName || [],
}] : [];
const columns = [
...preColumn,
...middleColumn,
...nextColumn,
];
return (
<Page
className={`${prefixCls}-iam-project`}
service={[
'iam-service.organization-project.list',
'iam-service.organization-project.create',
'iam-service.organization-project.check',
'iam-service.organization-project.update',
'iam-service.organization-project.disableProject',
'iam-service.organization-project.enableProject',
]}
>
<Header title={<FormattedMessage id={`${intlPrefix}.header.title`}/>}>
<Permission service={['iam-service.organization-project.create']} type={type} organizationId={orgId}>
<Button
onClick={this.handleopenTab.bind(this, null, 'create')}
icon="playlist_add"
>
<FormattedMessage id={`${intlPrefix}.create`}/>
</Button>
</Permission>
<Button
icon="refresh"
onClick={() => {
ProjectStore.changeLoading(true);
this.setState({
filters: {
params: [],
},
pagination: {
current: 1,
pageSize: 10,
total: '',
},
sort: {
columnKey: null,
order: null,
},
}, () => {
this.loadProjects();
});
}}
>
<FormattedMessage id="refresh"/>
</Button>
</Header>
<Content
code={intlPrefix}
>
<Table
pagination={this.state.pagination}
columns={columns}
dataSource={projectData}
rowKey={record => record.id}
filters={this.state.filters.params}
onChange={this.handlePageChange.bind(this)}
loading={ProjectStore.isLoading}
expandedRowRender={record => this.renderExpandRowRender(record)}
filterBarPlaceholder={intl.formatMessage({id: 'filtertable'})}
rowClassName={(record, index) => `${record.category === 'PROGRAM' && record.projects && record.projects.length ? '' : 'hidden-expand'}`}
/>
<Sidebar
title={this.renderSideTitle()}
visible={this.state.sidebar}
onCancel={this.handleTabClose.bind(this)}
onOk={this.handleSubmit.bind(this)}
okText={<FormattedMessage id={operation === 'create' ? 'create' : 'save'}/>}
cancelText={<FormattedMessage id="cancel"/>}
confirmLoading={this.state.submitting}
className="c7n-iam-project-sidebar"
>
{operation && this.renderSidebarContent()}
{operation === 'add' && this.getAddOtherBtn()}
</Sidebar>
</Content>
</Page>
);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/enums/LdapSyncType.java<|end_filename|>
package io.choerodon.iam.infra.enums;
/**
* @author superlee
* @since 0.16.0
*/
public enum LdapSyncType {
/**
* 同步用户
*/
SYNC("sync"),
/**
* 禁用用户
*/
DISABLE("disable");
private String value;
LdapSyncType(String value) {
this.value = value;
}
public String value() {
return value;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/ExcelService.java<|end_filename|>
package io.choerodon.iam.app.service;
import org.springframework.core.io.Resource;
import org.springframework.http.HttpHeaders;
import org.springframework.web.multipart.MultipartFile;
/**
* @author superlee
*/
public interface ExcelService {
void importUsers(Long id, MultipartFile multipartFile);
Resource getUserTemplates();
HttpHeaders getHttpHeaders();
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/asserts/DetailsHelperAssert.java<|end_filename|>
package io.choerodon.iam.infra.asserts;
import io.choerodon.core.exception.CommonException;
import io.choerodon.core.oauth.CustomClientDetails;
import io.choerodon.core.oauth.CustomUserDetails;
import io.choerodon.core.oauth.DetailsHelper;
/**
* @author superlee
* @since 2019-04-15
*/
public class DetailsHelperAssert {
public static CustomUserDetails userDetailNotExisted() {
return userDetailNotExisted("error.user.not.login");
}
public static CustomUserDetails userDetailNotExisted(String message) {
CustomUserDetails userDetails = DetailsHelper.getUserDetails();
if (userDetails == null) {
throw new CommonException(message);
}
return userDetails;
}
public static CustomClientDetails clientDetailNotExisted() {
return clientDetailNotExisted("error.client.not.found");
}
public static CustomClientDetails clientDetailNotExisted(String message) {
CustomClientDetails client = DetailsHelper.getClientDetails();
if (client == null) {
throw new CommonException(message);
}
return client;
}
public static void notCurrentUser(Long userId) {
CustomUserDetails userDetails = userDetailNotExisted();
if (!userDetails.getUserId().equals(userId)) {
throw new CommonException("error.bookMark.notCurrentUser");
}
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/OrganizationProjectServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.pagehelper.Page;
import com.github.pagehelper.PageInfo;
import io.choerodon.asgard.saga.annotation.Saga;
import io.choerodon.asgard.saga.dto.StartInstanceDTO;
import io.choerodon.asgard.saga.feign.SagaClient;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.core.exception.CommonException;
import io.choerodon.core.iam.ResourceLevel;
import io.choerodon.core.oauth.CustomUserDetails;
import io.choerodon.core.oauth.DetailsHelper;
import io.choerodon.iam.api.dto.ProjectCategoryDTO;
import io.choerodon.iam.api.dto.payload.ProjectEventPayload;
import io.choerodon.iam.app.service.OrganizationProjectService;
import io.choerodon.iam.app.service.RoleMemberService;
import io.choerodon.iam.app.service.UserService;
import io.choerodon.iam.infra.asserts.DetailsHelperAssert;
import io.choerodon.iam.infra.asserts.OrganizationAssertHelper;
import io.choerodon.iam.infra.asserts.ProjectAssertHelper;
import io.choerodon.iam.infra.asserts.UserAssertHelper;
import io.choerodon.iam.infra.common.utils.PageUtils;
import io.choerodon.iam.infra.dto.*;
import io.choerodon.iam.infra.enums.ProjectCategory;
import io.choerodon.iam.infra.enums.RoleLabel;
import io.choerodon.iam.infra.exception.EmptyParamException;
import io.choerodon.iam.infra.exception.IllegalArgumentException;
import io.choerodon.iam.infra.exception.InsertException;
import io.choerodon.iam.infra.exception.UpdateExcetion;
import io.choerodon.iam.infra.feign.AsgardFeignClient;
import io.choerodon.iam.infra.mapper.*;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils;
import java.util.*;
import java.util.stream.Collectors;
import static io.choerodon.iam.infra.common.utils.SagaTopic.Project.*;
import static io.choerodon.iam.infra.asserts.UserAssertHelper.WhichColumn;
/**
* @author flyleft
* @since 2018/3/26
*/
@Service
@RefreshScope
public class OrganizationProjectServiceImpl implements OrganizationProjectService {
private static final String PROJECT_NOT_EXIST_EXCEPTION = "error.project.not.exist";
public static final String PROJECT_DEFAULT_CATEGORY = "AGILE";
public static final String PROJECT = "project";
@Value("${choerodon.devops.message:false}")
private boolean devopsMessage;
@Value("${spring.application.name:default}")
private String serviceName;
@Value("${choerodon.category.enabled:false}")
private Boolean categoryEnable;
private SagaClient sagaClient;
private UserService userService;
private AsgardFeignClient asgardFeignClient;
private ProjectMapCategoryMapper projectMapCategoryMapper;
private ProjectRelationshipMapper projectRelationshipMapper;
private ProjectCategoryMapper projectCategoryMapper;
private ProjectMapper projectMapper;
private ProjectTypeMapper projectTypeMapper;
private RoleMapper roleMapper;
private LabelMapper labelMapper;
private ProjectAssertHelper projectAssertHelper;
private OrganizationAssertHelper organizationAssertHelper;
private UserAssertHelper userAssertHelper;
private RoleMemberService roleMemberService;
private final ObjectMapper mapper = new ObjectMapper();
public OrganizationProjectServiceImpl(SagaClient sagaClient,
UserService userService,
AsgardFeignClient asgardFeignClient,
ProjectMapCategoryMapper projectMapCategoryMapper,
ProjectCategoryMapper projectCategoryMapper,
ProjectMapper projectMapper,
ProjectAssertHelper projectAssertHelper,
ProjectTypeMapper projectTypeMapper,
OrganizationAssertHelper organizationAssertHelper,
UserAssertHelper userAssertHelper,
RoleMapper roleMapper,
LabelMapper labelMapper,
ProjectRelationshipMapper projectRelationshipMapper,
RoleMemberService roleMemberService) {
this.sagaClient = sagaClient;
this.userService = userService;
this.asgardFeignClient = asgardFeignClient;
this.projectMapCategoryMapper = projectMapCategoryMapper;
this.projectCategoryMapper = projectCategoryMapper;
this.projectMapper = projectMapper;
this.projectAssertHelper = projectAssertHelper;
this.organizationAssertHelper = organizationAssertHelper;
this.projectTypeMapper = projectTypeMapper;
this.userAssertHelper = userAssertHelper;
this.roleMapper = roleMapper;
this.labelMapper = labelMapper;
this.projectRelationshipMapper = projectRelationshipMapper;
this.roleMemberService = roleMemberService;
}
@Transactional(rollbackFor = Exception.class)
@Override
@Saga(code = PROJECT_CREATE, description = "iam创建项目", inputSchemaClass = ProjectEventPayload.class)
public ProjectDTO createProject(ProjectDTO projectDTO) {
List<Long> categoryIds = projectDTO.getCategoryIds();
Boolean enabled = projectDTO.getEnabled();
projectDTO.setEnabled(enabled == null ? true : enabled);
ProjectDTO dto;
if (devopsMessage) {
dto = createProjectBySaga(projectDTO);
} else {
dto = create(projectDTO);
initMemberRole(dto);
}
if (categoryEnable) {
initProjectCategories(categoryIds, dto);
}
return dto;
}
@Override
public ProjectDTO create(ProjectDTO projectDTO) {
Long organizationId = projectDTO.getOrganizationId();
organizationAssertHelper.organizationNotExisted(organizationId);
projectAssertHelper.codeExisted(projectDTO.getCode(), organizationId);
if (projectMapper.insertSelective(projectDTO) != 1) {
throw new CommonException("error.project.create");
}
ProjectTypeDTO projectTypeDTO = new ProjectTypeDTO();
projectTypeDTO.setCode(projectDTO.getType());
if (projectDTO.getType() != null && projectTypeMapper.selectCount(projectTypeDTO) != 1) {
throw new CommonException("error.project.type.notExist");
}
return projectMapper.selectByPrimaryKey(projectDTO);
}
private void initProjectCategories(List<Long> categoryIds, ProjectDTO dto) {
if (CollectionUtils.isEmpty(categoryIds)) {
//添加默认类型Agile
List<Long> ids = new ArrayList<>();
ProjectCategoryDTO projectCategory = new ProjectCategoryDTO();
projectCategory.setCode(PROJECT_DEFAULT_CATEGORY);
ProjectCategoryDTO result = projectCategoryMapper.selectOne(projectCategory);
if (result != null) {
ids.add(result.getId());
dto.setCategoryIds(ids);
}
} else {
categoryIds.forEach(id -> {
ProjectMapCategoryDTO example = new ProjectMapCategoryDTO();
example.setCategoryId(id);
example.setProjectId(dto.getId());
if (projectMapCategoryMapper.insertSelective(example) != 1) {
throw new InsertException("error.projectMapCategory.insert");
}
});
}
}
private ProjectDTO createProjectBySaga(final ProjectDTO projectDTO) {
ProjectEventPayload projectEventMsg = new ProjectEventPayload();
CustomUserDetails details = DetailsHelper.getUserDetails();
OrganizationDTO organizationDTO = organizationAssertHelper.organizationNotExisted(projectDTO.getOrganizationId());
if (details != null && details.getUserId() != 0) {
projectEventMsg.setUserName(details.getUsername());
projectEventMsg.setUserId(details.getUserId());
} else {
Long userId = organizationDTO.getUserId();
UserDTO userDTO = userAssertHelper.userNotExisted(userId);
projectEventMsg.setUserId(userId);
projectEventMsg.setUserName(userDTO.getLoginName());
}
ProjectDTO dto = create(projectDTO);
//init member_role
projectEventMsg.setRoleLabels(initMemberRole(dto));
projectEventMsg.setProjectId(dto.getId());
projectEventMsg.setProjectCode(dto.getCode());
projectEventMsg.setProjectCategory(dto.getCategory());
projectEventMsg.setProjectName(dto.getName());
projectEventMsg.setImageUrl(projectDTO.getImageUrl());
projectEventMsg.setOrganizationCode(organizationDTO.getCode());
projectEventMsg.setOrganizationName(organizationDTO.getName());
try {
String input = mapper.writeValueAsString(projectEventMsg);
sagaClient.startSaga(PROJECT_CREATE, new StartInstanceDTO(input, PROJECT, dto.getId() + "", ResourceLevel.ORGANIZATION.value(), dto.getOrganizationId()));
} catch (Exception e) {
throw new CommonException("error.organizationProjectService.createProject.event", e);
}
return dto;
}
private Set<String> initMemberRole(ProjectDTO project) {
List<RoleDTO> roles = roleMapper.selectRolesByLabelNameAndType(RoleLabel.PROJECT_OWNER.value(), "role", null);
if (roles.isEmpty()) {
throw new CommonException("error.role.not.found.by.label", RoleLabel.PROJECT_OWNER.value(), "role");
}
CustomUserDetails customUserDetails = DetailsHelper.getUserDetails();
if (customUserDetails == null) {
throw new CommonException("error.user.not.login");
}
Long projectId = project.getId();
Long userId = customUserDetails.getUserId();
Set<String> labelNames = new HashSet<>();
roles.forEach(role -> {
//创建项目只分配项目层的角色
if (ResourceLevel.PROJECT.value().equals(role.getResourceLevel())) {
//查出来的符合要求的角色,要拿出来所有的label,发送给devops处理
List<LabelDTO> labels = labelMapper.selectByRoleId(role.getId());
labelNames.addAll(labels.stream().map(LabelDTO::getName).collect(Collectors.toList()));
MemberRoleDTO memberRole = new MemberRoleDTO();
memberRole.setRoleId(role.getId());
memberRole.setMemberType("user");
memberRole.setMemberId(userId);
memberRole.setSourceId(projectId);
memberRole.setSourceType(ResourceType.PROJECT.value());
roleMemberService.insertSelective(memberRole);
}
});
return labelNames;
}
@Override
public List<ProjectDTO> queryAll(ProjectDTO projectDTO) {
return projectMapper.fulltextSearch(projectDTO, null, null, null);
}
@Override
public PageInfo<ProjectDTO> pagingQuery(ProjectDTO projectDTO, PageRequest pageRequest, String param) {
int page = pageRequest.getPage();
int size = pageRequest.getSize();
Page<ProjectDTO> result = new Page<>(page, size);
boolean doPage = (pageRequest.getSize() != 0);
if (doPage) {
int start = PageUtils.getBegin(page, size);
int count;
if (categoryEnable) {
count = projectMapper.fulltextSearchCountIgnoreProgramProject(projectDTO, param);
result.setTotal(count);
result.addAll(projectMapper.fulltextSearchCategory(projectDTO, param, start, size));
} else {
count = projectMapper.fulltextSearchCount(projectDTO, param);
result.setTotal(count);
result.addAll(projectMapper.fulltextSearch(projectDTO, param, start, size));
}
} else {
if (categoryEnable) {
result.addAll(projectMapper.fulltextSearchCategory(projectDTO, param, null, null));
} else {
result.addAll(projectMapper.fulltextSearch(projectDTO, param, null, null));
}
result.setTotal(result.size());
}
return result.toPageInfo();
}
@Transactional(rollbackFor = CommonException.class)
@Override
public ProjectDTO update(Long organizationId, ProjectDTO projectDTO) {
updateCheck(projectDTO);
projectDTO.setCode(null);
OrganizationDTO organizationDTO = organizationAssertHelper.organizationNotExisted(projectDTO.getOrganizationId());
ProjectDTO dto;
if (devopsMessage) {
dto = new ProjectDTO();
CustomUserDetails details = DetailsHelperAssert.userDetailNotExisted();
UserDTO user = userAssertHelper.userNotExisted(WhichColumn.LOGIN_NAME, details.getUsername());
ProjectEventPayload projectEventMsg = new ProjectEventPayload();
projectEventMsg.setUserName(details.getUsername());
projectEventMsg.setUserId(user.getId());
projectEventMsg.setOrganizationCode(organizationDTO.getCode());
projectEventMsg.setOrganizationName(organizationDTO.getName());
ProjectDTO newProjectDTO = updateSelective(projectDTO);
projectEventMsg.setProjectId(newProjectDTO.getId());
projectEventMsg.setProjectCode(newProjectDTO.getCode());
projectEventMsg.setProjectName(newProjectDTO.getName());
projectEventMsg.setImageUrl(newProjectDTO.getImageUrl());
BeanUtils.copyProperties(newProjectDTO, dto);
try {
String input = mapper.writeValueAsString(projectEventMsg);
sagaClient.startSaga(PROJECT_UPDATE, new StartInstanceDTO(input, PROJECT, newProjectDTO.getId() + "", ResourceLevel.ORGANIZATION.value(), organizationId));
} catch (Exception e) {
throw new CommonException("error.organizationProjectService.updateProject.event", e);
}
} else {
dto = updateSelective(projectDTO);
}
return dto;
}
@Override
public ProjectDTO updateSelective(ProjectDTO projectDTO) {
ProjectDTO project = projectAssertHelper.projectNotExisted(projectDTO.getId());
ProjectTypeDTO projectTypeDTO = new ProjectTypeDTO();
projectTypeDTO.setCode(projectDTO.getType());
if (projectDTO.getType() != null && projectTypeMapper.selectCount(projectTypeDTO) != 1) {
throw new CommonException("error.project.type.notExist");
}
if (!StringUtils.isEmpty(projectDTO.getName())) {
project.setName(projectDTO.getName());
}
if (!StringUtils.isEmpty(projectDTO.getCode())) {
project.setCode(projectDTO.getCode());
}
if (projectDTO.getEnabled() != null) {
project.setEnabled(projectDTO.getEnabled());
}
if (projectDTO.getImageUrl() != null) {
project.setImageUrl(projectDTO.getImageUrl());
}
project.setType(projectDTO.getType());
if (projectMapper.updateByPrimaryKey(project) != 1) {
throw new UpdateExcetion("error.project.update");
}
ProjectDTO returnProject = projectMapper.selectByPrimaryKey(projectDTO.getId());
if (returnProject.getType() != null) {
ProjectTypeDTO dto = new ProjectTypeDTO();
dto.setCode(project.getType());
returnProject.setTypeName(projectTypeMapper.selectOne(dto).getName());
}
return returnProject;
}
private void updateCheck(ProjectDTO projectDTO) {
String name = projectDTO.getName();
projectAssertHelper.objectVersionNumberNotNull(projectDTO.getObjectVersionNumber());
if (StringUtils.isEmpty(name)) {
throw new EmptyParamException("error.project.name.empty");
}
if (name.length() < 1 || name.length() > 32) {
throw new IllegalArgumentException("error.project.code.size");
}
}
@Override
@Saga(code = PROJECT_ENABLE, description = "iam启用项目", inputSchemaClass = ProjectEventPayload.class)
@Transactional(rollbackFor = Exception.class)
public ProjectDTO enableProject(Long organizationId, Long projectId, Long userId) {
organizationAssertHelper.organizationNotExisted(organizationId);
return updateProjectAndSendEvent(projectId, PROJECT_ENABLE, true, userId);
}
@Override
@Saga(code = PROJECT_DISABLE, description = "iam停用项目", inputSchemaClass = ProjectEventPayload.class)
@Transactional(rollbackFor = Exception.class)
public ProjectDTO disableProject(Long organizationId, Long projectId, Long userId) {
if (organizationId != null) {
organizationAssertHelper.organizationNotExisted(organizationId);
}
return updateProjectAndSendEvent(projectId, PROJECT_DISABLE, false, userId);
}
/**
* 启用、禁用项目且发送相应通知消息.
*
* @param projectId 项目Id
* @param consumerType saga消息类型
* @param enabled 是否启用
* @param userId 用户Id
* @return 项目信息
*/
private ProjectDTO updateProjectAndSendEvent(Long projectId, String consumerType, boolean enabled, Long userId) {
ProjectDTO projectDTO = projectMapper.selectByPrimaryKey(projectId);
projectDTO.setEnabled(enabled);
// 更新项目
projectDTO = updateSelective(projectDTO);
String category = projectDTO.getCategory();
// 项目所属项目群Id
Long programId = null;
if (!enabled) {
if (ProjectCategory.AGILE.value().equalsIgnoreCase(category)) {
// 项目禁用时,禁用项目关联的项目群关系
ProjectRelationshipDTO relationshipDTO = new ProjectRelationshipDTO();
relationshipDTO.setProjectId(projectId);
relationshipDTO.setEnabled(true);
relationshipDTO = projectRelationshipMapper.selectOne(relationshipDTO);
programId = updateProjectRelationShip(relationshipDTO, Boolean.FALSE);
} else if ((ProjectCategory.PROGRAM.value().equalsIgnoreCase(category))) {
// 项目群禁用时,禁用项目群下所有项目关系
List<ProjectRelationshipDTO> relationshipDTOS = projectRelationshipMapper.selectProjectsByParentId(projectId, true);
if (CollectionUtils.isNotEmpty(relationshipDTOS)) {
for (ProjectRelationshipDTO relationshipDTO : relationshipDTOS) {
updateProjectRelationShip(relationshipDTO, Boolean.FALSE);
}
}
}
}
// 发送通知消息
sendEvent(consumerType, enabled, userId, programId, projectDTO);
return projectDTO;
}
/**
* 启用、禁用项目群关系.
*
* @param relationshipDTO 项目群关系
* @param enabled 是否启用
* @return 项目所属项目群Id或null
*/
private Long updateProjectRelationShip(ProjectRelationshipDTO relationshipDTO, boolean enabled) {
if (relationshipDTO == null || !relationshipDTO.getEnabled()) {
return null;
}
relationshipDTO.setEnabled(enabled);
if (projectRelationshipMapper.updateByPrimaryKey(relationshipDTO) != 1) {
throw new UpdateExcetion("error.project.group.update");
}
if (categoryEnable) {
ProjectCategoryDTO projectCategoryDTO = new ProjectCategoryDTO();
projectCategoryDTO.setCode("PROGRAM_PROJECT");
projectCategoryDTO = projectCategoryMapper.selectOne(projectCategoryDTO);
ProjectMapCategoryDTO projectMapCategoryDTO = new ProjectMapCategoryDTO();
projectMapCategoryDTO.setProjectId(relationshipDTO.getProjectId());
projectMapCategoryDTO.setCategoryId(projectCategoryDTO.getId());
if (projectMapCategoryMapper.delete(projectMapCategoryDTO) != 1) {
throw new CommonException("error.project.map.category.delete");
}
}
return relationshipDTO.getProgramId();
}
/**
* 启用、禁用项目时,发送相应通知消息.
*
* @param consumerType saga消息类型
* @param enabled 是否启用
* @param userId 用户Id
* @param programId 项目群Id
* @param projectDTO 项目DTO
*/
private void sendEvent(String consumerType, boolean enabled, Long userId, Long programId, ProjectDTO projectDTO) {
Long projectId = projectDTO.getId();
if (devopsMessage) {
ProjectEventPayload payload = new ProjectEventPayload();
payload.setProjectId(projectId);
payload.setProjectCategory(projectDTO.getCategory());
payload.setProgramId(programId);
//saga
try {
String input = mapper.writeValueAsString(payload);
sagaClient.startSaga(consumerType, new StartInstanceDTO(input, PROJECT, "" + payload.getProjectId(), ResourceLevel.ORGANIZATION.value(), projectDTO.getOrganizationId()));
} catch (Exception e) {
throw new CommonException("error.organizationProjectService.enableOrDisableProject", e);
}
if (!enabled) {
//给asgard发送禁用定时任务通知
asgardFeignClient.disableProj(projectId);
}
// 给项目下所有用户发送通知
List<Long> userIds = projectMapper.listUserIds(projectId);
Map<String, Object> params = new HashMap<>();
params.put("projectName", projectMapper.selectByPrimaryKey(projectId).getName());
if (PROJECT_DISABLE.equals(consumerType)) {
userService.sendNotice(userId, userIds, "disableProject", params, projectId);
} else if (PROJECT_ENABLE.equals(consumerType)) {
userService.sendNotice(userId, userIds, "enableProject", params, projectId);
}
}
}
@Override
public void check(ProjectDTO projectDTO) {
Boolean checkCode = !StringUtils.isEmpty(projectDTO.getCode());
if (!checkCode) {
throw new CommonException("error.project.code.empty");
} else {
checkCode(projectDTO);
}
}
private void checkCode(ProjectDTO projectDTO) {
Boolean createCheck = StringUtils.isEmpty(projectDTO.getId());
ProjectDTO project = new ProjectDTO();
project.setOrganizationId(projectDTO.getOrganizationId());
project.setCode(projectDTO.getCode());
if (createCheck) {
Boolean existed = projectMapper.selectOne(project) != null;
if (existed) {
throw new CommonException("error.project.code.exist");
}
} else {
Long id = projectDTO.getId();
ProjectDTO dto = projectMapper.selectOne(project);
Boolean existed = dto != null && !id.equals(dto.getId());
if (existed) {
throw new CommonException("error.project.code.exist");
}
}
}
@Override
public Map<String, Object> getProjectsByType(Long organizationId) {
//1.获取所有类型
List<ProjectTypeDTO> list = projectTypeMapper.selectAll();
List<String> legend = list.stream().map(ProjectTypeDTO::getName).collect(Collectors.toList());
List<Map<String, Object>> data = new ArrayList<>();
//2.获取类型下所有项目名
list.forEach(type -> {
List<String> projectNames = projectMapper.selectProjectNameByType(type.getCode(), organizationId);
Map<String, Object> dataMap = new HashMap<>(5);
dataMap.put("value", projectNames.size());
dataMap.put("name", type.getName());
dataMap.put("projects", projectNames);
data.add(dataMap);
});
//3.获取无类型的所有项目名
List<String> projsNoType = projectMapper.selectProjectNameNoType(organizationId);
Map<String, Object> noTypeProjectList = new HashMap<>(5);
noTypeProjectList.put("value", projsNoType.size());
noTypeProjectList.put("name", "无");
noTypeProjectList.put("projects", projsNoType);
legend.add("无");
data.add(noTypeProjectList);
//4.构造返回map
Map<String, Object> map = new HashMap<>(5);
map.put("legend", legend);
map.put("data", data);
return map;
}
@Override
public List<ProjectDTO> getAvailableAgileProj(Long organizationId, Long projectId) {
organizationAssertHelper.organizationNotExisted(organizationId);
ProjectDTO projectDTO = selectCategoryByPrimaryKey(projectId);
if (projectDTO == null) {
throw new CommonException(PROJECT_NOT_EXIST_EXCEPTION);
} else if (!projectDTO.getCategory().equalsIgnoreCase(ProjectCategory.PROGRAM.value())) {
throw new CommonException("error.only.programs.can.configure.subprojects");
} else {
//组织下全部敏捷项目
return projectMapper.selectProjsNotGroup(organizationId, projectId);
}
}
@Override
public ProjectDTO selectCategoryByPrimaryKey(Long projectId) {
List<ProjectDTO> projects = projectMapper.selectCategoryByPrimaryKey(projectId);
ProjectDTO dto = mergeCategories(projects);
if (dto == null) {
throw new CommonException("error.project.not.exist");
}
return dto;
}
private ProjectDTO mergeCategories(List<ProjectDTO> projectDTOS) {
if (CollectionUtils.isEmpty(projectDTOS)) {
return null;
}
ProjectDTO projectDTO = new ProjectDTO();
BeanUtils.copyProperties(projectDTOS.get(0), projectDTO);
List<ProjectCategoryDTO> categories = new ArrayList<>();
String category = null;
for (int i = 0; i < projectDTOS.size(); i++) {
ProjectDTO p = projectDTOS.get(i);
ProjectCategoryDTO projectCategoryDTO = new ProjectCategoryDTO();
projectCategoryDTO.setCode(p.getCategory());
categories.add(projectCategoryDTO);
if (category == null && ProjectCategory.PROGRAM.value().equalsIgnoreCase(p.getCategory())) {
category = ProjectCategory.PROGRAM.value();
} else if (category == null && ProjectCategory.AGILE.value().equalsIgnoreCase(p.getCategory())) {
category = ProjectCategory.AGILE.value();
} else if (category == null) {
category = p.getCategory();
}
}
projectDTO.setCategory(category);
projectDTO.setCategories(categories);
return projectDTO;
}
@Override
public ProjectDTO getGroupInfoByEnableProject(Long organizationId, Long projectId) {
organizationAssertHelper.organizationNotExisted(organizationId);
projectAssertHelper.projectNotExisted(projectId);
return projectMapper.selectGroupInfoByEnableProject(organizationId, projectId);
}
@Override
public List<ProjectDTO> getAgileProjects(Long organizationId, String param) {
List<ProjectDTO> projectDTOS;
if (categoryEnable) {
projectDTOS = projectMapper.selectByOrgIdAndCategoryEnable(organizationId, PROJECT_DEFAULT_CATEGORY, param);
} else {
projectDTOS = projectMapper.selectByOrgIdAndCategory(organizationId, param);
}
return projectDTOS;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/ProjectServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import io.choerodon.asgard.saga.annotation.Saga;
import io.choerodon.asgard.saga.dto.StartInstanceDTO;
import io.choerodon.asgard.saga.feign.SagaClient;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.core.exception.CommonException;
import io.choerodon.core.iam.ResourceLevel;
import io.choerodon.core.oauth.CustomUserDetails;
import io.choerodon.core.oauth.DetailsHelper;
import io.choerodon.iam.api.dto.payload.ProjectEventPayload;
import io.choerodon.iam.app.service.OrganizationProjectService;
import io.choerodon.iam.app.service.ProjectService;
import io.choerodon.iam.infra.asserts.DetailsHelperAssert;
import io.choerodon.iam.infra.asserts.ProjectAssertHelper;
import io.choerodon.iam.infra.asserts.UserAssertHelper;
import io.choerodon.iam.infra.dto.OrganizationDTO;
import io.choerodon.iam.infra.dto.ProjectDTO;
import io.choerodon.iam.infra.dto.UserDTO;
import io.choerodon.iam.infra.mapper.OrganizationMapper;
import io.choerodon.iam.infra.mapper.ProjectMapCategoryMapper;
import io.choerodon.iam.infra.mapper.ProjectMapper;
import io.choerodon.iam.infra.mapper.UserMapper;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import static io.choerodon.iam.infra.common.utils.SagaTopic.Project.PROJECT_UPDATE;
import static io.choerodon.iam.infra.asserts.UserAssertHelper.WhichColumn;
/**
* @author flyleft
*/
@Service
@RefreshScope
public class ProjectServiceImpl implements ProjectService {
private OrganizationProjectService organizationProjectService;
@Value("${choerodon.category.enabled:false}")
private boolean enableCategory;
@Value("${choerodon.devops.message:false}")
private boolean devopsMessage;
@Value("${spring.application.name:default}")
private String serviceName;
private SagaClient sagaClient;
private final ObjectMapper mapper = new ObjectMapper();
private UserMapper userMapper;
private ProjectMapper projectMapper;
private ProjectAssertHelper projectAssertHelper;
private ProjectMapCategoryMapper projectMapCategoryMapper;
private UserAssertHelper userAssertHelper;
private OrganizationMapper organizationMapper;
public ProjectServiceImpl(OrganizationProjectService organizationProjectService,
SagaClient sagaClient,
UserMapper userMapper,
ProjectMapper projectMapper,
ProjectAssertHelper projectAssertHelper,
ProjectMapCategoryMapper projectMapCategoryMapper,
UserAssertHelper userAssertHelper,
OrganizationMapper organizationMapper) {
this.organizationProjectService = organizationProjectService;
this.sagaClient = sagaClient;
this.userMapper = userMapper;
this.projectMapper = projectMapper;
this.projectAssertHelper = projectAssertHelper;
this.projectMapCategoryMapper = projectMapCategoryMapper;
this.userAssertHelper = userAssertHelper;
this.organizationMapper = organizationMapper;
}
@Override
public ProjectDTO queryProjectById(Long projectId) {
ProjectDTO dto = projectAssertHelper.projectNotExisted(projectId);
if (enableCategory) {
dto.setCategories(projectMapCategoryMapper.selectProjectCategoryNames(dto.getId()));
}
return dto;
}
@Override
public PageInfo<UserDTO> pagingQueryTheUsersOfProject(Long id, Long userId, String email, PageRequest pageRequest, String param) {
return PageHelper
.startPage(pageRequest.getPage(), pageRequest.getSize())
.doSelectPageInfo(() -> userMapper.selectUsersByLevelAndOptions(ResourceLevel.PROJECT.value(), id, userId, email, param));
}
@Transactional(rollbackFor = CommonException.class)
@Override
@Saga(code = PROJECT_UPDATE, description = "iam更新项目", inputSchemaClass = ProjectEventPayload.class)
public ProjectDTO update(ProjectDTO projectDTO) {
if (devopsMessage) {
ProjectDTO dto = new ProjectDTO();
CustomUserDetails details = DetailsHelperAssert.userDetailNotExisted();
UserDTO user = userAssertHelper.userNotExisted(WhichColumn.LOGIN_NAME, details.getUsername());
ProjectDTO newProject = projectAssertHelper.projectNotExisted(projectDTO.getId());
OrganizationDTO organizationDTO = organizationMapper.selectByPrimaryKey(newProject.getOrganizationId());
ProjectEventPayload projectEventMsg = new ProjectEventPayload();
projectEventMsg.setUserName(details.getUsername());
projectEventMsg.setUserId(user.getId());
if (organizationDTO != null) {
projectEventMsg.setOrganizationCode(organizationDTO.getCode());
projectEventMsg.setOrganizationName(organizationDTO.getName());
}
projectEventMsg.setProjectId(newProject.getId());
projectEventMsg.setProjectCode(newProject.getCode());
ProjectDTO newDTO = organizationProjectService.updateSelective(projectDTO);
projectEventMsg.setProjectName(projectDTO.getName());
projectEventMsg.setImageUrl(newDTO.getImageUrl());
BeanUtils.copyProperties(newDTO, dto);
try {
String input = mapper.writeValueAsString(projectEventMsg);
sagaClient.startSaga(PROJECT_UPDATE, new StartInstanceDTO(input, "project", "" + newProject.getId(), ResourceLevel.PROJECT.value(), projectDTO.getId()));
} catch (Exception e) {
throw new CommonException("error.projectService.update.event", e);
}
return dto;
} else {
return organizationProjectService.updateSelective(projectDTO);
}
}
@Override
@Transactional(rollbackFor = Exception.class)
public ProjectDTO disableProject(Long projectId) {
Long userId = DetailsHelper.getUserDetails().getUserId();
return organizationProjectService.disableProject(null, projectId, userId);
}
@Override
public List<Long> listUserIds(Long projectId) {
return projectMapper.listUserIds(projectId);
}
@Override
public List<ProjectDTO> queryByIds(Set<Long> ids) {
if (ids.isEmpty()) {
return new ArrayList<>();
} else {
return projectMapper.selectByIds(ids);
}
}
@Override
public Boolean checkProjCode(String code) {
ProjectDTO projectDTO = new ProjectDTO();
projectDTO.setCode(code);
return projectMapper.selectOne(projectDTO) == null;
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/api/eventhandler/ParsePermissionListenerSpec.groovy<|end_filename|>
//package io.choerodon.iam.api.eventhandler
//
//import io.choerodon.eureka.event.EurekaEventPayload
//import io.choerodon.iam.app.service.UploadHistoryService.ParsePermissionService
//import spock.lang.Specification
//
//class ParsePermissionListenerSpec extends Specification {
//
// def "test receiveUpEvent"() {
// given:
// def service = Mock(ParsePermissionService)
// def listener = new ParsePermissionListener(service)
// when:
// listener.receiveUpEvent(new EurekaEventPayload())
//
// then:
// 1 * service.parser(_)
// }
//
//}
<|start_filename|>src/main/java/io/choerodon/iam/infra/feign/FileFeignClient.java<|end_filename|>
package io.choerodon.iam.infra.feign;
import io.choerodon.iam.api.dto.FileDTO;
import io.choerodon.iam.infra.config.MultipartSupportConfig;
import io.choerodon.iam.infra.feign.fallback.FileFeignClientFallback;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RequestPart;
import org.springframework.web.multipart.MultipartFile;
/**
* @author superlee
*/
@FeignClient(value = "file-service",
configuration = MultipartSupportConfig.class,
fallback = FileFeignClientFallback.class)
public interface FileFeignClient {
@PostMapping(
value = "/v1/files",
produces = {MediaType.APPLICATION_JSON_UTF8_VALUE},
consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
ResponseEntity<String> uploadFile(@RequestParam("bucket_name") String bucketName,
@RequestParam("file_name") String fileName,
@RequestPart("file") MultipartFile multipartFile);
@PostMapping(
value = "/v1/documents",
produces = {MediaType.APPLICATION_JSON_UTF8_VALUE},
consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
ResponseEntity<FileDTO> upload(@RequestParam("bucket_name") String bucketName,
@RequestParam("file_name") String fileName,
@RequestPart("file") MultipartFile multipartFile);
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/config/HandlerMethodArgumentConfig.java<|end_filename|>
package io.choerodon.iam.infra.config;
import io.choerodon.iam.infra.annotation.NamingRuleTransHandler;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.method.support.HandlerMethodArgumentResolver;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
import java.util.List;
/**
* @author dengyouquan
**/
@Configuration
public class HandlerMethodArgumentConfig extends WebMvcConfigurerAdapter {
@Bean
NamingRuleTransHandler namingRuleTransHandler() {
return new NamingRuleTransHandler();
}
@Override
public void addArgumentResolvers(List<HandlerMethodArgumentResolver> argumentResolvers) {
argumentResolvers.add(namingRuleTransHandler());
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/infra/annotation/NamingRuleTransHandlerSpec.groovy<|end_filename|>
package io.choerodon.iam.infra.annotation
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.infra.dto.ProjectDTO
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.context.annotation.Import
import org.springframework.core.MethodParameter
import org.springframework.web.bind.support.WebDataBinderFactory
import org.springframework.web.context.request.NativeWebRequest
import org.springframework.web.method.support.ModelAndViewContainer
import spock.lang.Specification
import javax.servlet.http.HttpServletRequest
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan
* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class NamingRuleTransHandlerSpec extends Specification {
private NamingRuleTransHandler namingRuleTransHandler = new NamingRuleTransHandler()
def "ResolveArgument"() {
given: "构造请求参数"
MethodParameter methodParameter = Mock(MethodParameter)
ModelAndViewContainer mavContainer = Mock(ModelAndViewContainer)
NativeWebRequest nativeWebRequest = Mock(NativeWebRequest)
WebDataBinderFactory binderFactory = Mock(WebDataBinderFactory)
HttpServletRequest servletRequest = Mock(HttpServletRequest)
NamingRuleTrans namingRuleTrans = Mock(NamingRuleTrans)
NamingRuleTransStrategy strategy = NamingRuleTransStrategy.CAMEL
Map<String, String[]> parameterMap = new HashMap<>()
String[] strings = new String[1]
strings[0] = "1"
parameterMap.put("organization_id", strings)
when: "调用方法"
namingRuleTransHandler.resolveArgument(methodParameter, mavContainer, nativeWebRequest, binderFactory)
then: "校验结果"
1 * nativeWebRequest.getNativeRequest(_) >> { servletRequest }
1 * servletRequest.getContentType() >> { "xml" }
1 * servletRequest.getMethod() >> { "GET" }
2 * methodParameter.getParameterType() >> { ProjectDTO }
1 * methodParameter.getParameterAnnotation(NamingRuleTrans.class) >> { namingRuleTrans }
1 * namingRuleTrans.value() >> { strategy }
1 * nativeWebRequest.getParameterMap() >> { parameterMap }
when: "调用方法"
strategy = NamingRuleTransStrategy.UNDER_LINE
namingRuleTransHandler.resolveArgument(methodParameter, mavContainer, nativeWebRequest, binderFactory)
then: "校验结果"
1 * nativeWebRequest.getNativeRequest(_) >> { servletRequest }
1 * servletRequest.getContentType() >> { "xml" }
1 * servletRequest.getMethod() >> { "GET" }
2 * methodParameter.getParameterType() >> { ProjectDTO }
1 * methodParameter.getParameterAnnotation(NamingRuleTrans.class) >> { namingRuleTrans }
1 * namingRuleTrans.value() >> { strategy }
1 * nativeWebRequest.getParameterMap() >> { parameterMap }
}
}
<|start_filename|>react/src/app/iam/containers/organization/organization-setting/basic-info-setting/BasicInfoSetting.js<|end_filename|>
import React, { Component } from 'react';
import { inject, observer } from 'mobx-react';
import { Button, Form, TextField, Modal, Select, Icon } from 'choerodon-ui/pro';
import { Content, Header, Page, Permission, stores } from '@choerodon/boot';
import { injectIntl, FormattedMessage } from 'react-intl';
import { withRouter } from 'react-router-dom';
import classnames from 'classnames';
import './BasicInfoSetting.scss';
import OrganizationSettingStore from '../../../../stores/organization/organization-setting/OrganizationSettingStore';
import '../../../../common/ConfirmModal.scss';
import AvatarUploader from '../../../../components/avatarUploader';
const { HeaderStore } = stores;
const FormItem = Form.Item;
const { Option } = Select;
const intlPrefix = 'organization.info';
const ORGANIZATION_TYPE = 'organization';
const PROJECT_TYPE = 'project';
// @Form.create({})
@injectIntl
@inject('AppState')
@observer
export default class BasicInfoSetting extends Component {
state = {
submitting: false,
isShowAvatar: false,
};
componentDidMount() {
this.loadOrganization();
}
componentWillUnmount() {
OrganizationSettingStore.setOrganizationInfo({});
OrganizationSettingStore.setImageUrl(null);
}
loadOrganization = () => {
const { AppState } = this.props;
const { id } = AppState.currentMenuType;
OrganizationSettingStore.axiosGetOrganizationInfo(id)
.then((data) => {
OrganizationSettingStore.setImageUrl(data.imageUrl);
OrganizationSettingStore.setOrganizationInfo(data);
})
.catch(Choerodon.handleResponseError);
};
handleSave(e) {
const oldInfo = OrganizationSettingStore.organizationInfo;
const body = {
...oldInfo,
imageUrl: OrganizationSettingStore.getImageUrl,
};
this.setState({ submitting: true });
OrganizationSettingStore.axiosSaveProjectInfo(body)
.then((data) => {
this.setState({ submitting: false });
Choerodon.prompt(this.props.intl.formatMessage({ id: 'save.success' }));
OrganizationSettingStore.setImageUrl(data.imageUrl);
OrganizationSettingStore.setOrganizationInfo(data);
HeaderStore.updateOrg(data);
})
.catch((error) => {
this.setState({ submitting: false });
Choerodon.handleResponseError(error);
});
}
cancelValue = () => {
// const { resetFields } = this.props.form;
// const { imageUrl } = OrganizationSettingStore.organizationInfo;
// OrganizationSettingStore.setImageUrl(imageUrl);
// resetFields();
this.loadOrganization();
};
getAvatar() {
const { isShowAvatar } = this.state;
const { name } = OrganizationSettingStore.organizationInfo;
const imageUrl = OrganizationSettingStore.getImageUrl;
return (
<div className="c7n-iam-organizationsetting-avatar">
<div
className="c7n-iam-organizationsetting-avatar-wrap"
style={{
backgroundColor: '#c5cbe8',
backgroundImage: imageUrl ? `url(${Choerodon.fileServer(imageUrl)})` : '',
}}
>
{!imageUrl && name && name.charAt(0)}
<Button
className={classnames(
'c7n-iam-organizationsetting-avatar-button',
'c7n-iam-organizationsetting-avatar-button-edit'
)}
onClick={this.openAvatarUploader}
>
<div className="c7n-iam-organizationsetting-avatar-button-icon">
<Icon type="photo_camera" />
</div>
</Button>
<AvatarUploader
visible={isShowAvatar}
intlPrefix="organization.project.avatar.edit"
onVisibleChange={this.closeAvatarUploader}
onUploadOk={this.handleUploadOk}
/>
</div>
</div>
);
}
/**
* 打开上传图片模态框
*/
openAvatarUploader = () => {
this.setState({
isShowAvatar: true,
});
};
/**
* 关闭上传图片模态框
* @param visible 模态框是否可见
*/
closeAvatarUploader = (visible) => {
this.setState({
isShowAvatar: visible,
});
};
handleUploadOk = (res) => {
OrganizationSettingStore.setImageUrl(res);
this.setState({
// imgUrl: res,
isShowAvatar: false,
});
};
fieldValueChangeHandlerMaker(fieldName) {
return function(value) {
const oldInfo = OrganizationSettingStore.organizationInfo;
OrganizationSettingStore.setOrganizationInfo({
...oldInfo,
[fieldName]: value,
});
console.log(OrganizationSettingStore.organizationInfo.name);
};
}
render() {
const { submitting } = this.state;
const { intl } = this.props;
const {
enabled,
name,
code,
address,
ownerRealName,
homePage,
} = OrganizationSettingStore.organizationInfo;
return (
<Page
service={['iam-service.organization.queryOrgLevel']}
style={{ position: 'static' }}
>
<Content values={{ name: enabled ? name : code }}>
<div className="c7n-iam-organizationsetting">
<div style={{ marginBottom: '20px' }}>
<span style={{ color: 'rgba(0,0,0,.6)' }}>
{intl.formatMessage({ id: `${intlPrefix}.avatar` })}
</span>
{this.getAvatar()}
</div>
<Form
columns={2}
labelLayout="float"
onSubmit={this.handleSave.bind(this)}
style={{ width: '5.12rem', marginLeft: '-0.05rem' }}
>
<TextField
label={<FormattedMessage id={`${intlPrefix}.name`} />}
pattern="^[-—\.\w\s\u4e00-\u9fa5]{1,32}$"
required
disabled={!enabled}
maxLength={32}
onChange={this.fieldValueChangeHandlerMaker('name')}
value={name}
/>
<TextField
label={<FormattedMessage id={`${intlPrefix}.code`} />}
disabled
defaultValue={code}
/>
<TextField
label={<FormattedMessage id={`${intlPrefix}.address`} />}
colSpan={2}
value={address}
onChange={this.fieldValueChangeHandlerMaker('address')}
/>
<TextField
colSpan={2}
label={<FormattedMessage id={`${intlPrefix}.homePage`} />}
value={homePage}
onChange={this.fieldValueChangeHandlerMaker('homePage')}
/>
<TextField
colSpan={2}
label={<FormattedMessage id={`${intlPrefix}.owner`} />}
disabled
defaultValue={ownerRealName}
/>
<div colSpan={2} className="divider" />
<Permission
service={['iam-service.organization.updateOnOrganizationLevel']}
>
<div colSpan={2} className="btnGroup">
<Button
type="submit"
color="blue"
loading={submitting}
disabled={!enabled}
>
<FormattedMessage id="save" />
</Button>
<Button
funcType="raised"
onClick={this.cancelValue}
disabled={!enabled}
>
<FormattedMessage id="cancel" />
</Button>
</div>
</Permission>
</Form>
</div>
</Content>
</Page>
);
}
}
<|start_filename|>react/src/app/iam/containers/project/project-setting/ProjectSetting.js<|end_filename|>
import React, { Component } from 'react';
import { inject, observer } from 'mobx-react';
import { Button, Form, Icon, Input, Modal, Select } from 'choerodon-ui';
import { axios, Content, Header, Page, Permission, stores } from '@choerodon/boot';
import { FormattedMessage, injectIntl } from 'react-intl';
import { withRouter } from 'react-router-dom';
import classnames from 'classnames';
import './ProjectSetting.scss';
import ProjectSettingStore from '../../../stores/project/project-setting/ProjectSettingStore';
import '../../../common/ConfirmModal.scss';
import AvatarUploader from '../../../components/avatarUploader';
const { HeaderStore } = stores;
const FormItem = Form.Item;
const Option = Select.Option;
const intlPrefix = 'project.info';
const ORGANIZATION_TYPE = 'organization';
const PROJECT_TYPE = 'project';
@Form.create({})
@withRouter
@injectIntl
@inject('AppState')
@observer
export default class ProjectSetting extends Component {
state = {
stopping: false,
categoryEnabled: false,
submitting: false,
isShowAvatar: false,
};
componentDidMount() {
this.loadEnableCategory();
this.loadProject();
this.loadProjectTypes();
}
componentWillUnmount() {
ProjectSettingStore.setProjectInfo({});
ProjectSettingStore.setImageUrl(null);
}
loadEnableCategory = () => {
axios.get(`/iam/v1/system/setting/enable_category`)
.then((response) => {
this.setState({
categoryEnabled: response,
});
});
};
loadProject = () => {
const { AppState } = this.props;
const id = AppState.currentMenuType.id;
ProjectSettingStore.axiosGetProjectInfo(id).then((data) => {
ProjectSettingStore.setImageUrl(data.imageUrl);
ProjectSettingStore.setProjectInfo(data);
}).catch(Choerodon.handleResponseError);
};
loadProjectTypes = () => {
ProjectSettingStore.loadProjectTypes().then((data) => {
if (data.failed) {
Choerodon.prompt(data.message);
} else {
ProjectSettingStore.setProjectTypes(data);
}
}).catch((error) => {
Choerodon.handleResponseError(error);
});
};
handleSave(e) {
e.preventDefault();
const { form, location, history } = this.props;
form.validateFields((err, value, modify) => {
if (!err) {
if (ProjectSettingStore.getProjectInfo.imageUrl !== ProjectSettingStore.getImageUrl) modify = true;
if (!modify) {
Choerodon.prompt(this.props.intl.formatMessage({ id: 'save.success' }));
return;
}
const { id, organizationId, objectVersionNumber } = ProjectSettingStore.getProjectInfo;
const body = {
id,
organizationId,
objectVersionNumber,
...value,
imageUrl: ProjectSettingStore.getImageUrl,
};
if (body.category) {
body.category = null;
}
body.type = body.type === 'no' || undefined ? null : value.type;
this.setState({ submitting: true });
ProjectSettingStore.axiosSaveProjectInfo(body)
.then((data) => {
this.setState({ submitting: false });
Choerodon.prompt(this.props.intl.formatMessage({ id: 'save.success' }));
ProjectSettingStore.setImageUrl(data.imageUrl);
ProjectSettingStore.setProjectInfo(data);
HeaderStore.updateProject(data);
history.replace(`${location.pathname}?type=project&id=${id}&name=${encodeURIComponent(data.name)}&organizationId=${organizationId}`);
})
.catch((error) => {
this.setState({ submitting: false });
Choerodon.handleResponseError(error);
});
}
});
}
handleEnabled = (name) => {
const { AppState, intl } = this.props;
const userId = AppState.getUserId;
this.setState({ stopping: true });
Modal.confirm({
className: 'c7n-iam-confirm-modal',
title: intl.formatMessage({ id: `${intlPrefix}.disable.title` }),
content: intl.formatMessage({ id: `${intlPrefix}.disable.content` }, { name }),
onOk: () => ProjectSettingStore.disableProject(AppState.currentMenuType.id)
.then((data) => {
this.setState({
stopping: false,
});
Choerodon.prompt(this.props.intl.formatMessage({ id: 'disable.success' }));
ProjectSettingStore.setProjectInfo(data);
HeaderStore.updateProject(data);
this.props.history.push('/');
HeaderStore.axiosGetOrgAndPro(sessionStorage.userId || userId).then((org) => {
org[0].forEach((value) => {
value.type = ORGANIZATION_TYPE;
});
org[1].forEach((value) => {
value.type = PROJECT_TYPE;
});
HeaderStore.setProData(org[0]);
HeaderStore.setProData(org[1]);
});
})
.catch((error) => {
this.setState({
stopping: false,
});
Choerodon.handleResponseError(error);
}),
});
};
cancelValue = () => {
const { resetFields } = this.props.form;
const { imageUrl } = ProjectSettingStore.getProjectInfo;
ProjectSettingStore.setImageUrl(imageUrl);
resetFields();
};
getAvatar() {
const { isShowAvatar } = this.state;
const { name } = ProjectSettingStore.getProjectInfo;
const imageUrl = ProjectSettingStore.getImageUrl;
return (
<div className="c7n-iam-projectsetting-avatar">
<div
className="c7n-iam-projectsetting-avatar-wrap"
style={{
backgroundColor: '#c5cbe8',
backgroundImage: imageUrl ? `url(${Choerodon.fileServer(imageUrl)})` : '',
}}
>
{!imageUrl && name && name.charAt(0)}
<Button className={classnames('c7n-iam-projectsetting-avatar-button', 'c7n-iam-projectsetting-avatar-button-edit')}
onClick={this.openAvatarUploader}>
<div className="c7n-iam-projectsetting-avatar-button-icon">
<Icon type="photo_camera" />
</div>
</Button>
<AvatarUploader visible={isShowAvatar}
intlPrefix="organization.project.avatar.edit"
onVisibleChange={this.closeAvatarUploader}
onUploadOk={this.handleUploadOk} />
</div>
</div>
);
}
/**
* 打开上传图片模态框
*/
openAvatarUploader = () => {
this.setState({
isShowAvatar: true,
});
};
/**
* 关闭上传图片模态框
* @param visible 模态框是否可见
*/
closeAvatarUploader = (visible) => {
this.setState({
isShowAvatar: visible,
});
};
handleUploadOk = (res) => {
ProjectSettingStore.setImageUrl(res);
this.setState({
// imgUrl: res,
isShowAvatar: false,
});
};
render() {
const { submitting, categoryEnabled } = this.state;
const { intl } = this.props;
const { getFieldDecorator } = this.props.form;
const { enabled, name, code, categories } = ProjectSettingStore.getProjectInfo;
const types = ProjectSettingStore.getProjectTypes;
return (
<Page
service={[
'iam-service.project.query',
'iam-service.project.update',
'iam-service.project.disableProject',
'iam-service.project.list',
]}
>
<Header title={<FormattedMessage id={`${intlPrefix}.header.title`} />}>
<Permission service={['iam-service.project.disableProject']}>
<div>
<Button
icon="remove_circle_outline"
onClick={this.handleEnabled.bind(this, name)}
disabled={!enabled}
>
<FormattedMessage id="disable" />
</Button>
</div>
</Permission>
</Header>
<Content
code={enabled ? intlPrefix : `${intlPrefix}.disabled`}
values={{ name: enabled ? name : code }}
>
<div className="c7n-iam-projectsetting">
<Form onSubmit={this.handleSave.bind(this)}>
<FormItem>
{getFieldDecorator('name', {
rules: [{
required: true,
whitespace: true,
message: intl.formatMessage({ id: `${intlPrefix}.namerequiredmsg` }),
}, {
/* eslint-disable-next-line */
pattern: /^[-—\.\w\s\u4e00-\u9fa5]{1,32}$/,
message: intl.formatMessage({ id: `${intlPrefix}.name.pattern.msg` }),
}],
initialValue: name,
})(
<Input
style={{ width: 512 }}
autoComplete="off"
label={<FormattedMessage id={`${intlPrefix}.name`} />}
disabled={!enabled}
maxLength={32}
showLengthInfo={false}
/>,
)}
</FormItem>
<FormItem>
{getFieldDecorator('code', {
initialValue: code,
})(
<Input autoComplete="off" label={<FormattedMessage id={`${intlPrefix}.code`} />} disabled
style={{ width: 512 }} />,
)}
</FormItem>
{categoryEnabled && (
<FormItem>
{getFieldDecorator('category', {
initialValue: categories && categories.map(value => value.name),
})(<Select
mode="multiple"
showArrow={false}
label={<FormattedMessage id={`${intlPrefix}.category`} />}
allowClear
disabled
style={{ width: 512 }}
loading={this.state.selectLoading}
>
{}
</Select>,
)}
</FormItem>
)}
<div>
<span style={{ color: 'rgba(0,0,0,.6)' }}>{intl.formatMessage({ id: `${intlPrefix}.avatar` })}</span>
{this.getAvatar()}
</div>
<div className="divider" />
<Permission service={['iam-service.project.update']}>
<div className="btnGroup">
<Button
funcType="raised"
htmlType="submit"
type="primary"
loading={submitting}
disabled={!enabled}
><FormattedMessage id="save" /></Button>
<Button
funcType="raised"
onClick={this.cancelValue}
disabled={!enabled}
>
<FormattedMessage id="cancel" />
</Button>
</div>
</Permission>
</Form>
</div>
</Content>
</Page>
);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/LdapHistoryMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import java.util.List;
import io.choerodon.iam.infra.dto.LdapHistoryDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
/**
* @author superlee
*/
public interface LdapHistoryMapper extends Mapper<LdapHistoryDTO> {
/**
* 查询ldap下所有完成的记录
* @param ldapId
* @return
*/
List<LdapHistoryDTO> selectAllEnd(@Param("ldapId")Long ldapId);
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/PermissionMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.infra.dto.PermissionDTO;
import io.choerodon.iam.infra.dto.RoleDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
import java.util.Set;
/**
* @author wuguokai
*/
public interface PermissionMapper extends Mapper<PermissionDTO> {
List<PermissionDTO> fuzzyQuery(@Param("permissionDTO") PermissionDTO permissionDTO,
@Param("param") String param);
List<PermissionDTO> selectByRoleId(@Param("roleId") Long roleId,
@Param("params") String params);
Set<String> checkPermission(@Param("member_id") Long memberId, @Param("source_type") String sourceType,
@Param("source_id") Long sourceId, @Param("codes") Set<String> codes);
List<PermissionDTO> selectErrorLevelPermissionByRole(@Param("role") RoleDTO role);
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/feign/AsgardFeignClient.java<|end_filename|>
package io.choerodon.iam.infra.feign;
import io.choerodon.iam.infra.feign.fallback.AsgardFeignClientFallback;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PutMapping;
/**
* @author dengyouquan
**/
@FeignClient(value = "asgard-service",
fallback = AsgardFeignClientFallback.class)
public interface AsgardFeignClient {
@PutMapping("/v1/schedules/organizations/{organization_id}/tasks/disable")
void disableOrg(@PathVariable("organization_id") long orgId);
@PutMapping("/v1/schedules/projects/{project_id}/tasks/disable")
void disableProj(@PathVariable("project_id") long projectId);
}
<|start_filename|>react/src/app/iam/containers/organization/role/RoleMsg.js<|end_filename|>
import React, { Component } from 'react';
import { withRouter } from 'react-router-dom';
import { inject, observer } from 'mobx-react';
import querystring from 'query-string';
import { set, get } from 'mobx';
import remove from 'lodash/remove';
import { Observable } from 'rxjs';
import _ from 'lodash';
import { Icon, Button, Col, Form, Input, Modal, Row, Select, Table, Tooltip, Tabs, Checkbox } from 'choerodon-ui';
import { injectIntl, FormattedMessage } from 'react-intl';
import { Content, Header, Page, axios } from '@choerodon/boot';
import { RESOURCES_LEVEL } from '@choerodon/boot/lib/containers/common/constants';
import RoleStore from '../../../stores/organization/role/RoleStore';
import MouseOverWrapper from '../../../components/mouseOverWrapper';
import { handleFiltersParams } from '../../../common/util';
import './Role.scss';
import Sider from './Sider';
const { Option } = Select;
const { TabPane } = Tabs;
const { confirm, Sidebar } = Modal;
const FormItem = Form.Item;
const intlPrefix = 'organization.role';
const LEVEL_NAME = {
site: '全局层',
organization: '组织层',
project: '项目层',
user: '个人中心',
};
@Form.create({})
@withRouter
@injectIntl
@inject('AppState')
@observer
export default class CreateRole extends Component {
constructor(props) {
super(props);
const queryObj = querystring.parse(props.location.search);
this.level = queryObj.level || undefined;
this.base = queryObj.base ? queryObj.base.split(',') : [];
this.roleId = queryObj.roleId || undefined;
this.isEdit = !!this.roleId;
this.tabLevel = queryObj.level;
this.state = {
submitLoading: false,
};
}
componentDidMount() {
RoleStore.setSelectedPermissions([]);
this.loadLabelsAndMenus();
}
loadLabelsAndMenus = () => {
const { level, tabLevel, base } = this;
const { AppState } = this.props;
const { id } = AppState.currentMenuType;
RoleStore.setTabLevel(tabLevel);
this.loadMenu(id, RoleStore.tabLevel || tabLevel);
RoleStore.loadRoleLabel(id);
if (base.length) {
RoleStore.getSelectedRolePermissions(id, base)
.then((res) => {
RoleStore.setSelectedPermissions(res.map(p => p.id));
});
}
if (this.isEdit) {
RoleStore.getRoleById(id, this.roleId)
.then((res) => {
this.props.form.resetFields();
RoleStore.setRoleMsg(res);
RoleStore.setSelectedPermissions(res.permissions.map(p => p.id));
});
}
}
loadMenu = (orgId, tabLevel) => {
RoleStore.loadMenu(orgId, tabLevel)
.then((menus) => {
set(RoleStore.menus, tabLevel, menus.subMenus);
set(RoleStore.expandedRowKeys, tabLevel, this.getAllIdByLevel(tabLevel));
if (!RoleStore.tabLevel) {
RoleStore.setTabLevel(tabLevel);
}
});
}
check = (selectedPermissions, menu, sign, type) => {
if (menu.subMenus) {
menu.subMenus.map(menuItem => this.check(selectedPermissions, menuItem, sign, type));
}
this.checkOne(selectedPermissions, menu, sign, type);
}
checkOne = (selectedPermissions, menu, sign, type) => {
if (type === 'all') {
if (menu.permissions.map(p => p.id).some(pid => selectedPermissions.findIndex(v => v === pid) === -1)) {
sign.sign = false;
}
} else if (type === 'none') {
if (menu.permissions.map(p => p.id).some(pid => selectedPermissions.findIndex(v => v === pid) !== -1)) {
sign.sign = false;
}
}
}
getTabCodes = () => {
const LEVEL_OBJ = {
site: ['site', 'user'],
project: ['project'],
organization: ['organization'],
};
return LEVEL_OBJ[this.level] || [];
}
getIds = (menu, res) => {
res.push(menu.id);
if (menu.subMenus) {
menu.subMenus.map(menuItem => this.getIds(menuItem, res));
}
}
getAllIdByLevel = (level) => {
const menus = get(RoleStore.menus, level) || [];
const res = [];
menus.map(menu => this.getIds(menu, res));
return res;
}
getOneMenuPermissons = (menu, res) => {
res.res = res.res.concat(menu.permissions.map(p => p.id));
}
getPermissions = (menu, res) => {
if (menu.subMenus) {
menu.subMenus.map(menuItem => this.getPermissions(menuItem, res));
}
this.getOneMenuPermissons(menu, res);
}
getAllPermissionsByRecord = (record, originRes) => {
const res = originRes || { res: [] };
this.getPermissions(record, res);
if (!originRes) {
res.res = [...new Set(res.res)];
return res.res;
}
}
getAllPermissionsByLevel = (level) => {
const menus = get(RoleStore.menus, level) || [];
const res = { res: [] };
menus.map(menu => this.getAllPermissionsByRecord(menu, res));
res.res = [...new Set(res.res)];
return res.res;
}
getCheckState = (type, selectedPermissions, record) => {
const sign = { sign: true };
this.check(selectedPermissions, record, sign, type);
return sign.sign;
}
checkCode = (rule, value, callback) => {
const { isEdit, level } = this;
const { AppState } = this.props;
const { id: orgId } = AppState.currentMenuType;
if (isEdit) {
callback();
}
const validValue = `role/${level}/${orgId}/${value}`;
const params = { code: validValue };
axios.post(`/iam/v1/organizations/${orgId}/roles/check`, JSON.stringify(params)).then((mes) => {
if (mes.failed) {
const { intl } = this.props;
callback(intl.formatMessage({ id: `${intlPrefix}.code.exist.msg` }));
} else {
callback();
}
});
};
linkToChange = (url) => {
const { history } = this.props;
const { AppState } = this.props;
const menu = AppState.currentMenuType;
const { type, id, name } = menu;
history.push(`${url}&type=${type}&id=${id}&name=${name}&organizationId=${id}`);
};
handleExpand = (expanded, record) => {
const expandedRowKeys = get(RoleStore.expandedRowKeys, RoleStore.tabLevel) || [];
if (expanded) {
expandedRowKeys.push(record.id);
} else {
remove(expandedRowKeys, v => v === record.id);
}
set(RoleStore.expandedRowKeys, RoleStore.tabLevel, expandedRowKeys);
}
handleCheckboxAllClick = (checkedAll, checkedNone, checkedSome, e) => {
const allPermissionsByRecord = this.getAllPermissionsByLevel(RoleStore.tabLevel);
const { selectedPermissions } = RoleStore;
let sp = selectedPermissions.slice();
if (checkedNone || checkedSome) {
sp = sp.concat(allPermissionsByRecord);
sp = [...new Set(sp)];
} else {
remove(sp, p => allPermissionsByRecord.includes(p));
}
RoleStore.setSelectedPermissions(sp);
}
handleCheckboxClick = (record, checkedAll, checkedNone, checkedSome, e) => {
const allPermissionsByRecord = this.getAllPermissionsByRecord(record);
const { selectedPermissions } = RoleStore;
let sp = selectedPermissions.slice();
if (checkedNone || checkedSome) {
sp = sp.concat(allPermissionsByRecord);
sp = [...new Set(sp)];
} else {
remove(sp, p => allPermissionsByRecord.includes(p));
}
RoleStore.setSelectedPermissions(sp);
}
handleCreate = (e) => {
const { level, isEdit } = this;
const { AppState } = this.props;
const { id } = AppState.currentMenuType;
const isDefault = isEdit && (RoleStore.roleMsg.code || '').startsWith(`role/${level}/default/`);
const codePrefix = isDefault
? `role/${level}/default/`
: `role/${level}/${id}/`;
e.preventDefault();
this.props.form.validateFieldsAndScroll((err) => {
if (!err) {
this.setState({ submitLoading: true });
const labelValues = this.props.form.getFieldValue('label');
const labelIds = labelValues && labelValues.map(labelId => ({ id: labelId }));
const role = {
name: this.props.form.getFieldValue('name').trim(),
code: `${codePrefix}${this.props.form.getFieldValue('code').trim()}`,
level: this.level,
permissions: RoleStore.selectedPermissions.slice().map(p => ({ id: p })),
labels: labelIds,
objectVersionNumber: RoleStore.roleMsg.objectVersionNumber,
};
const { intl } = this.props;
if (this.isEdit) {
RoleStore.editRoleByid(id, this.roleId, role)
.then((data) => {
if (!data.failed) {
Choerodon.prompt(intl.formatMessage({ id: 'modify.success' }));
this.setState({ submitLoading: false });
this.linkToChange(`/iam/org-role?level=${this.level}`);
} else {
Choerodon.prompt(data.message);
this.setState({ submitLoading: false });
}
});
} else {
RoleStore.createRole(id, role)
.then((data) => {
if (data && !data.failed) {
Choerodon.prompt(intl.formatMessage({ id: 'create.success' }));
this.setState({ submitLoading: false });
this.linkToChange(`/iam/org-role?level=${this.level}`);
} else {
Choerodon.prompt(data.message);
this.setState({ submitLoading: false });
}
})
.catch((errors) => {
if (errors.response.data.message === 'error.role.roleNameExist') {
Choerodon.prompt(intl.formatMessage({ id: `${intlPrefix}.name.exist.msg` }));
} else {
Choerodon.prompt(intl.formatMessage({ id: 'create.error' }));
}
});
}
}
});
};
handleReset = () => {
this.linkToChange(`/iam/org-role?level=${this.level}`);
};
handleChangeTabLevel = (orgId, key) => {
RoleStore.setTabLevel(key);
if (!get(RoleStore.menus, key)) {
this.loadMenu(orgId, key);
}
}
handleOpenSider = (record) => {
RoleStore.setCurrentMenu(record);
RoleStore.setSiderVisible(true);
}
handleSiderOk = (selectedPermissions) => {
const { level, isEdit } = this;
const isDefault = isEdit && (RoleStore.roleMsg.code || '').startsWith(`role/${level}/default/`);
if (isDefault) {
RoleStore.setSiderVisible(false);
return;
}
RoleStore.setSelectedPermissions(selectedPermissions);
RoleStore.setSiderVisible(false);
}
handleSiderCancel = () => {
RoleStore.setSiderVisible(false);
}
handleClickExpandBtn = () => {
const tabLevel = RoleStore.tabLevel || this.tabLevel;
const expand = get(RoleStore.expand, tabLevel);
if (expand) {
// 需要展开
set(RoleStore.expandedRowKeys, tabLevel, this.getAllIdByLevel(tabLevel));
} else {
// 需要收起
set(RoleStore.expandedRowKeys, tabLevel, []);
}
set(RoleStore.expand, tabLevel, !expand);
}
renderCheckbox = (isDefault) => {
const { selectedPermissions } = RoleStore;
const allPermissionsByLevel = this.getAllPermissionsByLevel(RoleStore.tabLevel);
const checkedAll = allPermissionsByLevel.every(p => selectedPermissions.includes(p));
const checkedNone = allPermissionsByLevel.every(p => !selectedPermissions.includes(p));
const checkedSome = !checkedAll && !checkedNone;
return (
<Checkbox
indeterminate={checkedSome}
onChange={this.handleCheckboxAllClick.bind(this, checkedAll, checkedNone, checkedSome)}
checked={!checkedNone}
disabled={isDefault}
/>
);
}
renderRoleLabel = () => {
const labels = RoleStore.getLabel;
return labels.map(({ id, name }) => (
<Option key={id} value={id}>{name}</Option>
));
};
renderForm = () => {
const { level, props: { intl, form: { getFieldDecorator }, AppState }, isEdit } = this;
const { id } = AppState.currentMenuType;
const isDefault = isEdit && (RoleStore.roleMsg.code || '').startsWith(`role/${level}/default/`);
const codePrefix = isDefault
? `role/${level}/default/`
: `role/${level}/${id}/`;
const formItemLayout = {
labelCol: {
xs: { span: 24 },
sm: { span: 100 },
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 10 },
},
};
return (
<Form layout="vertical">
<FormItem {...formItemLayout} style={{ display: 'inline-block', marginRight: 12 }}>
{getFieldDecorator('code', {
rules: [{
required: true,
whitespace: true,
message: intl.formatMessage({ id: `${intlPrefix}.code.require.msg` }),
}, {
pattern: /^[a-z]([-a-z0-9]*[a-z0-9])?$/,
message: intl.formatMessage({ id: `${intlPrefix}.code.pattern.msg` }),
}, {
validator: this.checkCode,
}],
validateFirst: true,
initialValue: isEdit ? (RoleStore.roleMsg.code || '').slice(codePrefix.length) : undefined,
})(
<Input
autoComplete="off"
label={<FormattedMessage id={`${intlPrefix}.code`} />}
prefix={codePrefix}
size="default"
style={{ width: 250 }}
disabled={isEdit}
maxLength={64}
showLengthInfo={false}
/>,
)}
</FormItem>
<FormItem {...formItemLayout} style={{ display: 'inline-block' }}>
{getFieldDecorator('name', {
rules: [{
required: true,
whitespace: true,
message: intl.formatMessage({ id: `${intlPrefix}.name.require.msg` }),
}],
initialValue: isEdit ? RoleStore.roleMsg.name : undefined,
})(
<Input
autoComplete="off"
label={<FormattedMessage id={`${intlPrefix}.name`} />}
style={{ width: 250 }}
maxLength={64}
showLengthInfo={false}
disabled={isDefault}
/>,
)}
</FormItem>
</Form>
);
}
renderTable = (level) => {
const data = get(RoleStore.menus, level);
const expandedRowKeys = get(RoleStore.expandedRowKeys, level) || [];
const { isEdit } = this;
const isDefault = isEdit && (RoleStore.roleMsg.code || '').startsWith(`role/${this.level}/default/`);
const columns = [{
title: '菜单',
dataIndex: 'name',
key: 'name',
width: '35%',
render: (text, record) => (
<span>
<Icon type={record.icon} style={{ marginRight: 8, verticalAlign: 'top' }} />
{text}
</span>
),
}, {
title: this.renderCheckbox(isDefault),
dataIndex: 'id',
key: 'id',
width: '36px',
render: (text, record) => {
const { selectedPermissions } = RoleStore;
const checkedAll = this.getCheckState('all', selectedPermissions, record);
const checkedNone = this.getCheckState('none', selectedPermissions, record);
const checkedSome = !checkedAll && !checkedNone;
return (
<Checkbox
indeterminate={checkedSome}
onChange={this.handleCheckboxClick.bind(this, record, checkedAll, checkedNone, checkedSome)}
checked={!checkedNone}
disabled={isDefault}
/>
);
},
}, {
title: '页面入口',
dataIndex: 'route',
key: 'route',
}, {
title: '',
width: '50px',
key: 'action',
align: 'right',
render: (text, record) => {
const { subMenus } = record;
if (!subMenus || !subMenus.length) {
return (
<Tooltip
title="配置"
placement="bottom"
>
<Button
shape="circle"
icon="predefine"
size="small"
onClick={this.handleOpenSider.bind(this, record)}
/>
</Tooltip>
);
}
return null;
},
}];
return (
<Table
loading={false}
filterBar={false}
pagination={false}
columns={columns}
defaultExpandAllRows
dataSource={data ? data.slice() : []}
childrenColumnName="subMenus"
rowKey={record => record.id}
expandedRowKeys={expandedRowKeys.slice()}
onExpand={this.handleExpand}
indentSize={25}
/>
);
}
renderBtns = () => (
<div style={{ marginTop: 32 }}>
<Button
funcType="raised"
type="primary"
onClick={this.handleCreate}
style={{ marginRight: 12 }}
loading={this.state.submitLoading}
>
<FormattedMessage id={!this.isEdit ? 'create' : 'save'} />
</Button>
<Button
funcType="raised"
onClick={this.handleReset}
style={{ color: '#3F51B5' }}
>
<FormattedMessage id="cancel" />
</Button>
</div>
)
renderSider = () => {
const { siderVisible, currentMenu, selectedPermissions } = RoleStore;
const { isEdit } = this;
const isDefault = isEdit && (RoleStore.roleMsg.code || '').startsWith(`role/${this.level}/default/`);
if (siderVisible) {
return (
<Sider
selectedPermissions={selectedPermissions}
menu={currentMenu}
onOk={this.handleSiderOk}
onCancel={this.handleSiderCancel}
disabled={isDefault}
/>
);
}
return null;
}
renderTab = () => {
const tabLevel = RoleStore.tabLevel || this.tabLevel;
const expand = get(RoleStore.expand, tabLevel);
return (
<React.Fragment>
<div>
<span style={{ marginRight: 80, fontSize: '16px' }}>菜单分配</span>
<Button
type="primary"
funcType="flat"
icon={expand ? 'expand_more' : 'expand_less'}
onClick={this.handleClickExpandBtn}
>
全部{expand ? '展开' : '收起'}
</Button>
</div>
<Tabs onChange={this.handleChangeTabLevel} activeKey={tabLevel}>
{this.getTabCodes().map(level => (
<TabPane tab={LEVEL_NAME[level] || level} key={level}>
{this.renderTable(level)}
</TabPane>
))}
</Tabs>
</React.Fragment>
);
}
render() {
const { AppState } = this.props;
const menu = AppState.currentMenuType;
const { type, id, name } = menu;
return (
<Page className="c7n-roleMsg">
<Header
title={`${!this.isEdit ? '创建' : '修改'}${LEVEL_NAME[this.level]}角色`}
backPath={`/iam/org-role?type=${type}&id=${id}&name=${name}&organizationId=${id}`}
/>
<Content>
{this.renderForm()}
{this.renderTab()}
{this.renderBtns()}
{this.renderSider()}
</Content>
</Page>
);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/dto/payload/DevOpsAppSyncPayload.java<|end_filename|>
package io.choerodon.iam.api.dto.payload;
public class DevOpsAppSyncPayload {
private Long organizationId;
private Long projectId;
private String code;
private String name;
private Boolean active;
private Long appId;
public Long getOrganizationId() {
return organizationId;
}
public void setOrganizationId(Long organizationId) {
this.organizationId = organizationId;
}
public Long getProjectId() {
return projectId;
}
public void setProjectId(Long projectId) {
this.projectId = projectId;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Boolean getActive() {
return active;
}
public void setActive(Boolean active) {
this.active = active;
}
public Long getAppId() {
return appId;
}
public void setAppId(Long appId) {
this.appId = appId;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/OrganizationService.java<|end_filename|>
package io.choerodon.iam.app.service;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.iam.api.dto.OrgSharesDTO;
import io.choerodon.iam.api.dto.OrganizationSimplifyDTO;
import io.choerodon.iam.infra.dto.OrganizationDTO;
import io.choerodon.iam.infra.dto.UserDTO;
import java.util.List;
import java.util.Set;
/**
* @author wuguokai
*/
public interface OrganizationService {
OrganizationDTO updateOrganization(Long organizationId, OrganizationDTO organizationDTO, String level, Long sourceId);
OrganizationDTO queryOrganizationById(Long organizationId);
OrganizationDTO queryOrganizationWithRoleById(Long organizationId);
PageInfo<OrganizationDTO> pagingQuery(OrganizationDTO organizationDTO, PageRequest pageRequest, String param);
OrganizationDTO enableOrganization(Long organizationId, Long userId);
OrganizationDTO disableOrganization(Long organizationId, Long userId);
void check(OrganizationDTO organization);
PageInfo<UserDTO> pagingQueryUsersInOrganization(Long organizationId,
Long userId, String email, PageRequest pageRequest, String param);
List<OrganizationDTO> queryByIds(Set<Long> ids);
/**
* 获取所有组织{id,name}
*
* @return list
*/
PageInfo<OrganizationSimplifyDTO> getAllOrgs(PageRequest pageRequest);
/**
* 分页获取 指定id范围 的 组织简要信息
*
* @param orgIds 指定的组织范围
* @param name 组织名查询参数
* @param code 组织编码查询参数
* @param enabled 组织启停用查询参数
* @param params 全局模糊搜索查询参数
* @param pageRequest 分页参数
* @return 分页结果
*/
PageInfo<OrgSharesDTO> pagingSpecified(Set<Long> orgIds, String name, String code, Boolean enabled, String params, PageRequest pageRequest);
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/ProjectRelationshipController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import java.util.Date;
import java.util.List;
import java.util.Map;
import javax.validation.Valid;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.iam.infra.dto.ProjectRelationshipDTO;
import io.swagger.annotations.ApiOperation;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import io.choerodon.core.iam.InitRoleCode;
import io.choerodon.iam.api.dto.RelationshipCheckDTO;
import io.choerodon.iam.app.service.ProjectRelationshipService;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
/**
* @author Eugen
*/
@RestController
@RequestMapping(value = "/v1/organizations/{organization_id}/project_relations")
public class ProjectRelationshipController {
private ProjectRelationshipService projectRelationshipService;
public ProjectRelationshipController(ProjectRelationshipService projectRelationshipService) {
this.projectRelationshipService = projectRelationshipService;
}
@Permission(type = ResourceType.ORGANIZATION, roles = {InitRoleCode.ORGANIZATION_ADMINISTRATOR})
@ApiOperation(value = "查询项目群下的子项目(默认查所有子项目,可传参只查启用的子项目)")
@GetMapping(value = "/{parent_id}")
public ResponseEntity<List<ProjectRelationshipDTO>> getProjUnderGroup(@PathVariable(name = "organization_id") Long orgId,
@PathVariable(name = "parent_id") Long id,
@RequestParam(name = "only_select_enable", required = false, defaultValue = "false") Boolean onlySelectEnable) {
return new ResponseEntity<>(projectRelationshipService.getProjUnderGroup(id, onlySelectEnable), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION, roles = {InitRoleCode.ORGANIZATION_ADMINISTRATOR})
@ApiOperation(value = "项目群下移除项目")
@DeleteMapping("/{relationship_id}")
public ResponseEntity delete(@PathVariable(name = "organization_id") Long orgId,
@PathVariable(name = "relationship_id") Long id) {
projectRelationshipService.removesAProjUnderGroup(orgId, id);
return new ResponseEntity(HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION, roles = {InitRoleCode.ORGANIZATION_ADMINISTRATOR})
@ApiOperation(value = "获取敏捷项目的不可用时间")
@GetMapping("/{project_id}/unavailable/under/{parent_id}")
public ResponseEntity<List<Map<String, Date>>> getUnavailableTime(@PathVariable(name = "organization_id") Long orgId,
@PathVariable(name = "project_id") Long id,
@PathVariable(name = "parent_id") Long parentId) {
return new ResponseEntity<>(projectRelationshipService.getUnavailableTime(id, parentId), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION, roles = {InitRoleCode.ORGANIZATION_ADMINISTRATOR})
@ApiOperation(value = "项目群下批量更新(添加/修改/启停用)子项目")
@PutMapping
public ResponseEntity<List<ProjectRelationshipDTO>> create(@PathVariable(name = "organization_id") Long orgId,
@RequestBody @Valid List<ProjectRelationshipDTO> projectRelationshipDTOList) {
return new ResponseEntity<>(projectRelationshipService.batchUpdateRelationShipUnderProgram(orgId, projectRelationshipDTOList), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION, roles = {InitRoleCode.ORGANIZATION_ADMINISTRATOR})
@ApiOperation(value = "校验项目关系能否被启用")
@GetMapping("/check/{relationship_id}/can_be_enabled")
public ResponseEntity<RelationshipCheckDTO> checkRelationshipCanBeEnabled(@PathVariable(name = "organization_id") Long orgId,
@PathVariable(name = "relationship_id") Long id) {
return new ResponseEntity<>(projectRelationshipService.checkRelationshipCanBeEnabled(id), HttpStatus.OK);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/RoleService.java<|end_filename|>
package io.choerodon.iam.app.service;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.iam.api.query.ClientRoleQuery;
import io.choerodon.iam.api.dto.RoleAssignmentSearchDTO;
import io.choerodon.iam.api.query.RoleQuery;
import io.choerodon.iam.infra.dto.RoleDTO;
import java.util.List;
/**
* @author superlee
* @author wuguokai
*/
public interface RoleService {
PageInfo<RoleDTO> pagingSearch(PageRequest pageRequest, RoleQuery roleQuery);
PageInfo<RoleDTO> pagingQueryOrgRoles(Long orgId, PageRequest pageRequest, RoleQuery roleQuery);
RoleDTO create(RoleDTO roleDTO);
RoleDTO createBaseOnRoles(RoleDTO roleDTO);
RoleDTO update(RoleDTO roleDTO);
RoleDTO orgUpdate(RoleDTO roleDTO,Long orgId);
void delete(Long id);
RoleDTO queryById(Long id);
RoleDTO enableRole(Long id);
RoleDTO disableRole(Long id);
RoleDTO orgEnableRole(Long roleId,Long orgId);
RoleDTO orgDisableRole(Long roleId,Long orgId);
RoleDTO queryWithPermissionsAndLabels(Long id);
List<RoleDTO> listRolesWithUserCountOnSiteLevel(RoleAssignmentSearchDTO roleAssignmentSearchDTO);
List<RoleDTO> listRolesWithClientCountOnSiteLevel(ClientRoleQuery clientRoleSearchDTO);
List<RoleDTO> listRolesWithUserCountOnOrganizationLevel(RoleAssignmentSearchDTO roleAssignmentSearchDTO, Long sourceId);
List<RoleDTO> listRolesWithClientCountOnOrganizationLevel(ClientRoleQuery clientRoleSearchDTO, Long sourceId);
List<RoleDTO> listRolesWithUserCountOnProjectLevel(RoleAssignmentSearchDTO roleAssignmentSearchDTO, Long sourceId);
List<RoleDTO> listRolesWithClientCountOnProjectLevel(ClientRoleQuery clientRoleSearchDTO, Long sourceId);
void check(RoleDTO role);
List<Long> queryIdsByLabelNameAndLabelType(String labelName, String labelType);
List<RoleDTO> selectByLabel(String label, Long organizationId);
List<RoleDTO> listRolesBySourceIdAndTypeAndUserId(String sourceType, Long sourceId, Long userId);
RoleDTO queryByCode(String code);
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/dto/ProjectDTO.java<|end_filename|>
package io.choerodon.iam.infra.dto;
import io.choerodon.iam.api.dto.ProjectCategoryDTO;
import io.choerodon.mybatis.entity.BaseDTO;
import io.swagger.annotations.ApiModelProperty;
import io.swagger.annotations.ApiParam;
import javax.persistence.Column;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.Transient;
import javax.validation.constraints.NotEmpty;
import javax.validation.constraints.Pattern;
import javax.validation.constraints.Size;
import java.util.List;
/**
* @author superlee
* @since 2019-04-22
*/
@Table(name = "fd_project")
public class ProjectDTO extends BaseDTO {
private static final String CODE_REGULAR_EXPRESSION =
"^[a-z](([a-z0-9]|-(?!-))*[a-z0-9])*$";
public static final String PROJECT_NAME_REG = "^[-—\\.\\w\\s\\u4e00-\\u9fa5]{1,32}$";
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@ApiModelProperty(value = "主键ID/非必填")
private Long id;
@ApiModelProperty(value = "项目名/必填")
@NotEmpty(message = "error.project.name.empty")
@Size(min = 1, max = 32, message = "error.project.code.size")
@Pattern(regexp = PROJECT_NAME_REG, message = "error.project.name.regex")
private String name;
@ApiModelProperty(value = "项目编码/必填")
@NotEmpty(message = "error.project.code.empty")
@Size(min = 1, max = 14, message = "error.project.code.size")
@Pattern(regexp = CODE_REGULAR_EXPRESSION, message = "error.project.code.illegal")
private String code;
@ApiParam(name = "organization_id", value = "组织id")
@ApiModelProperty(value = "组织ID/非必填")
private Long organizationId;
@ApiModelProperty(value = "项目图标url")
private String imageUrl;
@ApiModelProperty(value = "是否启用/非必填")
@Column(name = "is_enabled")
private Boolean enabled;
@ApiModelProperty(value = "项目类型code/非必填")
private String type;
@ApiModelProperty(value = "项目类型(遗留旧字段,一对一):AGILE(敏捷项目),PROGRAM(普通项目组),ANALYTICAL(分析型项目群)")
private String category;
@ApiModelProperty(value = "项目类型")
private List<Long> categoryIds;
@ApiModelProperty(value = "项目类型(非开源,一对多)")
private List<ProjectCategoryDTO> categories;
@Transient
private List<RoleDTO> roles;
@Transient
private List<ProjectDTO> projects;
@Transient
@ApiModelProperty(value = "项目类型名称/非必填")
private String typeName;
@Transient
private String organizationName;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public Long getOrganizationId() {
return organizationId;
}
public void setOrganizationId(Long organizationId) {
this.organizationId = organizationId;
}
public String getImageUrl() {
return imageUrl;
}
public void setImageUrl(String imageUrl) {
this.imageUrl = imageUrl;
}
public Boolean getEnabled() {
return enabled;
}
public void setEnabled(Boolean enabled) {
this.enabled = enabled;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getCategory() {
return category;
}
public void setCategory(String category) {
this.category = category;
}
public List<RoleDTO> getRoles() {
return roles;
}
public void setRoles(List<RoleDTO> roles) {
this.roles = roles;
}
public String getTypeName() {
return typeName;
}
public void setTypeName(String typeName) {
this.typeName = typeName;
}
public String getOrganizationName() {
return organizationName;
}
public void setOrganizationName(String organizationName) {
this.organizationName = organizationName;
}
public List<ProjectDTO> getProjects() {
return projects;
}
public void setProjects(List<ProjectDTO> projects) {
this.projects = projects;
}
public List<ProjectCategoryDTO> getCategories() {
return categories;
}
public void setCategories(List<ProjectCategoryDTO> categories) {
this.categories = categories;
}
public List<Long> getCategoryIds() {
return categoryIds;
}
public void setCategoryIds(List<Long> categoryIds) {
this.categoryIds = categoryIds;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/AuditMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import java.util.List;
import io.choerodon.iam.infra.dto.AuditDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
/**
* Created by Eugen on 01/03/2019.
*/
public interface AuditMapper extends Mapper<AuditDTO> {
List<AuditDTO> selectByParams(@Param("userId") Long userId,
@Param("businessType") String businessType,
@Param("dataType") String dataType);
}
<|start_filename|>src/test/groovy/io/choerodon/iam/app/service/impl/LookupServiceImplTestSpec.groovy<|end_filename|>
package io.choerodon.iam.app.service.impl
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.app.service.LookupService
import io.choerodon.iam.infra.asserts.AssertHelper
import io.choerodon.iam.infra.mapper.LookupMapper
import io.choerodon.iam.infra.mapper.LookupValueMapper
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.context.annotation.Import
import spock.lang.Specification
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class LookupServiceImplTestSpec extends Specification {
// private LookupRepository lookupRepository = Mock(LookupRepository)
// private LookupValueRepository lookupValueRepository = Mock(LookupValueRepository)
@Autowired
LookupMapper lookupMapper
@Autowired
LookupValueMapper lookupValueMapper
@Autowired
AssertHelper assertHelper
def "Delete"() {
given: "构造参数"
def id = 1L
LookupService lookupService = new LookupServiceImpl(lookupMapper,
lookupValueMapper, assertHelper)
when: "调用方法"
lookupService.delete(id)
then: "校验结果"
true
// 1 * lookupRepository.deleteById(_)
// 1 * lookupValueRepository.delete(_)
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/PermissionController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.constant.PageConstant;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.domain.Sort;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.iam.api.dto.CheckPermissionDTO;
import io.choerodon.iam.app.service.PermissionService;
import io.choerodon.iam.infra.dto.PermissionDTO;
import io.choerodon.mybatis.annotation.SortDefault;
import io.choerodon.swagger.annotation.CustomPageRequest;
import io.swagger.annotations.ApiOperation;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import java.util.List;
import java.util.Set;
/**
* @author wuguokai
*/
@RestController
@RequestMapping("/v1/permissions")
public class PermissionController {
private PermissionService permissionService;
public PermissionController(PermissionService permissionService) {
this.permissionService = permissionService;
}
@PostMapping(value = "/checkPermission")
@ApiOperation("通过permission code鉴权,判断用户是否有查看的权限")
@Permission(permissionLogin = true)
public ResponseEntity<List<CheckPermissionDTO>> checkPermission(@RequestBody List<CheckPermissionDTO> checkPermissions) {
return new ResponseEntity<>(permissionService.checkPermission(checkPermissions), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation("通过层级查询权限列表")
@GetMapping
@CustomPageRequest
public ResponseEntity<PageInfo<PermissionDTO>> pagingQuery(@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam("level") String level,
@RequestParam(required = false) String param) {
PermissionDTO dto = new PermissionDTO();
dto.setResourceLevel(level);
return new ResponseEntity<>(permissionService.pagingQuery(pageRequest, dto, param), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation("通过角色查询权限列表")
@PostMapping
public ResponseEntity<Set<PermissionDTO>> queryByRoleIds(@RequestBody List<Long> roleIds) {
return new ResponseEntity<>(permissionService.queryByRoleIds(roleIds), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation("组织层通过角色查询权限列表")
@PostMapping("/through_roles_at_org/{organization_id}")
public ResponseEntity<Set<PermissionDTO>> queryByRoleIdsAtOrg(@PathVariable(name = "organization_id") Long organizationId, @RequestBody List<Long> roleIds) {
return new ResponseEntity<>(permissionService.queryByRoleIds(roleIds), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation("通过层级,服务名,code查询Permission列表")
@GetMapping("/permissionList")
public ResponseEntity<List<PermissionDTO>> query(@RequestParam("level") String level,
@RequestParam(value = "service_name", required = false) String serviceName,
@RequestParam(value = "code", required = false) String code) {
return new ResponseEntity<>(permissionService.query(level, serviceName, code), HttpStatus.OK);
}
/**
* 根据传入的permission code,与最新更新的Instance抓取的swagger json对比,如果已经废弃了,就删除,没有废弃抛异常
*
* @param code the code of permission
* @return
*/
@Permission(type = ResourceType.SITE)
@ApiOperation("根据permission code删除permission, 只能删除废弃的permission")
@DeleteMapping
public ResponseEntity deleteByCode(@RequestParam String code) {
permissionService.deleteByCode(code);
return new ResponseEntity(HttpStatus.NO_CONTENT);
}
}
<|start_filename|>src/main/resources/script/db/iam_application_exploration.groovy<|end_filename|>
package script.db
databaseChangeLog(logicalFilePath: 'script/db/iam_application_exploration.groovy') {
changeSet(author: 'superlee', id: '2018-03-12-iam-application-exploration') {
if (helper.dbType().isSupportSequence()) {
createSequence(sequenceName: 'IAM_APPLICATION_EXPLORATION_S', startValue: "1")
}
createTable(tableName: "IAM_APPLICATION_EXPLORATION") {
column(name: 'ID', type: 'BIGINT UNSIGNED', autoIncrement: true, remarks: '表ID,主键,供其他表做外键,unsigned bigint、单表时自增、步长为 1') {
constraints(primaryKey: true, primaryKeyName: 'PK_IAM_APPLICATION_EXPLO')
}
column(name: 'APPLICATION_ID', type: 'BIGINT UNSIGNED', remarks: '应用id') {
constraints(nullable: false)
}
column(name: 'PATH', type: 'VARCHAR(4000)', remarks: '应用路径,从根节点到当前节点的application_id路径,实例:1/或1/2/3/或1/4/5/等') {
constraints(nullable: false)
}
column(name: 'ROOT_ID', type: 'BIGINT UNSIGNED', remarks: '当前节点的根节点id,如果自己是根节点,则是自己的id') {
constraints(nullable: false)
}
column(name: 'PARENT_ID', type: 'BIGINT UNSIGNED', remarks: '父节点id,没有父节点则为null')
column(name: 'HASHCODE', type: 'VARCHAR(64)', remarks: 'path路径的hash值,可能存在hash碰撞,碰撞的情况下在比较path') {
constraints(nullable: false)
}
column(name: 'IS_ENABLED', type: 'TINYINT UNSIGNED', defaultValue: "1", remarks: '是否启用。默认为1表示启用') {
constraints(nullable: false)
}
column(name: "OBJECT_VERSION_NUMBER", type: "BIGINT UNSIGNED", defaultValue: "1") {
constraints(nullable: true)
}
column(name: "CREATED_BY", type: "BIGINT UNSIGNED", defaultValue: "0") {
constraints(nullable: true)
}
column(name: "CREATION_DATE", type: "DATETIME", defaultValueComputed: "CURRENT_TIMESTAMP")
column(name: "LAST_UPDATED_BY", type: "BIGINT UNSIGNED", defaultValue: "0") {
constraints(nullable: true)
}
column(name: "LAST_UPDATE_DATE", type: "DATETIME", defaultValueComputed: "CURRENT_TIMESTAMP")
}
addUniqueConstraint(tableName: 'IAM_APPLICATION_EXPLORATION', columnNames: 'APPLICATION_ID, PARENT_ID', constraintName: 'PK_IAM_APPLICATION_EXPLO_U1')
}
changeSet(author: 'superlee', id: '2018-03-14-iam-application-exploration-remove-unique-constraint') {
dropUniqueConstraint(tableName:'IAM_APPLICATION_EXPLORATION', constraintName:'PK_IAM_APPLICATION_EXPLO_U1')
}
changeSet(author: 'superlee', id: '2019-07-18-iam-application-exploration-add-remark') {
setTableRemarks(tableName:"IAM_APPLICATION_EXPLORATION",remarks: "应用探测表,用于记录应用的路径信息,父子关系等")
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/ApplicationMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.api.query.ApplicationQuery;
import io.choerodon.iam.infra.dto.ApplicationDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
import java.util.Set;
/**
* @author superlee
* @since 0.15.0
*/
public interface ApplicationMapper extends Mapper<ApplicationDTO> {
/**
* 模糊查询
*
* @param applicationSearchDTO
* @return
*/
List fuzzyQuery(@Param("applicationSearchDTO") ApplicationQuery applicationSearchDTO);
/**
* 传入application id集合,返回application 对象集合
*
* @param idSet
* @return
*/
List matchId(@Param("idSet") Set<Long> idSet);
/**
* 查应用附带项目信息
*
* @param organizationId
* @return
*/
List<ApplicationDTO> selectWithProject(@Param("organizationId") Long organizationId);
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/common/utils/ImageUtils.java<|end_filename|>
package io.choerodon.iam.infra.common.utils;
import net.coobird.thumbnailator.Thumbnails;
import org.springframework.web.multipart.MultipartFile;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
/**
* @author dengyouquan
**/
public class ImageUtils {
private ImageUtils(){
throw new IllegalStateException("cann`t instantiation class");
}
public static MultipartFile cutImage(MultipartFile file, Double rotate, Integer axisX, Integer axisY, Integer width, Integer height) throws java.io.IOException {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
if (rotate != null) {
Thumbnails.of(file.getInputStream()).scale(1.0, 1.0).rotate(rotate).toOutputStream(outputStream);
}
if (axisX != null && axisY != null && width != null && height != null) {
if (outputStream.size() > 0) {
final InputStream rotateInputStream = new ByteArrayInputStream(outputStream.toByteArray());
outputStream.reset();
Thumbnails.of(rotateInputStream).scale(1.0, 1.0).sourceRegion(axisX, axisY, width, height).toOutputStream(outputStream);
} else {
Thumbnails.of(file.getInputStream()).scale(1.0, 1.0).sourceRegion(axisX, axisY, width, height).toOutputStream(outputStream);
}
}
if (outputStream.size() > 0) {
file = new MockMultipartFile(file.getName(), file.getOriginalFilename(),
file.getContentType(), outputStream.toByteArray());
}
return file;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/common/utils/SagaTopic.java<|end_filename|>
package io.choerodon.iam.infra.common.utils;
public final class SagaTopic {
private SagaTopic() {
}
public static class User {
private User() {
}
//创建用户
public static final String USER_CREATE = "iam-create-user";
//iam接收创建组织事件的SagaTaskCode
public static final String TASK_USER_CREATE = "task-create-user";
//批量创建用户
public static final String USER_CREATE_BATCH = "iam-create-user";
//更新用户
public static final String USER_UPDATE = "iam-update-user";
//删除用户
public static final String USER_DELETE = "iam-delete-user";
//启用用户
public static final String USER_ENABLE = "iam-enable-user";
//停用用户
public static final String USER_DISABLE = "iam-disable-user";
}
public static class Project {
private Project() {
}
//创建项目
public static final String PROJECT_CREATE = "iam-create-project";
//更新项目
public static final String PROJECT_UPDATE = "iam-update-project";
//停用项目
public static final String PROJECT_DISABLE = "iam-disable-project";
//启用项目
public static final String PROJECT_ENABLE = "iam-enable-project";
}
public static class MemberRole {
private MemberRole() {
}
//更新用户角色
public static final String MEMBER_ROLE_UPDATE = "iam-update-memberRole";
//删除用户角色
public static final String MEMBER_ROLE_DELETE = "iam-delete-memberRole";
}
public static class Organization {
private Organization() {
}
//组织服务创建组织
public static final String ORG_CREATE = "org-create-organization";
//组织服务注册组织
public static final String ORG_REGISTER = "register-org";
//iam接收创建组织事件的SagaTaskCode
public static final String TASK_ORG_CREATE = "iam-create-organization";
//iam接受注册组织:创建默认密码策略,创建默认ldap配置
public static final String TASK_ORG_REGISTER_INIT_ORG = "register-iam-init-org";
//iam接受注册组织:创建项目
public static final String TASK_ORG_REGISTER_INIT_PROJ = "register-iam-init-project";
//启用组织
public static final String ORG_ENABLE = "iam-enable-organization";
//停用组织
public static final String ORG_DISABLE = "iam-disable-organization";
//更新组织
public static final String ORG_UPDATE = "iam-update-organization";
}
public static class SystemSetting {
private SystemSetting() {
}
// iam 系统设置发生改变时(增加,更新,重置),触发 Saga 流程时的 code
public static final String SYSTEM_SETTING_UPDATE = "iam-update-system-setting";
}
public static class Application {
private Application() {
}
public static final String APP_CREATE = "iam-create-application";
public static final String APP_UPDATE = "iam-update-application";
public static final String APP_DISABLE = "iam-disable-application";
public static final String APP_ENABLE = "iam-enable-application";
public static final String APP_SYNC = "devops-sync-application";
public static final String IAM_SYNC_APP = "iam-sync-application";
public static final String APP_DELETE = "iam-delete-application";
public static final String APP_SYNC_DELETE = "iam-sync-app-delete";
public static final String DEVOPS_APP_DELETE = "devops-app-delete";
public static final String APP_SYNC_ACTIVE = "iam-sync-app-active";
public static final String DEVOPS_SYNC_APP_ACTIVE = "devops-sync-app-active";
public static final String APP_SYNC_NAME = "iam-sync-app-name";
public static final String DEVOPS_SYNC_APP_NAME = "devops-update-iam-app";
public static final String APP_UPDATE_ABNORMAL = "iam-update-application-abnormal";
public static final String APP_DEVOPS_CREATE_FAIL = "devops-create-app-fail";
}
public static class ProjectRelationship {
private ProjectRelationship() {
}
// iam新增项目关系
public static final String PROJECT_RELATIONSHIP_ADD = "iam-add-project-relationships";
// iam删除项目关系
public static final String PROJECT_RELATIONSHIP_DELETE = "iam-delete-project-relationships";
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/RoleLabelMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.infra.dto.RoleLabelDTO;
import io.choerodon.mybatis.common.Mapper;
/**
* @author superlee
*/
public interface RoleLabelMapper extends Mapper<RoleLabelDTO> {
}
<|start_filename|>react/src/app/iam/containers/organization/application/ManageApplication.js<|end_filename|>
import React, { Component } from 'react';
import get from 'lodash/get';
import { Button, Form, Table, Tooltip, Icon } from 'choerodon-ui';
import { inject, observer } from 'mobx-react';
import { withRouter } from 'react-router-dom';
import { Content, Header, Page, axios } from '@choerodon/boot';
import { injectIntl, FormattedMessage } from 'react-intl';
import './Application.scss';
import MouseOverWrapper from '../../../components/mouseOverWrapper';
import StatusTag from '../../../components/statusTag';
import AddSider from './AddSider';
const intlPrefix = 'organization.application';
@Form.create({})
@withRouter
@injectIntl
@inject('AppState')
@observer
export default class Application extends Component {
constructor(props) {
super(props);
this.id = get(props, 'match.params.applicationId', undefined);
}
componentDidMount() {
this.refresh();
}
refresh = () => {
if (!this.id) return;
const { ApplicationStore } = this.props;
ApplicationStore.getDetailById(this.id);
}
handleClickAddApplication = () => {
const { ApplicationStore } = this.props;
ApplicationStore.showSidebar();
}
handleopenTab = (record, operation) => {
const { ApplicationStore } = this.props;
ApplicationStore.setEditData(record);
ApplicationStore.showSidebar();
};
handleEnable = (record) => {
const { ApplicationStore } = this.props;
if (record.enabled) {
ApplicationStore.disableApplication(record.id).then(() => {
ApplicationStore.loadData();
});
} else {
ApplicationStore.enableApplication(record.id).then(() => {
ApplicationStore.loadData();
});
}
};
handlePageChange = (pagination, filters, sorter, params) => {
const { ApplicationStore } = this.props;
ApplicationStore.loadData(pagination, filters, sorter, params);
};
handleSaveMsg = (selections) => {
const { ApplicationStore } = this.props;
const { AppState: { currentMenuType: { organizationId } } } = this.props;
const unHandleData = ApplicationStore.applicationData.slice();
const originSelections = unHandleData.map(v => v.id);
const needDelete = originSelections.filter(v => !selections.includes(v));
const needAdd = selections.filter(v => !originSelections.includes(v));
if (needAdd.length && needDelete.length) {
Promise.all([ApplicationStore.addToCombination(this.id, needAdd), axios.post(`/iam/v1/organizations/${organizationId}/applications/${this.id}/delete_combination`, needDelete)])
.then(([res1, res2]) => {
ApplicationStore.closeSidebar();
this.forceUpdate();
this.refresh();
});
} else if (needAdd.length && !needDelete.length) {
ApplicationStore.addToCombination(this.id, selections)
.then(() => {
ApplicationStore.closeSidebar();
this.forceUpdate();
this.refresh();
});
} else if (!needAdd.length && needDelete.length) {
axios.post(`/iam/v1/organizations/${organizationId}/applications/${this.id}/delete_combination`, needDelete)
.then(() => {
ApplicationStore.closeSidebar();
this.forceUpdate();
this.refresh();
});
} else {
ApplicationStore.closeSidebar();
}
}
handleCancelSider = () => {
const { ApplicationStore } = this.props;
ApplicationStore.closeSidebar();
}
handleManage = (record) => {
const { AppState: { currentMenuType: { name, id } }, history } = this.props;
history.push(`/iam/application/manage/${record.id}?type=organization&id=${id}&name=${encodeURIComponent(name)}`);
}
handleDelete = (idArr) => {
const { AppState: { currentMenuType: { organizationId } } } = this.props;
axios.post(`/iam/v1/organizations/${organizationId}/applications/${this.id}/delete_combination`, idArr)
.then(() => {
this.refresh();
});
}
render() {
const { ApplicationStore: { filters, pagination, params, data }, AppState, intl, ApplicationStore, ApplicationStore: { applicationData } } = this.props;
const menuType = AppState.currentMenuType;
const orgId = menuType.id;
const unHandleData = ApplicationStore.applicationData.slice();
const selections = unHandleData.map(v => v.id);
const hasChild = unHandleData.some(v => v.applicationCategory === 'combination-application' && v.descendants && v.descendants.length);
const columns = [
{
title: <FormattedMessage id={`${intlPrefix}.name`} />,
dataIndex: 'name',
filters: [],
filteredValue: filters.name || [],
render: (text, record) => (
<span
style={{
borderLeft: record.isFirst && hasChild ? '1px solid rgba(0, 0, 0, 0.12)' : 'none',
paddingLeft: hasChild ? 20 : 'auto',
}}
>
<Icon type={record.applicationCategory === 'combination-application' ? 'grain' : 'predefine'} style={{ marginRight: 5, verticalAlign: 'text-top' }} />
{text}
</span>
),
},
{
title: <FormattedMessage id={`${intlPrefix}.code`} />,
dataIndex: 'code',
key: 'code',
width: '15%',
filters: [],
filteredValue: filters.code || [],
render: (text, record) => {
if (!record.isFirst) return null;
return <span>{text}</span>;
},
},
{
title: <FormattedMessage id={`${intlPrefix}.category`} />,
dataIndex: 'applicationCategory',
width: '10%',
render: (category, record) => (!record.isFirst ? null : <FormattedMessage id={`${intlPrefix}.category.${category.toLowerCase()}`} />),
filters: [{
text: '组合应用',
value: 'combination-application',
}, {
text: '普通应用',
value: 'application',
}],
filteredValue: filters.applicationCategory || [],
},
{
title: <FormattedMessage id={`${intlPrefix}.application-type`} />,
dataIndex: 'applicationType',
filters: [{
text: '开发应用',
value: 'normal',
}, {
text: '测试应用',
value: 'test',
}],
filteredValue: filters.applicationType || [],
width: '10%',
render: (text, record) => (
!record.isFirst ? null
: (
<MouseOverWrapper text={text} width={0.2}>
{text ? intl.formatMessage({ id: `${intlPrefix}.type.${text}` }) : ''}
</MouseOverWrapper>
)
),
},
{
title: <FormattedMessage id={`${intlPrefix}.project-name`} />,
dataIndex: 'projectName',
filters: [],
filteredValue: filters.projectName || [],
width: '20%',
render: (text, record) => (
!record.isFirst ? null
: (
<div>
{
text && (
<div className="c7n-iam-application-name-avatar">
{
record.imageUrl ? (
<img src={record.imageUrl} alt="avatar" style={{ width: '100%' }} />
) : (
<React.Fragment>{text.split('')[0]}</React.Fragment>
)
}
</div>
)
}
<MouseOverWrapper text={text} width={0.2}>
{text}
</MouseOverWrapper>
</div>
)
),
},
{
title: <FormattedMessage id="status" />,
dataIndex: 'enabled',
width: '10%',
filters: [{
text: intl.formatMessage({ id: 'enable' }),
value: 'true',
}, {
text: intl.formatMessage({ id: 'disable' }),
value: 'false',
}],
filteredValue: filters.enabled || [],
key: 'enabled',
render: (enabled, record) => (
!record.isFirst ? null
: (
<span style={{ marginRight: 8, fontSize: '12px', lineHeight: '18px', padding: '2px 6px', background: record.enabled ? 'rgba(0, 191, 165, 0.1)' : 'rgba(244, 67, 54, 0.1)', color: record.enabled ? '#009688' : '#D50000', borderRadius: '2px', border: '1px solid', borderColor: record.enabled ? '#009688' : '#D50000' }}>
{record.enabled ? '启用' : '停用'}
</span>
)
),
},
{
title: '',
key: 'action',
width: '120px',
align: 'right',
render: (text, record) => (
!record.isFirst ? null
: (
<Tooltip
title={<FormattedMessage id="delete" />}
placement="bottom"
>
<Button
shape="circle"
size="small"
onClick={e => this.handleDelete([record.id])}
icon="delete"
/>
</Tooltip>
)
),
},
];
return (
<Page>
<Header
title="应用配置"
backPath={`/iam/application?type=organization&id=${orgId}&name=${encodeURIComponent(menuType.name)}&organizationId=${orgId}`}
>
<Button
onClick={this.handleClickAddApplication}
icon="playlist_add"
>
添加子应用
</Button>
<Button
icon="refresh"
onClick={this.refresh}
>
<FormattedMessage id="refresh" />
</Button>
</Header>
<Content
title={`组合应用"${data.name || ''}"的子应用`}
description="您可以在此修改应用名称。如果此应用是组合应用,您可以在此查看此组合应用下子应用的信息,同时您还可以在此添加或删除此组合应用下的子应用。"
link="#"
>
<Table
pagination={pagination}
columns={columns}
dataSource={unHandleData}
rowKey={record => record.id}
filters={params.slice()}
onChange={this.handlePageChange}
loading={ApplicationStore.loading}
filterBarPlaceholder={intl.formatMessage({ id: 'filtertable' })}
childrenColumnName="descendants"
/>
</Content>
{
ApplicationStore.sidebarVisible ? (
<AddSider
onCancel={this.handleCancelSider}
onOk={this.handleSaveMsg}
id={this.id}
selections={selections}
/>
) : null
}
</Page>
);
}
}
<|start_filename|>src/main/resources/script/db/oauth_ldap_history.groovy<|end_filename|>
package script.db
databaseChangeLog(logicalFilePath: 'script/db/oauth_ldap_history.groovy') {
changeSet(author: '<EMAIL>', id: '2018-06-06-oauth-ldap-history') {
if(helper.dbType().isSupportSequence()){
createSequence(sequenceName: 'OAUTH_LDAP_HISTORY_S', startValue:"1")
}
createTable(tableName: "OAUTH_LDAP_HISTORY") {
column(name: 'ID', type: 'BIGINT UNSIGNED', autoIncrement: true, remarks: '表ID,主键,供其他表做外键,unsigned bigint、单表时自增、步长为 1') {
constraints(primaryKey: true, primaryKeyName: 'PK_OAUTH_LDAP_HISTORY')
}
column(name: 'LDAP_ID', type: 'BIGINT UNSIGNED', remarks: 'ldap id') {
constraints(nullable: false)
}
column(name: 'NEW_USER_COUNT', type: "INTEGER UNSIGNED", remarks: '同步用户新增数量')
column(name: 'UPDATE_USER_COUNT', type: "INTEGER UNSIGNED", remarks: '同步用户更新数量')
column(name: 'ERROR_USER_COUNT', type: "INTEGER UNSIGNED", remarks: '同步用户失败数量')
column(name: "SYNC_BEGIN_TIME", type: "DATETIME", remarks: '同步开始时间')
column(name: "SYNC_END_TIME", type: "DATETIME", remarks: '同步结束时间')
column(name: "OBJECT_VERSION_NUMBER", type: "BIGINT UNSIGNED", defaultValue: "1") {
constraints(nullable: true)
}
column(name: "CREATED_BY", type: "BIGINT UNSIGNED", defaultValue: "0") {
constraints(nullable: true)
}
column(name: "CREATION_DATE", type: "DATETIME", defaultValueComputed: "CURRENT_TIMESTAMP")
column(name: "LAST_UPDATED_BY", type: "BIGINT UNSIGNED", defaultValue: "0") {
constraints(nullable: true)
}
column(name: "LAST_UPDATE_DATE", type: "DATETIME", defaultValueComputed: "CURRENT_TIMESTAMP")
}
}
changeSet(author: 'superlee', id: '2019-07-18-oauth-ldap-history-add-remark') {
setTableRemarks(tableName:"OAUTH_LDAP_HISTORY",remarks: "ldap同步历史记录表")
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/common/utils/CollectionUtils.java<|end_filename|>
package io.choerodon.iam.infra.common.utils;
import java.util.*;
/**
* @author superlee
*/
public class CollectionUtils {
private CollectionUtils() {
throw new IllegalStateException("cann`t instantiation class");
}
public static <T> List<List<T>> subList(List<T> originalList, int volume) {
List<List<T>> list = new ArrayList<>();
if (volume < 1) {
return list;
}
int size = originalList.size();
int count = (size % volume == 0) ? size / volume : size / volume + 1;
int start = 0;
int end = volume;
if (size != 0) {
for (int i = 0; i < count; i++) {
end = end > size ? size : end;
List<T> subList = originalList.subList(start, end);
start = start + volume;
end = end + volume;
list.add(subList);
}
}
return list;
}
public static <T> List<Set<T>> subSet(Set<T> originalSet, int volume) {
List<Set<T>> list = new ArrayList<>();
if (volume < 1) {
return list;
}
int size = originalSet.size();
int count = (size % volume == 0) ? size / volume : size / volume + 1;
if (size != 0) {
Iterator<T> iterator = originalSet.iterator();
for (int i = 0; i < count; i++) {
int counter = 0;
Set<T> set = new HashSet<>();
list.add(set);
while (counter < volume) {
if (iterator.hasNext()) {
set.add(iterator.next());
}
counter++;
}
}
}
return list;
}
}
<|start_filename|>react/src/app/iam/containers/IAMIndex.js<|end_filename|>
import React from 'react';
import { Route, Switch } from 'react-router-dom';
import { inject } from 'mobx-react';
import { asyncLocaleProvider, asyncRouter, nomatch } from '@choerodon/boot';
// global 对应目录
const systemSetting = asyncRouter(() => import('./global/system-setting'));
const memberRole = asyncRouter(() => import('./global/member-role'));
const menuSetting = asyncRouter(() => import('./global/menu-setting'));
const organization = asyncRouter(() => import('./global/organization'));
const role = asyncRouter(() => import('./global/role'));
const roleLabel = asyncRouter(() => import('./global/role-label'));
const rootUser = asyncRouter(() => import('./global/root-user'));
const dashboardSetting = asyncRouter(() => import('./global/dashboard-setting'));
const projectType = asyncRouter(() => import('./global/project-type'));
// organization
const client = asyncRouter(() => import('./organization/client'));
const orgRole = asyncRouter(() => import('./organization/role'));
const ldap = asyncRouter(() => import('./organization/ldap'));
const passwordPolicy = asyncRouter(() => import('./organization/organization-setting/password-policy'));
const project = asyncRouter(() => import('./organization/project'));
const user = asyncRouter(() => import('./organization/user'));
const organizationSetting = asyncRouter(() => import('./organization/organization-setting'));
const application = asyncRouter(() => import('./organization/application'));
// project
const projectSetting = asyncRouter(() => import('./project/project-setting'));
// user
const password = asyncRouter(() => import('./user/password'));
const organizationInfo = asyncRouter(() => import('./user/organization-info'));
const projectInfo = asyncRouter(() => import('./user/project-info'));
const tokenManager = asyncRouter(() => import('./user/token-manager'));
const userInfo = asyncRouter(() => import('./user/user-info'));
const permissionInfo = asyncRouter(() => import('./user/permission-info'));
@inject('AppState')
class IAMIndex extends React.Component {
render() {
const { match, AppState } = this.props;
const langauge = AppState.currentLanguage;
const IntlProviderAsync = asyncLocaleProvider(langauge, () => import(`../locale/${langauge}`));
return (
<IntlProviderAsync>
<Switch>
<Route path={`${match.url}/member-role`} component={memberRole} />
<Route path={`${match.url}/menu-setting`} component={menuSetting} />
<Route path={`${match.url}/system-setting`} component={systemSetting} />
<Route path={`${match.url}/organization`} component={organization} />
<Route path={`${match.url}/role`} component={role} />
<Route path={`${match.url}/org-role`} component={orgRole} />
<Route path={`${match.url}/role-label`} component={roleLabel} />
<Route path={`${match.url}/root-user`} component={rootUser} />
<Route path={`${match.url}/dashboard-setting`} component={dashboardSetting} />
<Route path={`${match.url}/client`} component={client} />
<Route path={`${match.url}/ldap`} component={ldap} />
<Route path={`${match.url}/password-policy`} component={passwordPolicy} />
<Route path={`${match.url}/project`} component={project} />
<Route path={`${match.url}/user`} component={user} />
<Route path={`${match.url}/project-setting`} component={projectSetting} />
<Route path={`${match.url}/password`} component={password} />
<Route path={`${match.url}/organization-info`} component={organizationInfo} />
<Route path={`${match.url}/project-info`} component={projectInfo} />
<Route path={`${match.url}/token-manager`} component={tokenManager} />
<Route path={`${match.url}/user-info`} component={userInfo} />
<Route path={`${match.url}/permission-info`} component={permissionInfo} />
<Route path={`${match.url}/organization-setting`} component={organizationSetting} />
<Route path={`${match.url}/project-type`} component={projectType} />
<Route path={`${match.url}/application`} component={application} />
<Route path="*" component={nomatch} />
</Switch>
</IntlProviderAsync>
);
}
}
export default IAMIndex;
<|start_filename|>src/test/groovy/io/choerodon/iam/api/controller/v1/UserDashboardControllerSpec.groovy<|end_filename|>
package io.choerodon.iam.api.controller.v1
import io.choerodon.core.exception.ExceptionResponse
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.infra.dto.DashboardDTO
import io.choerodon.iam.infra.dto.UserDashboardDTO
import io.choerodon.iam.infra.mapper.DashboardMapper
import io.choerodon.iam.infra.mapper.UserDashboardMapper
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.boot.test.web.client.TestRestTemplate
import org.springframework.context.annotation.Import
import spock.lang.Shared
import spock.lang.Specification
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author <EMAIL>
*/
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class UserDashboardControllerSpec extends Specification {
private static String path = "/v1/home/dashboard"
@Shared
boolean sharedSetupDone = false;
@Shared
boolean sharedCleanupDone = true;
@Autowired
private UserDashboardMapper userDashboardMapper
@Autowired
private DashboardMapper dashboardMapper
@Autowired
private TestRestTemplate restTemplate
@Shared
List<DashboardDTO> dashboardList = new ArrayList<>()
void setup() {
if (!sharedSetupDone) {
given: '初始化dashboard'
for (int i = 0; i < 3; i++) {
DashboardDTO dashboard = new DashboardDTO();
dashboard.setId(1000 + i);
dashboard.setCode("0" + i);
dashboard.setDescription("site-test-desc-" + i);
dashboard.setName("site-test-name-" + i)
dashboard.setNamespace("iam")
dashboard.setIcon("IAM")
dashboard.setLevel("site")
dashboard.setSort(i + 1)
dashboard.setTitle("site-test-title-" + i)
dashboardList.add(dashboard)
}
for (int i = 0; i < 4; i++) {
DashboardDTO dashboard = new DashboardDTO();
dashboard.setCode("1" + i);
dashboard.setDescription("project-test-desc-" + i);
dashboard.setName("project-test-name-" + i)
dashboard.setNamespace("iam")
dashboard.setIcon("IAM")
dashboard.setLevel("project")
dashboard.setSort(i + 1)
dashboard.setTitle("project-test-title-" + i)
dashboardList.add(dashboard)
}
for (int i = 0; i < 5; i++) {
DashboardDTO dashboard = new DashboardDTO();
dashboard.setCode("2" + i);
dashboard.setDescription("org-test-desc-" + i);
dashboard.setName("org-test-name-" + i)
dashboard.setNamespace("iam")
dashboard.setIcon("IAM")
dashboard.setLevel("organization")
dashboard.setSort(i + 1)
dashboard.setTitle("org-test-title-" + i)
dashboardList.add(dashboard)
}
when: '批量插入dashboard'
def count = 0;
for (DashboardDTO dashboard : dashboardList) {
count = count + dashboardMapper.insert(dashboard)
}
sharedSetupDone = true
then: '批量插入成功'
count == 12
}
}
def cleanup() {
if (!sharedCleanupDone) {
when: '批量删除dashboard'
def count = 0;
for (DashboardDTO dashboard : dashboardList) {
count = count + dashboardMapper.deleteByPrimaryKey(dashboard)
}
then: '批量删除成功'
count == 12
}
}
def "List"() {
given: "单页查询dashboard list"
Map<String, Object> paramMap = new HashMap();
paramMap.put("level", "site")
paramMap.put("sourceId", 0)
when: "查询site层"
def entity =
restTemplate.getForEntity(path + '?level={level}&source_id={sourceId}', String, paramMap)
sharedCleanupDone = false
then: '查询site成功'
entity.statusCode.is2xxSuccessful()
when: "查询project层"
paramMap.put("level", "project")
entity =
restTemplate.getForEntity(path + '?level={level}&source_id={sourceId}', String, paramMap)
then: '查询site成功'
entity.statusCode.is2xxSuccessful()
}
def "Update"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
paramsMap.put("level", "project")
paramsMap.put("source_id", 1)
def userDashboard1 = dashboardList.get(1)
def userDashboard2 = dashboardList.get(2)
def userDashboardDTO1 = new UserDashboardDTO()
userDashboardDTO1.setObjectVersionNumber(2)
userDashboardDTO1.setDashboardId(userDashboard1.getId())
userDashboardDTO1.setLevel(userDashboard1.getLevel())
userDashboardDTO1.setDashboardCode(userDashboard1.getCode())
userDashboardDTO1.setSort(userDashboard1.getSort())
userDashboardDTO1.setDashboardDescription(userDashboard1.getDescription())
userDashboardDTO1.setDashboardIcon(userDashboard1.getIcon())
userDashboardDTO1.setDashboardTitle(userDashboard1.getTitle())
userDashboardDTO1.setNeedRoles(userDashboardDTO1.getNeedRoles())
userDashboardDTO1.setDashboardName("update-1")
def userDashboardDTO2 = new UserDashboardDTO()
userDashboardDTO2.setObjectVersionNumber(2)
userDashboardDTO2.setDashboardId(userDashboard2.getId())
userDashboardDTO2.setLevel(userDashboard2.getLevel())
userDashboardDTO2.setSort(userDashboard2.getSort())
userDashboardDTO2.setDashboardCode(userDashboard2.getCode())
userDashboardDTO2.setDashboardDescription(userDashboard2.getDescription())
userDashboardDTO2.setDashboardIcon(userDashboard2.getIcon())
userDashboardDTO2.setDashboardTitle(userDashboard2.getTitle())
userDashboardDTO2.setNeedRoles(userDashboard2.getNeedRoles())
userDashboardDTO2.setDashboardName("update-2")
def userDashboardDTOs = new ArrayList<UserDashboardDTO>()
userDashboardDTOs.add(userDashboardDTO1)
userDashboardDTOs.add(userDashboardDTO2)
when: "调用方法"
paramsMap.put("level", "error")
def entity = restTemplate.postForEntity(path + "?level={level}&source_id={source_id}", userDashboardDTOs, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.level.illegal")
when: "调用方法"
paramsMap.put("level", "site")
entity = restTemplate.postForEntity(path + "?level={level}&source_id={source_id}", userDashboardDTOs, String, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().size() == 2
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/dto/ProjectTypeDTO.java<|end_filename|>
package io.choerodon.iam.api.dto;
import javax.validation.constraints.NotEmpty;
import javax.validation.constraints.Pattern;
import io.swagger.annotations.ApiModelProperty;
/**
* @author superlee
*/
public class ProjectTypeDTO {
private static final String CODE_REGULAR_EXPRESSION
= "^[a-zA-Z][a-zA-Z0-9-_.//]*$";
private Long id;
@ApiModelProperty(value = "项目类型编码")
@Pattern(regexp = CODE_REGULAR_EXPRESSION, message = "error.code.illegal")
@NotEmpty(message = "error.code.empty")
private String code;
@ApiModelProperty(value = "项目类型名称")
@NotEmpty(message = "error.name.empty")
private String name;
@ApiModelProperty(value = "项目类型描述")
private String description;
private Long objectVersionNumber;
public Long getObjectVersionNumber() {
return objectVersionNumber;
}
public void setObjectVersionNumber(Long objectVersionNumber) {
this.objectVersionNumber = objectVersionNumber;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
@Override
public String toString() {
return "ProjectTypeDTO{" +
"id=" + id +
", code='" + code + '\'' +
", name='" + name + '\'' +
", description='" + description + '\'' +
", objectVersionNumber=" + objectVersionNumber +
'}';
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/OrganizationUserService.java<|end_filename|>
package io.choerodon.iam.app.service;
import java.util.List;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.iam.api.dto.UserSearchDTO;
import io.choerodon.iam.infra.dto.LdapErrorUserDTO;
import io.choerodon.iam.infra.dto.UserDTO;
/**
* @author superlee
* @since 2018/3/26
*/
public interface OrganizationUserService {
UserDTO create(UserDTO userDTO, boolean checkPassword);
PageInfo<UserDTO> pagingQuery(PageRequest pageRequest, UserSearchDTO user);
UserDTO update(UserDTO userDTO);
UserDTO resetUserPassword(Long organizationId, Long userId);
void delete(Long organizationId, Long id);
UserDTO query(Long organizationId, Long id);
UserDTO unlock(Long organizationId, Long userId);
UserDTO enableUser(Long organizationId, Long userId);
UserDTO disableUser(Long organizationId, Long userId);
/**
* ldap 批量同步用户,发送事件
*
* @param insertUsers 用户信息列表
*/
List<LdapErrorUserDTO> batchCreateUsers(List<UserDTO> insertUsers);
List<Long> listUserIds(Long organizationId);
}
<|start_filename|>react/src/app/iam/stores/noLevel/register-org/RegisterOrgStore.js<|end_filename|>
import { action, computed, observable } from 'mobx';
import { axios, store } from '@choerodon/boot';
const instance = axios.create();
instance.interceptors.request.use(
(config) => {
const newConfig = config;
newConfig.headers['Content-Type'] = 'application/json';
newConfig.headers.Accept = 'application/json';
const accessToken = Choerodon.getAccessToken();
if (accessToken) {
newConfig.headers.Authorization = accessToken;
}
return newConfig;
},
(err) => {
const error = err;
return Promise.reject(error);
},
);
instance.interceptors.response.use(res => res.data,
(error) => {
window.console.log(error);
});
@store('RegisterOrgStore')
class RegisterOrgStore {
checkCode = value => instance.post('/org/v1/organizations/check', JSON.stringify({ code: value }));
checkLoginname = loginName => instance.post('/iam/v1/users/check', JSON.stringify({ loginName }));
checkEmailAddress = email => instance.post('/iam/v1/users/check', JSON.stringify({ email }));
sendCaptcha = email => instance.get(`/org/v1/organizations/send/email_captcha?email=${email}`);
registerOrg = body => instance.post('/org/v1/organizations/register', JSON.stringify(body));
submitAccount = (email, captcha) => instance.post(`/org/v1/organizations/check/email_captcha?email=${email}&captcha=${captcha}`);
}
const registerOrgStore = new RegisterOrgStore();
export default registerOrgStore;
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/LookupController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.constant.PageConstant;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.domain.Sort;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.core.base.BaseController;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.app.service.LookupService;
import io.choerodon.iam.infra.dto.LookupDTO;
import io.choerodon.mybatis.annotation.SortDefault;
import io.choerodon.swagger.annotation.CustomPageRequest;
import io.swagger.annotations.ApiOperation;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import javax.validation.Valid;
/**
* @author superlee
*/
@RestController
@RequestMapping(value = "/v1/lookups")
public class LookupController extends BaseController {
private LookupService lookupService;
public LookupController(LookupService lookupService) {
this.lookupService = lookupService;
}
/**
* 创建lookupCode
*
* @param lookupDTO 需要创建的lookupDTO对象
* @return 返回信息
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "创建快码")
@PostMapping
public ResponseEntity<LookupDTO> create(@RequestBody @Valid LookupDTO lookupDTO) {
return new ResponseEntity<>(lookupService.create(lookupDTO), HttpStatus.OK);
}
/**
* 删除lookupType
*
* @param id lookup id
* @return 返回信息
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "删除快码")
@DeleteMapping(value = "/{id}")
public ResponseEntity delete(@PathVariable Long id) {
lookupService.delete(id);
return new ResponseEntity(HttpStatus.OK);
}
/**
* @return 返回信息
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "修改快码")
@PutMapping(value = "/{id}")
public ResponseEntity<LookupDTO> update(@PathVariable Long id,
@RequestBody @Valid LookupDTO lookupDTO) {
lookupDTO.setId(id);
return new ResponseEntity<>(lookupService.update(lookupDTO), HttpStatus.OK);
}
/**
* 分页查询lookupType 数据
*
* @param lookupDTO 查询封装对象
* @return 返回信息
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "分页查询快码")
@GetMapping
@CustomPageRequest
public ResponseEntity<PageInfo<LookupDTO>> list(@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
LookupDTO lookupDTO,
@RequestParam(required = false) String param) {
return new ResponseEntity<>(lookupService.pagingQuery(pageRequest, lookupDTO, param), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "通过code查询快码")
@GetMapping(value = "/code")
public ResponseEntity<LookupDTO> listByCode(@RequestParam(name = "value") String code) {
return new ResponseEntity<>(lookupService.listByCodeWithLookupValues(code), HttpStatus.OK);
}
/**
* 查看lookupCode
*
* @return 返回信息
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "通过id查询快码")
@GetMapping(value = "/{id}")
public ResponseEntity<LookupDTO> queryById(@PathVariable Long id) {
return new ResponseEntity<>(lookupService.queryById(id), HttpStatus.OK);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/BookMarkService.java<|end_filename|>
package io.choerodon.iam.app.service;
import io.choerodon.iam.infra.dto.BookMarkDTO;
import java.util.List;
/**
* @author dengyouquan
**/
public interface BookMarkService {
BookMarkDTO create(BookMarkDTO bookMarkDTO);
/**
* 更新传入书签列表
*
* @param bookMarkDTOS
* @return
*/
List<BookMarkDTO> updateAll(List<BookMarkDTO> bookMarkDTOS);
/**
* 查询用户下所有书签
*
* @return
*/
List<BookMarkDTO> list();
void delete(Long id);
}
<|start_filename|>react/src/app/iam/stores/organization/role/index.js<|end_filename|>
/**
*create by Qyellow on 2018/4/3
*/
import RoleStore from './RoleStore';
export default RoleStore;
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/DashboardServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.iam.api.dto.DashboardPositionDTO;
import io.choerodon.iam.app.service.DashboardService;
import io.choerodon.iam.infra.dto.DashboardDTO;
import io.choerodon.iam.infra.dto.DashboardRoleDTO;
import io.choerodon.iam.infra.dto.UserDashboardDTO;
import io.choerodon.iam.infra.exception.NotExistedException;
import io.choerodon.iam.infra.exception.UpdateExcetion;
import io.choerodon.iam.infra.mapper.DashboardMapper;
import io.choerodon.iam.infra.mapper.DashboardRoleMapper;
import io.choerodon.iam.infra.mapper.UserDashboardMapper;
import org.modelmapper.ModelMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import java.util.List;
/**
* @author <EMAIL>
*/
@Service("dashboardService")
public class DashboardServiceImpl implements DashboardService {
private static final Logger LOGGER = LoggerFactory.getLogger(DashboardService.class);
private final ObjectMapper objectMapper = new ObjectMapper();
private DashboardMapper dashboardMapper;
private DashboardRoleMapper dashboardRoleMapper;
private UserDashboardMapper userDashboardMapper;
private final ModelMapper modelMapper = new ModelMapper();
public DashboardServiceImpl(DashboardMapper dashboardMapper,
DashboardRoleMapper dashboardRoleMapper,
UserDashboardMapper userDashboardMapper) {
this.dashboardMapper = dashboardMapper;
this.dashboardRoleMapper = dashboardRoleMapper;
this.userDashboardMapper = userDashboardMapper;
}
@Override
public DashboardDTO update(Long dashboardId, DashboardDTO dashboardDTO, Boolean updateRole) {
dashboardDTO.setId(dashboardId);
dashboardDTO.setPosition(convertPositionToJson(dashboardDTO.getPositionDTO()));
if (dashboardMapper.updateByPrimaryKeySelective(dashboardDTO) != 1) {
throw new UpdateExcetion("error.dashboard.not.exist");
}
DashboardDTO dashboard =
modelMapper.map(dashboardMapper.selectByPrimaryKey(dashboardId), DashboardDTO.class);
if (!updateRole && dashboard.getNeedRoles() != null && !dashboard.getNeedRoles()) {
return dashboard;
}
List<String> roleCodes = dashboardDTO.getRoleCodes();
if (roleCodes != null && !roleCodes.isEmpty()) {
dashboardRoleMapper.deleteByDashboardCode(dashboard.getCode());
for (String role : roleCodes) {
DashboardRoleDTO dto = new DashboardRoleDTO();
dto.setRoleCode(role);
dto.setDashboardCode(dashboard.getCode());
dashboardRoleMapper.insertSelective(dto);
}
}
dashboard.setRoleCodes(dashboardRoleMapper.selectRoleCodes(dashboard.getCode()));
return dashboard;
}
@Override
public DashboardDTO query(Long dashboardId) {
DashboardDTO dashboard = new DashboardDTO();
dashboard.setId(dashboardId);
dashboard = dashboardMapper.selectByPrimaryKey(dashboardId);
if (dashboard == null) {
throw new NotExistedException("error.dashboard.not.exist");
}
return modelMapper.map(dashboard, DashboardDTO.class);
}
@Override
public PageInfo<DashboardDTO> list(DashboardDTO dashboardDTO, PageRequest pageRequest, String param) {
PageInfo<DashboardDTO> pageInfo =
PageHelper
.startPage(pageRequest.getPage(), pageRequest.getSize())
.doSelectPageInfo(() -> dashboardMapper.fulltextSearch(dashboardDTO, param));
pageInfo.getList().forEach(dashboard -> {
List<String> roleCodes = dashboardMapper.selectRoleCodesByDashboard(dashboard.getCode(), dashboard.getLevel());
dashboard.setRoleCodes(roleCodes);
});
return pageInfo;
}
@Override
public void reset(Long dashboardId) {
UserDashboardDTO deleteCondition = new UserDashboardDTO();
deleteCondition.setSourceId(dashboardId);
long num = userDashboardMapper.delete(deleteCondition);
LOGGER.info("reset userDashboard by dashboardId: {}, delete num: {}", dashboardId, num);
}
private String convertPositionToJson(DashboardPositionDTO positionDTO) {
if (positionDTO == null ||
(positionDTO.getHeight() == null && positionDTO.getWidth() == null)) {
return null;
}
if (positionDTO.getPositionX() == null) {
positionDTO.setPositionX(0);
}
if (positionDTO.getPositionY() == null) {
positionDTO.setPositionY(0);
}
if (positionDTO.getHeight() == null) {
positionDTO.setHeight(0);
}
if (positionDTO.getWidth() == null) {
positionDTO.setWidth(0);
}
try {
return objectMapper.writeValueAsString(positionDTO);
} catch (JsonProcessingException e) {
LOGGER.warn("error.userDashboardService.convertPositionToJson.JsonProcessingException", e);
return null;
}
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/dto/payload/SystemSettingEventPayload.java<|end_filename|>
package io.choerodon.iam.api.dto.payload;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import java.io.Serializable;
/**
* 用于 saga 传递消息,字段含义参照 {@link io.choerodon.iam.api.dto.SystemSettingDTO}
*
* @author zmf
*/
@Setter
@Getter
@NoArgsConstructor
@AllArgsConstructor
public class SystemSettingEventPayload implements Serializable {
private String favicon;
private String systemLogo;
private String systemTitle;
private String systemName;
private String defaultPassword;
private String defaultLanguage;
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/AccessTokenService.java<|end_filename|>
package io.choerodon.iam.app.service;
import java.util.List;
import java.util.Map;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.iam.infra.dto.AccessTokenDTO;
/**
* @author Eugen
**/
public interface AccessTokenService {
/**
* 根据用户Id查询用户下所有生效的token
*
* @return Token列表
*/
PageInfo<AccessTokenDTO> pagedSearch(PageRequest pageRequest, String clientName, String currentToken);
/**
* 手动失效用户已存在的token
*
* @param tokenId tokenId
* @param currentToken 当前token
*/
void delete(String tokenId, String currentToken);
/**
* 批量失效用户的token
*
* @param tokenIds token列表
* @param currentToken 当前token
*/
void deleteList(List<String> tokenIds, String currentToken);
/**
* 删除所有过期的token
*/
void deleteAllExpiredToken(Map<String, Object> map);
}
<|start_filename|>react/src/app/iam/dashboard/OnlineUsers/index.js<|end_filename|>
import React, { Component } from 'react';
import { withRouter } from 'react-router-dom';
import { FormattedMessage, injectIntl } from 'react-intl';
import { inject, observer } from 'mobx-react';
import ReactEcharts from 'echarts-for-react';
import { WSHandler } from '@choerodon/boot';
import { Button, Icon, Select, Spin } from 'choerodon-ui';
import './index.scss';
const intlPrefix = 'dashboard.onlineusers';
@withRouter
@injectIntl
@inject('AppState')
@observer
export default class OnlineUsers extends Component {
constructor(props) {
super(props);
this.state = {
loading: true,
info: {
time: [],
data: [],
},
};
}
getOption() {
const { info } = this.state;
const { intl: { formatMessage } } = this.props;
return {
tooltip: {
trigger: 'axis',
confine: true,
formatter: `{b}:00<br/>${formatMessage({ id: 'dashboard.onlineusers.count' })}: {c}${formatMessage({ id: 'dashboard.onlineusers.persons' })}`,
backgroundColor: '#FFFFFF',
borderWidth: 1,
borderColor: '#DDDDDD',
extraCssText: 'box-shadow: 0 2px 4px 0 rgba(0,0,0,0.20)',
textStyle: {
fontSize: 13,
color: '#000000',
},
},
legend: {
show: false,
},
grid: {
left: '-10',
bottom: '0px',
height: '80%',
width: '100%',
containLabel: true,
},
xAxis: [
{
type: 'category',
show: false,
boundaryGap: false,
data: info ? info.time : [],
},
],
yAxis: [
{
type: 'value',
show: false,
minInterval: 1,
},
],
series: [
{
name: formatMessage({ id: 'dashboard.onlineusers.count' }),
type: 'line',
areaStyle: {
color: 'rgba(82,102,212,0.80)',
},
smooth: true,
symbolSize: 0,
data: info ? info.data : [],
lineStyle: {
width: 0,
},
},
],
};
}
handleMessage = (data) => {
this.setState({
info: {
time: data.time,
data: data.data,
},
});
}
getContent = (data) => {
let content;
if (data) {
content = (
<React.Fragment>
<div className="c7n-iam-dashboard-onlineuser-main">
<div className="c7n-iam-dashboard-onlineuser-main-current">
<span>{data ? data.CurrentOnliners : 0}</span><span>人</span>
</div>
<ReactEcharts
style={{ height: '60%', width: '100%' }}
option={this.getOption()}
/>
</div>
<div className="c7n-iam-dashboard-onlineuser-bottom">
日总访问量: {data ? data.numberOfVisitorsToday : 0}
</div>
</React.Fragment>
);
} else {
content = <Spin spinning />;
}
return content;
}
render() {
const { loading } = this.state;
return (
<WSHandler
messageKey="choerodon:msg:online-info"
onMessage={this.handleMessage}
>
{
data => (
<div className="c7n-iam-dashboard-onlineuser" ref={(e) => { this.chartRef = e; }}>
{this.getContent(data)}
</div>
)
}
</WSHandler>
);
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/api/controller/v1/ClientControllerSpec.groovy<|end_filename|>
package io.choerodon.iam.api.controller.v1
import com.github.pagehelper.PageInfo
import io.choerodon.core.exception.ExceptionResponse
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.infra.dto.ClientDTO
import io.choerodon.iam.infra.mapper.ClientMapper
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.boot.test.web.client.TestRestTemplate
import org.springframework.context.annotation.Import
import org.springframework.http.HttpEntity
import org.springframework.http.HttpMethod
import spock.lang.Shared
import spock.lang.Specification
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class ClientControllerSpec extends Specification {
private static String BASE_PATH = "/v1/organizations/{organization_id}/clients"
@Autowired
private TestRestTemplate restTemplate
@Autowired
private ClientMapper clientMapper
@Shared
private List<ClientDTO> clientDOList = new ArrayList<>();
@Shared
def notExistOrganizationId = 300
@Shared
def organizationId = 1
@Shared
def orgIdNotExistsClientDTO
@Shared
def nameExistsClientDTO
@Shared
def isInit = false
@Shared
def isClear = true
def setup() {
if (!isInit) {
given: "初始化数据,切记client数据库有两条localhost和client数据,勿重合"
for (int i = 0; i < 5; i++) {
ClientDTO clientDO = new ClientDTO()
clientDO.setName("choerodon" + i)
clientDO.setOrganizationId(1)
clientDO.setAuthorizedGrantTypes("password,implicit,client_credentials,authorization_code,refresh_token")
clientDO.setSecret("secret")
clientDOList.add(clientDO)
}
for (int i = 0; i < 5; i++) {
ClientDTO clientDO = new ClientDTO()
clientDO.setName("client" + i)
clientDO.setOrganizationId(2)
clientDO.setAuthorizedGrantTypes("password,implicit,client_credentials,authorization_code,refresh_token")
clientDO.setSecret("secret")
clientDOList.add(clientDO)
}
and: "构造异常插入数据"
orgIdNotExistsClientDTO = new ClientDTO()
orgIdNotExistsClientDTO.setName("error")
orgIdNotExistsClientDTO.setOrganizationId(notExistOrganizationId)
orgIdNotExistsClientDTO.setAuthorizedGrantTypes("password,implicit,client_credentials,authorization_code,refresh_token")
orgIdNotExistsClientDTO.setSecret("secret")
nameExistsClientDTO = new ClientDTO()
nameExistsClientDTO.setName("client0")
nameExistsClientDTO.setAuthorizedGrantTypes("password,implicit,client_credentials,authorization_code,refresh_token")
nameExistsClientDTO.setSecret("secret")
when: "批量插入"
int count = 0
for (ClientDTO dto : clientDOList) {
clientMapper.insert(dto)
count++
}
then: "校验是否插入成功"
count == 10
isInit = true
}
}
def cleanup() {
if (!isClear) {
when: '批量删除dashboard'
def count = 0;
for (ClientDTO clientDO : clientDOList) {
count = count + clientMapper.deleteByPrimaryKey(clientDO)
}
then: '批量删除成功'
count == 10
}
}
def "Create"() {
given: "构造请求参数"
def paramMap = new HashMap<String, Object>()
def tempOrganizationId = 1
paramMap.put("organization_id", tempOrganizationId)
def clientDTO = new ClientDTO()
clientDTO.setName("insertclient")
clientDTO.setOrganizationId(tempOrganizationId)
clientDTO.setAuthorizedGrantTypes("password,implicit,client_credentials,authorization_code,refresh_token")
clientDTO.setSecret("secret")
when: "调用插入方法"
def entity = restTemplate.postForEntity(BASE_PATH, clientDTO, ClientDTO, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getName().equals(clientDTO.getName())
entity.getBody().getOrganizationId().equals(clientDTO.getOrganizationId())
entity.getBody().getSecret().equals(clientDTO.getSecret())
entity.getBody().getAuthorizedGrantTypes().equals(clientDTO.getAuthorizedGrantTypes())
clientMapper.deleteByPrimaryKey(entity.getBody().getId())
when: "调用插入方法[异常-组织id不存在]"
paramMap.put("organization_id", notExistOrganizationId)
entity = restTemplate.postForEntity(BASE_PATH, orgIdNotExistsClientDTO, ExceptionResponse, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.organization.not.exist")
when: "调用插入方法[异常-name重复]"
paramMap.put("organization_id", organizationId)
entity = restTemplate.postForEntity(BASE_PATH, nameExistsClientDTO, ExceptionResponse, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.db.duplicateKey")
}
def "CreateInfo"() {
given: "准备参数"
def paramMap = new HashMap<String, Object>()
def organizationId = 1L
paramMap.put("organization_id", organizationId)
when: "调用createInfo方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/createInfo", ClientDTO, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "Update"() {
given: "构造参数"
def updateClientDTO = clientDOList.get(4)
def paramMap = new HashMap<String, Object>()
paramMap.put("organization_id", organizationId)
paramMap.put("client_id", updateClientDTO.getId())
when: "调用更新方法[异常-名字为空]"
updateClientDTO.setName(null)
def entity = restTemplate.postForEntity(BASE_PATH + "/{client_id}", updateClientDTO, ExceptionResponse, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.clientName.empty")
when: "调用更新方法[异常-client_id不存在]"
updateClientDTO.setName("update-client")
paramMap.put("client_id", 200)
entity = restTemplate.postForEntity(BASE_PATH + "/{client_id}", updateClientDTO, ExceptionResponse, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.client.not.existed")
when: "调用更新方法[异常-org_id不相同]"
paramMap.put("organization_id", notExistOrganizationId)
paramMap.put("client_id", updateClientDTO.getId())
entity = restTemplate.postForEntity(BASE_PATH + "/{client_id}", updateClientDTO, ExceptionResponse, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
// entity.getBody().getCode().equals("error.organizationId.not.same")
when: "调用更新方法[异常-client_name存在]"
paramMap.put("organization_id", organizationId)
updateClientDTO.setName("localhost")
entity = restTemplate.postForEntity(BASE_PATH + "/{client_id}", updateClientDTO, ExceptionResponse, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.client.update")
when: "调用更新方法[异常-AdditionalInformation格式异常]"
updateClientDTO.setName("update-client")
updateClientDTO.setAdditionalInformation("dfas")
entity = restTemplate.postForEntity(BASE_PATH + "/{client_id}", updateClientDTO, ExceptionResponse, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.client.additionalInfo.notJson")
when: "调用更新方法[异常-版本号没有]"
updateClientDTO.setAdditionalInformation(null)
updateClientDTO.setOrganizationId(organizationId)
entity = restTemplate.postForEntity(BASE_PATH + "/{client_id}", updateClientDTO, ExceptionResponse, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
// entity.getBody().getCode().equals("error.client.update")
when: "调用更新方法"
updateClientDTO.setObjectVersionNumber(1)
entity = restTemplate.postForEntity(BASE_PATH + "/{client_id}", updateClientDTO, ClientDTO, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
// entity.getBody().getId().equals(updateClientDTO.getId())
// entity.getBody().getSecret().equals(updateClientDTO.getSecret())
// entity.getBody().getAuthorizedGrantTypes().equals(updateClientDTO.getAuthorizedGrantTypes())
// entity.getBody().getName().equals(updateClientDTO.getName())
}
def "Delete"() {
given: "构造参数"
def deleteClientDTO = clientDOList.get(0)
def paramMap = new HashMap<String, Object>()
paramMap.put("organization_id", 1)
paramMap.put("client_id", deleteClientDTO.getId())
def httpEntity = new HttpEntity<Object>()
when: "调用方法-[异常-orgid不相同]"
paramMap.put("organization_id", notExistOrganizationId)
def entity = restTemplate.exchange(BASE_PATH + "/{client_id}", HttpMethod.DELETE, httpEntity, ExceptionResponse, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.organizationId.not.same")
when: "调用方法"
paramMap.put("organization_id", organizationId)
entity = restTemplate.exchange(BASE_PATH + "/{client_id}", HttpMethod.DELETE, httpEntity, Boolean, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
// entity.getBody()
}
def "Query"() {
given: "构造参数"
def queryClientDTO = clientDOList.get(3)
def paramMap = new HashMap<String, Object>()
paramMap.put("organization_id", 1)
paramMap.put("client_id", queryClientDTO.getId())
when: "调用方法-[异常-client_id不存在]"
paramMap.put("client_id", 200)
def entity = restTemplate.getForEntity(BASE_PATH + "/{client_id}", ExceptionResponse, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.client.not.existed")
when: "调用方法-[异常-org_id不相同]"
paramMap.put("organization_id", notExistOrganizationId)
paramMap.put("client_id", queryClientDTO.getId())
entity = restTemplate.getForEntity(BASE_PATH + "/{client_id}", ExceptionResponse, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.organizationId.not.same")
when: "调用方法"
paramMap.put("organization_id", organizationId)
entity = restTemplate.getForEntity(BASE_PATH + "/{client_id}", ClientDTO, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getId().equals(queryClientDTO.getId())
entity.getBody().getSecret().equals(queryClientDTO.getSecret())
entity.getBody().getAuthorizedGrantTypes().equals(queryClientDTO.getAuthorizedGrantTypes())
entity.getBody().getName().equals(queryClientDTO.getName())
}
def "QueryByName"() {
given: "构造参数"
def queryClientDTO = clientDOList.get(3)
def paramMap = new HashMap<String, Object>()
paramMap.put("organization_id", organizationId)
when: "调用方法-[异常-client_name不存在]"
paramMap.put("client_name", "not_exist")
def entity = restTemplate.getForEntity(BASE_PATH + "/query_by_name?client_name={client_name}", ExceptionResponse, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.client.not.existed")
when: "调用方法[异常-组织id不同]"
paramMap.put("client_name", queryClientDTO.getName())
paramMap.put("organization_id", notExistOrganizationId)
entity = restTemplate.getForEntity(BASE_PATH + "/query_by_name?client_name={client_name}", ExceptionResponse, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.organizationId.not.same")
when: "调用方法"
paramMap.put("organization_id", organizationId)
entity = restTemplate.getForEntity(BASE_PATH + "/query_by_name?client_name={client_name}", ClientDTO, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getId().equals(queryClientDTO.getId())
entity.getBody().getSecret().equals(queryClientDTO.getSecret())
entity.getBody().getAuthorizedGrantTypes().equals(queryClientDTO.getAuthorizedGrantTypes())
entity.getBody().getName().equals(queryClientDTO.getName())
}
def "List"() {
given: "构造参数"
def paramMap = new HashMap<String, Object>()
paramMap.put("organization_id", organizationId)
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH, PageInfo, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.body.pages == 1
when: "调用方法-[带参数]"
paramMap.put("name", "choerodon")
paramMap.put("params", "choerodon")
entity = restTemplate.getForEntity(BASE_PATH + "?name={name}¶ms={params}", PageInfo, paramMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.body.pages == 1
entity.body.total == 3
}
def "Check"() {
given: "构造参数"
def clientDO = new ClientDTO()
clientDO.setOrganizationId(1)
when: "调用Check方法[异常-client_name为空]"
clientDO.setName(null)
def entity = restTemplate.postForEntity(BASE_PATH + "/check", clientDO, ExceptionResponse, organizationId)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.clientName.null")
when: "调用Check方法[异常-client_name不存在]"
clientDO.setName("not_exist")
entity = restTemplate.postForEntity(BASE_PATH + "/check", clientDO, Void, organizationId)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
//entity.getBody().getCode().equals("error.clientName.exist")
when: "调用Check方法[异常-client_name不存在]"
clientDO.setId(null)
clientDO.setName("not_exist")
entity = restTemplate.postForEntity(BASE_PATH + "/check", clientDO, Void, organizationId)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
//entity.getBody().getCode().equals("error.clientName.exist")
when: "调用Check方法"
entity = restTemplate.postForEntity(BASE_PATH + "/check", clientDO, Void, organizationId)
isClear = false
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
}
<|start_filename|>react/src/app/iam/stores/global/role-label/index.js<|end_filename|>
/**
* Created by hulingfangzi on 2018/5/28.
*/
import RoleLabelStore from './RoleLabelStore';
export default RoleLabelStore;
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/MenuPermissionMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.infra.dto.MenuPermissionDTO;
import io.choerodon.mybatis.common.Mapper;
/**
* @author wuguokai
*/
public interface MenuPermissionMapper extends Mapper<MenuPermissionDTO> {
}
<|start_filename|>react/src/app/iam/containers/global/organization/Organization.js<|end_filename|>
import React, { Component } from 'react';
import { runInAction } from 'mobx';
import { inject, observer } from 'mobx-react';
import { withRouter } from 'react-router-dom';
import { Button, Form, Input, Modal, Table, Tooltip, Row, Col, Select, Icon } from 'choerodon-ui';
import { Content, Header, Page, Permission } from '@choerodon/boot';
import { FormattedMessage, injectIntl } from 'react-intl';
import classnames from 'classnames';
import MouseOverWrapper from '../../../components/mouseOverWrapper';
import StatusTag from '../../../components/statusTag';
import './Organization.scss';
import AvatarUploader from '../../../components/avatarUploader';
const ORGANIZATION_TYPE = 'organization';
const PROJECT_TYPE = 'project';
const { Sidebar } = Modal;
const Option = Select.Option;
const FormItem = Form.Item;
const formItemLayout = {
labelCol: {
xs: { span: 24 },
sm: { span: 8 },
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 16 },
},
};
const inputWidth = 512;
const intlPrefix = 'global.organization';
let timer;
@Form.create()
@withRouter
@injectIntl
@inject('AppState', 'HeaderStore')
@observer
export default class Organization extends Component {
constructor(props) {
super(props);
this.editOrgFocusInput = React.createRef();
this.creatOrgFocusInput = React.createRef();
this.state = {
selectLoading: true,
isShowAvatar: false,
imgUrl: null,
};
}
componentWillMount() {
this.loadOrganizations();
}
componentWillUnmount() {
const { OrganizationStore } = this.props;
clearTimeout(timer);
OrganizationStore.setFilters();
OrganizationStore.setParams();
}
handleRefresh = () => {
const { OrganizationStore } = this.props;
OrganizationStore.refresh();
};
loadOrganizations(pagination, filters, sort, params) {
const { OrganizationStore } = this.props;
OrganizationStore.loadData(pagination, filters, sort, params);
}
// 创建组织侧边
createOrg = () => {
const { form, OrganizationStore } = this.props;
form.resetFields();
this.setState({
imgUrl: null,
});
runInAction(() => {
OrganizationStore.setEditData({});
OrganizationStore.show = 'create';
OrganizationStore.showSideBar();
});
setTimeout(() => {
this.creatOrgFocusInput.input.focus();
}, 10);
};
handleEdit = (data) => {
const { form, OrganizationStore } = this.props;
form.resetFields();
this.setState({
imgUrl: data.imageUrl,
});
runInAction(() => {
OrganizationStore.show = 'edit';
OrganizationStore.setEditData(data);
OrganizationStore.showSideBar();
});
setTimeout(() => {
this.editOrgFocusInput.input.focus();
}, 10);
};
showDetail = (data) => {
const { OrganizationStore } = this.props;
runInAction(() => {
OrganizationStore.setEditData(data);
OrganizationStore.loadOrgDetail(data.id).then((message) => {
if (message) {
Choerodon.prompt(message);
}
});
OrganizationStore.show = 'detail';
});
}
handleSubmit = (e) => {
e.preventDefault();
const { form, intl, OrganizationStore, HeaderStore, AppState } = this.props;
if (OrganizationStore.show !== 'detail') {
form.validateFields((err, values, modify) => {
Object.keys(values).forEach((key) => {
// 去除form提交的数据中的全部前后空格
if (typeof values[key] === 'string') values[key] = values[key].trim();
});
const { loginName, realName, id } = AppState.getUserInfo;
if (values.userId === `${loginName}${realName}`) values.userId = false;
if (OrganizationStore.editData.imageUrl !== this.state.imgUrl) modify = true;
if (!err) {
OrganizationStore.createOrUpdateOrg(values, modify, this.state.imgUrl, HeaderStore)
.then((message) => {
OrganizationStore.hideSideBar();
Choerodon.prompt(intl.formatMessage({ id: message }));
});
}
});
} else {
OrganizationStore.hideSideBar();
}
};
handleCancelFun = () => {
const { OrganizationStore } = this.props;
OrganizationStore.hideSideBar();
};
handleDisable = ({ enabled, id }) => {
const { intl, OrganizationStore, HeaderStore, AppState } = this.props;
const userId = AppState.getUserId;
OrganizationStore.toggleDisable(id, enabled)
.then(() => {
Choerodon.prompt(intl.formatMessage({ id: enabled ? 'disable.success' : 'enable.success' }));
HeaderStore.axiosGetOrgAndPro(sessionStorage.userId || userId);
}).catch(Choerodon.handleResponseError);
};
/**
* 组织编码校验
* @param rule 表单校验规则
* @param value 组织编码
* @param callback 回调函数
*/
checkCode = (rule, value, callback) => {
const { intl, OrganizationStore } = this.props;
OrganizationStore.checkCode(value)
.then(({ failed }) => {
if (failed) {
callback(intl.formatMessage({ id: 'global.organization.onlymsg' }));
} else {
callback();
}
});
};
handleSelectFilter = (value) => {
this.setState({
selectLoading: true,
});
const queryObj = {
param: value,
sort: 'id',
organization_id: 0,
};
if (timer) {
clearTimeout(timer);
}
if (value) {
timer = setTimeout(() => this.loadUsers(queryObj), 300);
} else {
return this.loadUsers(queryObj);
}
}
// 加载全平台用户信息
loadUsers = (queryObj) => {
const { OrganizationStore } = this.props;
OrganizationStore.loadUsers(queryObj).then((data) => {
OrganizationStore.setUsersData((data.list || []).slice());
this.setState({
selectLoading: false,
});
});
}
/**
* 获取组织所有者下拉选项
* @returns {any[]}
*/
getOption() {
const { OrganizationStore } = this.props;
const usersData = OrganizationStore.getUsersData;
return usersData && usersData.length > 0 ? (
usersData.map(({ id, loginName, realName }) => (
<Option key={id} value={id}>{loginName}{realName}</Option>
))
) : null;
}
renderSidebarTitle() {
const { show } = this.props.OrganizationStore;
switch (show) {
case 'create':
return 'global.organization.create';
case 'edit':
return 'global.organization.modify';
case 'detail':
return 'global.organization.detail';
default:
return '';
}
}
// 渲染侧边栏成功按钮文字
renderSidebarOkText() {
const { OrganizationStore: { show } } = this.props;
if (show === 'create') {
return <FormattedMessage id="create" />;
} else if (show === 'edit') {
return <FormattedMessage id="save" />;
} else {
return <FormattedMessage id="close" />;
}
}
renderSidebarDetail() {
const { intl: { formatMessage }, OrganizationStore: { editData, partDetail } } = this.props;
const infoList = [{
key: formatMessage({ id: `${intlPrefix}.name` }),
value: editData.name,
}, {
key: formatMessage({ id: `${intlPrefix}.code` }),
value: editData.code,
}, {
key: formatMessage({ id: `${intlPrefix}.region` }),
value: editData.address ? editData.address : '无',
}, {
key: formatMessage({ id: `${intlPrefix}.project.creationDate` }),
value: editData.creationDate,
}, {
key: formatMessage({ id: `${intlPrefix}.owner.login.name` }),
value: partDetail.ownerLoginName,
}, {
key: formatMessage({ id: `${intlPrefix}.owner.user.name` }),
value: partDetail.ownerRealName,
}, {
key: formatMessage({ id: `${intlPrefix}.home.page` }),
value: partDetail.homePage,
}, {
key: formatMessage({ id: `${intlPrefix}.phone` }),
value: partDetail.ownerPhone ? partDetail.ownerPhone : '无',
}, {
key: formatMessage({ id: `${intlPrefix}.mailbox` }),
value: partDetail.ownerEmail,
}, {
key: formatMessage({ id: `${intlPrefix}.avatar` }),
value: {
imgUrl: editData.imageUrl,
name: editData.name.charAt(0),
},
}];
return (
<Content
className="sidebar-content"
code="global.organization.detail"
values={{ name: `${editData.code}` }}
>
{
infoList.map(({ key, value }) => (
<Row
key={key}
className={classnames('c7n-organization-detail-row', { 'c7n-organization-detail-row-hide': value === null })}
>
<Col span={3}>{key}:</Col>
{
key === formatMessage({ id: `${intlPrefix}.avatar` }) ? (
<div className="c7n-iam-organization-avatar">
<div
className="c7n-iam-organization-avatar-wrap"
style={{
backgroundColor: '#c5cbe8',
backgroundImage: value.imgUrl ? `url(${Choerodon.fileServer(value.imgUrl)})` : '',
}}
>
{!value.imgUrl && value.name}
</div>
</div>
) : (
<Col span={21}>{value}</Col>
)
}
</Row>
))
}
</Content>
);
}
renderSidebarContent() {
const { intl, form: { getFieldDecorator }, OrganizationStore: { show, editData }, AppState } = this.props;
const { loginName, realName } = AppState.getUserInfo;
return (
<Content
className="sidebar-content"
code={show === 'create' ? 'global.organization.create' : 'global.organization.modify'}
values={{ name: show === 'create' ? `${AppState.getSiteInfo.systemName || 'Choerodon'}` : `${editData.code}` }}
>
<Form>
{
show === 'create' && (
<FormItem
{...formItemLayout}
>
{getFieldDecorator('code', {
rules: [{
required: true,
whitespace: true,
message: intl.formatMessage({ id: 'global.organization.coderequiredmsg' }),
}, {
max: 15,
message: intl.formatMessage({ id: 'global.organization.codemaxmsg' }),
}, {
pattern: /^[a-z](([a-z0-9]|-(?!-))*[a-z0-9])*$/,
message: intl.formatMessage({ id: 'global.organization.codepatternmsg' }),
}, {
validator: this.checkCode,
}],
validateTrigger: 'onBlur',
validateFirst: true,
})(
<Input
ref={(e) => {
this.creatOrgFocusInput = e;
}}
label={<FormattedMessage id="global.organization.code" />}
autoComplete="off"
style={{ width: inputWidth }}
maxLength={15}
showLengthInfo={false}
/>,
)}
</FormItem>
)
}
<FormItem
{...formItemLayout}
>
{getFieldDecorator('name', {
rules: [{ required: true, message: intl.formatMessage({ id: 'global.organization.namerequiredmsg' }), whitespace: true }],
validateTrigger: 'onBlur',
initialValue: show === 'create' ? undefined : editData.name,
})(
<Input
ref={(e) => {
this.editOrgFocusInput = e;
}}
label={<FormattedMessage id="global.organization.name" />}
autoComplete="off"
style={{ width: inputWidth }}
maxLength={32}
showLengthInfo={false}
/>,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{
getFieldDecorator('address', {
rules: [],
initialValue: show === 'create' ? undefined : editData.address,
})(
<Input
label={<FormattedMessage id="global.organization.region" />}
autoComplete="off"
style={{ width: inputWidth }}
/>,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{
getFieldDecorator('homePage', {
rules: [
{
pattern: /(https?:\/\/)?(www\.)?[-a-zA-Z0-9@:%._+~#=]{2,256}\.[a-z]{2,4}\b([-a-zA-Z0-9@:%_+.~#?&//=]*)/,
message: intl.formatMessage({ id: `${intlPrefix}.homepage.pattern.msg` }),
},
],
validateTrigger: 'onBlur',
initialValue: show === 'create' ? undefined : editData.homePage,
})(
<Input
label={<FormattedMessage id="global.organization.home.page" />}
autoComplete="off"
style={{ width: inputWidth }}
/>,
)}
</FormItem>
{
show === 'create' && (
<FormItem
{...formItemLayout}
>
{getFieldDecorator('userId', {
initialValue: `${loginName}${realName}`,
})(
<Select
style={{ width: 300 }}
label={<FormattedMessage id={`${intlPrefix}.owner`} />}
notFoundContent={intl.formatMessage({ id: 'memberrole.notfound.msg' })}
onFilterChange={this.handleSelectFilter}
getPopupContainer={() => document.getElementsByClassName('sidebar-content')[0].parentNode}
filterOption={false}
optionFilterProp="children"
loading={this.state.selectLoading}
filter
>
{this.getOption()}
</Select>,
)}
</FormItem>
)
}
<div>
<span style={{ color: 'rgba(0,0,0,.6)' }}>{intl.formatMessage({ id: `${intlPrefix}.avatar` })}</span>
{this.getAvatar(editData)}
</div>
</Form>
</Content>
);
}
getAvatar(data = {}) {
const { isShowAvatar, imgUrl } = this.state;
return (
<div className="c7n-iam-organization-avatar">
<div
className="c7n-iam-organization-avatar-wrap"
style={{
backgroundColor: data.name ? ' #c5cbe8' : '#ccc',
backgroundImage: imgUrl ? `url(${Choerodon.fileServer(imgUrl)})` : '',
}}
>
{!imgUrl && data.name && data.name.charAt(0)}
<Button className={classnames('c7n-iam-organization-avatar-button', { 'c7n-iam-organization-avatar-button-create': !data.name, 'c7n-iam-organization-avatar-button-edit': data.name })} onClick={this.openAvatarUploader}>
<div className="c7n-iam-organization-avatar-button-icon">
<Icon type="photo_camera" />
</div>
</Button>
<AvatarUploader visible={isShowAvatar} intlPrefix="global.organization.avatar.edit" onVisibleChange={this.closeAvatarUploader} onUploadOk={this.handleUploadOk} />
</div>
</div>
);
}
/**
* 打开上传图片模态框
*/
openAvatarUploader = () => {
this.setState({
isShowAvatar: true,
});
}
closeAvatarUploader = (visible) => {
this.setState({
isShowAvatar: visible,
});
};
handleUploadOk = (res) => {
this.setState({
imgUrl: res,
isShowAvatar: false,
});
}
handlePageChange = (pagination, filters, sorter, params) => {
this.loadOrganizations(pagination, filters, sorter, params);
};
getTableColumns() {
const { intl, OrganizationStore: { sort: { columnKey, order }, filters } } = this.props;
return [{
title: <FormattedMessage id="name" />,
dataIndex: 'name',
key: 'name',
filters: [],
width: '20%',
render: (text, record) => (
<React.Fragment>
<div className="c7n-iam-organization-name-avatar">
{
record.imageUrl ? <img src={record.imageUrl} alt="avatar" style={{ width: '100%' }} />
: <React.Fragment>{text.split('')[0]}</React.Fragment>
}
</div>
<MouseOverWrapper text={text} width={0.3}>
{text}
</MouseOverWrapper>
</React.Fragment>
),
sortOrder: columnKey === 'name' && order,
filteredValue: filters.name || [],
}, {
key: 'homePage',
// width: '20%',
title: <FormattedMessage id="global.organization.home.page" />,
dataIndex: 'homePage',
// filters: [],
sortOrder: columnKey === 'homePage' && order,
// filteredValue: filters.homePage || [],
render: text => (
<MouseOverWrapper text={text} width={0.3}>
{text}
</MouseOverWrapper>
),
}, {
title: <FormattedMessage id="code" />,
dataIndex: 'code',
key: 'code',
filters: [],
sortOrder: columnKey === 'code' && order,
filteredValue: filters.code || [],
width: '15%',
render: text => (
<MouseOverWrapper text={text} width={0.3}>
{text}
</MouseOverWrapper>
),
}, {
title: <FormattedMessage id="global.organization.project.count" />,
width: '10%',
dataIndex: 'projectCount',
key: 'projectCount',
align: 'center',
}, {
title: <FormattedMessage id="status" />,
width: '15%',
dataIndex: 'enabled',
key: 'enabled',
filters: [{
text: intl.formatMessage({ id: 'enable' }),
value: 'true',
}, {
text: intl.formatMessage({ id: 'disable' }),
value: 'false',
}],
filteredValue: filters.enabled || [],
render: enabled => (<StatusTag mode="icon" name={intl.formatMessage({ id: enabled ? 'enable' : 'disable' })} colorCode={enabled ? 'COMPLETED' : 'DISABLE'} />),
}, {
title: <FormattedMessage id="global.organization.project.creationDate" />,
width: '15%',
dataIndex: 'creationDate',
key: 'creationDate',
}, {
title: '',
width: 120,
key: 'action',
align: 'right',
render: (text, record) => (
<div className="operation">
<Permission service={['iam-service.organization.update']}>
<Tooltip
title={<FormattedMessage id="modify" />}
placement="bottom"
>
<Button
size="small"
icon="mode_edit"
shape="circle"
onClick={this.handleEdit.bind(this, record)}
/>
</Tooltip>
</Permission>
<Permission service={['iam-service.organization.disableOrganization', 'iam-service.organization.enableOrganization']}>
<Tooltip
title={<FormattedMessage id={record.enabled ? 'disable' : 'enable'} />}
placement="bottom"
>
<Button
size="small"
icon={record.enabled ? 'remove_circle_outline' : 'finished'}
shape="circle"
onClick={() => this.handleDisable(record)}
/>
</Tooltip>
</Permission>
<Permission service={['iam-service.organization.query']}>
<Tooltip
title={<FormattedMessage id="detail" />}
placement="bottom"
>
<Button
shape="circle"
icon="find_in_page"
size="small"
onClick={this.showDetail.bind(this, record)}
/>
</Tooltip>
</Permission>
</div>
),
}];
}
render() {
const {
intl, OrganizationStore: {
params, loading, pagination, sidebarVisible, submitting, show, orgData,
},
AppState,
} = this.props;
return (
<Page
service={[
'iam-service.organization.list',
'iam-service.organization.query',
'organization-service.organization.create',
'iam-service.organization.update',
'iam-service.organization.disableOrganization',
'iam-service.organization.enableOrganization',
'iam-service.role-member.queryAllUsers',
]}
>
<Header title={<FormattedMessage id="global.organization.header.title" />}>
<Permission service={['organization-service.organization.create']}>
<Button
onClick={this.createOrg}
icon="playlist_add"
>
<FormattedMessage id="global.organization.create" />
</Button>
</Permission>
<Button
onClick={this.handleRefresh}
icon="refresh"
>
<FormattedMessage id="refresh" />
</Button>
</Header>
<Content
code="global.organization"
values={{ name: AppState.getSiteInfo.systemName || 'Choerodon' }}
className="c7n-iam-organization"
>
<Table
columns={this.getTableColumns()}
dataSource={orgData.slice()}
pagination={pagination}
onChange={this.handlePageChange}
filters={params.slice()}
loading={loading}
rowKey="id"
filterBarPlaceholder={intl.formatMessage({ id: 'filtertable' })}
scroll={{ x: true }}
/>
<Sidebar
title={<FormattedMessage id={this.renderSidebarTitle()} />}
visible={sidebarVisible}
onOk={this.handleSubmit}
onCancel={this.handleCancelFun}
okCancel={show !== 'detail'}
okText={this.renderSidebarOkText()}
cancelText={<FormattedMessage id="cancel" />}
confirmLoading={submitting}
className={classnames('c7n-iam-organization-sidebar', { 'c7n-iam-organization-sidebar-create': show === 'create' })}
>
{show !== 'detail' ? this.renderSidebarContent() : this.renderSidebarDetail()}
</Sidebar>
</Content>
</Page>
);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/eventhandler/ActuatorSagaHandler.java<|end_filename|>
package io.choerodon.iam.api.eventhandler;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.choerodon.actuator.util.MicroServiceInitData;
import io.choerodon.annotation.entity.PermissionDescription;
import io.choerodon.annotation.entity.PermissionEntity;
import io.choerodon.asgard.saga.annotation.SagaTask;
import io.choerodon.core.swagger.PermissionData;
import io.choerodon.iam.app.service.UploadHistoryService;
import io.choerodon.iam.infra.dto.RoleDTO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import javax.sql.DataSource;
import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
@Component
public class ActuatorSagaHandler {
private static final Logger LOGGER = LoggerFactory.getLogger(ActuatorSagaHandler.class);
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final String ACTUATOR_REFRESH_SAGA_CODE = "mgmt-actuator-refresh";
private static final String PERMISSION_REFRESH_TASK_SAGA_CODE = "iam-permission-task-refresh";
private static final String INIT_DATA_REFRESH_TASK_SAGA_CODE = "iam-init-data-task-refresh";
private UploadHistoryService.ParsePermissionService parsePermissionService;
private DataSource dataSource;
public ActuatorSagaHandler(UploadHistoryService.ParsePermissionService parsePermissionService,
DataSource dataSource) {
this.parsePermissionService = parsePermissionService;
this.dataSource = dataSource;
}
@SagaTask(code = PERMISSION_REFRESH_TASK_SAGA_CODE, sagaCode = ACTUATOR_REFRESH_SAGA_CODE, seq = 1, description = "刷新权限表数据")
public String refreshPermission(String actuatorJson) throws IOException {
Map actuator = OBJECT_MAPPER.readValue(actuatorJson, Map.class);
String service = (String) actuator.get("service");
Map permissionNode = (Map) actuator.get("permission");
LOGGER.info("start to refresh permission, service: {}", service);
String permissionJson = OBJECT_MAPPER.writeValueAsString(permissionNode);
Map<String, PermissionDescription> descriptions = OBJECT_MAPPER.readValue(permissionJson, OBJECT_MAPPER.getTypeFactory().constructMapType(HashMap.class, String.class, PermissionDescription.class));
Map<String, RoleDTO> initRoleMap = parsePermissionService.queryInitRoleByCode();
for (Map.Entry<String, PermissionDescription> entry : descriptions.entrySet()) {
processDescription(service, entry.getKey(), entry.getValue(), initRoleMap);
}
return actuatorJson;
}
@SagaTask(code = INIT_DATA_REFRESH_TASK_SAGA_CODE, sagaCode = ACTUATOR_REFRESH_SAGA_CODE, seq = 1, description = "刷新菜单表数据")
public String refreshInitData(String actuatorJson) throws IOException, SQLException {
JsonNode root = OBJECT_MAPPER.readTree(actuatorJson);
String service = root.get("service").asText();
LOGGER.info("start to refresh init data, service: {}", service);
JsonNode data = root.get("init-data");
if (data == null || data.size() == 0) {
LOGGER.info("actuator init-data is empty skip iam-init-data-task-refresh.");
return actuatorJson;
}
try (Connection connection = dataSource.getConnection()) {
connection.setAutoCommit(false);
MicroServiceInitData.processInitData(data, connection, new HashSet<>(Arrays.asList("IAM_PERMISSION", "IAM_MENU_B", "IAM_MENU_PERMISSION", "IAM_DASHBOARD", "IAM_DASHBOARD_ROLE")));
connection.commit();
}
return actuatorJson;
}
private void processDescription(String service, String key, PermissionDescription description, Map<String, RoleDTO> initRoleMap) {
String[] names = key.split("\\.");
String controllerClassName = names[names.length - 2];
String action = names[names.length - 1];
String resource = camelToHyphenLine(controllerClassName).replace("-controller", "");
PermissionEntity permissionEntity = description.getPermission();
if (permissionEntity == null) {
return;
}
PermissionData permissionData = new PermissionData();
permissionData.setAction(action);
permissionData.setPermissionLevel(permissionEntity.getType());
permissionData.setPermissionLogin(permissionEntity.isPermissionLogin());
permissionData.setPermissionPublic(permissionEntity.isPermissionPublic());
permissionData.setPermissionWithin(permissionEntity.isPermissionWithin());
permissionData.setRoles(permissionEntity.getRoles());
parsePermissionService.processPermission(permissionEntity.getRoles(), description.getPath(), description.getMethod(), description.getDescription(), permissionData, service, resource, initRoleMap);
}
/**
* 驼峰格式字符串转换为中划线格式字符串
*
* @param param 驼峰形式的字符串 (eg. UserCodeController)
* @return 中划线形式的字符串 (eg. user-code-controller)
*/
public static String camelToHyphenLine(String param) {
if (param == null || "".equals(param.trim())) {
return "";
}
int len = param.length();
StringBuilder sb = new StringBuilder(len);
for (int i = 0; i < len; i++) {
char c = param.charAt(i);
if (Character.isUpperCase(c)) {
if (i > 0) {
sb.append('-');
}
sb.append(Character.toLowerCase(c));
} else {
sb.append(c);
}
}
return sb.toString();
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/AccessTokenController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import java.util.List;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.domain.Sort;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.iam.infra.dto.AccessTokenDTO;
import io.choerodon.mybatis.annotation.SortDefault;
import io.choerodon.swagger.annotation.CustomPageRequest;
import io.swagger.annotations.ApiOperation;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import io.choerodon.iam.app.service.AccessTokenService;
import springfox.documentation.annotations.ApiIgnore;
/**
* @author Eugen
**/
@RestController
@RequestMapping(value = "/v1/token")
public class AccessTokenController {
private AccessTokenService accessTokenService;
public AccessTokenController(AccessTokenService accessTokenService) {
this.accessTokenService = accessTokenService;
}
@Permission(permissionLogin = true, type = ResourceType.SITE)
@ApiOperation(value = "分页查询当前用户token")
@CustomPageRequest
@GetMapping
public ResponseEntity<PageInfo<AccessTokenDTO>> list(@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(value = "clientName", required = false) String clientName,
@RequestParam(value = "currentToken") String currentToken) {
return new ResponseEntity<>(accessTokenService.pagedSearch(pageRequest, clientName, currentToken), HttpStatus.OK);
}
@Permission(permissionLogin = true, type = ResourceType.SITE)
@ApiOperation(value = "根据tokenId删除token")
@DeleteMapping
public void delete(@RequestParam(name = "tokenId") String tokenId,
@RequestParam(value = "currentToken") String currentToken) {
accessTokenService.delete(tokenId, currentToken);
}
@Permission(permissionLogin = true, type = ResourceType.SITE)
@ApiOperation(value = "根据tokenId列表批量删除token")
@DeleteMapping("/batch")
public void deleteList(@RequestBody List<String> tokenIds,
@RequestParam(value = "currentToken") String currentToken) {
accessTokenService.deleteList(tokenIds, currentToken);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/eventhandler/OrganizationListener.java<|end_filename|>
package io.choerodon.iam.api.eventhandler;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.choerodon.asgard.saga.annotation.SagaTask;
import io.choerodon.core.exception.CommonException;
import io.choerodon.core.ldap.DirectoryType;
import io.choerodon.iam.api.dto.ProjectCategoryDTO;
import io.choerodon.iam.api.dto.payload.OrganizationCreateEventPayload;
import io.choerodon.iam.api.dto.payload.OrganizationRegisterEventPayload;
import io.choerodon.iam.app.service.*;
import io.choerodon.iam.infra.dto.*;
import io.choerodon.iam.infra.mapper.ProjectCategoryMapper;
import io.choerodon.iam.infra.mapper.ProjectMapCategoryMapper;
import io.choerodon.iam.infra.mapper.ProjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.io.IOException;
import java.util.Random;
import static io.choerodon.iam.app.service.impl.OrganizationProjectServiceImpl.PROJECT_DEFAULT_CATEGORY;
import static io.choerodon.iam.infra.common.utils.SagaTopic.Organization.*;
/**
* @author wuguokai
*/
@Component
public class OrganizationListener {
private static final Logger LOGGER = LoggerFactory.getLogger(OrganizationListener.class);
private static final String ALPHANUMERIC = "abcdefghijklmnopqrstuvwxyz1234567890";
private PasswordPolicyService passwordPolicyService;
private OrganizationService organizationService;
private LdapService ldapService;
private ProjectMapper projectMapper;
private UserService userService;
private OrganizationProjectService organizationProjectService;
private ProjectCategoryMapper projectCategoryMapper;
private ProjectMapCategoryMapper projectMapCategoryMapper;
private final ObjectMapper mapper = new ObjectMapper();
@Value("${choerodon.devops.message:false}")
private boolean devopsMessage;
@Value("${lock.expireTime:3600}")
private Integer lockedExpireTime;
@Value("${max.checkCaptcha:3}")
private Integer maxCheckCaptcha;
@Value("${max.errorTime:5}")
private Integer maxErrorTime;
@Value("${choerodon.category.enabled:false}")
private Boolean categoryEnable;
public OrganizationListener(LdapService ldapService, PasswordPolicyService passwordPolicyService,
OrganizationService organizationService,
UserService userService, OrganizationProjectService organizationProjectService,
ProjectMapper projectMapper,
ProjectCategoryMapper projectCategoryMapper,
ProjectMapCategoryMapper projectMapCategoryMapper) {
this.passwordPolicyService = passwordPolicyService;
this.organizationService = organizationService;
this.ldapService = ldapService;
this.userService = userService;
this.organizationProjectService = organizationProjectService;
this.projectMapper = projectMapper;
this.projectCategoryMapper = projectCategoryMapper;
this.projectMapCategoryMapper = projectMapCategoryMapper;
}
@SagaTask(code = TASK_ORG_CREATE, sagaCode = ORG_CREATE, seq = 1, description = "iam接收org服务创建组织事件")
public OrganizationCreateEventPayload create(String message) throws IOException {
OrganizationCreateEventPayload organizationEventPayload = mapper.readValue(message, OrganizationCreateEventPayload.class);
LOGGER.info("iam create the organization trigger task,payload: {}", organizationEventPayload);
Long orgId = organizationEventPayload.getId();
OrganizationDTO organizationDTO = organizationService.queryOrganizationById(orgId);
if (organizationDTO == null) {
throw new CommonException("error.organization.not exist");
}
createLdap(orgId, organizationDTO.getName());
createPasswordPolicy(orgId, organizationDTO.getCode(), organizationDTO.getName());
return organizationEventPayload;
}
@SagaTask(code = TASK_ORG_REGISTER_INIT_ORG, sagaCode = ORG_REGISTER, seq = 20, description = "创建默认密码策略,创建默认ldap配置")
public OrganizationRegisterEventPayload registerInitOrg(String message) throws IOException {
OrganizationRegisterEventPayload organizationRegisterEventPayload =
mapper.readValue(message, OrganizationRegisterEventPayload.class);
LOGGER.info("Iam receives Saga event '{}' and triggers task: {},payload: {}",
ORG_REGISTER, TASK_ORG_REGISTER_INIT_ORG, organizationRegisterEventPayload);
Long orgId = organizationRegisterEventPayload.getOrganization().getId();
OrganizationDTO organizationDTO = organizationService.queryOrganizationById(orgId);
if (organizationDTO == null) {
throw new CommonException("error.organization.not exist");
}
userService.updateUserDisabled(organizationRegisterEventPayload.getUser().getId());
createLdap(orgId, organizationDTO.getName());
createPasswordPolicy(orgId, organizationDTO.getCode(), organizationDTO.getName());
return organizationRegisterEventPayload;
}
@SagaTask(code = TASK_ORG_REGISTER_INIT_PROJ, sagaCode = ORG_REGISTER, seq = 80, description = "创建项目")
public OrganizationRegisterEventPayload registerInitProj(String message) throws IOException {
OrganizationRegisterEventPayload organizationRegisterEventPayload =
mapper.readValue(message, OrganizationRegisterEventPayload.class);
LOGGER.info("Iam receives Saga event '{}' and triggers task: {},payload: {}",
ORG_REGISTER, TASK_ORG_REGISTER_INIT_PROJ, organizationRegisterEventPayload);
ProjectDTO dto = new ProjectDTO();
dto.setName("公司内销平台");
dto.setType("type/develop-platform");
dto.setOrganizationId(organizationRegisterEventPayload.getOrganization().getId());
dto.setCode(randomProjCode());
dto.setEnabled(true);
dto = organizationProjectService.create(dto);
if (categoryEnable) {
assignDefaultCategoriesToProjects(dto.getId());
}
organizationRegisterEventPayload.setProject(
new OrganizationRegisterEventPayload.Project(dto.getId(), dto.getCode(), dto.getName()));
return organizationRegisterEventPayload;
}
private ProjectMapCategoryDTO assignDefaultCategoriesToProjects(Long projectId) {
ProjectMapCategoryDTO projectMapCategoryDTO = new ProjectMapCategoryDTO();
ProjectCategoryDTO projectCategoryDTO = new ProjectCategoryDTO();
projectCategoryDTO.setCode(PROJECT_DEFAULT_CATEGORY);
projectCategoryDTO = projectCategoryMapper.selectOne(projectCategoryDTO);
if (projectCategoryDTO != null) {
projectMapCategoryDTO.setCategoryId(projectCategoryDTO.getId());
}
projectMapCategoryDTO.setProjectId(projectId);
if (projectMapCategoryMapper.insert(projectMapCategoryDTO) != 1) {
throw new CommonException("error.project.map.category.insert");
}
return projectMapCategoryDTO;
}
private void createPasswordPolicy(Long orgId, String code, String name) {
try {
LOGGER.info("### begin create password policy of organization {} ", orgId);
PasswordPolicyDTO passwordPolicyDTO = new PasswordPolicyDTO();
passwordPolicyDTO.setOrganizationId(orgId);
passwordPolicyDTO.setCode(code);
passwordPolicyDTO.setName(name);
passwordPolicyDTO.setMaxCheckCaptcha(maxCheckCaptcha);
passwordPolicyDTO.setMaxErrorTime(maxErrorTime);
passwordPolicyDTO.setLockedExpireTime(lockedExpireTime);
//默认开启登陆安全策略,设置为
passwordPolicyDTO.setEnableSecurity(true);
passwordPolicyDTO.setEnableCaptcha(true);
passwordPolicyDTO.setMaxCheckCaptcha(3);
passwordPolicyDTO.setEnableLock(true);
passwordPolicyDTO.setMaxErrorTime(5);
passwordPolicyDTO.setLockedExpireTime(600);
passwordPolicyService.create(orgId, passwordPolicyDTO);
} catch (Exception e) {
LOGGER.error("create password policy error of organizationId: {}, exception: {}", orgId, e);
}
}
private void createLdap(Long orgId, String name) {
try {
LOGGER.info("### begin create ldap of organization {} ", orgId);
LdapDTO ldapDTO = new LdapDTO();
ldapDTO.setOrganizationId(orgId);
ldapDTO.setName(name);
ldapDTO.setServerAddress("");
ldapDTO.setPort("389");
ldapDTO.setDirectoryType(DirectoryType.OPEN_LDAP.value());
ldapDTO.setEnabled(true);
ldapDTO.setUseSSL(false);
ldapDTO.setObjectClass("person");
ldapDTO.setSagaBatchSize(500);
ldapDTO.setConnectionTimeout(10);
ldapDTO.setAccount("test");
ldapDTO.setPassword("<PASSWORD>");
ldapDTO.setUuidField("entryUUID");
ldapService.create(orgId, ldapDTO);
} catch (Exception e) {
LOGGER.error("create ldap error of organization, organizationId: {}, exception: {}", orgId, e);
}
}
/**
* 生成随机项目编码
* 1.数据库中未存在
* 2.格式:"proj-"+8位随机小写字母或数字
*
* @return 符合条件的项目编码
*/
private String randomProjCode() {
String projectCode = "";
boolean flag = false;
while (!flag) {
projectCode = "proj-" + generateString(false, 8);
ProjectDTO projectDTO = new ProjectDTO();
projectDTO.setCode(projectCode);
ProjectDTO projByCode = projectMapper.selectOne(projectDTO);
if (projByCode == null) {
flag = true;
}
}
return projectCode;
}
private String generateString(Boolean isChinese, int length) {
char[] text = new char[length];
for (int i = 0; i < length; i++) {
if (isChinese) {
text[i] = (char) (0x4e00 + (int) (Math.random() * (0x5ea5 - 0x4ea5 + 1)));
} else {
text[i] = ALPHANUMERIC.charAt(new Random().nextInt(ALPHANUMERIC.length()));
}
}
return new String(text);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/dto/UserPasswordDTO.java<|end_filename|>
package io.choerodon.iam.api.dto;
import javax.validation.constraints.NotEmpty;
import io.swagger.annotations.ApiModelProperty;
/**
* @author superlee
* @since 2018/4/12
*/
public class UserPasswordDTO {
@ApiModelProperty(value = "新密码/必填")
@NotEmpty
private String password;
@ApiModelProperty(value = "原始密码/必填")
@NotEmpty
private String originalPassword;
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getOriginalPassword() {
return originalPassword;
}
public void setOriginalPassword(String originalPassword) {
this.originalPassword = originalPassword;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/BookMarkServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import io.choerodon.core.exception.CommonException;
import io.choerodon.core.oauth.CustomUserDetails;
import io.choerodon.iam.app.service.BookMarkService;
import io.choerodon.iam.infra.asserts.BookMarkAssertHelper;
import io.choerodon.iam.infra.asserts.DetailsHelperAssert;
import io.choerodon.iam.infra.dto.BookMarkDTO;
import io.choerodon.iam.infra.mapper.BookMarkMapper;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.CollectionUtils;
import java.util.Collections;
import java.util.List;
/**
* @author dengyouquan
**/
@Service
public class BookMarkServiceImpl implements BookMarkService {
private BookMarkMapper bookMarkMapper;
private BookMarkAssertHelper bookMarkAssertHelper;
public BookMarkServiceImpl(BookMarkMapper bookMarkMapper,
BookMarkAssertHelper bookMarkAssertHelper) {
this.bookMarkMapper = bookMarkMapper;
this.bookMarkAssertHelper = bookMarkAssertHelper;
}
@Override
public BookMarkDTO create(BookMarkDTO bookMarkDTO) {
CustomUserDetails userDetails = DetailsHelperAssert.userDetailNotExisted();
bookMarkDTO.setUserId(userDetails.getUserId());
bookMarkMapper.insert(bookMarkDTO);
return bookMarkDTO;
}
/**
* 更新失败一个就回滚
*
* @param bookMarks
* @return
*/
@Override
@Transactional(rollbackFor = Exception.class)
public List<BookMarkDTO> updateAll(List<BookMarkDTO> bookMarks) {
if (CollectionUtils.isEmpty(bookMarks)) {
return Collections.emptyList();
}
bookMarks.forEach(bookMarkDTO -> {
Long id = bookMarkDTO.getId();
if (id == null) {
return;
}
BookMarkDTO dto = bookMarkAssertHelper.bookMarkNotExisted(id);
Long userId = dto.getUserId();
DetailsHelperAssert.notCurrentUser(userId);
bookMarkDTO.setUserId(userId);
if (bookMarkMapper.updateByPrimaryKey(bookMarkDTO) != 1) {
throw new CommonException("error.bookMark.update");
}
});
return bookMarks;
}
@Override
public List<BookMarkDTO> list() {
CustomUserDetails userDetails = DetailsHelperAssert.userDetailNotExisted();
Long userId = userDetails.getUserId();
BookMarkDTO dto = new BookMarkDTO();
dto.setUserId(userId);
return bookMarkMapper.select(dto);
}
@Override
public void delete(Long id) {
BookMarkDTO dto = bookMarkAssertHelper.bookMarkNotExisted(id);
Long userId = dto.getUserId();
DetailsHelperAssert.notCurrentUser(userId);
bookMarkMapper.deleteByPrimaryKey(id);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/AuditServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.iam.infra.dto.AuditDTO;
import org.springframework.stereotype.Service;
import io.choerodon.iam.app.service.AuditService;
import io.choerodon.iam.infra.mapper.AuditMapper;
/**
* Created by Eugen on 01/03/2019.
*/
@Service
public class AuditServiceImpl implements AuditService {
private AuditMapper auditMapper;
public AuditServiceImpl(AuditMapper auditMapper) {
this.auditMapper = auditMapper;
}
@Override
public AuditDTO create(AuditDTO auditDTO) {
auditMapper.insert(auditDTO);
return auditMapper.selectByPrimaryKey(auditDTO);
}
@Override
public PageInfo<AuditDTO> pagingQuery(Long userId, String businessType, String dataType, PageRequest pageRequest) {
return PageHelper
.startPage(pageRequest.getPage(), pageRequest.getSize())
.doSelectPageInfo(() -> auditMapper.selectByParams(userId, businessType, dataType));
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/dto/LookupDTO.java<|end_filename|>
package io.choerodon.iam.infra.dto;
import io.choerodon.mybatis.annotation.MultiLanguage;
import io.choerodon.mybatis.annotation.MultiLanguageField;
import io.choerodon.mybatis.entity.BaseDTO;
import io.swagger.annotations.ApiModelProperty;
import javax.persistence.*;
import javax.validation.constraints.NotEmpty;
import javax.validation.constraints.Size;
import java.util.List;
/**
* @author superlee
* @since 2019-04-23
*/
@MultiLanguage
@Table(name = "fd_lookup")
public class LookupDTO extends BaseDTO {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@ApiModelProperty(value = "快码id", hidden = true)
private Long id;
@ApiModelProperty(value = "快码code")
@NotEmpty(message = "error.code.empty")
@Size(max = 32, min = 1, message = "error.code.length")
private String code;
@MultiLanguageField
@ApiModelProperty(value = "描述")
private String description;
@Transient
@ApiModelProperty(value = "快码值")
private List<LookupValueDTO> lookupValues;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public List<LookupValueDTO> getLookupValues() {
return lookupValues;
}
public void setLookupValues(List<LookupValueDTO> lookupValues) {
this.lookupValues = lookupValues;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/dto/ApplicationExplorationDTO.java<|end_filename|>
package io.choerodon.iam.infra.dto;
import io.choerodon.mybatis.entity.BaseDTO;
import javax.persistence.*;
/**
* @author superlee
* @since 2019-04-23
*/
@Table(name = "iam_application_exploration")
public class ApplicationExplorationDTO extends BaseDTO {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
private Long applicationId;
private String path;
private Long rootId;
private Long parentId;
private String hashcode;
@Column(name = "is_enabled")
private Boolean enabled;
@Transient
private String applicationName;
@Transient
private String applicationCode;
@Transient
private String applicationCategory;
@Transient
private String applicationType;
@Transient
private Boolean applicationEnabled;
@Transient
private Long projectId;
@Transient
private String projectCode;
@Transient
private String projectName;
@Transient
private String projectImageUrl;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getApplicationId() {
return applicationId;
}
public void setApplicationId(Long applicationId) {
this.applicationId = applicationId;
}
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
public Long getRootId() {
return rootId;
}
public void setRootId(Long rootId) {
this.rootId = rootId;
}
public Long getParentId() {
return parentId;
}
public void setParentId(Long parentId) {
this.parentId = parentId;
}
public String getHashcode() {
return hashcode;
}
public void setHashcode(String hashcode) {
this.hashcode = hashcode;
}
public Boolean getEnabled() {
return enabled;
}
public void setEnabled(Boolean enabled) {
this.enabled = enabled;
}
public String getApplicationName() {
return applicationName;
}
public void setApplicationName(String applicationName) {
this.applicationName = applicationName;
}
public String getApplicationCode() {
return applicationCode;
}
public void setApplicationCode(String applicationCode) {
this.applicationCode = applicationCode;
}
public String getApplicationCategory() {
return applicationCategory;
}
public void setApplicationCategory(String applicationCategory) {
this.applicationCategory = applicationCategory;
}
public String getApplicationType() {
return applicationType;
}
public void setApplicationType(String applicationType) {
this.applicationType = applicationType;
}
public Boolean getApplicationEnabled() {
return applicationEnabled;
}
public void setApplicationEnabled(Boolean applicationEnabled) {
this.applicationEnabled = applicationEnabled;
}
public Long getProjectId() {
return projectId;
}
public void setProjectId(Long projectId) {
this.projectId = projectId;
}
public String getProjectCode() {
return projectCode;
}
public void setProjectCode(String projectCode) {
this.projectCode = projectCode;
}
public String getProjectName() {
return projectName;
}
public void setProjectName(String projectName) {
this.projectName = projectName;
}
public String getProjectImageUrl() {
return projectImageUrl;
}
public void setProjectImageUrl(String projectImageUrl) {
this.projectImageUrl = projectImageUrl;
}
}
<|start_filename|>react/src/app/iam/stores/organization/project/index.js<|end_filename|>
/**
*create by mading on 2018/4/2
*/
import ProjectStore from './ProjectStore';
export default ProjectStore;
<|start_filename|>src/test/groovy/io/choerodon/iam/api/controller/v1/LanguageControllerSpec.groovy<|end_filename|>
package io.choerodon.iam.api.controller.v1
import com.github.pagehelper.PageInfo
import io.choerodon.core.exception.ExceptionResponse
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.infra.dto.LanguageDTO
import io.choerodon.iam.infra.mapper.LanguageMapper
import org.springframework.beans.BeanUtils
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.boot.test.web.client.TestRestTemplate
import org.springframework.context.annotation.Import
import org.springframework.http.HttpEntity
import org.springframework.http.HttpMethod
import spock.lang.Specification
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan
* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class LanguageControllerSpec extends Specification {
private static String BASE_PATH = "/v1/languages"
@Autowired
private TestRestTemplate restTemplate
@Autowired
private LanguageMapper languageMapper
def "Update"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
def languageDO = languageMapper.selectByPrimaryKey(1L);
def httpEntity = null;
paramsMap.put("id", languageDO.getId())
when: "调用方法[异常-版本号为空]"
def language = new LanguageDTO()
BeanUtils.copyProperties(languageDO, language)
httpEntity = new HttpEntity<LanguageDTO>(language)
def entity = restTemplate.exchange(BASE_PATH + "/{id}", HttpMethod.PUT, httpEntity, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("zh_CN")
when: "调用方法[异常-id不存在]"
def languageDTO2 = new LanguageDTO()
BeanUtils.copyProperties(languageDO, languageDTO2)
paramsMap.put("id", 1000)
httpEntity = new HttpEntity<LanguageDTO>(languageDTO2)
entity = restTemplate.exchange(BASE_PATH + "/{id}", HttpMethod.PUT, httpEntity, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.language.update")
when: "调用方法[异常-code不存在]"
paramsMap.put("id", 1L)
def languageDTO3 = new LanguageDTO()
BeanUtils.copyProperties(languageDO, languageDTO3)
languageDTO3.setCode(null)
httpEntity = new HttpEntity<LanguageDTO>(languageDTO3)
entity = restTemplate.exchange(BASE_PATH + "/{id}", HttpMethod.PUT, httpEntity, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.code.empty")
when: "调用方法[异常-name长度太长]"
def languageDTO4 = new LanguageDTO()
BeanUtils.copyProperties(languageDO, languageDTO4)
languageDTO4.setName("namenamenamenamenamenamenamenamenamenamenamenamename")
httpEntity = new HttpEntity<LanguageDTO>(languageDTO4)
entity = restTemplate.exchange(BASE_PATH + "/{id}", HttpMethod.PUT, httpEntity, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.name.length")
when: "调用方法"
def languageDTO5 = new LanguageDTO()
BeanUtils.copyProperties(languageDO, languageDTO5)
httpEntity = new HttpEntity<LanguageDTO>(languageDTO5)
entity = restTemplate.exchange(BASE_PATH + "/{id}", HttpMethod.PUT, httpEntity, LanguageDTO, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
// entity.getBody().getId().equals(languageDO.getId())
// entity.getBody().getCode().equals(languageDO.getCode())
// entity.getBody().getName().equals(languageDO.getName())
// entity.getBody().getDescription().equals(languageDO.getDescription())
}
def "PagingQuery"() {
given: "构造参数列表"
def paramsMap = new HashMap<String, Object>()
when: "调用方法[全查询]"
def entity = restTemplate.getForEntity(BASE_PATH, PageInfo, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.body.pages == 1
entity.body.total == 2
entity.getBody().list.size() == 2
when: "调用方法[带参数查询]"
paramsMap.put("code", "zh_CN")
entity = restTemplate.getForEntity(BASE_PATH + "?code={code}", PageInfo, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.body.pages == 1
entity.body.total == 1
entity.getBody().list.size() == 1
}
def "ListAll"() {
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/list", List)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().size() == 2
}
def "QueryByCode"() {
given: "构造参数列表"
def paramsMap = new HashMap<String, Object>()
when: "调用方法[存在code]"
paramsMap.put("value", "zh_CN")
def entity = restTemplate.getForEntity(BASE_PATH + "/code?value={value}", LanguageDTO, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals(paramsMap.get("value"))
when: "调用方法[不存在code]"
paramsMap.put("value", "zh_US")
entity = restTemplate.getForEntity(BASE_PATH + "/code?value={value}", ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
// entity.getBody().getCode().equals("error.resource.notExist")
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/LanguageServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.app.service.LanguageService;
import io.choerodon.iam.infra.asserts.AssertHelper;
import io.choerodon.iam.infra.dto.LanguageDTO;
import io.choerodon.iam.infra.mapper.LanguageMapper;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
/**
* @author superlee
*/
@Service
public class LanguageServiceImpl implements LanguageService {
private LanguageMapper languageMapper;
private AssertHelper assertHelper;
public LanguageServiceImpl(LanguageMapper languageMapper,
AssertHelper assertHelper) {
this.languageMapper = languageMapper;
this.assertHelper = assertHelper;
}
@Override
public PageInfo<LanguageDTO> pagingQuery(PageRequest pageRequest, LanguageDTO languageDTO, String param) {
return PageHelper
.startPage(pageRequest.getPage(), pageRequest.getSize())
.doSelectPageInfo(() -> languageMapper.fulltextSearch(languageDTO, param));
}
@Override
@Transactional(rollbackFor = CommonException.class)
public LanguageDTO update(LanguageDTO languageDTO) {
assertHelper.objectVersionNumberNotNull(languageDTO.getObjectVersionNumber());
if (languageMapper.updateByPrimaryKeySelective(languageDTO) != 1) {
throw new CommonException("error.language.update");
}
return languageMapper.selectByPrimaryKey(languageDTO.getId());
}
@Override
public LanguageDTO queryByCode(String code) {
LanguageDTO dto = new LanguageDTO();
dto.setCode(code);
return languageMapper.selectOne(dto);
}
@Override
public List<LanguageDTO> listAll() {
return languageMapper.selectAll();
}
}
<|start_filename|>react/src/app/iam/stores/organization/ldap/LDAPStore.js<|end_filename|>
/**
* Created by song on 2017/6/26.
*/
import { action, computed, observable } from 'mobx';
import { axios, store } from '@choerodon/boot';
import querystring from 'query-string';
@store('LDAPStore')
class LDAPStore {
@observable ldapData = null;
@observable testData = null;
@observable syncData = null;
@observable isLoading = true;
@observable isConnectLoading = true;
@observable isShowResult = false;
@observable confirmLoading = false;
@observable isSyncLoading = false;
@observable syncRecord = []; // 同步记录
@observable detailRecord = []; // 失败记录
@action setIsLoading(flag) {
this.isLoading = flag;
}
@computed get getIsLoading() {
return this.isLoading;
}
@action setIsConnectLoading(flag) {
this.isConnectLoading = flag;
}
@computed get getIsConnectLoading() {
return this.isConnectLoading;
}
@action setIsSyncLoading(flag) {
this.isSyncLoading = flag;
}
@computed get getIsSyncLoading() {
return this.isSyncLoading;
}
@action setIsConfirmLoading(flag) {
this.confirmLoading = flag;
}
@computed get getIsConfirmLoading() {
return this.confirmLoading;
}
@action setIsShowResult(data) {
this.isShowResult = data;
}
@computed get getIsShowResult() {
return this.isShowResult;
}
@action setLDAPData(data) {
this.ldapData = data;
}
@computed get getLDAPData() {
return this.ldapData;
}
@action setTestData(data) {
this.testData = data;
}
@computed get getTestData() {
return this.testData;
}
@action setSyncData(data) {
this.syncData = data;
}
@computed get getSyncData() {
return this.syncData;
}
@action cleanData() {
this.ldapData = {};
}
@action setSyncRecord(data) {
this.syncRecord = data;
}
@computed get getSyncRecord() {
return this.syncRecord;
}
@action setDetailRecord(data) {
this.detailRecord = data;
}
@computed get getDetailRecord() {
return this.detailRecord;
}
// 加载同步记录
loadSyncRecord(
{ current, pageSize },
{ columnKey = 'id', order = 'descend' },
organizationId, id) {
const queryObj = {
page: current,
size: pageSize,
};
if (columnKey) {
const sorter = [];
sorter.push(columnKey);
if (order === 'descend') {
sorter.push('desc');
}
queryObj.sort = sorter.join(',');
}
return axios.get(`/iam/v1/organizations/${organizationId}/ldaps/${id}/history?${querystring.stringify(queryObj)}`);
}
// 加载失败详情
loadDetail(
{ current, pageSize },
{ uuid, loginName, realName, email },
{ columnKey = 'id', order = 'descend' },
params, organizationId, id) {
const queryObj = {
page: current,
size: pageSize,
uuid,
loginName,
realName,
email,
params,
};
if (columnKey) {
const sorter = [];
sorter.push(columnKey);
if (order === 'descend') {
sorter.push('desc');
}
queryObj.sort = sorter.join(',');
}
return axios.get(`/iam/v1/organizations/${organizationId}/ldap_histories/${id}/error_users?${querystring.stringify(queryObj)}`);
}
loadLDAP = (organizationId) => {
this.cleanData();
this.setIsLoading(true);
return axios.get(`/iam/v1/organizations/${organizationId}/ldaps`).then((data) => {
if (data) {
this.setLDAPData(data);
}
this.setIsLoading(false);
});
};
loadOrganization(organizationId) {
this.setIsLoading(true);
axios.get(`/uaa/v1/organizations/${organizationId}`).then((data) => {
if (data) {
this.setOrganization(data);
}
this.setIsLoading(false);
});
}
updateLDAP = (organizationId, id, ldap) =>
axios.post(`/iam/v1/organizations/${organizationId}/ldaps/${id}`, JSON.stringify(ldap));
testConnect = (organizationId, id, ldap) =>
axios.post(`/iam/v1/organizations/${organizationId}/ldaps/${id}/test_connect`, JSON.stringify(ldap));
getSyncInfo = (organizationId, id) =>
axios.get(`/iam/v1/organizations/${organizationId}/ldaps/${id}/latest_history`);
SyncUsers = (organizationId, id) =>
axios.post(`/iam/v1/organizations/${organizationId}/ldaps/${id}/sync_users`);
enabledLdap = (organizationId, id) =>
axios.put(`/iam/v1/organizations/${organizationId}/ldaps/${id}/enable`);
disabledLdap = (organizationId, id) =>
axios.put(`/iam/v1/organizations/${organizationId}/ldaps/${id}/disable`);
}
const ldapStore = new LDAPStore();
export default ldapStore;
<|start_filename|>src/main/java/io/choerodon/iam/app/service/ProjectRelationshipService.java<|end_filename|>
package io.choerodon.iam.app.service;
import java.util.Date;
import java.util.List;
import java.util.Map;
import io.choerodon.iam.api.dto.RelationshipCheckDTO;
import io.choerodon.iam.infra.dto.ProjectRelationshipDTO;
/**
* @author Eugen
*/
public interface ProjectRelationshipService {
/**
* 查询一个项目群下的子项目(默认查所有子项目,可传参只查启用的子项目).
*
* @param parentId 父级Id
* @param onlySelectEnable 是否只查启用项目
* @return 项目群下的子项目列表
*/
List<ProjectRelationshipDTO> getProjUnderGroup(Long parentId, Boolean onlySelectEnable);
/**
* 项目组下移除项目
*
* @param orgId 组织Id
* @param groupId 项目群关系Id
*/
void removesAProjUnderGroup(Long orgId, Long groupId);
/**
* 查询项目在该项目组下的不可用时间
*
* @param projectId 项目id
* @param parentId 项目组id
* @return
*/
List<Map<String, Date>> getUnavailableTime(Long projectId, Long parentId);
/**
* 批量修改/新增/启停用项目组
*
* @param list
* @return
*/
List<ProjectRelationshipDTO> batchUpdateRelationShipUnderProgram(Long orgId, List<ProjectRelationshipDTO> list);
/**
* 校验项目关系能否被启用
*
* @param id
* @return
*/
RelationshipCheckDTO checkRelationshipCanBeEnabled(Long id);
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/ProjectService.java<|end_filename|>
package io.choerodon.iam.app.service;
import java.util.List;
import java.util.Set;
import com.github.pagehelper.Page;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.iam.infra.dto.ProjectDTO;
import io.choerodon.iam.infra.dto.UserDTO;
/**
* @author flyleft
*/
public interface ProjectService {
ProjectDTO queryProjectById(Long projectId);
PageInfo<UserDTO> pagingQueryTheUsersOfProject(Long id, Long userId, String email, PageRequest pageRequest, String param);
ProjectDTO update(ProjectDTO projectDTO);
ProjectDTO disableProject(Long id);
Boolean checkProjCode(String code);
List<Long> listUserIds(Long projectId);
List<ProjectDTO> queryByIds(Set<Long> ids);
}
<|start_filename|>react/src/app/iam/stores/organization/client/index.js<|end_filename|>
import Client from './ClientStore';
export default Client;
<|start_filename|>src/main/java/io/choerodon/iam/app/service/UserService.java<|end_filename|>
package io.choerodon.iam.app.service;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Future;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.iam.api.dto.*;
import io.choerodon.iam.infra.dto.OrganizationDTO;
import io.choerodon.iam.infra.dto.ProjectDTO;
import io.choerodon.iam.infra.dto.UserDTO;
import org.springframework.web.multipart.MultipartFile;
/**
* @author superlee
* @author wuguokai
*/
public interface UserService {
UserDTO querySelf();
List<OrganizationDTO> queryOrganizations(Long userId, Boolean includedDisabled);
List<ProjectDTO> queryProjectsByOrganizationId(Long userId, Long organizationId);
List<OrganizationDTO> queryOrganizationWithProjects();
UserDTO queryByLoginName(String loginName);
void selfUpdatePassword(Long userId, UserPasswordDTO userPasswordDTO, Boolean checkPassword, Boolean checkLogin);
UserDTO lockUser(Long userId, Integer lockExpireTime);
UserDTO queryInfo(Long userId);
RegistrantInfoDTO queryRegistrantInfoAndAdmin(String orgCode);
UserDTO updateInfo(UserDTO user, Boolean checkLogin);
void check(UserDTO user);
List<ProjectDTO> queryProjects(Long id, Boolean includedDisabled);
PageInfo<UserDTO> pagingQueryUsersWithRoles(PageRequest pageRequest,
RoleAssignmentSearchDTO roleAssignmentSearchDTO, Long sourceId, ResourceType resourceType);
PageInfo<UserDTO> pagingQueryUsersByRoleIdOnSiteLevel(PageRequest pageRequest,
RoleAssignmentSearchDTO roleAssignmentSearchDTO, Long roleId, boolean doPage);
PageInfo<UserDTO> pagingQueryUsersByRoleIdOnOrganizationLevel(PageRequest pageRequest,
RoleAssignmentSearchDTO roleAssignmentSearchDTO,
Long roleId, Long sourceId, boolean doPage);
PageInfo<UserDTO> pagingQueryUsersByRoleIdOnProjectLevel(PageRequest pageRequest,
RoleAssignmentSearchDTO roleAssignmentSearchDTO,
Long roleId, Long sourceId, boolean doPage);
String uploadPhoto(Long id, MultipartFile file);
String savePhoto(Long id, MultipartFile file, Double rotate, Integer axisX, Integer axisY, Integer width, Integer height);
PageInfo<UserDTO> pagingQueryAdminUsers(PageRequest pageRequest, UserDTO userDTO, String params);
void addAdminUsers(long[] ids);
void deleteAdminUser(long id);
/**
* 根据用户id集合查询用户的集合
*
* @param ids 用户id数组
* @param onlyEnabled 默认为true,只查询启用的用户
* @return List<UserDTO> 用户集合
*/
List<UserDTO> listUsersByIds(Long[] ids, Boolean onlyEnabled);
/**
* 根据用户emails集合查询用户的集合
*
* @param emails 用户email数组
* @return List<UserDTO> 用户集合
*/
List<UserDTO> listUsersByEmails(String[] emails);
PageInfo<OrganizationDTO> pagingQueryOrganizationsWithRoles(PageRequest pageRequest,
Long id, String params);
PageInfo<ProjectDTO> pagingQueryProjectAndRolesById(PageRequest pageRequest,
Long id, String params);
UserDTO createUserAndAssignRoles(CreateUserWithRolesDTO userWithRoles);
PageInfo<ProjectDTO> pagingQueryProjectsSelf(ProjectDTO projectDTO,
PageRequest pageRequest, String params);
PageInfo<OrganizationDTO> pagingQueryOrganizationsSelf(OrganizationDTO organizationDTO,
PageRequest pageRequest, String params);
Long[] listUserIds();
Long queryOrgIdByEmail(String email);
OrganizationProjectDTO queryByUserIdWithRoleOrganizationAndProject(Long userId);
PageInfo<SimplifiedUserDTO> pagingQueryAllUser(PageRequest pageRequest, String param, Long organizationId);
PageInfo<UserDTO> pagingQueryUsersOnSiteLevel(Long userId, String email, PageRequest pageRequest, String param);
Map<String, Object> queryAllAndNewUsers();
PageInfo<UserRoleDTO> pagingQueryRole(PageRequest pageRequest, String param, Long userId, Long organizationId);
/**
* 根据loginName集合查询所有用户
*
* @param loginNames
* @param onlyEnabled
* @return
*/
List<UserDTO> listUsersByLoginNames(String[] loginNames, Boolean onlyEnabled);
/**
* 异步
* 向用户发送通知(包括邮件和站内信)
*
* @param fromUserId 发送通知的用户
* @param userIds 接受通知的目标用户
* @param code 业务code
* @param params 渲染参数
* @param sourceId 触发发送通知对应的组织/项目id,如果是site层,可以为0或null
*/
Future<String> sendNotice(Long fromUserId, List<Long> userIds, String code, Map<String, Object> params, Long sourceId);
Future<String> sendNotice(Long fromUserId, List<Long> userIds, String code, Map<String, Object> params, Long sourceId, boolean sendAll);
UserDTO updateUserDisabled(Long userId);
UserInfoDTO updateUserInfo(Long id, UserInfoDTO userInfoDTO);
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/ClientController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import javax.validation.Valid;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.domain.Sort;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.iam.infra.dto.ClientDTO;
import io.choerodon.mybatis.annotation.SortDefault;
import io.choerodon.swagger.annotation.CustomPageRequest;
import io.swagger.annotations.ApiOperation;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import io.choerodon.core.base.BaseController;
import io.choerodon.iam.app.service.ClientService;
import io.choerodon.iam.infra.common.utils.ParamUtils;
import springfox.documentation.annotations.ApiIgnore;
/**
* @author wuguokai
*/
@RestController
@RequestMapping(value = "/v1/organizations/{organization_id}/clients")
public class ClientController extends BaseController {
private ClientService clientService;
public ClientController(ClientService clientService) {
this.clientService = clientService;
}
/**
* 根据Client对象创建一个新的客户端
*
* @param organizationId 组织id
* @param clientDTO 客户端对象
* @return 创建成功的客户端对象
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "创建客户端")
@PostMapping
public ResponseEntity<ClientDTO> create(@PathVariable("organization_id") Long organizationId,
@RequestBody @Valid ClientDTO clientDTO) {
return new ResponseEntity<>(clientService.create(organizationId, clientDTO), HttpStatus.OK);
}
/**
* 构造供创建使用的随机的客户端信息
*
* @param organizationId 组织id
* @return 客户端创建信息
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "随机的客户端创建信息生成")
@GetMapping(value = "/createInfo")
public ResponseEntity<ClientDTO> createInfo(@PathVariable("organization_id") Long organizationId) {
return new ResponseEntity<>(clientService.getDefaultCreateData(organizationId), HttpStatus.OK);
}
/**
* 根据clientId更新Client
*
* @param organizationId 组织id
* @param clientId 客户端id
* @param clientDTO 客户端对象
* @return 更新成功的客户端对象
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "修改客户端")
@PostMapping(value = "/{client_id}")
public ResponseEntity<ClientDTO> update(@PathVariable("organization_id") Long organizationId,
@PathVariable("client_id") Long clientId,
@RequestBody ClientDTO clientDTO) {
clientDTO.setId(clientId);
clientDTO.setOrganizationId(organizationId);
return new ResponseEntity<>(clientService.update(clientDTO), HttpStatus.OK);
}
/**
* 根据clientId删除客户端
*
* @param organizationId 组织id
* @param clientId 客户端id
* @return 删除是否成功
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "删除客户端")
@DeleteMapping(value = "/{client_id}")
public ResponseEntity delete(@PathVariable("organization_id") Long organizationId, @PathVariable("client_id") Long clientId) {
clientService.delete(organizationId, clientId);
return new ResponseEntity(HttpStatus.OK);
}
/**
* 根据ClientId,查询客户端对象
*
* @param organizationId 组织id
* @param clientId 客户端id
* @return 查询到的客户端对象
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "通过id查询客户端")
@GetMapping(value = "/{client_id}")
public ResponseEntity<ClientDTO> query(@PathVariable("organization_id") Long organizationId, @PathVariable("client_id") Long clientId) {
return new ResponseEntity<>(clientService.query(organizationId, clientId), HttpStatus.OK);
}
/**
* 根据客户端名称查询Client
*
* @param organizationId 组织id
* @param clientName 客户端名称
* @return 查询到的客户端对象
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "通过名称查询客户端")
@GetMapping("/query_by_name")
public ResponseEntity<ClientDTO> queryByName(@PathVariable("organization_id") Long organizationId, @RequestParam(value = "client_name") String clientName) {
return new ResponseEntity<>(clientService.queryByName(organizationId, clientName), HttpStatus.OK);
}
/**
* 分页模糊查询客户端
*
* @param organizationId 组织id
* @param name 客户端名称
* @param params 模糊查询参数
* @return 查询到的客户端分页对象
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "分页模糊查询客户端")
@GetMapping
@CustomPageRequest
public ResponseEntity<PageInfo<ClientDTO>> list(@PathVariable("organization_id") Long organizationId,
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(required = false) String name,
@RequestParam(required = false) String[] params) {
ClientDTO clientDTO = new ClientDTO();
clientDTO.setOrganizationId(organizationId);
clientDTO.setName(name);
return new ResponseEntity<>(clientService.list(clientDTO, pageRequest, ParamUtils.arrToStr(params)), HttpStatus.OK);
}
/**
* 客户端重名校验接口(name),新建校验不传id,更新校验传id
*
* @param organizationId 组织id
* @param client 客户端对象
* @return 验证成功,否则失败
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "客户端信息校验")
@PostMapping(value = "/check")
public void check(@PathVariable(name = "organization_id") Long organizationId,
@RequestBody ClientDTO client) {
clientService.check(client);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/LanguageService.java<|end_filename|>
package io.choerodon.iam.app.service;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.iam.infra.dto.LanguageDTO;
import java.util.List;
/**
* @author superlee
*/
public interface LanguageService {
PageInfo<LanguageDTO> pagingQuery(PageRequest pageRequest, LanguageDTO languageDTO, String param);
LanguageDTO update(LanguageDTO languageDTO);
LanguageDTO queryByCode(String code);
List<LanguageDTO> listAll();
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/feign/NotifyFeignClient.java<|end_filename|>
package io.choerodon.iam.infra.feign;
import javax.validation.Valid;
import io.choerodon.core.notify.NoticeSendDTO;
import io.choerodon.iam.api.dto.SystemAnnouncementDTO;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.http.ResponseEntity;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import io.choerodon.iam.infra.feign.fallback.NotifyFeignClientFallback;
@FeignClient(value = "notify-service", path = "/v1", fallback = NotifyFeignClientFallback.class)
public interface NotifyFeignClient {
@PostMapping("/notices")
void postNotice(@RequestBody @Valid NoticeSendDTO dto);
@PostMapping("/announcements")
ResponseEntity<SystemAnnouncementDTO> create(@RequestBody @Validated SystemAnnouncementDTO dto);
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/common/utils/ldap/LdapSyncUserTask.java<|end_filename|>
package io.choerodon.iam.infra.common.utils.ldap;
import java.lang.reflect.Field;
import java.util.*;
import java.util.stream.Collectors;
import javax.naming.NamingException;
import javax.naming.directory.Attribute;
import javax.naming.directory.Attributes;
import javax.naming.directory.SearchControls;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.app.service.OrganizationUserService;
import io.choerodon.iam.infra.common.utils.CollectionUtils;
import io.choerodon.iam.infra.dto.*;
import io.choerodon.iam.infra.enums.LdapErrorUserCause;
import io.choerodon.iam.infra.mapper.LdapErrorUserMapper;
import io.choerodon.iam.infra.mapper.LdapHistoryMapper;
import io.choerodon.iam.infra.mapper.UserMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.ldap.control.PagedResultsDirContextProcessor;
import org.springframework.ldap.core.AttributesMapper;
import org.springframework.ldap.core.LdapOperations;
import org.springframework.ldap.core.LdapTemplate;
import org.springframework.ldap.core.support.LdapOperationsCallback;
import org.springframework.ldap.core.support.SingleContextSource;
import org.springframework.ldap.filter.AndFilter;
import org.springframework.ldap.filter.EqualsFilter;
import org.springframework.ldap.filter.HardcodedFilter;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;
/**
* @author wuguokai
*/
@RefreshScope
@Component
public class LdapSyncUserTask {
private static final Logger logger = LoggerFactory.getLogger(LdapSyncUserTask.class);
private static final String OBJECT_CLASS = "objectclass";
private OrganizationUserService organizationUserService;
private LdapErrorUserMapper ldapErrorUserMapper;
private UserMapper userMapper;
private LdapHistoryMapper ldapHistoryMapper;
public LdapSyncUserTask(OrganizationUserService organizationUserService,
LdapErrorUserMapper ldapErrorUserMapper,
UserMapper userMapper,
LdapHistoryMapper ldapHistoryMapper) {
this.organizationUserService = organizationUserService;
this.ldapErrorUserMapper = ldapErrorUserMapper;
this.userMapper = userMapper;
this.ldapHistoryMapper = ldapHistoryMapper;
}
@Async("ldap-executor")
public void syncLDAPUser(LdapTemplate ldapTemplate, LdapDTO ldap, String syncType, FinishFallback fallback) {
logger.info("@@@ start to sync users from ldap server, sync type: {}", syncType);
LdapSyncReport ldapSyncReport = initLdapSyncReport(ldap);
LdapHistoryDTO ldapHistory = initLdapHistory(ldap.getId());
syncUsersFromLdapServer(ldapTemplate, ldap, ldapSyncReport, ldapHistory.getId(), syncType);
logger.info("@@@ syncing users has been finished, sync type: {}, ldapSyncReport: {}", syncType, ldapSyncReport);
fallback.callback(ldapSyncReport, ldapHistory);
}
private void syncUsersFromLdapServer(LdapTemplate ldapTemplate, LdapDTO ldap,
LdapSyncReport ldapSyncReport, Long ldapHistoryId,
String syncType) {
//搜索控件
final SearchControls searchControls = new SearchControls();
searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE);
//Filter
AndFilter andFilter = getFilter(ldap);
//分页PagedResultsDirContextProcessor
final PagedResultsDirContextProcessor processor =
new PagedResultsDirContextProcessor(ldap.getSagaBatchSize());
AttributesMapper attributesMapper = getDefaultAttributesMapper();
//反射获取ldapTemplate的ignorePartialResultException和ignoreNameNotFoundException值
boolean ignorePartialResultException = false;
boolean ignoreNameNotFoundException = false;
try {
Field ignorePartialResultExceptionField = ldapTemplate.getClass().getDeclaredField("ignorePartialResultException");
Field ignoreNameNotFoundExceptionField = ldapTemplate.getClass().getDeclaredField("ignoreNameNotFoundException");
ignorePartialResultExceptionField.setAccessible(true);
ignoreNameNotFoundExceptionField.setAccessible(true);
ignorePartialResultException = (boolean) ignorePartialResultExceptionField.get(ldapTemplate);
ignoreNameNotFoundException = (boolean) ignoreNameNotFoundExceptionField.get(ldapTemplate);
} catch (NoSuchFieldException e) {
logger.warn("reflect to get field failed, exception: {}", e);
} catch (IllegalAccessException e) {
logger.warn("reflect to get field value failed, exception: {}", e);
}
SingleContextSource.doWithSingleContext(
ldapTemplate.getContextSource(), new LdapOperationsCallback<List<UserDTO>>() {
@Override
public List<UserDTO> doWithLdapOperations(LdapOperations operations) {
Integer page = 1;
do {
List<UserDTO> users = new ArrayList<>();
List<LdapErrorUserDTO> errorUsers = new ArrayList<>();
List<Attributes> attributesList =
operations.search("", andFilter.toString(), searchControls,
attributesMapper, processor);
//将当前分页的数据做插入处理
if (attributesList.isEmpty()) {
logger.warn("can not find any attributes while filter is {}, page is {}", andFilter, page);
break;
} else {
processUserFromAttributes(ldap, attributesList, users, ldapSyncReport, errorUsers);
attributesList.clear();
}
//当前页做数据写入
if (!users.isEmpty()) {
switch (syncType) {
case "sync":
compareWithDbAndInsert(users, ldapSyncReport, errorUsers, ldapHistoryId);
break;
case "disable":
disable(users, ldapSyncReport, errorUsers, ldapHistoryId);
break;
default:
break;
}
}
users.clear();
errorUsers.clear();
page++;
} while (processor.hasMore());
// } while (processor.getCookie().getCookie() != null);
return null;
}
}, false, ignorePartialResultException, ignoreNameNotFoundException);
}
private AndFilter getFilter(LdapDTO ldap) {
AndFilter andFilter = getAndFilterByObjectClass(ldap);
HardcodedFilter hardcodedFilter = new HardcodedFilter(ldap.getCustomFilter());
andFilter.and(hardcodedFilter);
return andFilter;
}
private AttributesMapper getDefaultAttributesMapper() {
return new AttributesMapper() {
@Override
public Object mapFromAttributes(Attributes attributes) {
return attributes;
}
};
}
private void processUserFromAttributes(LdapDTO ldap, List<Attributes> attributesList,
List<UserDTO> users, LdapSyncReport ldapSyncReport,
List<LdapErrorUserDTO> errorUsers) {
Long organizationId = ldap.getOrganizationId();
String loginNameFiled = ldap.getLoginNameField();
String emailFiled = ldap.getEmailField();
String uuidField = ldap.getUuidField();
attributesList.forEach(attributes -> {
Attribute uuidAttribute = attributes.get(uuidField);
Attribute loginNameAttribute = attributes.get(loginNameFiled);
Attribute emailAttribute = attributes.get(emailFiled);
Attribute realNameAttribute = null;
if (ldap.getRealNameField() != null) {
realNameAttribute = attributes.get(ldap.getRealNameField());
}
Attribute phoneAttribute = null;
if (ldap.getPhoneField() != null) {
phoneAttribute = attributes.get(ldap.getPhoneField());
}
String uuid;
String loginName;
String email;
String realName = null;
String phone = null;
if (uuidAttribute == null) {
ldapSyncReport.incrementError();
logger.error("the uuid {} of attributes {} can not be null, skip the user", ldap.getUuidField(), attributes);
return;
}
try {
uuid = uuidAttribute.get().toString();
} catch (NamingException e) {
ldapSyncReport.incrementError();
logger.error("attributes {} get uuid attribute exception {}, skip the user", attributes, e);
return;
}
if (loginNameAttribute == null) {
ldapSyncReport.incrementError();
LdapErrorUserDTO errorUser = new LdapErrorUserDTO();
errorUser.setUuid(uuid);
errorUser.setCause(LdapErrorUserCause.LOGIN_NAME_FIELD_NULL.value());
errorUsers.add(errorUser);
return;
}
try {
loginName = loginNameAttribute.get().toString();
} catch (NamingException e) {
ldapSyncReport.incrementError();
LdapErrorUserDTO errorUser = new LdapErrorUserDTO();
errorUser.setUuid(uuid);
errorUser.setCause(LdapErrorUserCause.LOGIN_NAME_GET_EXCEPTION.value());
errorUsers.add(errorUser);
return;
}
if (emailAttribute == null) {
ldapSyncReport.incrementError();
LdapErrorUserDTO errorUser = new LdapErrorUserDTO();
errorUser.setUuid(uuid);
errorUser.setCause(LdapErrorUserCause.EMAIL_FIELD_NULL.value());
errorUsers.add(errorUser);
return;
}
try {
email = emailAttribute.get().toString();
} catch (NamingException e) {
ldapSyncReport.incrementError();
LdapErrorUserDTO errorUser = new LdapErrorUserDTO();
errorUser.setUuid(uuid);
errorUser.setCause(LdapErrorUserCause.EMAIL_GET_EXCEPTION.value());
errorUser.setLoginName(loginName);
errorUsers.add(errorUser);
return;
}
try {
if (realNameAttribute != null) {
realName = realNameAttribute.get().toString();
}
if (phoneAttribute != null) {
phone = phoneAttribute.get().toString();
}
} catch (NamingException e) {
logger.warn("realName or phone field attribute get exception {}", e);
}
UserDTO user = new UserDTO();
user.setUuid(uuid);
user.setOrganizationId(organizationId);
user.setLanguage("zh_CN");
user.setTimeZone("CTT");
user.setEnabled(true);
user.setLocked(false);
user.setLdap(true);
user.setAdmin(false);
user.setPassword("<PASSWORD>");
user.setLastPasswordUpdatedAt(new Date(System.currentTimeMillis()));
user.setLoginName(loginName);
user.setEmail(email);
user.setRealName(realName);
user.setPhone(phone);
users.add(user);
});
}
private AndFilter getAndFilterByObjectClass(LdapDTO ldapDTO) {
String objectClass = ldapDTO.getObjectClass();
String[] arr = objectClass.split(",");
AndFilter andFilter = new AndFilter();
for (String str : arr) {
andFilter.and(new EqualsFilter(OBJECT_CLASS, str));
}
return andFilter;
}
private LdapSyncReport initLdapSyncReport(LdapDTO ldap) {
Long organizationId = ldap.getOrganizationId();
LdapSyncReport ldapSyncReport = new LdapSyncReport(organizationId);
ldapSyncReport.setLdapId(ldap.getId());
ldapSyncReport.setStartTime(new Date(System.currentTimeMillis()));
return ldapSyncReport;
}
private void compareWithDbAndInsert(List<UserDTO> users, LdapSyncReport ldapSyncReport,
List<LdapErrorUserDTO> errorUsers, Long ldapHistoryId) {
ldapSyncReport.incrementCount(Long.valueOf(users.size()));
List<UserDTO> insertUsers = new ArrayList<>();
Set<String> nameSet = users.stream().map(UserDTO::getLoginName).collect(Collectors.toSet());
Set<String> emailSet = users.stream().map(UserDTO::getEmail).collect(Collectors.toSet());
//oracle In-list上限为1000,这里List size要小于1000
List<Set<String>> subNameSet = CollectionUtils.subSet(nameSet, 999);
List<Set<String>> subEmailSet = CollectionUtils.subSet(emailSet, 999);
Set<String> existedNames = new HashSet<>();
Set<String> existedEmails = new HashSet<>();
subNameSet.forEach(set -> existedNames.addAll(userMapper.matchLoginName(set)));
subEmailSet.forEach(set -> existedEmails.addAll(userMapper.matchEmail(set)));
users.forEach(user -> {
String loginName = user.getLoginName();
if (!existedNames.contains(loginName)) {
if (existedEmails.contains(user.getEmail())) {
//邮箱重复,报错
ldapSyncReport.incrementError();
LdapErrorUserDTO errorUser = new LdapErrorUserDTO();
errorUser.setUuid(user.getUuid());
errorUser.setLoginName(loginName);
errorUser.setEmail(user.getEmail());
errorUser.setRealName(user.getRealName());
errorUser.setPhone(user.getPhone());
errorUser.setCause(LdapErrorUserCause.EMAIL_ALREADY_EXISTED.value());
errorUsers.add(errorUser);
} else {
insertUsers.add(user);
ldapSyncReport.incrementNewInsert();
}
} else {
UserDTO userDTO = selectByLoginName(loginName);
//lastUpdatedBy=0则是程序同步的,跳过在用户界面上手动禁用的情况
if (userDTO.getLastUpdatedBy().equals(0L) && !userDTO.getEnabled()) {
organizationUserService.enableUser(ldapSyncReport.getOrganizationId(), userDTO.getId());
ldapSyncReport.incrementUpdate();
}
}
});
insertUser(ldapSyncReport, errorUsers, insertUsers);
insertErrorUser(errorUsers, ldapHistoryId);
cleanAfterDataPersistence(insertUsers, nameSet, emailSet, subNameSet, subEmailSet, existedNames, existedEmails);
}
private UserDTO selectByLoginName(String loginName) {
UserDTO dto = new UserDTO();
dto.setLoginName(loginName);
return userMapper.selectOne(dto);
}
private void disable(List<UserDTO> users, LdapSyncReport ldapSyncReport,
List<LdapErrorUserDTO> errorUsers, Long ldapHistoryId) {
users.forEach(user -> {
UserDTO userDTO = selectByLoginName(user.getLoginName());
if (userDTO == null) {
return;
}
if (userDTO.getEnabled()) {
try {
organizationUserService.disableUser(userDTO.getOrganizationId(), userDTO.getId());
ldapSyncReport.incrementUpdate();
} catch (CommonException e) {
LdapErrorUserDTO errorUser = new LdapErrorUserDTO();
errorUser.setUuid(user.getUuid());
errorUser.setLoginName(user.getLoginName());
errorUser.setEmail(user.getEmail());
errorUser.setRealName(user.getRealName());
errorUser.setPhone(user.getPhone());
errorUser.setCause(LdapErrorUserCause.SEND_MESSAGE_FAILED.value());
errorUsers.add(errorUser);
}
}
});
insertErrorUser(errorUsers, ldapHistoryId);
}
private void insertErrorUser(List<LdapErrorUserDTO> errorUsers, Long ldapHistoryId) {
if (!errorUsers.isEmpty()) {
errorUsers.forEach(errorUser -> {
errorUser.setLdapHistoryId(ldapHistoryId);
ldapErrorUserMapper.insert(errorUser);
});
}
}
private void insertUser(LdapSyncReport ldapSyncReport, List<LdapErrorUserDTO> errorUsers, List<UserDTO> insertUsers) {
if (!insertUsers.isEmpty()) {
List<LdapErrorUserDTO> errorUserList = organizationUserService.batchCreateUsers(insertUsers);
errorUsers.addAll(errorUserList);
Long errorCount = Long.valueOf(errorUserList.size());
ldapSyncReport.reduceInsert(errorCount);
ldapSyncReport.incrementError(errorCount);
}
}
private LdapHistoryDTO initLdapHistory(Long ldapId) {
LdapHistoryDTO ldapHistory = new LdapHistoryDTO();
ldapHistory.setLdapId(ldapId);
ldapHistory.setSyncBeginTime(new Date(System.currentTimeMillis()));
if (ldapHistoryMapper.insertSelective(ldapHistory) != 1) {
throw new CommonException("error.ldapHistory.insert");
}
return ldapHistoryMapper.selectByPrimaryKey(ldapHistory);
}
private void cleanAfterDataPersistence(List<UserDTO> insertUsers, Set<String> nameSet, Set<String> emailSet,
List<Set<String>> subNameSet, List<Set<String>> subEmailSet,
Set<String> existedNames, Set<String> existedEmails) {
insertUsers.clear();
nameSet.clear();
emailSet.clear();
subNameSet.clear();
subEmailSet.clear();
existedNames.clear();
existedEmails.clear();
}
public interface FinishFallback {
/**
* 同步完成后回调
*
* @param ldapSyncReport 同步结果
*/
LdapHistoryDTO callback(LdapSyncReport ldapSyncReport, LdapHistoryDTO ldapHistoryDTO);
}
@Component
public class FinishFallbackImpl implements FinishFallback {
private LdapHistoryMapper ldapHistoryMapper;
public FinishFallbackImpl(LdapHistoryMapper ldapHistoryMapper) {
this.ldapHistoryMapper = ldapHistoryMapper;
}
@Override
public LdapHistoryDTO callback(LdapSyncReport ldapSyncReport, LdapHistoryDTO ldapHistoryDTO) {
ldapHistoryDTO.setSyncEndTime(new Date(System.currentTimeMillis()));
ldapHistoryDTO.setNewUserCount(ldapSyncReport.getInsert());
ldapHistoryDTO.setUpdateUserCount(ldapSyncReport.getUpdate());
ldapHistoryDTO.setErrorUserCount(ldapSyncReport.getError());
ldapHistoryMapper.updateByPrimaryKeySelective(ldapHistoryDTO);
return ldapHistoryMapper.selectByPrimaryKey(ldapHistoryDTO);
}
}
}
<|start_filename|>src/main/resources/script/db/oauth_access_token.groovy<|end_filename|>
package script.db
databaseChangeLog(logicalFilePath: 'oauth_access_token.groovy') {
changeSet(author: '<EMAIL>', id: '2018-03-26-oauth_access_token') {
createTable(tableName: "OAUTH_ACCESS_TOKEN") {
column(name: 'TOKEN_ID', type: 'VARCHAR(128)', remarks: 'Access Token ID') {
constraints(primaryKey: true, primaryKeyName: 'PK_OAUTH_ACCESS_TOKEN')
}
column(name: 'TOKEN', type: 'BLOB', remarks: 'Token对象')
column(name: 'AUTHENTICATION_ID', type: 'VARCHAR(255)', remarks: '授权ID,用于索引授权对象')
column(name: 'USER_NAME', type: 'VARCHAR(32)', remarks: '用户名')
column(name: 'CLIENT_ID', type: 'VARCHAR(32)', remarks: 'Client ID')
column(name: 'AUTHENTICATION', type: 'BLOB', remarks: '授权对象')
column(name: 'REFRESH_TOKEN', type: 'VARCHAR(128)', remarks: 'Refresh Token ID')
}
}
changeSet(author: 'superlee', id: '2019-04-24-oauth-access-token-add-column') {
addColumn(tableName: 'OAUTH_ACCESS_TOKEN') {
column(name: "OBJECT_VERSION_NUMBER", type: "BIGINT UNSIGNED", defaultValue: "1") {
constraints(nullable: true)
}
column(name: "CREATED_BY", type: "BIGINT UNSIGNED", defaultValue: "0") {
constraints(nullable: true)
}
column(name: "CREATION_DATE", type: "DATETIME", defaultValueComputed: "CURRENT_TIMESTAMP")
column(name: "LAST_UPDATED_BY", type: "BIGINT UNSIGNED", defaultValue: "0") {
constraints(nullable: true)
}
column(name: "LAST_UPDATE_DATE", type: "DATETIME", defaultValueComputed: "CURRENT_TIMESTAMP")
}
}
changeSet(author: 'superlee', id: '2019-07-18-oauth-access-token-add-remark') {
setTableRemarks(tableName:"OAUTH_ACCESS_TOKEN",remarks: "oauth认证access token表")
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/annotation/NamingRuleTransHandler.java<|end_filename|>
package io.choerodon.iam.infra.annotation;
import io.choerodon.core.exception.CommonException;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.BeanWrapper;
import org.springframework.beans.PropertyAccessorFactory;
import org.springframework.core.MethodParameter;
import org.springframework.http.MediaType;
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
import org.springframework.web.bind.support.WebDataBinderFactory;
import org.springframework.web.context.request.NativeWebRequest;
import org.springframework.web.method.support.HandlerMethodArgumentResolver;
import org.springframework.web.method.support.ModelAndViewContainer;
import javax.servlet.http.HttpServletRequest;
import java.lang.reflect.Field;
import java.util.Map;
/**
* @author dengyouquan
* 根据指定的命名规则解析请求参数,只能用在get方法自定义类上,不能使用在json数据转换中
**/
public class NamingRuleTransHandler implements HandlerMethodArgumentResolver {
private static MappingJackson2HttpMessageConverter converter = new MappingJackson2HttpMessageConverter();
@Override
public boolean supportsParameter(MethodParameter parameter) {
return parameter.hasParameterAnnotation(NamingRuleTrans.class);
}
@Override
public Object resolveArgument(MethodParameter methodParameter, ModelAndViewContainer mavContainer, NativeWebRequest nativeWebRequest, WebDataBinderFactory binderFactory) {
HttpServletRequest servletRequest = nativeWebRequest.getNativeRequest(HttpServletRequest.class);
String contentType = servletRequest.getContentType();
if (!"GET".equals(servletRequest.getMethod()) || MediaType.APPLICATION_JSON_VALUE.equals(contentType)) {
throw new CommonException("error.parse.json.methodArgumentResolver.notSupportJson");
}
if (methodParameter.getParameterType().getDeclaredFields().length == 0) {
throw new CommonException("error.parse.json.methodArgumentResolver.notEntity");
}
Object result = null;
try {
NamingRuleTrans namingRuleTrans = methodParameter.getParameterAnnotation(NamingRuleTrans.class);
NamingRuleTransStrategy strategy = namingRuleTrans.value();
Object obj = BeanUtils.instantiate(methodParameter.getParameterType());
BeanWrapper wrapper = PropertyAccessorFactory.forBeanPropertyAccess(obj);
Map<String, String[]> parameterMap = nativeWebRequest.getParameterMap();
for (Map.Entry<String, String[]> map : parameterMap.entrySet()) {
String paramName = map.getKey();
String[] paramValue = map.getValue();
Field[] declaredFields = obj.getClass().getDeclaredFields();
for (Field declaredField : declaredFields) {
String transParamName = null;
switch (strategy) {
case UNDER_LINE:
transParamName = camelToUnderLine(paramName);
break;
case CAMEL:
transParamName = underLineToCamel(paramName);
break;
default:
transParamName = underLineToCamel(paramName);
break;
}
if (declaredField.getName().equals(transParamName)) {
wrapper.setPropertyValue(transParamName, paramValue);
}
}
}
result = obj;
} catch (Exception e) {
throw new CommonException("error.parse.json.methodArgumentResolver");
}
return result;
}
private String underLineToCamel(String name) {
StringBuilder result = new StringBuilder();
if (name == null || name.isEmpty()) {
return "";
} else if (!name.contains("_")) {
// 不含下划线,仅将首字母小写
return name.substring(0, 1).toLowerCase() + name.substring(1);
}
String[] camels = name.split("_");
for (String camel : camels) {
if (camel.isEmpty()) {
continue;
}
if (result.length() == 0) {
result.append(camel.toLowerCase());
} else {
result.append(camel.substring(0, 1).toUpperCase());
result.append(camel.substring(1).toLowerCase());
}
}
return result.toString();
}
private String camelToUnderLine(String name) {
StringBuilder sb = new StringBuilder(name);
int temp = 0;
for (int i = 0; i < name.length(); i++) {
if (Character.isUpperCase(name.charAt(i))) {
sb.insert(i + temp, "_");
temp += 1;
}
}
return sb.toString().toLowerCase();
}
}
<|start_filename|>react/src/app/iam/stores/global/service/index.js<|end_filename|>
import ServiceStore from './ServiceStore';
export default ServiceStore;
<|start_filename|>src/main/java/io/choerodon/iam/app/service/PermissionService.java<|end_filename|>
package io.choerodon.iam.app.service;
import com.github.pagehelper.Page;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.iam.api.dto.CheckPermissionDTO;
import io.choerodon.iam.infra.dto.PermissionDTO;
import java.util.List;
import java.util.Set;
/**
* @author wuguokai
*/
public interface PermissionService {
PageInfo<PermissionDTO> pagingQuery(PageRequest pageRequest, PermissionDTO permissionDTO, String param);
List<CheckPermissionDTO> checkPermission(List<CheckPermissionDTO> checkPermissionDTOList);
Set<PermissionDTO> queryByRoleIds(List<Long> roleIds);
List<PermissionDTO> query(String level, String serviceName, String code);
void deleteByCode(String code);
PageInfo<PermissionDTO> listPermissionsByRoleId(PageRequest pageRequest, Long id, String params);
}
<|start_filename|>react/src/app/iam/stores/global/dashboard-setting/index.js<|end_filename|>
import DashboardSettingStore from './DashboardSettingStore';
export default DashboardSettingStore;
<|start_filename|>src/test/groovy/io/choerodon/iam/app/service/impl/OrganizationProjectServiceImplSpec.groovy<|end_filename|>
package io.choerodon.iam.app.service.impl
import io.choerodon.asgard.saga.dto.StartInstanceDTO
import io.choerodon.asgard.saga.feign.SagaClient
import io.choerodon.core.oauth.CustomUserDetails
import io.choerodon.core.oauth.DetailsHelper
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.app.service.ProjectTypeService
import io.choerodon.iam.app.service.OrganizationProjectService
import io.choerodon.iam.app.service.RoleMemberService
import io.choerodon.iam.app.service.UserService
import io.choerodon.iam.infra.asserts.OrganizationAssertHelper
import io.choerodon.iam.infra.asserts.ProjectAssertHelper
import io.choerodon.iam.infra.asserts.UserAssertHelper
import io.choerodon.iam.infra.dto.ProjectDTO
import io.choerodon.iam.infra.feign.AsgardFeignClient
import io.choerodon.iam.infra.mapper.LabelMapper
import io.choerodon.iam.infra.mapper.ProjectCategoryMapper
import io.choerodon.iam.infra.mapper.ProjectMapCategoryMapper
import io.choerodon.iam.infra.mapper.ProjectMapper
import io.choerodon.iam.infra.mapper.ProjectRelationshipMapper
import io.choerodon.iam.infra.mapper.ProjectTypeMapper
import io.choerodon.iam.infra.mapper.RoleMapper
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.context.annotation.Import
import org.springframework.transaction.annotation.Transactional
import spock.lang.Specification
import java.lang.reflect.Field
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class OrganizationProjectServiceImplSpec extends Specification {
private SagaClient sagaClient = Mock(SagaClient)
private AsgardFeignClient asgardFeignClient = Mock(AsgardFeignClient)
@Autowired
private UserService userService
@Autowired
private ProjectTypeService projectTypeService
@Autowired
private OrganizationProjectService organizationProjectService
@Autowired
private ProjectMapCategoryMapper projectMapCategoryMapper
@Autowired
private ProjectCategoryMapper projectCategoryMapper
@Autowired
private ProjectMapper projectMapper
@Autowired
ProjectAssertHelper projectAssertHelper
@Autowired
ProjectTypeMapper projectTypeMapper
@Autowired
OrganizationAssertHelper organizationAssertHelper
@Autowired
UserAssertHelper userAssertHelper
@Autowired
RoleMapper roleMapper
@Autowired
LabelMapper labelMapper
@Autowired
ProjectRelationshipMapper projectRelationshipMapper
@Autowired
RoleMemberService roleMemberService
def setup() {
given: "构造organizationProjectService"
organizationProjectService = new OrganizationProjectServiceImpl(sagaClient, userService, asgardFeignClient, projectMapCategoryMapper,
projectCategoryMapper, projectMapper, projectAssertHelper, projectTypeMapper, organizationAssertHelper, userAssertHelper,
roleMapper, labelMapper, projectRelationshipMapper, roleMemberService)
Field field = organizationProjectService.getClass().getDeclaredField("devopsMessage")
field.setAccessible(true)
field.set(organizationProjectService, true)
Field field1 = organizationProjectService.getClass().getDeclaredField("categoryEnable")
field1.setAccessible(true)
field1.set(organizationProjectService, false)
DetailsHelper.setCustomUserDetails(1, "zh_CN")
}
@Transactional
def "CreateProject"() {
given: "构造请求参数"
ProjectDTO projectDTO = new ProjectDTO()
projectDTO.setName("name")
projectDTO.setCode("code")
projectDTO.setEnabled(true)
projectDTO.setOrganizationId(1L)
when: "调用方法"
organizationProjectService.createProject(projectDTO)
then: "校验结果"
noExceptionThrown()
}
def "QueryAll"() {
when: "调用方法"
def result = organizationProjectService.queryAll(new ProjectDTO())
then: "校验结果"
result.isEmpty()
}
@Transactional
def "Update"() {
given: "mock"
ProjectDTO projectDTO = new ProjectDTO()
projectDTO.setName("name")
projectDTO.setCode("code")
projectDTO.setEnabled(true)
projectDTO.setOrganizationId(1L)
long id = organizationProjectService.create(projectDTO).getId()
ProjectDTO dto = projectMapper.selectByPrimaryKey(id)
dto.setName("name1")
CustomUserDetails customUserDetails = new CustomUserDetails("admin","admin")
customUserDetails.setUserId(1L)
customUserDetails.setLanguage("zh_CN")
customUserDetails.setTimeZone("zkk")
DetailsHelper.setCustomUserDetails(customUserDetails)
when: "调用方法"
def entity = organizationProjectService.update(1L, dto)
then: "校验结果"
entity.objectVersionNumber ==2
}
@Transactional
def "EnableProject"() {
given:""
ProjectDTO projectDTO = new ProjectDTO()
projectDTO.setName("name")
projectDTO.setCode("code")
projectDTO.setEnabled(true)
projectDTO.setOrganizationId(1L)
long id =organizationProjectService.create(projectDTO).getId()
when: "调用方法"
organizationProjectService.enableProject(1L, id, 1L)
then: "校验结果"
1 * sagaClient.startSaga(_ as String, _ as StartInstanceDTO)
}
@Transactional
def "DisableProject"() {
given:""
ProjectDTO projectDTO = new ProjectDTO()
projectDTO.setName("name")
projectDTO.setCode("code")
projectDTO.setEnabled(true)
projectDTO.setOrganizationId(1L)
long id = organizationProjectService.create(projectDTO).getId()
when: "调用方法"
organizationProjectService.enableProject(1L, id, 1L)
then: "校验结果"
1 * sagaClient.startSaga(_ as String, _ as StartInstanceDTO)
}
}
<|start_filename|>react/config.js<|end_filename|>
const config = {
server: 'http://api.staging.saas.hand-china.com',
// server: 'http://api.c7nf.choerodon.staging.saas.hand-china.com',
master: '@choerodon/master',
projectType: 'choerodon',
buildType: 'single',
dashboard: {
iam: {
components: 'react/src/app/iam/dashboard/*',
locale: 'react/src/app/iam/locale/dashboard/*',
},
},
resourcesLevel: ['site', 'organization', 'project', 'user'],
};
module.exports = config;
<|start_filename|>react/src/app/iam/stores/user/announcement-info/index.js<|end_filename|>
import AnnouncementInfoStore from './AnnouncementInfoStore';
export default AnnouncementInfoStore;
<|start_filename|>src/main/java/io/choerodon/iam/app/service/UploadHistoryService.java<|end_filename|>
package io.choerodon.iam.app.service;
import io.choerodon.core.swagger.PermissionData;
import io.choerodon.eureka.event.EurekaEventPayload;
import io.choerodon.iam.infra.dto.RoleDTO;
import io.choerodon.iam.infra.dto.UploadHistoryDTO;
import java.util.Map;
/**
* @author superlee
*/
public interface UploadHistoryService {
UploadHistoryDTO latestHistory(Long userId, String type, Long sourceId, String sourceType);
/**
* @author superlee
*/
interface ParsePermissionService {
/**
* 解析swagger的文档树
*
* @param payload 接受的消息
*/
void parser(EurekaEventPayload payload);
String processPermission(String[] roles, String path, String method, String description, PermissionData permission, String serviceName, String resourceCode, Map<String, RoleDTO> initRoleMap);
Map<String, RoleDTO> queryInitRoleByCode();
}
}
<|start_filename|>src/main/resources/script/db/fd_project_relationship.groovy<|end_filename|>
package script.db
databaseChangeLog(logicalFilePath: 'script/db/fd_project_relationship.groovy') {
changeSet(author: '<EMAIL>', id: '2019-03-05-fd_project_relationship') {
if (helper.dbType().isSupportSequence()) {
createSequence(sequenceName: 'FD_PROJECT_RELATIONSHIP_S', startValue: "1")
}
createTable(tableName: "FD_PROJECT_RELATIONSHIP") {
column(name: 'ID', type: 'BIGINT UNSIGNED', autoIncrement: true, remarks: '表ID,主键,供其他表做外键,unsigned bigint、单表时自增、步长为 1。') {
constraints(primaryKey: true, primaryKeyName: 'PK_FD_PROJECT_RELATIONSHIP')
}
column(name: 'PROJECT_ID', type: 'BIGINT UNSIGNED', remarks: '项目ID。') {
constraints(nullable: false)
}
column(name: 'PARENT_ID', type: 'BIGINT UNSIGNED', remarks: '父亲ID。', defaultValue: "0") {
constraints(nullable: false)
}
column(name: "START_DATE", type: "DATETIME", remarks: '开始时间')
column(name: "END_DATE", type: "DATETIME", remarks: '结束时间')
column(name: 'IS_ENABLED', type: 'TINYINT UNSIGNED', defaultValue: "1", remarks: '是否启用。1启用,0未启用') {
constraints(nullable: false)
}
column(name: "LAST_UPDATE_DATE", type: "DATETIME", defaultValueComputed: "CURRENT_TIMESTAMP", remarks: "更新时间")
column(name: "OBJECT_VERSION_NUMBER", type: "BIGINT UNSIGNED", defaultValue: "1")
column(name: "CREATED_BY", type: "BIGINT UNSIGNED", defaultValue: "0")
column(name: "CREATION_DATE", type: "DATETIME", defaultValueComputed: "CURRENT_TIMESTAMP")
column(name: "LAST_UPDATED_BY", type: "BIGINT UNSIGNED", defaultValue: "0")
}
}
changeSet(author: '<EMAIL>', id: '2019-03-05-fd_project_relationship-add-program_id') {
addColumn(tableName: 'FD_PROJECT_RELATIONSHIP') {
column(name: 'PROGRAM_ID', type: 'BIGINT UNSIGNED', remarks: '所属项目ID(只包含PROGRAM类型)', afterColumn: 'PARENT_ID')
}
}
changeSet(author: 'superlee', id: '2019-07-18-fd-project-relationship-add-remark') {
setTableRemarks(tableName:"FD_PROJECT_RELATIONSHIP",remarks: "项目关系表")
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/dto/RelationshipCheckDTO.java<|end_filename|>
package io.choerodon.iam.api.dto;
import io.swagger.annotations.ApiModelProperty;
public class RelationshipCheckDTO {
@ApiModelProperty(value = "是否与项目其余被占用时间冲突")
private Boolean result;
@ApiModelProperty(value = "占用的项目code")
private String projectCode;
@ApiModelProperty(value = "占用的项目name")
private String projectName;
public Boolean getResult() {
return result;
}
public void setResult(Boolean result) {
this.result = result;
}
public String getProjectCode() {
return projectCode;
}
public void setProjectCode(String projectCode) {
this.projectCode = projectCode;
}
public String getProjectName() {
return projectName;
}
public void setProjectName(String projectName) {
this.projectName = projectName;
}
public RelationshipCheckDTO() {
}
public RelationshipCheckDTO(Boolean result, String projectCode, String projectName) {
this.result = result;
this.projectCode = projectCode;
this.projectName = projectName;
}
}
<|start_filename|>react/src/app/iam/stores/global/token/index.js<|end_filename|>
import SaasTokenStore from './SaasTokenStore';
export default SaasTokenStore;
<|start_filename|>react/src/app/iam/stores/global/system-setting/SystemSettingStore.js<|end_filename|>
import { action, computed, observable } from 'mobx';
import { axios, store } from '@choerodon/boot';
@store('SystemSettingStore')
class SystemSettingStore {
@observable logoLoadingStatus = false;
@observable userSetting = {};
@observable logo = '';
@observable favicon = '';
@observable submitting = false;
@computed
get getUserSetting() {
return this.userSetting ? this.userSetting : {};
}
@action
setUserSetting(data) {
this.userSetting = data;
}
@computed
get getFavicon() {
return this.favicon ? this.favicon : '';
}
@action
setFavicon(favicon) {
this.favicon = favicon;
}
@computed
get getLogo() {
return this.logo ? this.logo : '';
}
@action
setLogo(logo) {
this.logo = logo;
}
@action
putUserSetting(data) {
return axios.put('/iam/v1/system/setting', data);
}
@action
postUserSetting(data) {
return axios.post('/iam/v1/system/setting', data);
}
@action
resetUserSetting() {
return axios.delete('/iam/v1/system/setting');
}
loadUserSetting() {
return axios.get('/iam/v1/system/setting');
}
}
const systemSettingStore = new SystemSettingStore();
export default systemSettingStore;
<|start_filename|>react/src/app/iam/stores/dashboard/failedSaga/index.js<|end_filename|>
import FailedSagaStore from './FailedSagaStore';
export default FailedSagaStore;
<|start_filename|>react/src/app/iam/containers/organization/organization-setting/password-policy/PasswordPolicy.js<|end_filename|>
import React, { Component } from 'react';
import { inject, observer } from 'mobx-react';
import {
Button,
Form,
TextField,
TextArea,
NumberField,
SelectBox,
} from 'choerodon-ui/pro';
import { injectIntl, FormattedMessage } from 'react-intl';
import { Content, Page, Permission } from '@choerodon/boot';
import PasswordPolicyStore from '../../../../stores/organization/password-policy/index';
import LoadingBar from '../../../../components/loadingBar/index';
import './PasswordPolicy.scss';
const inputPrefix = 'organization.pwdpolicy';
const Option = SelectBox.Option;
@injectIntl
@inject('AppState')
@observer
export default class PasswordPolicy extends Component {
passwordForm;
securityForm;
constructor(props) {
super(props);
this.state = {
loading: false,
showPwd: true, // 是否显示密码安全策略
showLogin: true, // 是否显示登录安全策略
lockStatus: false, // 登录安全策略是否开启锁定
codeStatus: false, // 登录安全策略是否开启验证码
submitting: false,
organizationId: this.props.AppState.currentMenuType.id,
};
}
componentDidMount() {
this.loadData();
}
/**
* 刷新函数
*/
reload = () => {
this.loadData();
};
/**
* 加载当前组织密码策略
*/
loadData() {
const { organizationId } = this.state;
this.setState({
loading: true,
});
PasswordPolicyStore.loadData(organizationId)
.then((data) => {
if (data.failed) {
Choerodon.prompt(data.message);
} else {
PasswordPolicyStore.setPasswordPolicy(data);
const codeStatus = data.enableCaptcha; // 登录安全策略是否开启验证码
const lockStatus = data.enableLock; // 登录安全策略是否开启锁定
this.setState({
loading: false,
codeStatus,
lockStatus,
});
}
})
.catch((error) => {
Choerodon.handleResponseError(error);
this.setState({
loading: false,
});
});
}
handleSubmit = async (e) => {
const { AppState, intl } = this.props;
const { passwordForm, securityForm } = this;
const [result1, result2] = await Promise.all([
passwordForm.checkValidity(),
securityForm.checkValidity(),
]);
const isBothFormValid = result1 && result2;
if (isBothFormValid) {
const oldPolicy = PasswordPolicyStore.passwordPolicy;
this.setState({
submitting: true,
showPwd: true,
showLogin: true,
});
PasswordPolicyStore.updatePasswordPolicy(
AppState.currentMenuType.id,
oldPolicy.id,
{
...oldPolicy,
}
)
.then((data) => {
this.setState({ submitting: false });
Choerodon.prompt(intl.formatMessage({ id: 'save.success' }));
PasswordPolicyStore.setPasswordPolicy(data);
this.loadData();
})
.catch((error) => {
this.setState({ submitting: false });
Choerodon.handleResponseError(error);
});
}
};
updatePasswordPolicyStoreField(fieldName, value) {
const oldPolicy = PasswordPolicyStore.passwordPolicy;
PasswordPolicyStore.setPasswordPolicy({
...oldPolicy,
[fieldName]: value,
});
}
fieldValueChangeHandlerMaker = (fieldName) => {
return (value) => {
this.updatePasswordPolicyStoreField(fieldName, value);
};
};
handleEnablePasswordChange = (value) => {
this.updatePasswordPolicyStoreField('enablePassword', value);
this.setState((prevState) => ({
showPwd: !prevState.showPwd,
}));
};
handleEnableSecurityChange = (value) => {
this.updatePasswordPolicyStoreField('enableSecurity', value);
this.setState((prevState) => ({
showLogin: !prevState.showLogin,
}));
};
handleEnableLockChange = (value) => {
this.updatePasswordPolicyStoreField('enableLock', value);
this.setState((prevState) => ({
lockStatus: !prevState.lockStatus,
}));
};
handleEnableCaptchaChange = (value) => {
this.updatePasswordPolicyStoreField('enableCaptcha', value);
this.setState((prevState) => ({
codeStatus: !prevState.codeStatus,
}));
};
render() {
const { AppState, form, intl } = this.props;
// const { getFieldDecorator } = form;
const { loading, submitting, showPwd, codeStatus, lockStatus } = this.state;
const inputHalfWidth = '236px';
const inputWidth = '512px';
const passwordPolicy = PasswordPolicyStore.passwordPolicy || {};
const {
originalPassword,
enablePassword,
notUsername,
enableSecurity,
enableCaptcha,
enableLock,
minLength,
maxLength,
digitsCount,
lowercaseCount,
uppercaseCount,
specialCharCount,
notRecentCount,
regularExpression,
maxCheckCaptcha,
maxErrorTime,
lockedExpireTime,
} = passwordPolicy;
const mainContent = loading ? (
<LoadingBar />
) : (
<div>
<div className="strategy-switch-container">
<h3 className="strategy-switch-title">
<FormattedMessage id={`${inputPrefix}.password`} />
</h3>
<div className="strategy-switch">
<span className="strategy-switch-label">是否启用:</span>
<SelectBox
// label={<FormattedMessage id={`${inputPrefix}.enabled.password`} />}
value={enablePassword}
onChange={this.handleEnablePasswordChange}
colSpan={6}
>
<Option value={true}>
<FormattedMessage id="yes" />
</Option>
<Option value={false}>
<FormattedMessage id="no" />
</Option>
</SelectBox>
</div>
</div>
<Form
labelLayout="float"
className="strategy-form"
onSubmit={this.handleSubmit}
columns={6}
style={{ display: showPwd ? 'block' : 'none' }}
ref={(node) => (this.passwordForm = node)}
>
<SelectBox
label={<FormattedMessage id={`${inputPrefix}.notusername`} />}
value={notUsername}
onChange={this.fieldValueChangeHandlerMaker('notUsername')}
colSpan={6}
>
<Option value={true}>
<FormattedMessage id="yes" />
</Option>
<Option value={false}>
<FormattedMessage id="no" />
</Option>
</SelectBox>
<TextField
label={<FormattedMessage id={`${inputPrefix}.originalpassword`} />}
value={originalPassword || ''}
onChange={this.fieldValueChangeHandlerMaker('originalPassword')}
colSpan={6}
/>
<NumberField
label={<FormattedMessage id={`${inputPrefix}.minlength`} />}
min={0}
value={minLength || 0}
onChange={this.fieldValueChangeHandlerMaker('minLength')}
colSpan={3}
step={1}
/>
<NumberField
label={<FormattedMessage id={`${inputPrefix}.maxlength`} />}
min={0}
value={maxLength || 0}
onChange={this.fieldValueChangeHandlerMaker('maxLength')}
colSpan={3}
step={1}
/>
<NumberField
min={0}
label={<FormattedMessage id={`${inputPrefix}.digitscount`} />}
value={digitsCount}
onChange={this.fieldValueChangeHandlerMaker('digitsCount')}
colSpan={2}
step={1}
/>
<NumberField
label={<FormattedMessage id={`${inputPrefix}.lowercasecount`} />}
min={0}
value={lowercaseCount}
onChange={this.fieldValueChangeHandlerMaker('lowercaseCount')}
colSpan={2}
step={1}
/>
<NumberField
label={<FormattedMessage id={`${inputPrefix}.uppercasecount`} />}
min={0}
value={uppercaseCount}
onChange={this.fieldValueChangeHandlerMaker('uppercaseCount')}
colSpan={2}
step={1}
/>
<NumberField
label={<FormattedMessage id={`${inputPrefix}.specialcharcount`} />}
min={0}
value={specialCharCount}
onChange={this.fieldValueChangeHandlerMaker('specialCharCount')}
colSpan={3}
step={1}
/>
<NumberField
label={<FormattedMessage id={`${inputPrefix}.notrecentcount`} />}
min={0}
value={notRecentCount}
onChange={this.fieldValueChangeHandlerMaker('notRecentCount')}
colSpan={3}
step={1}
/>
<TextArea
label={<FormattedMessage id={`${inputPrefix}.regularexpression`} />}
rows={2}
value={regularExpression}
onChange={this.fieldValueChangeHandlerMaker('regularExpression')}
colSpan={6}
/>
</Form>
<div className="strategy-switch-container">
<h3 className="strategy-switch-title">
<FormattedMessage id={`${inputPrefix}.login`} />
</h3>
<div className="strategy-switch">
<span>是否启用:</span>
<SelectBox
// label={
// <FormattedMessage id={`${inputPrefix}.enabled.security`} />
// }
value={enableSecurity}
onChange={this.handleEnableSecurityChange}
>
<Option value={true}>
<FormattedMessage id="yes" />
</Option>
<Option value={false}>
<FormattedMessage id="no" />
</Option>
</SelectBox>
</div>
</div>
<Form
labelLayout="float"
columns={6}
className="strategy-form"
style={{
display: enableSecurity ? 'block' : 'none',
}}
ref={(node) => (this.securityForm = node)}
>
<SelectBox
label={<FormattedMessage id={`${inputPrefix}.enabled.captcha`} />}
value={enableCaptcha}
onChange={this.handleEnableCaptchaChange}
colSpan={6}
>
<Option value={true}>
<FormattedMessage id="yes" />
</Option>
<Option value={false}>
<FormattedMessage id="no" />
</Option>
</SelectBox>
{codeStatus ? (
<NumberField
label={<FormattedMessage id={`${inputPrefix}.maxerror.count`} />}
min={0}
value={enableCaptcha ? maxCheckCaptcha : 3}
onChange={this.fieldValueChangeHandlerMaker('maxCheckCaptcha')}
colSpan={6}
step={1}
/>
) : (
''
)}
<SelectBox
label={<FormattedMessage id={`${inputPrefix}.enabled.lock`} />}
value={enableLock}
onChange={this.handleEnableLockChange}
colSpan={6}
>
<Option value={true}>
<FormattedMessage id="yes" />
</Option>
<Option value={false}>
<FormattedMessage id="no" />
</Option>
</SelectBox>
{lockStatus
? [
<NumberField
label={<FormattedMessage id={`${inputPrefix}.maxerror.count`} />}
min={0}
value={enableLock ? maxErrorTime : 5}
onChange={this.fieldValueChangeHandlerMaker('maxErrorTime')}
colSpan={6}
key="maxErrorTime"
step={1}
/>,
<NumberField
label={<FormattedMessage id={`${inputPrefix}.locktime`} />}
min={0}
value={lockedExpireTime}
onChange={this.fieldValueChangeHandlerMaker('lockedExpireTime')}
colSpan={6}
key="lockExpireTime"
step={1}
/>,
]
: ''}
</Form>
<div className="divider" />
<div className="button-group">
<Permission service={['iam-service.password-policy.update']}>
<Button
funcType="raised"
type="submit"
color="blue"
loading={submitting}
onClick={this.handleSubmit}
>
<FormattedMessage id="save" />
</Button>
</Permission>
<Button
funcType="raised"
onClick={this.reload}
disabled={submitting}
style={{ color: '#3F51B5' }}
>
<FormattedMessage id="cancel" />
</Button>
</div>
</div>
);
return (
<Page
className="password-policy"
service={[
'iam-service.password-policy.update',
'iam-service.password-policy.queryByOrganizationId',
]}
>
<Content values={{ name: AppState.currentMenuType.name }}>
<div className="policy-container">{mainContent}</div>
</Content>
</Page>
);
}
}
<|start_filename|>react/src/app/iam/stores/global/project-type/index.js<|end_filename|>
import ProjectTypeStore from './ProjectTypeStore';
export default ProjectTypeStore;
<|start_filename|>src/test/groovy/io/choerodon/iam/api/controller/v1/RoleMemberControllerSpec.groovy<|end_filename|>
package io.choerodon.iam.api.controller.v1
import com.github.pagehelper.PageInfo
import io.choerodon.base.domain.PageRequest
import io.choerodon.core.exception.ExceptionResponse
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.api.query.ClientRoleQuery
import io.choerodon.iam.api.dto.RoleAssignmentDeleteDTO
import io.choerodon.iam.api.dto.RoleAssignmentSearchDTO
import io.choerodon.iam.api.dto.UploadHistoryDTO
import io.choerodon.iam.app.service.UserService
import io.choerodon.iam.infra.dto.ClientDTO
import io.choerodon.iam.infra.dto.MemberRoleDTO
import io.choerodon.iam.infra.dto.ProjectDTO
import io.choerodon.iam.infra.dto.RoleDTO
import io.choerodon.iam.infra.dto.UserDTO
import io.choerodon.iam.infra.enums.MemberType
import io.choerodon.iam.infra.mapper.*
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.boot.test.web.client.TestRestTemplate
import org.springframework.context.annotation.Import
import org.springframework.core.io.Resource
import org.springframework.web.multipart.MultipartFile
import spock.lang.Shared
import spock.lang.Specification
import spock.lang.Stepwise
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
@Stepwise
class RoleMemberControllerSpec extends Specification {
private static final String BASE_PATH = "/v1"
@Autowired
private TestRestTemplate restTemplate
@Autowired
private UserService userService
@Autowired
private MemberRoleMapper memberRoleMapper
@Autowired
private RoleMapper roleMapper
@Autowired
private UserMapper userMapper
@Autowired
private ProjectMapper projectMapper
@Autowired
private ClientMapper clientMapper
@Shared
def needInit = true
@Shared
def needClean = false
@Shared
def memberRoleDOList = new ArrayList<MemberRoleDTO>()
@Shared
def roleDOList = new ArrayList<RoleDTO>()
@Shared
def userDOList = new ArrayList<UserDTO>()
@Shared
def clientDOList
@Shared
def projectDO = new ProjectDTO()
def setup() {
if (needInit) {
given: "构造参数"
needInit = false
for (int i = 0; i < 3; i++) {
RoleDTO roleDO = new RoleDTO()
roleDO.setCode("role/site/default/rolemember" + i)
roleDO.setName("权限管理员")
roleDO.setResourceLevel("site")
roleDOList.add(roleDO)
}
projectDO.setCode("hand")
projectDO.setName("汉得")
projectDO.setOrganizationId(1L)
when: "插入记录"
def count = 0
count += projectMapper.insert(projectDO)
for (RoleDTO roleDO : roleDOList) {
count += roleMapper.insert(roleDO)
}
for (int i = 0; i < 3; i++) {
UserDTO user = new UserDTO()
user.setLoginName("dengyouquan" + i)
user.setRealName("dengyouquan" + i)
user.setEmail("dengyouquan" + i + "@qq.com")
user.setSourceId(projectDO.getId())
user.setOrganizationId(1L)
userDOList.add(user)
}
for (UserDTO dto : userDOList){
count ++
userMapper.insert(dto)
}
for (int i = 0; i < 3; i++) {
MemberRoleDTO memberRoleDO = new MemberRoleDTO()
memberRoleDO.setMemberType("user")
memberRoleDO.setRoleId(roleDOList.get(i).getId())
memberRoleDO.setSourceType("site")
memberRoleDO.setSourceId(0)
memberRoleDO.setMemberId()
memberRoleDOList.add(memberRoleDO)
}
for (MemberRoleDTO dto : memberRoleDOList) {
count++
memberRoleMapper.insert(dto)
}
clientDOList = initClient()
then: "校验结果"
count == 10
}
}
def cleanup() {
if (needClean) {
given: ""
def count = 0
needClean = false
when: "删除记录"
for (MemberRoleDTO memberRoleDO : memberRoleDOList) {
count += memberRoleMapper.deleteByPrimaryKey(memberRoleDO)
}
for (UserDTO userDO : userDOList) {
count += userMapper.deleteByPrimaryKey(userDO)
}
for (RoleDTO roleDO : roleDOList) {
count += roleMapper.deleteByPrimaryKey(roleDO)
}
for (ClientDTO clientDO : clientDOList) {
clientMapper.deleteByPrimaryKey(clientDO)
}
count += projectMapper.deleteByPrimaryKey(projectDO)
then: "校验结果"
count == 10
}
}
List<ClientDTO> initClient() {
List<ClientDTO> clientDOList = new ArrayList<>()
for (int i = 0; i < 3; i++) {
ClientDTO clientDO = new ClientDTO()
clientDO.setName("client" + i)
clientDO.setOrganizationId(1L)
clientMapper.insertSelective(clientDO)
clientDOList.add(clientDO)
}
return clientDOList
}
def "CreateOrUpdateOnSiteLevel"() {
given: "构造参数列表"
def paramsMap = new HashMap<String, Object>()
Long[] memberIds = new Long[2]
memberIds[0] = userDOList.get(0).getId()
memberIds[1] = userDOList.get(1).getId()
paramsMap.put("is_edit", true)
paramsMap.put("member_ids", memberIds)
when: "调用方法[异常-role id为空]"
def memberRoleDO = new MemberRoleDTO()
def memberRoleDOList1 = new ArrayList<MemberRoleDTO>()
memberRoleDOList1.add(memberRoleDO)
def entity = restTemplate.postForEntity(BASE_PATH + "/site/role_members?is_edit={is_edit}&member_ids={member_ids}", memberRoleDOList1, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.roleId.null")
when: "调用方法[异常-role不存在]"
memberRoleDO.setRoleId(1000L)
entity = restTemplate.postForEntity(BASE_PATH + "/site/role_members?is_edit={is_edit}&member_ids={member_ids}", memberRoleDOList1, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.role.not.exist")
when: "调用方法"
entity = restTemplate.postForEntity(BASE_PATH + "/site/role_members?is_edit={is_edit}&member_ids={member_ids}", memberRoleDOList, List, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
!entity.getBody().isEmpty()
}
def "CreateOrUpdateClientRoleOnSiteLevel"() {
given: "构造参数列表"
def paramsMap = new HashMap<String, Object>()
Long[] memberIds = new Long[2]
memberIds[0] = clientDOList.get(0).getId()
memberIds[1] = clientDOList.get(1).getId()
paramsMap.put("is_edit", true)
paramsMap.put("member_ids", memberIds)
when: "调用方法[异常-role id为空]"
def memberRoleDO = new MemberRoleDTO()
memberRoleDO.setMemberType(MemberType.CLIENT.value())
def memberRoleDOList1 = new ArrayList<MemberRoleDTO>()
memberRoleDOList1.add(memberRoleDO)
def entity = restTemplate.postForEntity(BASE_PATH + "/site/role_members?is_edit={is_edit}&member_ids={member_ids}", memberRoleDOList1, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.roleId.null")
when: "调用方法[异常-role不存在]"
memberRoleDO.setRoleId(1000L)
entity = restTemplate.postForEntity(BASE_PATH + "/site/role_members?is_edit={is_edit}&member_ids={member_ids}", memberRoleDOList1, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.role.not.exist")
when: "调用方法"
memberRoleDO.setRoleId(roleDOList.get(0).getId())
memberRoleDO.setMemberType(MemberType.CLIENT.value())
entity = restTemplate.postForEntity(BASE_PATH + "/site/role_members?is_edit={is_edit}&member_ids={member_ids}", memberRoleDOList, String, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody() != null
}
def "CreateOrUpdateOnOrganizationLevel"() {
given: "构造参数列表"
def paramsMap = new HashMap<String, Object>()
Long[] memberIds = new Long[1]
memberIds[0] = 1L
paramsMap.put("organization_id", 1L)
paramsMap.put("is_edit", true)
paramsMap.put("member_ids", memberIds)
MemberRoleDTO memberRoleDO = new MemberRoleDTO()
memberRoleDO.setSourceType("organization")
def memberRoleDOList1 = memberRoleMapper.select(memberRoleDO)
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/organizations/{organization_id}/role_members?is_edit={is_edit}&member_ids={member_ids}", memberRoleDOList, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.roles.in.same.level")
// when: "调用方法"
// def memberIds1 = new Long[1]
// memberIds1[0] = 1L
// paramsMap.put("member_ids", memberIds1)
// entity = restTemplate.postForEntity(BASE_PATH + "/organizations/{organization_id}/role_members?is_edit={is_edit}&member_ids={member_ids}", memberRoleDOList1, ExceptionResponse, paramsMap)
//
// then: "校验结果"
// entity.statusCode.is2xxSuccessful()
// entity.getBody().getCode().equals("error.roles.in.same.level")
//// !entity.getBody().isEmpty()
when: "调用方法"
def memberIds2 = new Long[1]
memberIds2[0] = 1L
paramsMap.put("member_ids", memberIds2)
paramsMap.put("organization_id", 1L)
memberRoleDOList1 = new ArrayList<MemberRoleDTO>()
MemberRoleDTO memberRoleDO1 = new MemberRoleDTO()
memberRoleDO1.setMemberId(1L)
memberRoleDO1.setMemberType(MemberType.CLIENT.value())
memberRoleDO1.setRoleId(2L)
memberRoleDO1.setSourceId(1L)
memberRoleDO1.setSourceType("organization")
memberRoleDOList1.add(memberRoleDO1)
entity = restTemplate.postForEntity(BASE_PATH + "/organizations/{organization_id}/role_members?is_edit={is_edit}&member_ids={member_ids}", ExceptionResponse, ExceptionResponse, paramsMap)
then: "校验结果"
noExceptionThrown()
}
def "CreateOrUpdateOnProjectLevel"() {
given: "构造参数列表"
def paramsMap = new HashMap<String, Object>()
Long[] memberIds = new Long[1]
memberIds[0] = 1L
paramsMap.put("project_id", 1L)
paramsMap.put("is_edit", true)
paramsMap.put("member_ids", memberIds)
MemberRoleDTO memberRoleDO = new MemberRoleDTO()
memberRoleDO.setSourceType("project")
//null
def memberRoleDOList1 = memberRoleMapper.select(memberRoleDO)
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/projects/{project_id}/role_members?is_edit={is_edit}&member_ids={member_ids}", memberRoleDOList1, String, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
when: "调用方法"
memberRoleDO.setMemberType(MemberType.CLIENT.value())
entity = restTemplate.postForEntity(BASE_PATH + "/projects/{project_id}/role_members?is_edit={is_edit}&member_ids={member_ids}", memberRoleDOList1, String, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "PagingQueryUsersByRoleIdOnSiteLevel"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
def roleId = roleDOList.get(0).getId()
paramsMap.put("role_id", roleId)
def roleAssignmentSearchDTO = new RoleAssignmentSearchDTO()
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/site/role_members/users?role_id={role_id}", roleAssignmentSearchDTO, PageInfo, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().list.size() != 0
}
def "PagingQueryClientsByRoleIdOnSiteLevel"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
def roleId = roleDOList.get(0).getId()
paramsMap.put("role_id", roleId)
def clientRoleSearchDTO = new ClientRoleQuery()
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/site/role_members/clients?role_id={role_id}", clientRoleSearchDTO, PageInfo, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody() != null
}
def "PagingQueryUsersByRoleIdOnOrganizationLevel"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
def roleId = roleDOList.get(0).getId()
paramsMap.put("role_id", roleId)
paramsMap.put("organization_id", 1L)
def roleAssignmentSearchDTO = new RoleAssignmentSearchDTO()
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/organizations/{organization_id}/role_members/users?role_id={role_id}", roleAssignmentSearchDTO, PageInfo, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().list.isEmpty()
}
def "PagingQueryClientsByRoleIdOnOrganizationLevel"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
def roleId = roleDOList.get(0).getId()
paramsMap.put("role_id", roleId)
paramsMap.put("organization_id", 1L)
def clientRoleSearchDTO = new ClientRoleQuery()
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/organizations/{organization_id}/role_members/clients?role_id={role_id}", clientRoleSearchDTO, PageInfo, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().list.isEmpty()
}
def "PagingQueryUsersByRoleIdOnProjectLevel"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
def roleId = roleDOList.get(0).getId()
paramsMap.put("role_id", roleId)
paramsMap.put("project_id", projectDO.getId())
def roleAssignmentSearchDTO = new RoleAssignmentSearchDTO()
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/projects/{project_id}/role_members/users?role_id={role_id}", roleAssignmentSearchDTO, PageInfo, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
//只有site用户
entity.getBody().list.isEmpty()
}
def "PagingQueryClientsByRoleIdOnProjectLevel"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
def roleId = roleDOList.get(0).getId()
paramsMap.put("role_id", roleId)
paramsMap.put("project_id", projectDO.getId())
def clientRoleSearchDTO = new ClientRoleQuery()
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/projects/{project_id}/role_members/clients?role_id={role_id}", clientRoleSearchDTO, PageInfo, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
//只有site用户
entity.getBody().list.isEmpty()
}
def "DeleteOnSiteLevel"() {
given: "构造参数列表"
def paramsMap = new HashMap<String, Object>()
def roleAssignmentDeleteDTO = new RoleAssignmentDeleteDTO()
roleAssignmentDeleteDTO.setSourceId(0)
roleAssignmentDeleteDTO.setMemberType("user")
roleAssignmentDeleteDTO.setView("userId")
List<Long> roleIds = new ArrayList<>()
roleIds.add(roleDOList.get(0).getId())
roleIds.add(roleDOList.get(1).getId())
roleIds.add(roleDOList.get(2).getId())
def map = new HashMap<Long, List<Long>>()
map.put(userDOList.get(0).getId(), roleIds)
roleAssignmentDeleteDTO.setData(map)
when: "调用方法[异常-view不合法]"
def entity = restTemplate.postForEntity(BASE_PATH + "/site/role_members/delete", roleAssignmentDeleteDTO, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.member_role.view.illegal")
when: "调用方法"
roleAssignmentDeleteDTO.setView("userView")
entity = restTemplate.postForEntity(BASE_PATH + "/site/role_members/delete", roleAssignmentDeleteDTO, Void, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "DeleteOnOrganizationLevel"() {
given: "构造参数列表"
def paramsMap = new HashMap<String, Object>()
def roleAssignmentDeleteDTO = new RoleAssignmentDeleteDTO()
roleAssignmentDeleteDTO.setSourceId(0)
roleAssignmentDeleteDTO.setMemberType("user")
roleAssignmentDeleteDTO.setView("userView")
List<Long> roleIds = new ArrayList<>()
roleIds.add(roleDOList.get(0).getId())
roleIds.add(roleDOList.get(1).getId())
roleIds.add(roleDOList.get(2).getId())
def map = new HashMap<Long, List<Long>>()
map.put(userDOList.get(0).getId(), roleIds)
roleAssignmentDeleteDTO.setData(map)
paramsMap.put("organization_id", 1L)
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/organizations/{organization_id}/role_members/delete", roleAssignmentDeleteDTO, Void, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "DeleteOnProjectLevel"() {
given: "构造参数列表"
def paramsMap = new HashMap<String, Object>()
def roleAssignmentDeleteDTO = new RoleAssignmentDeleteDTO()
roleAssignmentDeleteDTO.setSourceId(0)
roleAssignmentDeleteDTO.setMemberType("user")
roleAssignmentDeleteDTO.setView("userView")
List<Long> roleIds = new ArrayList<>()
roleIds.add(roleDOList.get(0).getId())
roleIds.add(roleDOList.get(1).getId())
roleIds.add(roleDOList.get(2).getId())
def map = new HashMap<Long, List<Long>>()
map.put(userDOList.get(0).getId(), roleIds)
roleAssignmentDeleteDTO.setData(map)
paramsMap.put("project_id", 1L)
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/projects/{project_id}/role_members/delete", roleAssignmentDeleteDTO, Void, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "ListRolesWithUserCountOnSiteLevel"() {
given: "构造请求参数"
def roleAssignmentSearchDTO = new RoleAssignmentSearchDTO()
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/site/role_members/users/count", roleAssignmentSearchDTO, List)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
!entity.getBody().isEmpty()
}
def "ListRolesWithClientCountOnSiteLevel"() {
given: "构造请求参数"
def roleAssignmentSearchDTO = new RoleAssignmentSearchDTO()
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/site/role_members/clients/count", roleAssignmentSearchDTO, List)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
!entity.getBody().isEmpty()
}
def "ListRolesWithUserCountOnOrganizationLevel"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
paramsMap.put("organization_id", 1L)
def roleAssignmentSearchDTO = new RoleAssignmentSearchDTO()
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/organizations/{organization_id}/role_members/users/count", roleAssignmentSearchDTO, List, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().size() == 2
}
def "ListRolesWithClientCountOnOrganizationLevel"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
paramsMap.put("organization_id", 1L)
def roleAssignmentSearchDTO = new RoleAssignmentSearchDTO()
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/organizations/{organization_id}/role_members/clients/count", roleAssignmentSearchDTO, List, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody() != null
}
def "ListRolesWithUserCountOnProjectLevel"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
paramsMap.put("project_id", projectDO.getId())
def roleAssignmentSearchDTO = new RoleAssignmentSearchDTO()
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/projects/{project_id}/role_members/users/count", roleAssignmentSearchDTO, List, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().size() == 4
}
def "ListRolesWithClientCountOnProjectLevel"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
paramsMap.put("project_id", projectDO.getId())
def roleAssignmentSearchDTO = new RoleAssignmentSearchDTO()
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/projects/{project_id}/role_members/clients/count", roleAssignmentSearchDTO, List, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody() != null
}
def "PagingQueryUsersWithSiteLevelRoles"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
def roleAssignmentSearchDTO = new RoleAssignmentSearchDTO()
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/site/role_members/users/roles", roleAssignmentSearchDTO, PageInfo, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().list.size() != 0
}
def "PagingQueryClientsWithSiteLevelRoles"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
def roleAssignmentSearchDTO = new RoleAssignmentSearchDTO()
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/site/role_members/clients/roles", roleAssignmentSearchDTO, PageInfo, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody() != null
}
def "PagingQueryUsersWithOrganizationLevelRoles"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
paramsMap.put("organization_id", 1L)
def roleAssignmentSearchDTO = new RoleAssignmentSearchDTO()
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/organizations/{organization_id}/role_members/users/roles", roleAssignmentSearchDTO, PageInfo, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
//自己插入的userDO
entity.getBody().list.size() != 0
}
def "pagingQueryClientsWithOrganizationLevelRoles"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
paramsMap.put("organization_id", 1L)
def clientSearch = new ClientRoleQuery()
clientSearch.setClientName("client")
clientSearch.setRoleName("管理")
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/organizations/{organization_id}/role_members/clients/roles", clientSearch, PageInfo, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().list.size() == 0
}
def "PagingQueryUsersWithProjectLevelRoles"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
paramsMap.put("project_id", projectDO.getId())
def roleAssignmentSearchDTO = new RoleAssignmentSearchDTO()
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/projects/{project_id}/role_members/users/roles", roleAssignmentSearchDTO, String, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody() != null
}
def "PagingQueryClientsWithProjectLevelRoles"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
paramsMap.put("project_id", projectDO.getId())
def roleAssignmentSearchDTO = new RoleAssignmentSearchDTO()
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/projects/{project_id}/role_members/clients/roles", roleAssignmentSearchDTO, String, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody() != null
}
def "GetUserWithOrgLevelRolesByUserId"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
paramsMap.put("user_id", userDOList.get(0).getId())
paramsMap.put("organization_id", 1L)
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/organizations/{organization_id}/role_members/users/{user_id}", List, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
//!entity.getBody().isEmpty()
}
def "GetUserWithProjLevelRolesByUserId"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
paramsMap.put("user_id", userDOList.get(0).getId())
paramsMap.put("project_id", projectDO.getId())
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/projects/{project_id}/role_members/users/{user_id}", List, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().isEmpty()
}
def "DownloadTemplatesOnSite"() {
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/site/role_members/download_templates", Resource)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().exists()
}
def "DownloadTemplatesOnOrganization"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
paramsMap.put("organization_id", 1L)
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/organizations/{organization_id}/role_members/download_templates", Resource, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().exists()
}
def "DownloadTemplatesOnProject"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
paramsMap.put("project_id", projectDO.getId())
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/projects/{project_id}/role_members/download_templates", Resource, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().exists()
}
def "Import2MemberRoleOnSite"() {
given: "构造请求参数"
MultipartFile file = null
def paramsMap = new HashMap<String, Object>()
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/site/role_members/batch_import", file, Void, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "Import2MemberRoleOnOrganization"() {
given: "构造请求参数"
MultipartFile file = null
def paramsMap = new HashMap<String, Object>()
paramsMap.put("organization_id", 1L)
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/organizations/{organization_id}/role_members/batch_import", file, Void, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "Import2MemberRoleOnProject"() {
given: "构造请求参数"
MultipartFile file = null
def paramsMap = new HashMap<String, Object>()
paramsMap.put("project_id", projectDO.getId())
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/projects/{project_id}/role_members/batch_import", file, Void, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "LatestHistoryOnSite"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
paramsMap.put("user_id", userDOList.get(0).getId())
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/site/member_role/users/{user_id}/upload/history", UploadHistoryDTO, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "LatestHistoryOnOrganization"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
paramsMap.put("user_id", userDOList.get(0).getId())
paramsMap.put("organization_id", 1L)
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/organizations/{organization_id}/member_role/users/{user_id}/upload/history", UploadHistoryDTO, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "LatestHistoryOnProject"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
paramsMap.put("user_id", userDOList.get(0).getId())
paramsMap.put("project_id", projectDO.getId())
when: "调用方法"
needClean = true
def entity = restTemplate.getForEntity(BASE_PATH + "/projects/{project_id}/member_role/users/{user_id}/upload/history", UploadHistoryDTO, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "queryAllClients"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
paramsMap.put("size", 10)
paramsMap.put("page", 0)
when: "调用方法"
needClean = true
def entity = restTemplate.getForEntity(BASE_PATH + "/all/clients", PageInfo, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "queryAllUsers"() {
given: "构造请求参数"
RoleMemberController controller = new RoleMemberController(null, userService, null, null, null,null)
PageRequest pageRequest = new PageRequest(1,20)
when: "调用方法"
def result = controller.queryAllUsers(pageRequest,1L,"param")
then: "校验结果"
result.statusCode.is2xxSuccessful()
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/MenuController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.app.service.MenuService;
import io.choerodon.iam.infra.dto.MenuDTO;
import io.swagger.annotations.ApiOperation;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import javax.validation.Valid;
import java.util.List;
/**
* @author wuguokai
* @author superlee
*/
@RestController
@RequestMapping("/v1/menus")
public class MenuController {
public static String ORG_TOP_MENU_CODE = "choerodon.code.top.organization";
public static String PROJ_TOP_MENU_CODE = "choerodon.code.top.project";
private MenuService menuService;
public MenuController(MenuService menuService) {
this.menuService = menuService;
}
@Permission(type = ResourceType.SITE)
@ApiOperation("根据id查询目录")
@GetMapping("/{id}")
public ResponseEntity<MenuDTO> query(@PathVariable("id") Long id) {
return new ResponseEntity<>(menuService.query(id), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE, permissionLogin = true)
@ApiOperation("获取可以访问的菜单列表")
@GetMapping
public ResponseEntity<MenuDTO> menus(@RequestParam String code,
@RequestParam(name = "source_id") Long sourceId) {
return new ResponseEntity<>(menuService.menus(code, sourceId), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation("菜单配置界面根据层级查菜单")
@GetMapping("/menu_config")
public ResponseEntity<MenuDTO> menuConfig(@RequestParam String code) {
return new ResponseEntity<>(menuService.menuConfig(code), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation("查询组织层菜单")
@GetMapping("/org/{organization_id}/menu_config")
public ResponseEntity<MenuDTO> orgMenuConfig(@PathVariable(name = "organization_id") Long organizationId,
@RequestParam String code) {
if (!(PROJ_TOP_MENU_CODE.equalsIgnoreCase(code) || ORG_TOP_MENU_CODE.equalsIgnoreCase(code))) {
throw new CommonException("error.menu.code.cannot.query");
}
return new ResponseEntity<>(menuService.menuConfig(code), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation("菜单配置保存")
@PostMapping("/menu_config")
public ResponseEntity saveMenuConfig(@RequestParam String code, @RequestBody List<MenuDTO> menus) {
menuService.saveMenuConfig(code, menus);
return new ResponseEntity<>(HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation("创建目录")
@PostMapping
public ResponseEntity<MenuDTO> create(@RequestBody @Valid MenuDTO menuDTO) {
return new ResponseEntity<>(menuService.create(menuDTO), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation("更新目录")
@PutMapping("/{id}")
public ResponseEntity<MenuDTO> update(@PathVariable("id") Long id,
@RequestBody @Valid MenuDTO menuDTO) {
return new ResponseEntity<>(menuService.update(id, menuDTO), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation("根据id删除菜单,只能删非默认菜单")
@DeleteMapping("/{id}")
public ResponseEntity delete(@PathVariable("id") Long id) {
menuService.delete(id);
return new ResponseEntity<>(HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "菜单code是否重复")
@PostMapping(value = "/check")
public ResponseEntity check(@RequestBody MenuDTO menu) {
menuService.check(menu);
return new ResponseEntity(HttpStatus.OK);
}
@Permission(type = ResourceType.SITE, permissionWithin = true)
@ApiOperation(value = "查询所有菜单")
@GetMapping(value = "/list")
public ResponseEntity<List<MenuDTO>> list() {
return new ResponseEntity<>(menuService.list(), HttpStatus.OK);
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/api/controller/v1/OrganizationControllerSpec.groovy<|end_filename|>
package io.choerodon.iam.api.controller.v1
import com.github.pagehelper.PageInfo
import io.choerodon.core.exception.ExceptionResponse
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.app.service.OrganizationService
import io.choerodon.iam.infra.dto.OrganizationDTO
import io.choerodon.iam.infra.mapper.OrganizationMapper
import org.springframework.beans.BeanUtils
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.boot.test.web.client.TestRestTemplate
import org.springframework.context.annotation.Import
import org.springframework.http.HttpEntity
import org.springframework.http.HttpMethod
import spock.lang.Shared
import spock.lang.Specification
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class OrganizationControllerSpec extends Specification {
private static final String BASE_PATH = "/v1/organizations"
@Autowired
private TestRestTemplate restTemplate
@Autowired
private OrganizationMapper organizationMapper
@Autowired
private OrganizationService organizationService
@Shared
def needInit = true
@Shared
def needClean = false
@Shared
def organizationDOList = new ArrayList<OrganizationDTO>()
def setup() {
if (needInit) {
given: "构造参数"
for (int i = 0; i < 3; i++) {
def organizationDO = new OrganizationDTO()
organizationDO.setCode("hand" + i)
organizationDO.setName("汉得" + i)
organizationDOList.add(organizationDO)
}
when: "调用方法"
needInit = false
int count = 0
for (OrganizationDTO dto : organizationDOList) {
organizationMapper.insert(dto)
count++
}
then: "校验结果"
count == 3
}
}
def cleanup() {
if (needClean) {
when: "调用方法"
needClean = false
def count = 0
for (OrganizationDTO organizationDO : organizationDOList) {
count += organizationMapper.deleteByPrimaryKey(organizationDO)
}
then: "校验结果"
count == 3
}
}
def "Update"() {
given: "构造请求参数"
def updateDto = organizationDOList.get(0)
updateDto.setCode("update-hand")
updateDto.setName("汉得更新")
updateDto.setObjectVersionNumber(1)
def httpEntity = new HttpEntity<Object>(updateDto)
when: "调用对应方法"
def entity = restTemplate.exchange(BASE_PATH + "/{organization_id}", HttpMethod.PUT, httpEntity, OrganizationDTO, updateDto.getId())
then: "校验结果"
entity.statusCode.is2xxSuccessful()
//code不可更新
!entity.getBody().getCode().equals(updateDto.getCode())
entity.getBody().getId().equals(updateDto.getId())
entity.getBody().getName().equals(updateDto.getName())
}
def "Query"() {
given: "构造请求参数"
def organizationId = 1L
def organizationDO = organizationMapper.selectByPrimaryKey(organizationId)
when: "调用对应方法[异常-组织id不存在]"
def entity = restTemplate.getForEntity(BASE_PATH + "/{organization_id}", ExceptionResponse, 1000L)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.organization.not.exist")
when: "调用对应方法"
entity = restTemplate.getForEntity(BASE_PATH + "/{organization_id}", OrganizationDTO, organizationId)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getId().equals(organizationId)
entity.getBody().getCode().equals(organizationDO.getCode())
entity.getBody().getName().equals(organizationDO.getName())
}
def "QueryOrgLevel"() {
given: "构造请求参数"
def organizationId = 1L
def organizationDO = organizationMapper.selectByPrimaryKey(organizationId)
when: "调用对应方法[异常-组织id不存在]"
def entity = restTemplate.getForEntity(BASE_PATH + "/{organization_id}/org_level", ExceptionResponse, 1000L)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.organization.not.exist")
when: "调用对应方法"
entity = restTemplate.getForEntity(BASE_PATH + "/{organization_id}/org_level", OrganizationDTO, organizationId)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getId().equals(organizationId)
entity.getBody().getCode().equals(organizationDO.getCode())
entity.getBody().getName().equals(organizationDO.getName())
}
def "List"() {
given: "构造请求参数"
def name = "汉得"
def code = "hand"
when: "调用对应方法[全查询]"
def entity = restTemplate.getForEntity(BASE_PATH, PageInfo)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
// entity.getBody().total == 4
!entity.getBody().list.isEmpty()
when: "调用对应方法"
entity = restTemplate.getForEntity(BASE_PATH + "?code={code}&name={name}", PageInfo, code, name)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().total == 3
}
def "EnableOrganization"() {
given: "构造请求参数"
def organizationId = 1L
def httpEntity = new HttpEntity<Object>()
when: "调用对应方法[异常-组织id不存在]"
def entity = restTemplate.exchange(BASE_PATH + "/{organization_id}/enable", HttpMethod.PUT, httpEntity, ExceptionResponse, 1000L)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.organization.not.exist")
when: "调用对应方法"
entity = restTemplate.exchange(BASE_PATH + "/{organization_id}/enable", HttpMethod.PUT, httpEntity, OrganizationDTO, organizationId)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getEnabled()
}
def "DisableOrganization"() {
given: "构造请求参数"
def organizationId = 1L
def httpEntity = new HttpEntity<Object>()
when: "调用对应方法[异常-组织id不存在]"
def entity = restTemplate.exchange(BASE_PATH + "/{organization_id}/disable", HttpMethod.PUT, httpEntity, ExceptionResponse, 1000L)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.organization.not.exist")
when: "调用对应方法"
entity = restTemplate.exchange(BASE_PATH + "/{organization_id}/disable", HttpMethod.PUT, httpEntity, OrganizationDTO, organizationId)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
!entity.getBody().getEnabled()
}
def "Check"() {
given: "构造请求参数"
def organizationDTO = organizationDOList.get(1)
when: "调用对应方法[异常-组织code为空]"
def organizationDTO1 = new OrganizationDTO()
BeanUtils.copyProperties(organizationDTO, organizationDTO1)
organizationDTO1.setCode(null)
def entity = restTemplate.postForEntity(BASE_PATH + "/check", organizationDTO1, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.organization.code.empty")
when: "调用对应方法[异常-组织id存在]"
def organizationDTO2 = new OrganizationDTO()
BeanUtils.copyProperties(organizationDTO, organizationDTO2)
organizationDTO2.setCode("operation")
entity = restTemplate.postForEntity(BASE_PATH + "/check", organizationDTO2, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.organization.code.exist")
when: "调用对应方法"
def organizationDTO3 = new OrganizationDTO()
organizationDTO3.setCode("test")
entity = restTemplate.postForEntity(BASE_PATH + "/check", organizationDTO3, Void)
needClean = true
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "queryByIds"() {
given:
OrganizationController controller = new OrganizationController(organizationService)
def ids = new HashSet()
ids << 1L
when:
def value = controller.queryByIds(ids)
then:
value.body.size() > 0
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/LdapController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.domain.Sort;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.iam.api.dto.LdapAccountDTO;
import io.choerodon.iam.api.dto.LdapConnectionDTO;
import io.choerodon.iam.app.service.LdapService;
import io.choerodon.iam.infra.dto.LdapDTO;
import io.choerodon.iam.infra.dto.LdapErrorUserDTO;
import io.choerodon.iam.infra.dto.LdapHistoryDTO;
import io.choerodon.mybatis.annotation.SortDefault;
import io.choerodon.swagger.annotation.CustomPageRequest;
import io.swagger.annotations.ApiOperation;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
/**
* @author wuguokai
*/
@RestController
@RequestMapping("/v1/organizations/{organization_id}")
public class LdapController {
private LdapService ldapService;
public LdapController(LdapService ldapService) {
this.ldapService = ldapService;
}
/**
* 添加Ldap
*
* @param organizationId
* @param ldapDTO
* @return ldapDTO
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "创建Ldap")
@PostMapping(value = "/ldaps")
public ResponseEntity<LdapDTO> create(@PathVariable("organization_id") Long organizationId,
@RequestBody @Validated LdapDTO ldapDTO) {
return new ResponseEntity<>(ldapService.create(organizationId, ldapDTO), HttpStatus.OK);
}
/**
* 更新Ldap
*
* @param organizationId
* @param id
* @param ldapDTO
* @return ldapDTO
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "修改Ldap")
@PostMapping(value = "/ldaps/{id}")
public ResponseEntity<LdapDTO> update(@PathVariable("organization_id") Long organizationId,
@PathVariable("id") Long id, @RequestBody @Validated LdapDTO ldapDTO) {
return new ResponseEntity<>(ldapService.update(organizationId, id, ldapDTO), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "启用ldap")
@PutMapping(value = "/ldaps/{id}/enable")
public ResponseEntity<LdapDTO> enableLdap(@PathVariable(name = "organization_id") Long organizationId,
@PathVariable Long id) {
return new ResponseEntity<>(ldapService.enableLdap(organizationId, id), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "禁用ldap")
@PutMapping(value = "/ldaps/{id}/disable")
public ResponseEntity<LdapDTO> disableLdap(@PathVariable(name = "organization_id") Long organizationId,
@PathVariable Long id) {
return new ResponseEntity<>(ldapService.disableLdap(organizationId, id), HttpStatus.OK);
}
/**
* 根据组织id查询Ldap
*
* @param organizationId
* @return ldapDTO
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "查询组织下的Ldap")
@GetMapping(value = "/ldaps")
public ResponseEntity<LdapDTO> queryByOrgId(@PathVariable("organization_id") Long organizationId) {
return new ResponseEntity<>(ldapService.queryByOrganizationId(organizationId), HttpStatus.OK);
}
/**
* 根据组织id删除Ldap
*
* @param organizationId
* @return ldapDTO
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "删除组织下的Ldap")
@DeleteMapping("/ldaps/{id}")
public ResponseEntity delete(@PathVariable("organization_id") Long organizationId,
@PathVariable("id") Long id) {
ldapService.delete(organizationId, id);
return new ResponseEntity(HttpStatus.OK);
}
/**
* 测试ldap连接
*
* @return 是否连接成功
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "测试ldap连接")
@PostMapping("/ldaps/{id}/test_connect")
public ResponseEntity<LdapConnectionDTO> testConnect(@PathVariable("organization_id") Long organizationId,
@PathVariable("id") Long id,
@RequestBody LdapAccountDTO ldapAccount) {
return new ResponseEntity<>(ldapService.testConnect(organizationId, id, ldapAccount), HttpStatus.OK);
}
/**
* 同步ldap用户
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "同步ldap用户")
@PostMapping("/ldaps/{id}/sync_users")
public ResponseEntity syncUsers(@PathVariable("organization_id") Long organizationId,
@PathVariable Long id) {
ldapService.syncLdapUser(organizationId, id);
return new ResponseEntity(HttpStatus.NO_CONTENT);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "根据ldap id查询最新一条历史记录")
@GetMapping("/ldaps/{id}/latest_history")
public ResponseEntity<LdapHistoryDTO> latestHistory(@PathVariable("organization_id") Long organizationId,
@PathVariable Long id) {
return new ResponseEntity<>(ldapService.queryLatestHistory(id), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "根据ldap id查询历史记录")
@GetMapping("/ldaps/{id}/history")
@CustomPageRequest
public ResponseEntity<PageInfo<LdapHistoryDTO>> pagingQueryHistories(@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@PathVariable("organization_id") Long organizationId,
@PathVariable Long id) {
return new ResponseEntity<>(ldapService.pagingQueryHistories(pageRequest, id), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "根据ldap history id查询同步用户错误详情")
@GetMapping("/ldap_histories/{id}/error_users")
@CustomPageRequest
public ResponseEntity<PageInfo<LdapErrorUserDTO>> pagingQueryErrorUsers(@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@PathVariable("organization_id") Long organizationId,
@PathVariable Long id,
LdapErrorUserDTO ldapErrorUserDTO) {
return new ResponseEntity<>(ldapService.pagingQueryErrorUsers(pageRequest, id, ldapErrorUserDTO), HttpStatus.OK);
}
/**
* 用于ldap同步过程中,因为不可控因素(iam服务挂掉)导致endTime为空一直在同步中的问题,该接口只是更新下endTime
*
* @param organizationId 组织id
* @param id ldap id
* @return
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "根据ldap id更新历史记录的endTime")
@PutMapping("/ldaps/{id}/stop")
public ResponseEntity<LdapHistoryDTO> stop(@PathVariable("organization_id") Long organizationId, @PathVariable Long id) {
return new ResponseEntity<>(ldapService.stop(id), HttpStatus.OK);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/LdapService.java<|end_filename|>
package io.choerodon.iam.app.service;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.iam.api.dto.LdapAccountDTO;
import io.choerodon.iam.api.dto.LdapConnectionDTO;
import io.choerodon.iam.infra.dto.LdapDTO;
import io.choerodon.iam.infra.dto.LdapErrorUserDTO;
import io.choerodon.iam.infra.dto.LdapHistoryDTO;
import java.util.Map;
/**
* @author wuguokai
*/
public interface LdapService {
LdapDTO create(Long organizationId, LdapDTO ldapDTO);
LdapDTO update(Long organizationId, Long id, LdapDTO ldapDTO);
LdapDTO queryByOrganizationId(Long organizationId);
void delete(Long organizationId, Long id);
/**
* 测试是否能连接到ldap
*
* @param organizationId 组织id
* @param id ldapId
* @return LdapConnectionDTO 连接测试结构体
*/
LdapConnectionDTO testConnect(Long organizationId, Long id, LdapAccountDTO ldapAccountDTO);
Map<String, Object> testConnect(LdapDTO ldap);
/**
* 根据ldap配置同步用户
*
* @param organizationId
* @param id
*/
void syncLdapUser(Long organizationId, Long id);
LdapDTO validateLdap(Long organizationId, Long id);
/**
* 根据ldap id 查询最新的一条记录
*
* @param ldapId ldapId
* @return
*/
LdapHistoryDTO queryLatestHistory(Long ldapId);
LdapDTO enableLdap(Long organizationId, Long id);
LdapDTO disableLdap(Long organizationId, Long id);
LdapHistoryDTO stop(Long id);
/**
* 根据ldapId分页查询ldap history
* @param ldapId
* @return
*/
PageInfo<LdapHistoryDTO> pagingQueryHistories(PageRequest pageRequest, Long ldapId);
PageInfo<LdapErrorUserDTO> pagingQueryErrorUsers(PageRequest pageRequest, Long id, LdapErrorUserDTO ldapErrorUserDTO);
}
<|start_filename|>react/src/app/iam/containers/organization/application/AddSider.js<|end_filename|>
import React, { Component } from 'react';
import { Modal, Table } from 'choerodon-ui';
import { Content, axios } from '@choerodon/boot';
import remove from 'lodash/remove';
import { injectIntl } from 'react-intl';
import { inject, observer } from 'mobx-react';
import MouseOverWrapper from '../../../components/mouseOverWrapper';
const { Sidebar } = Modal;
const intlPrefix = 'organization.application';
@injectIntl
@inject('AppState')
@observer
export default class Application extends Component {
constructor(props) {
super(props);
this.state = {
selections: props.selections || [],
};
this.data = props.data || [];
}
componentDidMount() {
this.loadChildApp();
}
loadChildApp = () => {
const { data, AppState: { currentMenuType: { organizationId } }, id } = this.props;
if (!data) {
// 根据接口获得全部可以给添加的应用
axios.get(`/iam/v1/organizations/${organizationId}/applications/${id}/enabled_app`)
.then((res) => {
if (!res.failed) {
this.data = res;
this.forceUpdate();
} else {
Choerodon.prompt(data.message);
}
});
}
}
handleSelect = (record, selected, selectedRows) => {
const { selections } = this.state;
if (selected) {
if (!selections.includes(record.id)) {
selections.push(record.id);
}
} else {
remove(selections, p => p === record.id);
}
this.setState({
selections,
});
}
handleSelectAll = (selected, selectedRows, changeRows) => {
let { selections } = this.state;
if (selected) {
selections = selections.concat(selectedRows.map(r => r.id));
selections = [...new Set(selections)];
} else {
remove(selections, p => changeRows.map(r => r.id).includes(p));
}
this.setState({
selections,
});
}
handleOk = () => {
const { onOk } = this.props;
const { selections } = this.state;
if (onOk) {
onOk(selections);
}
}
renderContent() {
const { intl } = this.props;
const { data, state: { selections } } = this;
const columns = [
{
title: '应用名称',
dataIndex: 'name',
width: '20%',
render: text => (
<MouseOverWrapper text={text} width={0.2}>
{text}
</MouseOverWrapper>
),
},
{
title: '应用编码',
dataIndex: 'code',
width: '20%',
render: text => (
<MouseOverWrapper text={text} width={0.2}>
{text}
</MouseOverWrapper>
),
},
{
title: '应用分类',
dataIndex: 'applicationType',
width: '15%',
render: text => (
<span>
{text ? intl.formatMessage({ id: `${intlPrefix}.type.${text}` }) : ''}
</span>
),
},
{
title: '开发项目',
dataIndex: 'projectName',
render: (text, record) => (
<div>
{
text && (
<div className="c7n-iam-application-name-avatar">
{
record.imageUrl ? (
<img src={record.imageUrl} alt="avatar" style={{ width: '100%' }} />
) : (
<React.Fragment>{text.split('')[0]}</React.Fragment>
)
}
</div>
)
}
<MouseOverWrapper text={text} width={0.2}>
{text}
</MouseOverWrapper>
</div>
),
},
];
const rowSelection = {
selectedRowKeys: selections,
onSelect: (record, selected, selectedRows) => {
this.handleSelect(record, selected, selectedRows);
},
onSelectAll: (selected, selectedRows, changeRows) => {
this.handleSelectAll(selected, selectedRows, changeRows);
},
};
return (
<Table
columns={columns}
dataSource={data}
rowKey={record => record.id}
rowSelection={rowSelection}
filterBarPlaceholder={intl.formatMessage({ id: 'filtertable' })}
scroll={{ x: true }}
/>
);
}
render() {
const { onCancel } = this.props;
return (
<Sidebar
visible
title="添加子应用"
bodyStyle={{ padding: 0 }}
onCancel={onCancel}
onOk={this.handleOk}
onText="添加"
>
<Content
title="向组合应用添加子应用"
description="您可以在此修改组合应用下的子应用信息。"
link="#"
>
{this.renderContent()}
</Content>
</Sidebar>
);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/UploadHistoryMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.infra.dto.UploadHistoryDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
/**
* @author superlee
*/
public interface UploadHistoryMapper extends Mapper<UploadHistoryDTO> {
UploadHistoryDTO latestHistory(@Param("userId") Long userId,
@Param("type") String type,
@Param("sourceId") Long sourceId,
@Param("sourceType")String sourceType);
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/PasswordPolicyController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.iam.infra.dto.PasswordPolicyDTO;
import io.swagger.annotations.ApiOperation;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import io.choerodon.iam.api.validator.PasswordPolicyValidator;
import io.choerodon.iam.app.service.PasswordPolicyService;
/**
* @author wuguokai
*/
@RestController
@RequestMapping("/v1/organizations/{organization_id}/password_policies")
public class PasswordPolicyController {
private PasswordPolicyService passwordPolicyService;
private PasswordPolicyValidator passwordPolicyValidator;
public PasswordPolicyController(PasswordPolicyService passwordPolicyService, PasswordPolicyValidator passwordPolicyValidator) {
this.passwordPolicyService = passwordPolicyService;
this.passwordPolicyValidator = passwordPolicyValidator;
}
/**
* 查询目标组织密码策略
*
* @return 目标组织密码策略
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "查询组织的密码策略")
@GetMapping
public ResponseEntity<PasswordPolicyDTO> queryByOrganizationId(@PathVariable("organization_id") Long organizationId) {
return new ResponseEntity<>(passwordPolicyService.queryByOrgId(organizationId), HttpStatus.OK);
}
/**
* 查询密码策略
*
* @return 密码策略
*/
/**
* 更新当前选择的组织密码策略
*
* @param passwordPolicyDTO 要更新的密码策略
* @return 更新后的密码策略
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "修改组织的密码策略")
@PostMapping("/{id}")
public ResponseEntity<PasswordPolicyDTO> update(@PathVariable("organization_id") Long organizationId,
@PathVariable("id") Long id,
@RequestBody @Validated PasswordPolicyDTO passwordPolicyDTO) {
passwordPolicyValidator.update(organizationId, id, passwordPolicyDTO);
return new ResponseEntity<>(passwordPolicyService.update(organizationId, id, passwordPolicyDTO), HttpStatus.OK);
}
}
<|start_filename|>react/src/app/iam/containers/organization/application/AddApplication.js<|end_filename|>
import React, { Component } from 'react';
import remove from 'lodash/remove';
import { Button, Form, Table, Tooltip, Radio, Select, Input } from 'choerodon-ui';
import { inject, observer } from 'mobx-react';
import { withRouter } from 'react-router-dom';
import { Content, Header, Page } from '@choerodon/boot';
import { injectIntl, FormattedMessage } from 'react-intl';
import './Application.scss';
import MouseOverWrapper from '../../../components/mouseOverWrapper';
import StatusTag from '../../../components/statusTag';
import AddSider from './AddSider';
const RadioGroup = Radio.Group;
const { Option } = Select;
const FormItem = Form.Item;
const intlPrefix = 'organization.application';
const formItemLayout = {
labelCol: {
xs: { span: 24 },
sm: { span: 8 },
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 16 },
},
};
@Form.create({})
@withRouter
@injectIntl
@inject('AppState')
@observer
export default class Application extends Component {
componentDidMount() {
this.refresh();
}
refresh = () => {
const { ApplicationStore } = this.props;
ApplicationStore.loadApplications();
};
checkCode = (rule, value, callback) => {
const { ApplicationStore, intl } = this.props;
const params = { code: value };
ApplicationStore.checkApplicationCode(params)
.then((mes) => {
if (mes.failed) {
callback(intl.formatMessage({ id: `${intlPrefix}.code.exist.msg` }));
} else {
callback();
}
}).catch((err) => {
callback('校验超时');
Choerodon.handleResponseError(err);
});
};
checkName = (rule, value, callback) => {
const { ApplicationStore, intl, ApplicationStore: { editData } } = this.props;
const params = { name: value };
ApplicationStore.checkApplicationCode(params)
.then((mes) => {
if (mes.failed) {
callback(intl.formatMessage({ id: `${intlPrefix}.name.exist.msg` }));
} else {
callback();
}
}).catch((err) => {
callback('校验超时');
Choerodon.handleResponseError(err);
});
};
handleSubmit = (e) => {
e.preventDefault();
const { ApplicationStore, AppState: { currentMenuType } } = this.props;
const orgId = currentMenuType.id;
const orgName = currentMenuType.name;
const { validateFields } = this.props.form;
validateFields((err, value) => {
if (!err) {
const { ApplicationStore: { selectedRowKeys }, history, intl } = this.props;
const { applicationCategory, code, name, applicationType, projectId } = value;
const isCombine = applicationCategory === 'combination-application';
const data = {
applicationCategory,
code,
name: name.trim(),
applicationType: !isCombine ? applicationType : undefined,
projectId: !isCombine ? projectId : undefined,
descendantIds: isCombine ? selectedRowKeys : undefined,
};
ApplicationStore.setSubmitting(true);
ApplicationStore.createApplication(data)
.then((res) => {
ApplicationStore.setSubmitting(false);
if (!res.failed) {
history.push(`/iam/application?type=organization&id=${orgId}&name=${encodeURIComponent(orgName)}`);
Choerodon.prompt(intl.formatMessage({ id: 'create.success' }));
ApplicationStore.loadData();
} else {
Choerodon.prompt(res.message);
}
}).catch((error) => {
ApplicationStore.setSubmitting(false);
Choerodon.handleResponseError(error);
});
}
});
};
handleAddApplication = () => {
const { ApplicationStore } = this.props;
ApplicationStore.showSidebar();
};
handleSiderOk = (selections) => {
const { ApplicationStore } = this.props;
ApplicationStore.setSelectedRowKeys(selections);
ApplicationStore.closeSidebar();
}
handleSiderCancel = () => {
const { ApplicationStore } = this.props;
ApplicationStore.closeSidebar();
}
handleDelete = (record) => {
const { ApplicationStore } = this.props;
const { selectedRowKeys } = ApplicationStore;
remove(selectedRowKeys, v => v === record.id);
ApplicationStore.setSelectedRowKeys(selectedRowKeys);
}
renderForm() {
const { intl, ApplicationStore, form } = this.props;
const { getFieldDecorator } = form;
const { projectData, editData, submitting } = ApplicationStore;
const inputWidth = 512;
return (
<React.Fragment>
<Form layout="vertical" className="rightForm" style={{ width: 512 }}>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('applicationCategory', {
initialValue: 'application',
})(
<RadioGroup label={<FormattedMessage id={`${intlPrefix}.category`} />} className="c7n-iam-application-radiogroup">
{
['application', 'combination-application'].map(value => <Radio value={value} key={value}>{intl.formatMessage({ id: `${intlPrefix}.category.${value.toLowerCase()}` })}</Radio>)
}
</RadioGroup>,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('code', {
rules: [{
required: true,
whitespace: true,
message: intl.formatMessage({ id: `${intlPrefix}.code.require.msg` }),
}, {
pattern: /^[a-z]([-a-z0-9]*[a-z0-9])?$/,
message: intl.formatMessage({ id: `${intlPrefix}.code.format.msg` }),
}, {
validator: this.checkCode,
}],
validateTrigger: 'onBlur',
validateFirst: true,
})(
<Input
autoComplete="off"
label={<FormattedMessage id={`${intlPrefix}.code`} />}
style={{ width: inputWidth }}
ref={(e) => { this.createFocusInput = e; }}
maxLength={14}
showLengthInfo={false}
/>,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('name', {
rules: [{
required: true,
message: intl.formatMessage({ id: `${intlPrefix}.name.require.msg` }),
}, {
pattern: /^[^\s]*$/,
message: intl.formatMessage({ id: `${intlPrefix}.whitespace.msg` }),
}, {
validator: this.checkName,
}],
validateTrigger: 'onBlur',
validateFirst: true,
})(
<Input
autoComplete="off"
label={<FormattedMessage id={`${intlPrefix}.name`} />}
style={{ width: inputWidth }}
ref={(e) => { this.editFocusInput = e; }}
maxLength={14}
showLengthInfo={false}
/>,
)}
</FormItem>
{
this.props.form.getFieldValue('applicationCategory') !== 'combination-application' && (
<React.Fragment>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('applicationType', {
initialValue: 'normal',
})(
<RadioGroup label={<FormattedMessage id={`${intlPrefix}.type`} />} className="c7n-iam-application-radiogroup">
{
['normal', 'test'].map(value => <Radio value={value} key={value}>{intl.formatMessage({ id: `${intlPrefix}.type.${value.toLowerCase()}` })}</Radio>)
}
</RadioGroup>,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('projectId', {
})(
<Select
label={<FormattedMessage id={`${intlPrefix}.assignment`} />}
className="c7n-iam-application-radiogroup"
getPopupContainer={that => that}
filterOption={(input, option) => {
const childNode = option.props.children;
if (childNode && React.isValidElement(childNode)) {
return childNode.props.children.props.children.toLowerCase().indexOf(input.toLowerCase()) >= 0;
}
return false;
}}
allowClear
filter
>
{
projectData.map(({ id, name, code }) => (
<Option value={id} key={id} title={name}>
<Tooltip title={code} placement="right" align={{ offset: [20, 0] }}>
<span style={{ display: 'inline-block', width: '100%' }}>{name}</span>
</Tooltip>
</Option>
))
}
</Select>,
)}
</FormItem>
</React.Fragment>
)
}
</Form>
</React.Fragment>
);
}
renderTable = () => {
const columns = [{
title: <FormattedMessage id={`${intlPrefix}.name`} />,
dataIndex: 'name',
width: '160px',
}, {
title: <FormattedMessage id={`${intlPrefix}.code`} />,
dataIndex: 'code',
key: 'applicationCode',
render: text => (
<MouseOverWrapper text={text} width={0.1}>
{text}
</MouseOverWrapper>
),
}, {
dataIndex: 'id',
width: '50px',
key: 'id',
render: (text, record) => (
<div>
<Tooltip
title={<FormattedMessage id="delete" />}
placement="bottom"
>
<Button
shape="circle"
size="small"
onClick={e => this.handleDelete(record)}
icon="delete"
/>
</Tooltip>
</div>
),
}];
const { ApplicationStore, intl } = this.props;
const { listParams, listLoading, applicationTreeData, getAddListDataSource, selectedRowKeys } = ApplicationStore;
const data = getAddListDataSource;
const filteredData = data.filter(v => selectedRowKeys.includes(v.id));
return (
<Table
columns={columns}
dataSource={filteredData}
rowKey="path"
className="c7n-iam-application-tree-table"
filters={false}
filterBar={false}
loading={listLoading}
filterBarPlaceholder={intl.formatMessage({ id: 'filtertable' })}
/>
);
};
renderTableBlock = () => {
if (this.props.form.getFieldValue('applicationCategory') === 'combination-application') {
return (
<div style={{ width: 512, paddingBottom: 42, marginBottom: 12, borderBottom: '1px solid rgba(0, 0, 0, 0.12)' }}>
<h3 style={{ marginTop: 24, marginBottom: 10 }}>子应用</h3>
{this.renderTable()}
</div>
);
}
return null;
};
renderSider = () => {
const { ApplicationStore } = this.props;
const { sidebarVisible, submitting, getAddListDataSource, selectedRowKeys } = ApplicationStore;
if (!sidebarVisible) return null;
return (
<AddSider
data={getAddListDataSource}
selections={selectedRowKeys.slice()}
onOk={this.handleSiderOk}
onCancel={this.handleSiderCancel}
/>
);
};
render() {
const { AppState, ApplicationStore: { submitting } } = this.props;
const menuType = AppState.currentMenuType;
const orgId = menuType.id;
return (
<Page>
<Header
title="创建应用"
backPath={`/iam/application?type=organization&id=${orgId}&name=${encodeURIComponent(menuType.name)}&organizationId=${orgId}`}
/>
<Content
title="创建应用"
description="请在此输入应用的名称、编码,选择项目类型。同时您可以为应用分配开发项目,平台会为您在对应项目下创建git代码库。注意:一旦您分配了开发项目就不能再次修改开发项目,请谨慎操作。"
link="#"
className="c7n-iam-application"
>
{this.renderForm()}
{
this.props.form.getFieldValue('applicationCategory') === 'combination-application' && (
<Button
onClick={this.handleAddApplication}
icon="playlist_add"
funcType="raised"
style={{ color: '#3f51b5' }}
>
添加子应用
</Button>
)
}
{this.renderTableBlock()}
<Button style={{ marginRight: 10 }} loading={submitting} onClick={this.handleSubmit} type="primary" funcType="raised"><FormattedMessage id="create" /></Button>
<Button onClick={() => this.props.history.push(`/iam/application?type=organization&id=${orgId}&name=${encodeURIComponent(menuType.name)}&organizationId=${orgId}`)} funcType="raised">取消</Button>
{this.renderSider()}
</Content>
</Page>
);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/SystemSettingMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.infra.dto.SystemSettingDTO;
import io.choerodon.mybatis.common.Mapper;
/**
* @author zmf
* @since 2018-10-15
*/
public interface SystemSettingMapper extends Mapper<SystemSettingDTO> {
}
<|start_filename|>src/main/java/io/choerodon/iam/api/dto/ProjectCategoryDTO.java<|end_filename|>
package io.choerodon.iam.api.dto;
import io.choerodon.mybatis.entity.BaseDTO;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
/**
* @author jiameng.cao
* @date 2019/6/4
*/
@Table(name = "fd_project_category")
public class ProjectCategoryDTO extends BaseDTO {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
private String name;
private String description;
private String code;
private Long organizationId;
private Boolean displayFlag;
private Boolean builtInFlag;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public Boolean getDisplayFlag() {
return displayFlag;
}
public void setDisplayFlag(Boolean displayFlag) {
this.displayFlag = displayFlag;
}
public Boolean getBuiltInFlag() {
return builtInFlag;
}
public void setBuiltInFlag(Boolean builtInFlag) {
this.builtInFlag = builtInFlag;
}
public Long getOrganizationId() {
return organizationId;
}
public void setOrganizationId(Long organizationId) {
this.organizationId = organizationId;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/api/eventhandler/OrganizationListenerSpec.groovy<|end_filename|>
package io.choerodon.iam.api.eventhandler
import com.fasterxml.jackson.databind.ObjectMapper
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.api.dto.payload.OrganizationCreateEventPayload
import io.choerodon.iam.app.service.LdapService
import io.choerodon.iam.app.service.OrganizationProjectService
import io.choerodon.iam.app.service.OrganizationService
import io.choerodon.iam.app.service.PasswordPolicyService
import io.choerodon.iam.app.service.UserService
import io.choerodon.iam.infra.dto.OrganizationDTO
import io.choerodon.iam.infra.mapper.ProjectCategoryMapper
import io.choerodon.iam.infra.mapper.ProjectMapCategoryMapper
import io.choerodon.iam.infra.mapper.ProjectMapper
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.context.annotation.Import
import org.springframework.transaction.annotation.Transactional
import spock.lang.Specification
import java.lang.reflect.Field
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class OrganizationListenerSpec extends Specification {
@Autowired
private LdapService ldapService
@Autowired
private PasswordPolicyService passwordPolicyService
@Autowired
private OrganizationService organizationService
@Autowired
UserService userService
@Autowired
OrganizationProjectService organizationProjectService
@Autowired
ProjectMapper projectMapper
@Autowired
ProjectCategoryMapper projectCategoryMapper
@Autowired
ProjectMapCategoryMapper projectMapCategoryMapper
private OrganizationListener organizationListener
private final ObjectMapper mapper = new ObjectMapper()
def setup() {
organizationListener = new OrganizationListener(ldapService, passwordPolicyService,
organizationService, userService, organizationProjectService, projectMapper, projectCategoryMapper, projectMapCategoryMapper)
Field field = organizationListener.getClass().getDeclaredField("devopsMessage")
field.setAccessible(true)
field.set(organizationListener, true)
}
@Transactional
def "Create"() {
given: "构造请求参数"
OrganizationCreateEventPayload payload = new OrganizationCreateEventPayload()
payload.setId(1L)
payload.setCode("code")
payload.setName("name")
String message = mapper.writeValueAsString(payload)
when: "调用方法"
organizationListener.create(message)
then: "校验结果"
true
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/MemberRoleMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.api.query.ClientRoleQuery;
import io.choerodon.iam.infra.dto.ClientDTO;
import io.choerodon.iam.infra.dto.MemberRoleDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* @author carllhw
*/
public interface MemberRoleMapper extends Mapper<MemberRoleDTO> {
List<Long> selectDeleteList(@Param("memberId") long memberId, @Param("sourceId") long sourceId, @Param("memberType") String memberType, @Param("sourceType") String sourceType, @Param("list") List<Long> deleteList);
int deleteMemberRoleByMemberIdAndMemberType(@Param("memberId") Long memberId,
@Param("memberType") String memberType);
int selectCountBySourceId(@Param("id") Long id, @Param("type") String type);
int selectCountClients(@Param("sourceId") Long sourceId,
@Param("sourceType") String sourceType,
@Param("clientRoleSearchDTO") ClientRoleQuery clientRoleSearchDTO,
@Param("param") String param);
List<ClientDTO> selectClientsWithRoles(
@Param("sourceId") Long sourceId,
@Param("sourceType") String sourceType,
@Param("clientRoleSearchDTO") ClientRoleQuery clientRoleSearchDTO,
@Param("param") String param,
@Param("start") Integer start,
@Param("size") Integer size);
}
<|start_filename|>src/test/groovy/io/choerodon/iam/infra/common/utils/CollectionUtilsSpec.groovy<|end_filename|>
package io.choerodon.iam.infra.common.utils
import io.choerodon.iam.IntegrationTestConfiguration
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.context.annotation.Import
import spock.lang.Specification
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan
* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class CollectionUtilsSpec extends Specification {
private int count = 100
def "SubList"() {
given: "构造请求参数"
List<Integer> list = new ArrayList<>()
for (int i = 0; i < count; i++) {
list.add(i)
}
when: "调用方法"
List<List<Integer>> result = CollectionUtils.subList(list, 10)
then: "校验结果"
result.size() == 10
}
def "SubSet"() {
given: "构造请求参数"
Set<Integer> set = new HashSet<>()
for (int i = 0; i < count; i++) {
set.add(i)
}
when: "调用方法"
Set<Set<Integer>> result = CollectionUtils.subSet(set, 10)
then: "校验结果"
result.size() == 10
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/ProjectTypeController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.domain.Sort;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.iam.app.service.ProjectTypeService;
import io.choerodon.iam.infra.dto.ProjectTypeDTO;
import io.choerodon.mybatis.annotation.SortDefault;
import io.choerodon.swagger.annotation.CustomPageRequest;
import io.swagger.annotations.ApiOperation;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import javax.validation.Valid;
import java.util.List;
@RestController
@RequestMapping("/v1/projects/types")
public class ProjectTypeController {
private ProjectTypeService projectTypeService;
public ProjectTypeController(ProjectTypeService projectTypeService) {
this.projectTypeService = projectTypeService;
}
public void setProjectTypeService(ProjectTypeService projectTypeService) {
this.projectTypeService = projectTypeService;
}
@Permission(type = ResourceType.SITE, permissionLogin = true)
@GetMapping
public List<ProjectTypeDTO> list() {
return projectTypeService.list();
}
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "分页模糊查询项目类型")
@GetMapping(value = "/paging_query")
@CustomPageRequest
public ResponseEntity<PageInfo<ProjectTypeDTO>> pagingQuery(@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(required = false) String name,
@RequestParam(required = false) String code,
@RequestParam(required = false) String param) {
return new ResponseEntity<>(projectTypeService.pagingQuery(pageRequest, name, code, param), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "创建项目类型")
@PostMapping
public ResponseEntity<ProjectTypeDTO> create(@RequestBody @Valid ProjectTypeDTO projectTypeDTO) {
return new ResponseEntity<>(projectTypeService.create(projectTypeDTO), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "更新项目类型")
@PostMapping("/{id}")
public ResponseEntity<ProjectTypeDTO> update(@PathVariable Long id,
@RequestBody @Valid ProjectTypeDTO projectTypeDTO) {
return new ResponseEntity<>(projectTypeService.update(id, projectTypeDTO), HttpStatus.OK);
}
/**
* @param projectTypeDTO
* @return
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "重名校验")
@PostMapping("/check")
public ResponseEntity check(@RequestBody ProjectTypeDTO projectTypeDTO) {
projectTypeService.check(projectTypeDTO);
return new ResponseEntity(HttpStatus.OK);
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/api/controller/v1/PasswordPolicyControllerSpec.groovy<|end_filename|>
package io.choerodon.iam.api.controller.v1
import io.choerodon.core.exception.CommonException
import io.choerodon.core.exception.ExceptionResponse
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.api.validator.PasswordPolicyValidator
import io.choerodon.iam.infra.dto.PasswordPolicyDTO
import io.choerodon.iam.infra.mapper.PasswordPolicyMapper
import org.springframework.beans.BeanUtils
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.boot.test.web.client.TestRestTemplate
import org.springframework.context.annotation.Import
import spock.lang.Specification
import spock.lang.Stepwise
import java.lang.reflect.Field
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan
* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
@Stepwise
class PasswordPolicyControllerSpec extends Specification {
private static final String BASE_PATH = "/v1/organizations/{organization_id}/password_policies"
@Autowired
private TestRestTemplate restTemplate
@Autowired
private PasswordPolicyMapper policyMapper
def "QueryByOrganizationId"() {
given: "构造请求参数"
def organizationId = 1L
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH, PasswordPolicyDTO, organizationId)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getId().equals(organizationId)
}
def "Update"() {
given: "构造请求参数"
def organizationId = 1L
def passwordPolicyId = 1L
def passwordPolicyDTO = policyMapper.selectByPrimaryKey(passwordPolicyId)
passwordPolicyDTO.setDigitsCount(1)
passwordPolicyDTO.setLowercaseCount(1)
passwordPolicyDTO.setUppercaseCount(1)
passwordPolicyDTO.setSpecialCharCount(1)
passwordPolicyDTO.setMinLength(1)
passwordPolicyDTO.setMaxLength(10)
when: "调用方法[异常-密码策略不存在]"
def entity = restTemplate.postForEntity(BASE_PATH + "/{id}", passwordPolicyDTO, ExceptionResponse, organizationId, 1000L)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.passwordPolicy.not.exist")
when: "调用方法[异常-组织id不存在]"
entity = restTemplate.postForEntity(BASE_PATH + "/{id}", passwordPolicyDTO, ExceptionResponse, 1000L, passwordPolicyId)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.passwordPolicy.organizationId.not.same")
when: "调用方法[异常-密码超过最大长度]"
def passwordPolicyDTO3 = new PasswordPolicyDTO()
BeanUtils.copyProperties(passwordPolicyDTO, passwordPolicyDTO3)
passwordPolicyDTO3.setMaxLength(3)
entity = restTemplate.postForEntity(BASE_PATH + "/{id}", passwordPolicyDTO3, ExceptionResponse, organizationId, passwordPolicyId)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.allLeastRequiredLength.greaterThan.maxLength")
when: "调用方法[异常-密码最小长度大于最大长度]"
def passwordPolicyDTO4 = new PasswordPolicyDTO()
BeanUtils.copyProperties(passwordPolicyDTO, passwordPolicyDTO4)
passwordPolicyDTO4.setMinLength(20)
entity = restTemplate.postForEntity(BASE_PATH + "/{id}", passwordPolicyDTO4, ExceptionResponse, organizationId, passwordPolicyId)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.maxLength.lessThan.minLength")
when: "调用方法"
def passwordPolicyDTO5 = new PasswordPolicyDTO()
BeanUtils.copyProperties(passwordPolicyDTO, passwordPolicyDTO5)
entity = restTemplate.postForEntity(BASE_PATH + "/{id}", passwordPolicyDTO5, PasswordPolicyDTO, organizationId, passwordPolicyId)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getId().equals(passwordPolicyId)
}
def "Create"() {
given: "构造请求参数"
Long orgId = 1L
PasswordPolicyDTO passwordPolicyDTO = new PasswordPolicyDTO()
List<PasswordPolicyDTO> policyDOList = new ArrayList<>()
policyDOList << new PasswordPolicyDTO()
PasswordPolicyMapper passwordPolicyMapper = Mock(PasswordPolicyMapper)
PasswordPolicyValidator passwordPolicyValidator =
new PasswordPolicyValidator()
Field field = passwordPolicyValidator.getClass().getDeclaredField("passwordPolicyMapper")
field.setAccessible(true)
field.set(passwordPolicyValidator, passwordPolicyMapper)
when: "测试PasswordPolicyValidator create"
passwordPolicyValidator.create(orgId, passwordPolicyDTO)
then: "校验结果"
def exception = thrown(CommonException)
exception.message.equals("error.passwordPolicy.organizationId.exist")
1 * passwordPolicyMapper.select(_) >> { policyDOList }
when: "测试PasswordPolicyValidator create"
passwordPolicyValidator.create(orgId, passwordPolicyDTO)
then: "校验结果"
exception = thrown(CommonException)
exception.message.equals("error.passwordPolicy.code.exist")
passwordPolicyMapper.select(_) >>> [new ArrayList<PasswordPolicyDTO>(), policyDOList]
}
}
<|start_filename|>react/src/app/iam/containers/global/root-user/RootUser.js<|end_filename|>
import React, { Component } from 'react';
import { inject, observer } from 'mobx-react';
import { Button, Form, Modal, Table, Tooltip, Select } from 'choerodon-ui';
import { Content, Header, Page, Permission } from '@choerodon/boot';
import { withRouter } from 'react-router-dom';
import { injectIntl, FormattedMessage } from 'react-intl';
import { handleFiltersParams } from '../../../common/util';
import RootUserStore from '../../../stores/global/root-user/RootUserStore';
import StatusTag from '../../../components/statusTag';
import '../../../common/ConfirmModal.scss';
import './RootUser.scss';
let timer;
const { Sidebar } = Modal;
const intlPrefix = 'global.rootuser';
const FormItem = Form.Item;
const Option = Select.Option;
const FormItemNumLayout = {
labelCol: {
xs: { span: 24 },
sm: { span: 100 },
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 10 },
},
};
@Form.create({})
@withRouter
@injectIntl
@inject('AppState')
@observer
export default class RootUser extends Component {
state = this.getInitState();
getInitState() {
return {
visible: false,
pagination: {
current: 1,
pageSize: 10,
total: 0,
},
sort: {
columnKey: 'id',
order: 'descend',
},
filters: {},
params: [],
onlyRootUser: false,
submitting: false,
selectLoading: true,
};
}
componentWillMount() {
this.reload();
}
isEmptyFilters = ({ loginName, realName, enabled, locked }) => {
if ((loginName && loginName.length) ||
(realName && realName.length) ||
(enabled && enabled.length) ||
(locked && locked.length)
) {
return false;
}
return true;
}
reload = (paginationIn, filtersIn, sortIn, paramsIn) => {
const {
pagination: paginationState,
sort: sortState,
filters: filtersState,
params: paramsState,
} = this.state;
const pagination = paginationIn || paginationState;
const sort = sortIn || sortState;
const filters = filtersIn || filtersState;
const params = paramsIn || paramsState;
this.setState({
loading: true,
filters,
});
// 若params或filters含特殊字符表格数据置空
const isIncludeSpecialCode = handleFiltersParams(params, filters);
if (isIncludeSpecialCode) {
RootUserStore.setRootUserData([]);
this.setState({
loading: false,
sort,
params,
pagination: {
total: 0,
},
});
return;
}
RootUserStore.loadRootUserData(pagination, filters, sort, params).then((data) => {
if (this.isEmptyFilters(filters) && !params.length) {
this.setState({
onlyRootUser: data.total <= 1,
});
}
RootUserStore.setRootUserData(data.list || []);
this.setState({
pagination: {
current: data.pageNum,
pageSize: data.pageSize,
total: data.total,
},
loading: false,
sort,
params,
});
});
}
tableChange = (pagination, filters, sort, params) => {
this.reload(pagination, filters, sort, params);
}
openSidebar = () => {
const { resetFields } = this.props.form;
resetFields();
this.setState({
visible: true,
});
}
closeSidebar = () => {
this.setState({
submitting: false,
visible: false,
});
}
handleDelete = (record) => {
const { intl } = this.props;
Modal.confirm({
className: 'c7n-iam-confirm-modal',
title: intl.formatMessage({ id: `${intlPrefix}.remove.title` }),
content: intl.formatMessage({ id: `${intlPrefix}.remove.content` }, {
name: record.realName,
}),
onOk: () => RootUserStore.deleteRootUser(record.id).then(({ failed, message }) => {
if (failed) {
Choerodon.prompt(message);
} else {
Choerodon.prompt(intl.formatMessage({ id: 'remove.success' }));
this.reload();
}
}),
});
}
handleOk = (e) => {
const { intl } = this.props;
const { validateFields } = this.props.form;
e.preventDefault();
validateFields((err, { member }) => {
if (!err) {
this.setState({
submitting: true,
});
RootUserStore.addRootUser(member).then(({ failed, message }) => {
if (failed) {
Choerodon.prompt(message);
} else {
Choerodon.prompt(intl.formatMessage({ id: 'add.success' }));
this.closeSidebar();
this.reload();
}
});
}
});
};
getUserOption = () => {
const usersData = RootUserStore.getUsersData;
return usersData && usersData.length > 0 ? (
usersData.map(({ id, imageUrl, loginName, realName }) => (
<Option key={id} value={id} label={`${loginName}${realName}`}>
<div className="c7n-iam-rootuser-user-option">
<div className="c7n-iam-rootuser-user-option-avatar">
{
imageUrl ? <img src={imageUrl} alt="userAvatar" style={{ width: '100%' }} /> :
<span className="c7n-iam-rootuser-user-option-avatar-noavatar">{realName && realName.split('')[0]}</span>
}
</div>
<span>{loginName}{realName}</span>
</div>
</Option>
))
) : null;
}
handleSelectFilter = (value) => {
this.setState({
selectLoading: true,
});
const queryObj = {
param: value,
sort: 'id',
organization_id: 0,
};
if (timer) {
clearTimeout(timer);
}
if (value) {
timer = setTimeout(() => (this.loadUsers(queryObj)), 300);
} else {
return this.loadUsers(queryObj);
}
}
// 加载全平台用户信息
loadUsers = (queryObj) => {
RootUserStore.loadUsers(queryObj).then((data) => {
RootUserStore.setUsersData(data.list.slice());
this.setState({
selectLoading: false,
});
});
}
renderTable() {
const { AppState, intl } = this.props;
const { type } = AppState.currentMenuType;
const { filters, sort: { columnKey, order } } = this.state;
const rootUserData = RootUserStore.getRootUserData.slice();
const columns = [
{
title: <FormattedMessage id={`${intlPrefix}.loginname`} />,
key: 'loginName',
dataIndex: 'loginName',
filters: [],
filteredValue: filters.loginName || [],
},
{
title: <FormattedMessage id={`${intlPrefix}.realname`} />,
key: 'realName',
dataIndex: 'realName',
filters: [],
filteredValue: filters.realName || [],
},
{
title: <FormattedMessage id={`${intlPrefix}.status.enabled`} />,
key: 'enabled',
dataIndex: 'enabled',
render: enabled => (<StatusTag mode="icon" name={intl.formatMessage({ id: enabled ? 'enable' : 'disable' })} colorCode={enabled ? 'COMPLETED' : 'DISABLE'} />),
filters: [{
text: intl.formatMessage({ id: 'enable' }),
value: 'true',
}, {
text: intl.formatMessage({ id: 'disable' }),
value: 'false',
}],
filteredValue: filters.enabled || [],
},
{
title: <FormattedMessage id={`${intlPrefix}.status.locked`} />,
key: 'locked',
dataIndex: 'locked',
filters: [{
text: intl.formatMessage({ id: `${intlPrefix}.normal` }),
value: 'false',
}, {
text: intl.formatMessage({ id: `${intlPrefix}.locked` }),
value: 'true',
}],
filteredValue: filters.locked || [],
render: lock => intl.formatMessage({ id: lock ? `${intlPrefix}.locked` : `${intlPrefix}.normal` }),
},
{
title: '',
width: 100,
align: 'right',
render: (text, record) => {
const { onlyRootUser } = this.state;
return (
<div>
<Permission
service={['iam-service.user.deleteDefaultUser']}
type={type}
>
<Tooltip
title={onlyRootUser ? <FormattedMessage id={`${intlPrefix}.remove.disable.tooltip`} /> : <FormattedMessage id="remove" />}
placement={onlyRootUser ? 'bottomRight' : 'bottom'}
overlayStyle={{ maxWidth: '300px' }}
>
<Button
size="small"
disabled={onlyRootUser}
onClick={this.handleDelete.bind(this, record)}
shape="circle"
icon="delete_forever"
/>
</Tooltip>
</Permission>
</div>
);
},
},
];
return (
<Table
loading={this.state.loading}
pagination={this.state.pagination}
columns={columns}
indentSize={0}
dataSource={rootUserData}
filters={this.state.params}
rowKey="id"
onChange={this.tableChange}
filterBarPlaceholder={intl.formatMessage({ id: 'filtertable' })}
/>
);
}
render() {
const { AppState, form, intl } = this.props;
const { type } = AppState.currentMenuType;
const { getFieldDecorator } = form;
return (
<Page
className="root-user-setting"
service={[
'iam-service.user.pagingQueryAdminUsers',
'iam-service.user.addDefaultUsers',
'iam-service.user.deleteDefaultUser',
'iam-service.role-member.queryAllUsers',
]}
>
<Header title={<FormattedMessage id={`${intlPrefix}.header.title`} />}>
<Permission
service={['iam-service.user.addDefaultUsers']}
type={type}
>
<Button
onClick={this.openSidebar}
icon="playlist_add"
>
<FormattedMessage id="add" />
</Button>
</Permission>
<Button
icon="refresh"
onClick={() => {
this.setState(this.getInitState(), () => {
this.reload();
});
}}
>
<FormattedMessage id="refresh" />
</Button>
</Header>
<Content
code={intlPrefix}
values={{ name: AppState.getSiteInfo.systemName || 'Choerodon' }}
>
{this.renderTable()}
<Sidebar
title={<FormattedMessage id={`${intlPrefix}.add`} />}
onOk={this.handleOk}
okText={<FormattedMessage id="add" />}
cancelText={<FormattedMessage id="cancel" />}
onCancel={this.closeSidebar}
visible={this.state.visible}
confirmLoading={this.state.submitting}
>
<Content
className="sidebar-content"
code={`${intlPrefix}.add`}
values={{ name: AppState.getSiteInfo.systemName || 'Choerodon' }}
>
<FormItem
{...FormItemNumLayout}
>
{getFieldDecorator('member', {
rules: [{
required: true,
message: intl.formatMessage({ id: `${intlPrefix}.require.msg` }),
}],
initialValue: [],
})(
<Select
label={<FormattedMessage id={`${intlPrefix}.user`} />}
optionLabelProp="label"
allowClear
style={{ width: 512 }}
mode="multiple"
optionFilterProp="children"
filterOption={false}
filter
onFilterChange={this.handleSelectFilter}
notFoundContent={intl.formatMessage({ id: `${intlPrefix}.notfound.msg` })}
loading={this.state.selectLoading}
>
{this.getUserOption()}
</Select>,
)}
</FormItem>
</Content>
</Sidebar>
</Content>
</Page>
);
}
}
<|start_filename|>react/src/app/iam/stores/user/organization-info/OrganizationInfoStore.js<|end_filename|>
import { action, computed, observable } from 'mobx';
import { axios, store } from '@choerodon/boot';
import queryString from 'query-string';
@store('OrganizationInfoStore')
class OrganizationInfoStore {
@observable myOrganizationData = [];
@observable organizationRolesData = [];
@observable loading = false;
@observable sidebarVisible = false;
@observable pagination = {
current: 1,
pageSize: 10,
total: 0,
};
@observable params = [];
@observable showSize = 10;
refresh(id) {
this.loadData(id, { current: 1, pageSize: 10 }, []);
}
@action
setShowSize(size) {
this.showSize = size;
}
@action
showSideBar() {
this.sidebarVisible = true;
}
@action
hideSideBar() {
this.sidebarVisible = false;
}
@action
loadMyOrganizations() {
this.loading = true;
return axios.get(`/iam/v1/users/self/organizations/paging_query?${queryString.stringify({
page: 1,
size: 20,
enabled: true,
})}`).then(action((result) => {
const { failed, list } = result;
if (!failed) {
this.myOrganizationData = list || result;
}
this.loading = false;
}))
.catch(action((error) => {
Choerodon.handleResponseError(error);
this.loading = false;
}));
}
@action
loadData(id, pagination = this.pagination, params = this.params) {
this.loading = true;
this.params = params;
return axios.get(`/iam/v1/users/${id}/organization_roles?${queryString.stringify({
page: pagination.current,
size: pagination.pageSize,
params: params.join(','),
})}`)
.then(action(({ failed, list, total }) => {
if (!failed) {
this.organizationRolesData = list;
this.pagination = {
...pagination,
total,
};
}
this.loading = false;
}))
.catch(action((error) => {
Choerodon.handleResponseError(error);
this.loading = false;
}));
}
}
const organizationInfoStore = new OrganizationInfoStore();
export default organizationInfoStore;
<|start_filename|>src/main/java/io/choerodon/iam/app/service/ProjectTypeService.java<|end_filename|>
package io.choerodon.iam.app.service;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.iam.infra.dto.ProjectTypeDTO;
import java.util.List;
public interface ProjectTypeService {
List<ProjectTypeDTO> list();
PageInfo<ProjectTypeDTO> pagingQuery(PageRequest pageRequest, String name, String code, String param);
ProjectTypeDTO create(ProjectTypeDTO projectTypeDTO);
ProjectTypeDTO update(Long id, ProjectTypeDTO projectTypeDTO);
void check(ProjectTypeDTO projectTypeDTO);
}
<|start_filename|>docker/Dockerfile<|end_filename|>
FROM registry.cn-hangzhou.aliyuncs.com/choerodon-tools/javabase:0.8.0
COPY app.jar /iam-service.jar
CMD java $JAVA_OPTS $SKYWALKING_OPTS -jar /iam-service.jar
<|start_filename|>src/main/resources/script/db/iam_book_mark.groovy<|end_filename|>
package script.db
databaseChangeLog(logicalFilePath: 'script/db/iam_book_mark.groovy') {
changeSet(author: '<EMAIL>', id: '2018-11-05-iam-book-mark') {
if (helper.dbType().isSupportSequence()) {
createSequence(sequenceName: 'IAM_BOOK_MARK_S', startValue: "1")
}
createTable(tableName: "IAM_BOOK_MARK") {
column(name: 'ID', type: 'BIGINT UNSIGNED', autoIncrement: true, remarks: '表ID,主键,供其他表做外键,unsigned bigint、单表时自增、步长为 1') {
constraints(primaryKey: true, primaryKeyName: 'PK_IAM_BOOK_MARK')
}
column(name: 'NAME', type: 'VARCHAR(64)', remarks: '书签名称') {
constraints(nullable: false)
}
column(name: 'URL', type: 'VARCHAR(255)', remarks: '书签url') {
constraints(nullable: false)
}
column(name: 'ICON', type: 'VARCHAR(128)', remarks: '图标的code值') {
constraints(nullable: false)
}
column(name: 'COLOR', type: 'VARCHAR(32)', remarks: '图标的颜色')
column(name: 'SORT', type: 'BIGINT UNSIGNED', remarks: '书签顺序') {
constraints(nullable: false)
}
column(name: 'USER_ID', type: 'BIGINT UNSIGNED', remarks: '用户ID') {
constraints(nullable: false)
}
column(name: "OBJECT_VERSION_NUMBER", type: "BIGINT UNSIGNED", defaultValue: "1") {
constraints(nullable: true)
}
column(name: "CREATED_BY", type: "BIGINT UNSIGNED", defaultValue: "0") {
constraints(nullable: true)
}
column(name: "CREATION_DATE", type: "DATETIME", defaultValueComputed: "CURRENT_TIMESTAMP")
column(name: "LAST_UPDATED_BY", type: "BIGINT UNSIGNED", defaultValue: "0") {
constraints(nullable: true)
}
column(name: "LAST_UPDATE_DATE", type: "DATETIME", defaultValueComputed: "CURRENT_TIMESTAMP")
}
}
changeSet(author: 'superlee', id: '2019-07-18-iam-book-mark-add-remark') {
setTableRemarks(tableName:"IAM_BOOK_MARK",remarks: "书签表")
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/asserts/MenuAssertHelper.java<|end_filename|>
package io.choerodon.iam.infra.asserts;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.infra.dto.MenuDTO;
import io.choerodon.iam.infra.mapper.MenuMapper;
import org.springframework.stereotype.Component;
/**
* 菜单断言帮助类
*
* @author superlee
* @since 0.16.0
*/
@Component
public class MenuAssertHelper extends AssertHelper {
private MenuMapper menuMapper;
public MenuAssertHelper(MenuMapper menuMapper) {
this.menuMapper = menuMapper;
}
public void codeExisted(String code) {
MenuDTO dto = new MenuDTO();
dto.setCode(code);
if (!menuMapper.select(dto).isEmpty()) {
throw new CommonException("error.menu.code.existed");
}
}
public MenuDTO menuNotExisted(Long id) {
return menuNotExisted(id, "error.menu.not.exist");
}
public MenuDTO menuNotExisted(Long id, String message) {
MenuDTO dto = menuMapper.selectByPrimaryKey(id);
if (dto == null) {
throw new CommonException(message, id);
}
return dto;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/enums/ExcelSuffix.java<|end_filename|>
package io.choerodon.iam.infra.enums;
/**
* @author superlee
*/
public enum ExcelSuffix {
/**
* excel2003文件后缀
*/
XLS("xls"),
/**
* excel2007文件后缀
*/
XLSX("xlsx");
private String value;
ExcelSuffix(String value) {
this.value = value;
}
public String value() {
return value;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/ClientMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import java.util.List;
import io.choerodon.iam.infra.dto.ClientDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
import io.choerodon.iam.api.query.ClientRoleQuery;
import io.choerodon.iam.api.dto.SimplifiedClientDTO;
/**
* @author wuguokai
*/
public interface ClientMapper extends Mapper<ClientDTO> {
/**
* 分页模糊查询客户端
*
* @param clientDTO 客户端对象
* @param param 客户端模糊查询参数
* @return 客户端集合
*/
List<ClientDTO> fulltextSearch(@Param("clientDTO") ClientDTO clientDTO,
@Param("param") String param);
Integer selectClientCountFromMemberRoleByOptions(
@Param("roleId") Long roleId,
@Param("sourceType") String sourceType,
@Param("sourceId") Long sourceId,
@Param("clientRoleSearchDTO") ClientRoleQuery clientRoleSearchDTO,
@Param("param") String param);
List<ClientDTO> selectClientsByRoleIdAndOptions(
@Param("roleId") Long roleId,
@Param("sourceId") Long sourceId,
@Param("sourceType") String sourceType,
@Param("clientRoleSearchDTO") ClientRoleQuery clientRoleSearchDTO,
@Param("param") String param);
List<SimplifiedClientDTO> selectAllClientSimplifiedInfo(@Param("params") String params);
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/BookMarkController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import io.choerodon.base.annotation.Permission;
import io.choerodon.core.validator.ValidList;
import io.choerodon.iam.app.service.BookMarkService;
import io.choerodon.iam.infra.dto.BookMarkDTO;
import io.swagger.annotations.ApiOperation;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import javax.validation.Valid;
import java.util.List;
/**
* @author dengyouquan
**/
@RestController
@RequestMapping(value = "/v1/bookmarks")
public class BookMarkController {
private BookMarkService bookMarkService;
public BookMarkController(BookMarkService bookMarkService) {
this.bookMarkService = bookMarkService;
}
@Permission(permissionLogin = true)
@ApiOperation(value = "创建书签")
@PostMapping
public ResponseEntity<BookMarkDTO> create(@RequestBody @Valid BookMarkDTO bookMarkDTO) {
return new ResponseEntity<>(bookMarkService.create(bookMarkDTO), HttpStatus.OK);
}
@Permission(permissionLogin = true)
@ApiOperation(value = "修改客户端")
@PutMapping
public ResponseEntity<List<BookMarkDTO>> update(@RequestBody @Validated ValidList<BookMarkDTO> bookMarkDTOS) {
return new ResponseEntity<>(bookMarkService.updateAll(bookMarkDTOS), HttpStatus.OK);
}
@Permission(permissionLogin = true)
@ApiOperation(value = "删除书签")
@DeleteMapping(value = "/{id}")
public void delete(@PathVariable("id") Long id) {
bookMarkService.delete(id);
}
@Permission(permissionLogin = true)
@ApiOperation(value = "查询当前用户全部书签")
@GetMapping
public ResponseEntity<List<BookMarkDTO>> list() {
return new ResponseEntity<>(bookMarkService.list(), HttpStatus.OK);
}
}
<|start_filename|>react/src/app/iam/stores/dashboard/failedSaga/FailedSagaStore.js<|end_filename|>
import { action, computed, observable, toJS } from 'mobx';
import { axios, store } from '@choerodon/boot';
import moment from 'moment';
@store('FailedSagaStore')
class FailedSagaStore {
@observable data = null;
@observable loading = true;
@observable startTime = moment().subtract(6, 'days');
@observable endTime = moment();
@observable showSize = 220;
@action
setShowSize(size) {
this.showSize = size;
}
@action setChartData(data) {
this.data = data;
}
@computed get getChartData() {
return this.data;
}
@action setLoading(flag) {
this.loading = flag;
}
@action setStartTime(data) {
this.startTime = data;
}
@computed get getStartTime() {
return this.startTime;
}
@action setEndTime(data) {
this.endTime = data;
}
@computed get getEndTime() {
return this.endTime;
}
loadData = (beginDate, endDate) => axios.get(`/asgard/v1/sagas/instances/failed/count?begin_date=${beginDate}&end_date=${endDate}`)
.then((res) => {
if (res.failed) {
Choerodon.prompt(res.message);
} else {
this.setChartData(res);
}
this.setLoading(false);
}).catch((error) => {
this.setLoading(false);
Choerodon.handleResponseError(error);
})
}
const failedSagaStore = new FailedSagaStore();
export default failedSagaStore;
<|start_filename|>src/test/groovy/io/choerodon/iam/api/controller/v1/ApplicationControllerSpec.groovy<|end_filename|>
package io.choerodon.iam.api.controller.v1
import io.choerodon.asgard.saga.producer.TransactionalProducer
import io.choerodon.core.exception.CommonException
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.api.query.ApplicationQuery
import io.choerodon.iam.app.service.ApplicationService
import io.choerodon.iam.app.service.impl.ApplicationServiceImpl
import io.choerodon.iam.infra.asserts.ApplicationAssertHelper
import io.choerodon.iam.infra.asserts.OrganizationAssertHelper
import io.choerodon.iam.infra.asserts.ProjectAssertHelper
import io.choerodon.iam.infra.dto.ApplicationDTO
import io.choerodon.iam.infra.enums.ApplicationCategory
import io.choerodon.iam.infra.enums.ApplicationType
import io.choerodon.iam.infra.mapper.ApplicationExplorationMapper
import io.choerodon.iam.infra.mapper.ApplicationMapper
import org.modelmapper.ModelMapper
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.context.annotation.Import
import spock.lang.Shared
import spock.lang.Specification
import spock.lang.Stepwise
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
@Stepwise
class ApplicationControllerSpec extends Specification {
@Autowired
ApplicationMapper applicationMapper
@Autowired
ApplicationExplorationMapper applicationExplorationMapper
@Autowired
OrganizationAssertHelper organizationAssertHelper
@Autowired
ProjectAssertHelper projectAssertHelper
@Autowired
ApplicationAssertHelper applicationAssertHelper
ApplicationController controller
TransactionalProducer producer
@Shared
Long id
def "setup"() {
producer = Mock(TransactionalProducer)
ApplicationService service = new ApplicationServiceImpl(applicationMapper, producer, applicationExplorationMapper,
organizationAssertHelper, projectAssertHelper, applicationAssertHelper)
controller = new ApplicationController(service)
}
def "Create"() {
given:
ApplicationDTO dto = new ApplicationDTO()
dto.setCode("code")
dto.setName("name")
dto.setApplicationCategory("application")
dto.setApplicationType("test")
dto.setOrganizationId(1L)
dto.setEnabled(true)
when:
def result = controller.create(1, dto)
id = result.getBody().getId()
then:
result.statusCode.is2xxSuccessful()
result.body.code == 'code'
}
def "Update"() {
given:
ApplicationDTO app = applicationAssertHelper.applicationNotExisted(id)
when:
def result = controller.update(1, id, app)
then:
result.statusCode.is2xxSuccessful()
}
def "PagingQuery"() {
given:
// PageRequest pageRequest = new PageRequest(0, 10)
when:
def result = controller.pagingQuery(1L, 0, 10,false, new ApplicationQuery())
then:
result.statusCode.is2xxSuccessful()
result.body.list.size() > 0
}
def "Enabled"() {
when:
controller.disable(id)
then:
noExceptionThrown()
}
def "Disable"() {
when:
controller.enabled(id)
then:
noExceptionThrown()
}
def "Types"() {
when:
def result = controller.types()
then:
result.body.contains("test")
}
def "Check"() {
given:
ApplicationDTO dto = new ApplicationDTO()
dto.setName("nnn")
dto.setOrganizationId(1L)
when: "插入校验name"
controller.check(1L, dto)
then:
noExceptionThrown()
when: "更新校验name"
dto.setId(1)
controller.check(1L, dto)
then:
noExceptionThrown()
when: "更新校验code"
dto.setName(null)
dto.setCode("ccc")
controller.check(1L, dto)
then:
noExceptionThrown()
when: "插入校验code"
dto.setId(null)
controller.check(1L, dto)
then:
noExceptionThrown()
}
def "addToCombination"() {
given: "初始化5个组合应用和一个普通应用"
ApplicationDTO dto = new ApplicationDTO()
dto.setOrganizationId(1L)
dto.setProjectId(0L)
dto.setEnabled(true)
dto.setApplicationType(ApplicationType.DEVELOPMENT.code())
for (int i = 0; i < 5; i++) {
dto.setId((i + 1) * 100)
dto.setName(i + "")
dto.setCode(i + "")
dto.setApplicationCategory(ApplicationCategory.COMBINATION.code())
controller.create(1L, dto)
}
dto.setName("n123")
dto.setCode("c123")
dto.setId(600L)
dto.setApplicationCategory(ApplicationCategory.APPLICATION.code())
controller.create(1L, dto)
when: "添加组"
def ids = [200L, 300L] as Long[]
controller.addToCombination(1L, 100L, ids)
then:
noExceptionThrown()
when: "移除300"
ids = [200L] as Long[]
controller.addToCombination(1L, 100L, ids)
then:
noExceptionThrown()
when: "添加自己"
ids = [100L] as Long[]
controller.addToCombination(1L, 100L, ids)
then:
thrown(CommonException)
}
def "queryDescendant"() {
when:
def result = controller.queryDescendant(1L, 100L)
then:
result.statusCode.is2xxSuccessful()
result.body.size() == 2
}
def "queryEnabledApplication"() {
when:
def result = controller.queryEnabledApplication(1, 100)
then:
result.statusCode.is2xxSuccessful()
}
def "queryApplicationList"() {
given:
// PageRequest pageRequest = new PageRequest(0, 10)
when:
def result = controller.queryApplicationList(0, 10, 1L, 100L, null, null)
then:
result.statusCode.is2xxSuccessful()
}
def "query"() {
when:
def result = controller.query(1, 100, false)
then:
result.statusCode.is2xxSuccessful()
result.body.getName() == "0"
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/dto/ResetPasswordDTO.java<|end_filename|>
package io.choerodon.iam.api.dto;
/**
* @author jiameng.cao
* @date 2019/6/11
*/
public class ResetPasswordDTO {
private Boolean enable_reset;
private String resetGitlabPasswordUrl;
public Boolean getEnable_reset() {
return enable_reset;
}
public void setEnable_reset(Boolean enable_reset) {
this.enable_reset = enable_reset;
}
public String getResetGitlabPasswordUrl() {
return resetGitlabPasswordUrl;
}
public void setResetGitlabPasswordUrl(String resetGitlabPasswordUrl) {
this.resetGitlabPasswordUrl = resetGitlabPasswordUrl;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/LabelServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import io.choerodon.iam.app.service.LabelService;
import io.choerodon.iam.infra.dto.LabelDTO;
import io.choerodon.iam.infra.mapper.LabelMapper;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Service;
import java.util.List;
/**
* @author superlee
*/
@Service
public class LabelServiceImpl implements LabelService {
private LabelMapper labelMapper;
public LabelServiceImpl(LabelMapper labelMapper) {
this.labelMapper = labelMapper;
}
@Override
public List<LabelDTO> listByOption(LabelDTO label) {
return labelMapper.listByOption(label);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/dto/ProjectTypeDTO.java<|end_filename|>
package io.choerodon.iam.infra.dto;
import io.choerodon.mybatis.entity.BaseDTO;
import io.swagger.annotations.ApiModelProperty;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.validation.constraints.NotEmpty;
import javax.validation.constraints.Pattern;
/**
* @author superlee
* @since 2019-04-23
*/
@Table(name = "fd_project_type")
public class ProjectTypeDTO extends BaseDTO {
private static final String CODE_REGULAR_EXPRESSION = "^[a-zA-Z][a-zA-Z0-9-_.//]*$";
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
@ApiModelProperty(value = "项目类型编码")
@Pattern(regexp = CODE_REGULAR_EXPRESSION, message = "error.code.illegal")
@NotEmpty(message = "error.code.empty")
private String name;
@ApiModelProperty(value = "项目类型名称")
@NotEmpty(message = "error.name.empty")
private String code;
@ApiModelProperty(value = "项目类型描述")
private String description;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/ApplicationServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import io.choerodon.asgard.saga.annotation.Saga;
import io.choerodon.asgard.saga.producer.StartSagaBuilder;
import io.choerodon.asgard.saga.producer.TransactionalProducer;
import io.choerodon.core.exception.CommonException;
import io.choerodon.core.iam.ResourceLevel;
import io.choerodon.iam.api.query.ApplicationQuery;
import io.choerodon.iam.app.service.ApplicationService;
import io.choerodon.iam.infra.asserts.ApplicationAssertHelper;
import io.choerodon.iam.infra.asserts.OrganizationAssertHelper;
import io.choerodon.iam.infra.asserts.ProjectAssertHelper;
import io.choerodon.iam.infra.common.utils.CollectionUtils;
import io.choerodon.iam.infra.dto.ApplicationDTO;
import io.choerodon.iam.infra.dto.ApplicationExplorationDTO;
import io.choerodon.iam.infra.enums.ApplicationCategory;
import io.choerodon.iam.infra.enums.ApplicationType;
import io.choerodon.iam.infra.exception.EmptyParamException;
import io.choerodon.iam.infra.mapper.ApplicationExplorationMapper;
import io.choerodon.iam.infra.mapper.ApplicationMapper;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.ObjectUtils;
import org.springframework.util.StringUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import static io.choerodon.iam.infra.common.utils.SagaTopic.Application.APP_CREATE;
import static io.choerodon.iam.infra.common.utils.SagaTopic.Application.APP_DELETE;
import static io.choerodon.iam.infra.common.utils.SagaTopic.Application.APP_DISABLE;
import static io.choerodon.iam.infra.common.utils.SagaTopic.Application.APP_ENABLE;
import static io.choerodon.iam.infra.common.utils.SagaTopic.Application.APP_UPDATE;
/**
* @author superlee
* @since 0.15.0
*/
@Service
public class ApplicationServiceImpl implements ApplicationService {
private static final Long PROJECT_DOES_NOT_EXIST_ID = 0L;
private static final String SEPARATOR = "/";
private ApplicationMapper applicationMapper;
private ApplicationExplorationMapper applicationExplorationMapper;
private OrganizationAssertHelper organizationAssertHelper;
private ProjectAssertHelper projectAssertHelper;
private ApplicationAssertHelper applicationAssertHelper;
private TransactionalProducer producer;
@Value("${choerodon.devops.message:false}")
private boolean devopsMessage;
public ApplicationServiceImpl(ApplicationMapper applicationMapper,
TransactionalProducer producer,
ApplicationExplorationMapper applicationExplorationMapper,
OrganizationAssertHelper organizationAssertHelper,
ProjectAssertHelper projectAssertHelper,
ApplicationAssertHelper applicationAssertHelper) {
this.applicationMapper = applicationMapper;
this.producer = producer;
this.applicationExplorationMapper = applicationExplorationMapper;
this.organizationAssertHelper = organizationAssertHelper;
this.projectAssertHelper = projectAssertHelper;
this.applicationAssertHelper = applicationAssertHelper;
}
@Override
@Saga(code = APP_CREATE, description = "iam创建应用", inputSchemaClass = ApplicationDTO.class)
@Transactional(rollbackFor = Exception.class)
public ApplicationDTO create(ApplicationDTO applicationDTO) {
organizationAssertHelper.organizationNotExisted(applicationDTO.getOrganizationId());
applicationAssertHelper.applicationExisted(applicationDTO);
validate(applicationDTO);
//combination-application不能选项目
if (ObjectUtils.isEmpty(applicationDTO.getProjectId())) {
applicationDTO.setProjectId(0L);
}
Long projectId = applicationDTO.getProjectId();
if (!PROJECT_DOES_NOT_EXIST_ID.equals(projectId)) {
projectAssertHelper.projectNotExisted(projectId);
}
String combination = ApplicationCategory.COMBINATION.code();
boolean isCombination = combination.equals(applicationDTO.getApplicationCategory());
ApplicationDTO result;
boolean sendMessage =
(!isCombination
&& !PROJECT_DOES_NOT_EXIST_ID.equals(projectId)
&& devopsMessage);
if (sendMessage) {
result =
producer.applyAndReturn(
StartSagaBuilder
.newBuilder()
.withLevel(ResourceLevel.ORGANIZATION)
.withSourceId(applicationDTO.getOrganizationId())
.withRefType("application")
.withSagaCode(APP_CREATE),
builder -> {
doInsert(applicationDTO);
//关系表插入路径
insertExploration(applicationDTO.getId());
ApplicationDTO dto = applicationMapper.selectByPrimaryKey(applicationDTO.getId());
dto.setFrom(applicationDTO.getFrom());
builder
.withPayloadAndSerialize(dto)
.withRefId(String.valueOf(applicationDTO.getId()));
return applicationDTO;
});
} else {
doInsert(applicationDTO);
//关系表插入路径
insertExploration(applicationDTO.getId());
result = applicationDTO;
}
result.setObjectVersionNumber(1L);
if (isCombination) {
processDescendants(applicationDTO, result);
}
return result;
}
@Override
@Saga(code = APP_UPDATE, description = "iam更新应用", inputSchemaClass = ApplicationDTO.class)
@Transactional(rollbackFor = Exception.class)
public ApplicationDTO update(ApplicationDTO applicationDTO) {
Long originProjectId =
ObjectUtils.isEmpty(applicationDTO.getProjectId()) ? PROJECT_DOES_NOT_EXIST_ID : applicationDTO.getProjectId();
applicationDTO.setProjectId(originProjectId);
validate(applicationDTO);
ApplicationDTO dto = applicationAssertHelper.applicationNotExisted(applicationDTO.getId());
Long targetProjectId = dto.getProjectId();
preUpdate(applicationDTO, dto);
ApplicationDTO result;
String combination = ApplicationCategory.COMBINATION.code();
boolean isCombination = combination.equals(dto.getApplicationCategory());
if (devopsMessage && !isCombination) {
if (PROJECT_DOES_NOT_EXIST_ID.equals(targetProjectId)) {
if (!PROJECT_DOES_NOT_EXIST_ID.equals(originProjectId)) {
//send create event
result = sendEvent(applicationDTO, APP_CREATE);
} else {
//do not send event
result = doUpdate(applicationDTO);
}
} else {
//send update event
result = sendEvent(applicationDTO, APP_UPDATE);
}
} else {
//do not sent event
result = doUpdate(applicationDTO);
}
if (isCombination) {
processDescendants(applicationDTO, result);
}
return result;
}
@Override
@Transactional(rollbackFor = Exception.class)
@Saga(code = APP_DELETE, description = "iam删除应用", inputSchemaClass = ApplicationDTO.class)
public void delete(Long organizationId, Long id) {
organizationAssertHelper.organizationNotExisted(organizationId);
ApplicationDTO applicationDTO = applicationAssertHelper.applicationNotExisted(id);
applicationExplorationMapper.deleteDescendantByApplicationId(id);
if (devopsMessage) {
deleteAndSendEvent(applicationDTO, APP_DELETE);
} else {
doDelete(applicationDTO);
}
}
@Override
public PageInfo<ApplicationDTO> pagingQuery(int page, int size, ApplicationQuery applicationSearchDTO, Boolean withDescendants) {
PageInfo<ApplicationDTO> result = PageHelper.startPage(page, size).doSelectPageInfo(() -> applicationMapper.fuzzyQuery(applicationSearchDTO));
if (withDescendants) {
result.getList().forEach(app -> {
//组合应用查询所有后代
if (ApplicationCategory.isCombination(app.getApplicationCategory())) {
List<ApplicationExplorationDTO> applicationExplorations = applicationExplorationMapper.selectDescendants(generatePath(app.getId()));
//todo dfs算法优化
processTreeData(app, applicationExplorations);
}
});
}
return result;
}
@Override
@Saga(code = APP_ENABLE, description = "iam启用应用", inputSchemaClass = ApplicationDTO.class)
public ApplicationDTO enable(Long id) {
return enable(id, true);
}
@Override
@Saga(code = APP_DISABLE, description = "iam禁用应用", inputSchemaClass = ApplicationDTO.class)
public ApplicationDTO disable(Long id) {
return enable(id, false);
}
private ApplicationDTO enable(Long id, boolean enabled) {
ApplicationDTO applicationDTO = applicationAssertHelper.applicationNotExisted(id);
applicationDTO.setEnabled(enabled);
String sagaCode = enabled ? APP_ENABLE : APP_DISABLE;
String combination = ApplicationCategory.COMBINATION.code();
boolean sendMessage =
(!combination.equals(applicationDTO.getApplicationCategory())
&& !PROJECT_DOES_NOT_EXIST_ID.equals(applicationDTO.getProjectId())
&& devopsMessage);
if (sendMessage) {
return sendEvent(applicationDTO, sagaCode);
} else {
return doUpdate(applicationDTO);
}
}
@Override
public List<String> types() {
List<String> types = new ArrayList<>();
for (ApplicationType applicationType : ApplicationType.values()) {
types.add(applicationType.code());
}
return types;
}
@Override
public String getToken(Long id) {
return applicationMapper.selectByPrimaryKey(id).getApplicationToken();
}
@Override
@Transactional(rollbackFor = Exception.class)
public String createToken(Long id) {
String token = UUID.randomUUID().toString();
ApplicationDTO applicationDTO = applicationMapper.selectByPrimaryKey(id);
applicationDTO.setApplicationToken(token);
if (applicationMapper.updateByPrimaryKey(applicationDTO) != 1) {
throw new CommonException("error.application.update");
}
return token;
}
@Override
public ApplicationDTO getApplicationByToken(String applicationToken) {
if (StringUtils.isEmpty(applicationToken)) {
throw new EmptyParamException("error.application.token.empty");
}
ApplicationDTO applicationDTO = new ApplicationDTO();
applicationDTO.setApplicationToken(applicationToken);
return applicationMapper.selectOne(applicationDTO);
}
@Override
public Long getIdByCode(String code, Long projectId) {
ApplicationDTO applicationDTO = new ApplicationDTO();
applicationDTO.setCode(code);
applicationDTO.setProjectId(projectId);
return applicationMapper.selectOne(applicationDTO).getId();
}
@Override
public void check(ApplicationDTO applicationDTO) {
if (!StringUtils.isEmpty(applicationDTO.getName())) {
//name是组织下唯一
checkName(applicationDTO);
}
if (!StringUtils.isEmpty((applicationDTO.getCode()))) {
//如果选的有项目,code是项目下唯一;如果没选项目,code是组织下唯一
checkCode(applicationDTO);
}
}
/**
* id为目标应用,ids为子应用
*
* @param organizationId 组织id
* @param id 应用id,applicationCategory为combination-application {@link ApplicationCategory#COMBINATION}
* @param ids 需要被分配的应用或组合应用
*/
@Override
@Transactional(rollbackFor = Exception.class)
public void addToCombination(Long organizationId, Long id, Long[] ids) {
Set<Long> idSet = preValidate(organizationId, id, ids, "error.application.addToCombination.not.support");
//查询直接儿子
List<ApplicationExplorationDTO> originDirectDescendant =
applicationExplorationMapper.selectDirectDescendantByApplicationId(id);
//筛选哪些儿子不变,哪些要删除,哪些要新增
List<Long> originDirectDescendantIds =
originDirectDescendant.stream().map(ApplicationExplorationDTO::getApplicationId).collect(Collectors.toList());
List<Long> intersection = originDirectDescendantIds.stream().filter(idSet::contains).collect(Collectors.toList());
List<Long> insertList = idSet.stream().filter(item ->
!intersection.contains(item)).collect(Collectors.toList());
List<Long> deleteList = originDirectDescendantIds.stream().filter(item ->
!intersection.contains(item)).collect(Collectors.toList());
//校验组合应用或应用 idSet 是否能放到 组合应用=id 下面。
//查询到达目标应用的所有路径,key为rootId,value为在该root节点下的所有路径
Map<Long, Set<String>> rootIdMap = getRootIdMap(id);
if (!insertList.isEmpty()) {
//查询子应用的所有后代,并校验是否构成环
Map<Long, List<ApplicationExplorationDTO>> descendantMap = getDescendantMap(new HashSet<>(insertList));
canAddToCombination(id, idSet, descendantMap);
for (Map.Entry<Long, Set<String>> entry : rootIdMap.entrySet()) {
Long rootId = entry.getKey();
Set<String> paths = entry.getValue();
paths.forEach(path -> addTreeNode(id, descendantMap, rootId, path));
}
}
deleteDescendants(deleteList, rootIdMap);
}
@Override
@Transactional(rollbackFor = Exception.class)
public void deleteCombination(Long organizationId, Long id, Long[] ids) {
Set<Long> idSet = preValidate(organizationId, id, ids, "error.application.deleteCombination.not.support");
Map<Long, Set<String>> rootIdMap = getRootIdMap(id);
deleteDescendants(idSet, rootIdMap);
}
@Override
public List<ApplicationExplorationDTO> queryDescendant(Long id) {
if (!ApplicationCategory.isCombination(
applicationAssertHelper.applicationNotExisted(id).getApplicationCategory())) {
throw new CommonException("error.application.queryDescendant.not.support");
}
return applicationExplorationMapper.selectDescendants(generatePath(id));
}
@Override
public PageInfo<ApplicationDTO> queryApplicationList(int page, int size, Long id, String name, String code) {
return PageHelper.startPage(page, size).doSelectPageInfo(() ->
applicationExplorationMapper.selectDescendantApplications(generatePath(id), ApplicationCategory.APPLICATION.code(), name, code));
}
@Override
public List<ApplicationDTO> queryEnabledApplication(Long organizationId, Long id) {
if (!ApplicationCategory.isCombination(applicationAssertHelper.applicationNotExisted(id).getApplicationCategory())) {
throw new CommonException("error.application.query.not.support");
}
List<ApplicationDTO> applications = applicationMapper.selectWithProject(organizationId);
List<ApplicationExplorationDTO> ancestors = applicationExplorationMapper.selectAncestorByApplicationId(id);
Set<Long> ancestorIds = ancestors.stream().map(ApplicationExplorationDTO::getApplicationId).collect(Collectors.toSet());
if (ancestorIds.isEmpty()) {
ancestorIds.add(id);
}
List<ApplicationDTO> apps =
applications.stream().filter(app -> !ancestorIds.contains(app.getId())).collect(Collectors.toList());
return apps;
}
@Override
public ApplicationDTO query(Long id, Boolean withDescendants) {
ApplicationDTO app = applicationAssertHelper.applicationNotExisted(id);
if (withDescendants &&
ApplicationCategory.isCombination(app.getApplicationCategory())) {
List<ApplicationExplorationDTO> applicationExplorations = applicationExplorationMapper.selectDescendants(generatePath(app.getId()));
//todo dfs算法优化
processTreeData(app, applicationExplorations);
}
return app;
}
private Map<Long, Set<String>> getRootIdMap(Long id) {
ApplicationExplorationDTO example = new ApplicationExplorationDTO();
example.setApplicationId(id);
List<ApplicationExplorationDTO> pathNodes = applicationExplorationMapper.select(example);
Map<Long, Set<String>> map = new HashMap<>();
pathNodes.forEach(node -> {
Long rootId = node.getRootId();
Set<String> paths = map.get(rootId);
if (paths == null) {
paths = new HashSet<>();
map.put(rootId, paths);
}
paths.add(node.getPath());
});
return map;
}
private Map<Long, List<ApplicationExplorationDTO>> getDescendantMap(Set<Long> idSet) {
Map<Long, List<ApplicationExplorationDTO>> map = new HashMap<>(idSet.size());
idSet.forEach(currentId -> {
List<ApplicationExplorationDTO> list =
applicationExplorationMapper.selectDescendantByPath(generatePath(currentId));
map.put(currentId, list);
});
return map;
}
private void isApplicationsIllegal(Long organizationId, Set<Long> idSet) {
////oracle In-list上限为1000,这里List size要小于1000
List<Set<Long>> list = CollectionUtils.subSet(idSet, 999);
List<ApplicationDTO> applications = new ArrayList<>();
list.forEach(set -> applications.addAll(applicationMapper.matchId(set)));
//校验是不是在组织下面
List<Long> illegalIds =
applications.stream().filter(
app -> !organizationId.equals(app.getOrganizationId()))
.map(ApplicationDTO::getId).collect(Collectors.toList());
if (!illegalIds.isEmpty()) {
throw new CommonException("error.application.add2combination.target.not.belong2organization",
Arrays.toString(illegalIds.toArray()), organizationId);
}
//校验应用是否都存在
if (idSet.size() != applications.size()) {
List<Long> existedIds =
applications.stream().map(ApplicationDTO::getId).collect(Collectors.toList());
illegalIds = idSet.stream().filter(id -> !existedIds.contains(id)).collect(Collectors.toList());
throw new CommonException("error.application.add2combination.not.existed", Arrays.toString(illegalIds.toArray()));
}
}
private void checkCode(ApplicationDTO applicationDTO) {
String code = applicationDTO.getCode();
ApplicationDTO example = new ApplicationDTO();
example.setCode(code);
example.setOrganizationId(applicationDTO.getOrganizationId());
Long id = applicationDTO.getId();
check(example, id, "error.application.code.duplicate");
}
private void check(ApplicationDTO example, Long id, String message) {
boolean check4Insert = (id == null);
if (check4Insert) {
if (!applicationMapper.select(example).isEmpty()) {
throw new CommonException(message);
}
} else {
List<ApplicationDTO> applications = applicationMapper.select(example);
if (applications.size() > 2) {
throw new CommonException(message);
}
if (applications.size() == 1 && !applications.get(0).getId().equals(id)) {
throw new CommonException(message);
}
}
}
private void checkName(ApplicationDTO applicationDTO) {
String name = applicationDTO.getName();
ApplicationDTO example = new ApplicationDTO();
example.setName(name);
example.setOrganizationId(applicationDTO.getOrganizationId());
Long id = applicationDTO.getId();
check(example, id, "error.application.name.duplicate");
}
private void preUpdate(ApplicationDTO applicationDTO, ApplicationDTO application) {
boolean canUpdateProject = PROJECT_DOES_NOT_EXIST_ID.equals(application.getProjectId());
if (!canUpdateProject) {
//为空的情况下,调用updateByPrimaryKeySelective这一列不会被更新
applicationDTO.setProjectId(null);
} else if (!PROJECT_DOES_NOT_EXIST_ID.equals(applicationDTO.getProjectId())) {
projectAssertHelper.projectNotExisted(applicationDTO.getProjectId());
}
applicationDTO.setOrganizationId(null);
applicationDTO.setApplicationCategory(null);
applicationDTO.setCode(null);
applicationAssertHelper.objectVersionNumberNotNull(applicationDTO.getObjectVersionNumber());
}
private void validate(ApplicationDTO applicationDTO) {
String category = applicationDTO.getApplicationCategory();
if (!ApplicationCategory.matchCode(category)) {
throw new CommonException("error.application.applicationCategory.illegal");
}
}
private void addTreeNode(Long id, Map<Long, List<ApplicationExplorationDTO>> descendantMap, Long rootId, String parentPath) {
for (Map.Entry<Long, List<ApplicationExplorationDTO>> entry : descendantMap.entrySet()) {
Long key = entry.getKey();
List<ApplicationExplorationDTO> applicationExplorations = entry.getValue();
applicationExplorations.forEach(ae -> {
StringBuilder builder =
new StringBuilder().append(parentPath).append(ae.getPath().substring(1));
String path = builder.toString();
ApplicationExplorationDTO example = new ApplicationExplorationDTO();
example.setApplicationId(ae.getApplicationId());
example.setPath(path);
example.setHashcode(String.valueOf(path.hashCode()));
example.setRootId(rootId);
if (ae.getApplicationId().equals(key)) {
example.setParentId(id);
} else {
example.setParentId(ae.getParentId());
}
example.setId(null);
example.setEnabled(true);
applicationExplorationMapper.insertSelective(example);
});
}
}
private void deleteTreeNode(Map<Long, List<ApplicationExplorationDTO>> descendantMap, String parentPath) {
for (Map.Entry<Long, List<ApplicationExplorationDTO>> entry : descendantMap.entrySet()) {
List<ApplicationExplorationDTO> applicationExplorations = entry.getValue();
applicationExplorations.forEach(ae -> {
StringBuilder builder =
new StringBuilder().append(parentPath).append(ae.getPath().substring(1));
String path = builder.toString();
ApplicationExplorationDTO example = new ApplicationExplorationDTO();
example.setPath(path);
applicationExplorationMapper.delete(example);
});
}
}
private void canAddToCombination(Long id, Set<Long> idSet, Map<Long, List<ApplicationExplorationDTO>> descendantMap) {
if (idSet.contains(id)) {
throw new CommonException("error.application.add2combination.circle", id, id);
}
Set<ApplicationExplorationDTO> set = new HashSet<>();
for (Map.Entry<Long, List<ApplicationExplorationDTO>> entry : descendantMap.entrySet()) {
set.addAll(entry.getValue());
}
List<Long> illegalIds =
set
.stream()
.filter(ae -> ae.getApplicationId().equals(id))
.map(ApplicationExplorationDTO::getRootId)
.collect(Collectors.toList());
if (!illegalIds.isEmpty()) {
throw new CommonException("error.application.add2combination.circle", Arrays.toString(illegalIds.toArray()), id);
}
}
private void deleteDescendants(Collection<Long> deleteList, Map<Long, Set<String>> rootIdMap) {
if (!deleteList.isEmpty()) {
Map<Long, List<ApplicationExplorationDTO>> descendantMap = getDescendantMap(new HashSet<>(deleteList));
for (Map.Entry<Long, Set<String>> entry : rootIdMap.entrySet()) {
Set<String> paths = entry.getValue();
paths.forEach(path -> deleteTreeNode(descendantMap, path));
}
}
}
private Set<Long> preValidate(Long organizationId, Long id, Long[] ids, String message) {
organizationAssertHelper.organizationNotExisted(organizationId);
if (!ApplicationCategory.isCombination(applicationAssertHelper.applicationNotExisted(id).getApplicationCategory())) {
throw new CommonException(message);
}
Set<Long> idSet = new HashSet<>(Arrays.asList(ids));
if (!idSet.isEmpty()) {
isApplicationsIllegal(organizationId, idSet);
}
return idSet;
}
private void processTreeData(ApplicationDTO app, List<ApplicationExplorationDTO> applicationExplorations) {
Long appId = app.getId();
List<ApplicationDTO> applications = new ArrayList<>();
applicationExplorations.forEach(ae -> {
if (appId.equals(ae.getParentId())) {
ApplicationDTO dto = new ApplicationDTO();
dto.setId(ae.getApplicationId());
dto.setName(ae.getApplicationName());
dto.setCode(ae.getApplicationCode());
dto.setApplicationCategory(ae.getApplicationCategory());
dto.setApplicationType(ae.getApplicationType());
dto.setEnabled(ae.getApplicationEnabled());
dto.setProjectId(ae.getProjectId());
dto.setProjectCode(ae.getProjectCode());
dto.setProjectName(ae.getProjectName());
dto.setImageUrl(ae.getProjectImageUrl());
dto.setParentId(appId);
applications.add(dto);
processTreeData(dto, applicationExplorations);
}
});
app.setDescendants(applications.isEmpty() ? null : applications);
}
/**
* 删除应用并发送saga消息通知.
*
* @param application 应用DTO
* @param sagaCode saga编码
*/
private void deleteAndSendEvent(ApplicationDTO application, String sagaCode) {
producer.apply(
StartSagaBuilder
.newBuilder()
.withLevel(ResourceLevel.ORGANIZATION)
.withRefType("application")
.withSagaCode(sagaCode),
builder -> {
doDelete(application);
builder
.withPayloadAndSerialize(application)
.withRefId(String.valueOf(application.getId()))
.withSourceId(application.getOrganizationId());
});
}
private ApplicationDTO sendEvent(ApplicationDTO applicationDTO, String sagaCode) {
return producer.applyAndReturn(
StartSagaBuilder
.newBuilder()
.withLevel(ResourceLevel.ORGANIZATION)
.withRefType("application")
.withSagaCode(sagaCode),
builder -> {
ApplicationDTO application = doUpdate(applicationDTO);
builder
.withPayloadAndSerialize(application)
.withRefId(String.valueOf(application.getId()))
.withSourceId(application.getOrganizationId());
return application;
});
}
private ApplicationDTO doUpdate(ApplicationDTO applicationDTO) {
if (applicationMapper.updateByPrimaryKeySelective(applicationDTO) != 1) {
throw new CommonException("error.application.update");
}
return applicationMapper.selectByPrimaryKey(applicationDTO.getId());
}
private void processDescendants(ApplicationDTO applicationDTO, ApplicationDTO result) {
List<Long> descendantIds = applicationDTO.getDescendantIds();
if (descendantIds != null) {
Long[] array = new Long[descendantIds.size()];
addToCombination(result.getOrganizationId(), result.getId(), descendantIds.toArray(array));
}
}
private void insertExploration(Long appId) {
ApplicationExplorationDTO example = new ApplicationExplorationDTO();
example.setApplicationId(appId);
String path = generatePath(appId);
example.setPath(path);
example.setApplicationEnabled(true);
example.setRootId(appId);
example.setHashcode(String.valueOf(path.hashCode()));
applicationExplorationMapper.insertSelective(example);
}
private String generatePath(Long appId) {
return new StringBuilder().append(SEPARATOR).append(appId).append(SEPARATOR).toString();
}
private void doInsert(ApplicationDTO applicationDTO) {
if (applicationMapper.insertSelective(applicationDTO) != 1) {
throw new CommonException("error.application.insert");
}
}
private void doDelete(ApplicationDTO applicationDTO) {
if (applicationMapper.deleteByPrimaryKey(applicationDTO) != 1) {
throw new CommonException("error.application.delete");
}
}
}
<|start_filename|>react/src/app/iam/containers/global/menu-setting/util.js<|end_filename|>
function defineProperty(obj, property, value) {
Object.defineProperty(obj, property, {
value,
writable: true,
enumerable: false,
configurable: true,
});
}
export function hasDirChild({ subMenus }) {
if (subMenus) {
return subMenus.some(record => record.type !== 'menu_item');
}
return false;
}
export function isChild(parent, child) {
if (parent.subMenus) {
return parent.subMenus.some(menu => menu === child || isChild(menu, child));
}
return false;
}
export function findParent(menus, record) {
const index = menus.indexOf(record);
let result = null;
if (index === -1) {
menus.some((data) => {
const { subMenus } = data;
if (subMenus) {
const ret = findParent(subMenus, record);
if (ret) {
result = ret;
if (!ret.parentData) {
result.parentData = data;
}
return true;
}
}
return false;
});
return result;
} else {
return {
parent: menus,
index,
};
}
}
export function deleteNode(menus, record) {
const { parent, index, parentData } = findParent(menus, record);
parent.splice(index, 1);
if (!parent.length && parentData) {
parentData.subMenus = null;
}
}
export function canDelete({ subMenus }) {
if (subMenus) {
return subMenus.every(menu => menu.type === 'menu' && canDelete(menu));
}
return true;
}
export function defineLevel(obj, level) {
defineProperty(obj, '__level__', level);
}
export function defineParentName(obj, name) {
defineProperty(obj, '__parent_name__', name);
}
export function normalizeMenus(menus, level = -1, parentName) {
level += 1;
menus.forEach((menu) => {
const { subMenus, name } = menu;
defineLevel(menu, level);
if (parentName) {
defineParentName(menu, parentName);
}
if (subMenus) {
normalizeMenus(subMenus, level, name);
}
});
return menus;
}
export function adjustSort(data) {
data.forEach((value, index) => {
value.sort = index;
if (value.subMenus) {
adjustSort(value.subMenus);
}
});
return data;
}
<|start_filename|>src/test/groovy/io/choerodon/iam/infra/common/utils/ParamUtilsSpec.groovy<|end_filename|>
package io.choerodon.iam.infra.common.utils
import io.choerodon.iam.IntegrationTestConfiguration
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.context.annotation.Import
import spock.lang.Specification
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan
* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class ParamUtilsSpec extends Specification {
def "ArrToStr"() {
given: "构造请求参数"
String[] params = new String[2]
params[0] = "param"
params[1] = "param1"
when: "调用方法"
String result = ParamUtils.arrToStr(params)
then: "校验结果"
result.equals(params[0] + "," + params[1] + ",")
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/enums/ApplicationCategory.java<|end_filename|>
package io.choerodon.iam.infra.enums;
/**
* 应用被划分为哪些类别
*
* @author superlee
* @since 0.15.0
*/
public enum ApplicationCategory {
/**
* 应用
*/
APPLICATION("普通应用", "application"),
/**
* 组合应用
*/
COMBINATION("组合应用", "combination-application");
private String value;
private String code;
ApplicationCategory(String value, String code) {
this.value = value;
this.code = code;
}
public String value() {
return value;
}
public String code() {
return code;
}
public static boolean matchCode(String code) {
for (ApplicationCategory applicationCategory : ApplicationCategory.values()) {
if (applicationCategory.code.equals(code)) {
return true;
}
}
return false;
}
public static boolean isApplication(String code) {
return APPLICATION.code.equals(code);
}
public static boolean isCombination(String code) {
return COMBINATION.code.equals(code);
}
}
<|start_filename|>react/src/app/iam/containers/global/role/RoleCreate.js<|end_filename|>
import React, { Component } from 'react';
import { withRouter } from 'react-router-dom';
import { inject, observer } from 'mobx-react';
import { Observable } from 'rxjs';
import _ from 'lodash';
import { Button, Col, Form, Input, Modal, Row, Select, Table, Tooltip } from 'choerodon-ui';
import { injectIntl, FormattedMessage } from 'react-intl';
import { Content, Header, Page, axios } from '@choerodon/boot';
import RoleStore from '../../../stores/global/role/RoleStore';
import MouseOverWrapper from '../../../components/mouseOverWrapper';
import { handleFiltersParams } from '../../../common/util';
import './Role.scss';
const { Option } = Select;
const { confirm, Sidebar } = Modal;
const FormItem = Form.Item;
const intlPrefix = 'global.role';
@Form.create({})
@withRouter
@injectIntl
@inject('AppState')
@observer
export default class CreateRole extends Component {
constructor(props) {
super(props);
const level = RoleStore.getChosenLevel !== '';
this.state = {
visible: false,
selectedLevel: 'site',
code: '',
description: '',
page: 1,
pageSize: 10,
alreadyPage: 1,
errorName: '',
errorDescription: '',
submitting: false,
selectedRowKeys: [],
selectedSideBar: [],
currentPermission: [],
firstLoad: true,
initLevel: level,
permissionParams: [],
};
}
componentWillMount() {
this.setCanPermissionCanSee();
const permissions = RoleStore.getSelectedRolesPermission || [];
this.setState({
currentPermission: permissions.map(item => item.id),
});
}
componentWillUnmount() {
RoleStore.setCanChosePermission('site', []);
RoleStore.setCanChosePermission('organization', []);
RoleStore.setCanChosePermission('project', []);
RoleStore.setChosenLevel('');
RoleStore.setSelectedRolesPermission([]);
}
// 获取权限管理数据
setCanPermissionCanSee() {
const levels = ['organization', 'project', 'site'];
for (let c = 0; c < levels.length; c += 1) {
Observable.fromPromise(axios.get(`iam/v1/permissions?level=${levels[c]}`))
.subscribe((data) => {
RoleStore.handleCanChosePermission(levels[c], data);
});
}
}
checkCode = (rule, value, callback) => {
const validValue = `role/${RoleStore.getChosenLevel}/custom/${value}`;
const params = { code: validValue };
axios.post('/iam/v1/roles/check', JSON.stringify(params)).then((mes) => {
if (mes.failed) {
const { intl } = this.props;
callback(intl.formatMessage({ id: `${intlPrefix}.code.exist.msg` }));
} else {
callback();
}
});
};
showModal = () => {
const { currentPermission } = this.state;
RoleStore.setPermissionPage(RoleStore.getChosenLevel, {
current: 1,
pageSize: 10,
total: '',
});
this.setState({
permissionParams: [],
}, () => {
this.setCanPermissionCanSee(RoleStore.getChosenLevel);
const selected = RoleStore.getSelectedRolesPermission
.filter(item => currentPermission.indexOf(item.id) !== -1);
RoleStore.setInitSelectedPermission(selected);
this.setState({
visible: true,
});
});
};
linkToChange = (url) => {
const { history } = this.props;
history.push(url);
};
handleChangePermission = (selected, ids, permissions) => {
const initPermission = RoleStore.getInitSelectedPermission;
if (selected) {
const newPermission = initPermission.concat(permissions);
RoleStore.setInitSelectedPermission(_.uniqBy(newPermission, 'code'));
} else {
const centerPermission = initPermission.slice();
_.remove(centerPermission, item => ids.indexOf(item.id) !== -1);
RoleStore.setInitSelectedPermission(centerPermission);
}
};
handleOk = () => {
const selected = RoleStore.getInitSelectedPermission;
const selectedIds = selected.map(item => item.id);
RoleStore.setSelectedRolesPermission(_.uniqBy(selected));
this.setState({
currentPermission: selectedIds,
visible: false,
alreadyPage: 1,
});
};
handleCancel = () => {
this.setState({
visible: false,
firstLoad: false,
});
};
handleCreate = (e) => {
e.preventDefault();
this.setState({
firstLoad: false,
});
this.props.form.validateFieldsAndScroll((err) => {
if (!err) {
const { intl } = this.props;
const { currentPermission } = this.state;
const rolePermissionss = [];
currentPermission.forEach(id =>
rolePermissionss.push({ id }));
if (rolePermissionss.length > 0) {
const labelValues = this.props.form.getFieldValue('label');
const labelIds = labelValues && labelValues.map(labelId => ({ id: labelId }));
const role = {
name: this.props.form.getFieldValue('name').trim(),
modified: this.props.form.getFieldValue('modified'),
enabled: this.props.form.getFieldValue('enabled'),
code: `role/${RoleStore.getChosenLevel}/custom/${this.props.form.getFieldValue('code').trim()}`,
level: RoleStore.getChosenLevel,
permissions: rolePermissionss,
labels: labelIds,
};
this.setState({ submitting: true });
RoleStore.createRole(role)
.then((data) => {
this.setState({ submitting: false });
if (data && !data.failed) {
Choerodon.prompt(intl.formatMessage({ id: 'create.success' }));
this.linkToChange('/iam/role');
} else {
Choerodon.prompt(data.message);
}
})
.catch((errors) => {
this.setState({ submitting: false });
if (errors.response.data.message === 'error.role.roleNameExist') {
Choerodon.prompt(intl.formatMessage({ id: `${intlPrefix}.name.exist.msg` }));
} else {
Choerodon.prompt(intl.formatMessage({ id: 'create.error' }));
}
});
}
}
});
};
handleReset = () => {
this.linkToChange('/iam/role');
};
handleModal = (value) => {
const { form, intl } = this.props;
const that = this;
const { getFieldValue, setFieldsValue } = form;
const { currentPermission } = this.state;
const level = getFieldValue('level');
const code = getFieldValue('code');
const label = getFieldValue('label');
if (level && (currentPermission.length || code || label.length)) {
confirm({
title: intl.formatMessage({ id: `${intlPrefix}.modify.level.title` }),
content: intl.formatMessage({ id: `${intlPrefix}.modify.level.content` }),
onOk() {
RoleStore.setChosenLevel(value);
RoleStore.setSelectedRolesPermission([]);
RoleStore.loadRoleLabel(value);
setFieldsValue({ code: '', label: [] });
that.setState({
currentPermission: [],
});
},
onCancel() {
setFieldsValue({ level });
},
});
} else {
RoleStore.setChosenLevel(value);
RoleStore.setSelectedRolesPermission([]);
RoleStore.loadRoleLabel(value);
setFieldsValue({ code: '', label: [] });
this.setState({
currentPermission: [],
});
}
};
handlePageChange = (pagination, filters, sorter, params) => {
const level = RoleStore.getChosenLevel;
this.setState({
permissionParams: params,
});
// 若params或filters含特殊字符表格数据置空
const isIncludeSpecialCode = handleFiltersParams(params, filters);
if (isIncludeSpecialCode) {
RoleStore.setCanChosePermission(level, []);
RoleStore.setPermissionPage(level, {
current: 1,
total: 0,
size: 10,
});
return;
}
const newFilters = {
params: (params && params.join(',')) || '',
};
RoleStore.getWholePermission(level, pagination, newFilters).subscribe((data) => {
RoleStore.handleCanChosePermission(level, data);
});
};
renderRoleLabel = () => {
const labels = RoleStore.getLabel;
return labels.map(item =>
<Option key={item.id} value={`${item.id}`}>{item.name}</Option>);
};
render() {
const { currentPermission, firstLoad, submitting, initLevel } = this.state;
const { intl, AppState } = this.props;
const { getFieldDecorator } = this.props.form;
const formItemLayout = {
labelCol: {
xs: { span: 24 },
sm: { span: 100 },
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 10 },
},
};
const origin = RoleStore.getCanChosePermission;
const data = RoleStore.getChosenLevel !== '' ? origin[RoleStore.getChosenLevel].slice() : [];
const pagination = RoleStore.getPermissionPage[RoleStore.getChosenLevel];
const selectedPermission = RoleStore.getSelectedRolesPermission || [];
const changePermission = RoleStore.getInitSelectedPermission || [];
const level = RoleStore.getChosenLevel;
const codePrefix = `role/${level || 'level'}/custom/`;
return (
<Page className="choerodon-roleCreate">
<Header
title={<FormattedMessage id={`${intlPrefix}.create`} />}
backPath="/iam/role"
/>
<Content
code={`${intlPrefix}.create`}
values={{ name: AppState.getSiteInfo.systemName || 'Choerodon' }}
>
<div>
<Form layout="vertical">
<FormItem
{...formItemLayout}
>
{getFieldDecorator('level', {
rules: [{
required: true,
message: intl.formatMessage({ id: `${intlPrefix}.level.require.msg` }),
}],
initialValue: level !== '' ? level : undefined,
})(
<Select
label={<FormattedMessage id={`${intlPrefix}.level`} />}
ref={this.saveSelectRef}
size="default"
style={{
width: '512px',
}}
getPopupContainer={() => document.getElementsByClassName('page-content')[0]}
onChange={this.handleModal}
disabled={initLevel}
>
<Option value="site">{intl.formatMessage({ id: 'global' })}</Option>
<Option value="organization">{intl.formatMessage({ id: 'organization' })}</Option>
<Option value="project">{intl.formatMessage({ id: 'project' })}</Option>
</Select>,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('code', {
rules: [{
required: true,
whitespace: true,
message: intl.formatMessage({ id: `${intlPrefix}.code.require.msg` }),
}, {
pattern: /^[a-z]([-a-z0-9]*[a-z0-9])?$/,
message: intl.formatMessage({ id: `${intlPrefix}.code.pattern.msg` }),
}, {
validator: this.checkCode,
}],
validateFirst: true,
initialValue: this.state.roleName,
})(
<Input
autoComplete="off"
label={<FormattedMessage id={`${intlPrefix}.code`} />}
prefix={codePrefix}
size="default"
style={{
width: '512px',
}}
disabled={level === ''}
maxLength={64}
showLengthInfo={false}
/>,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('name', {
rules: [{
required: true,
whitespace: true,
message: intl.formatMessage({ id: `${intlPrefix}.name.require.msg` }),
}],
initialValue: this.state.name,
})(
<Input
autoComplete="off"
label={<FormattedMessage id={`${intlPrefix}.name`} />}
type="textarea"
rows={1}
style={{
width: '512px',
}}
maxLength={64}
showLengthInfo={false}
/>,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('label')(
<Select
mode="multiple"
label={<FormattedMessage id={`${intlPrefix}.label`} />}
size="default"
getPopupContainer={() => document.getElementsByClassName('page-content')[0]}
style={{
width: '512px',
}}
disabled={RoleStore.getChosenLevel === ''}
>
{this.renderRoleLabel()}
</Select>,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
<Tooltip
placement="top"
title={<FormattedMessage id={RoleStore.getChosenLevel ? `${intlPrefix}.add.permission` : `${intlPrefix}.level.nothing.msg`} />}
>
<Button
funcType="raised"
onClick={this.showModal.bind(this)}
disabled={RoleStore.getChosenLevel === ''}
className="addPermission"
icon="add"
>
<FormattedMessage id={`${intlPrefix}.add.permission`} />
</Button>
</Tooltip>
</FormItem>
<FormItem>
{currentPermission.length > 0 ? (
<span className="alreadyDes">
<FormattedMessage id={`${intlPrefix}.permission.count.msg`} values={{ count: currentPermission.length }} />
</span>
) : (
<span className="alreadyDes">
<FormattedMessage id={`${intlPrefix}.permission.nothing.msg`} />
</span>
)}
</FormItem>
<FormItem
{...formItemLayout}
>
<Table
className="c7n-role-permission-table"
columns={[{
title: <FormattedMessage id={`${intlPrefix}.permission.code`} />,
width: '50%',
dataIndex: 'code',
key: 'code',
render: text => (
<MouseOverWrapper text={text} width={0.5}>
{text}
</MouseOverWrapper>
),
}, {
title: <FormattedMessage id={`${intlPrefix}.permission.desc`} />,
width: '50%',
dataIndex: 'description',
key: 'description',
render: text => (
<MouseOverWrapper text={text} width={0.5}>
{text}
</MouseOverWrapper>
),
}]}
dataSource={selectedPermission || []}
filterBarPlaceholder={intl.formatMessage({ id: 'filtertable' })}
rowSelection={{
selectedRowKeys: currentPermission,
onChange: (selectedRowKeys, selectedRows) => {
this.setState({
currentPermission: selectedRowKeys,
});
},
}}
rowKey="id"
/>
{!firstLoad && !currentPermission.length ? (
<div style={{ color: '#d50000' }} className="c7n-form-explain">
<FormattedMessage id={`${intlPrefix}.permission.require.msg`} />
</div>
) : null}
</FormItem>
<FormItem>
<Row className="mt-md">
<Col className="choerodon-btn-create">
<Button
funcType="raised"
type="primary"
onClick={this.handleCreate}
loading={submitting}
>
<FormattedMessage id="create" />
</Button>
</Col>
<Col span={5}>
<Button
funcType="raised"
onClick={this.handleReset}
disabled={submitting}
style={{ color: '#3F51B5' }}
>
<FormattedMessage id="cancel" />
</Button>
</Col>
</Row>
</FormItem>
</Form>
<Sidebar
title={<FormattedMessage id={`${intlPrefix}.add.permission`} />}
visible={this.state.visible}
onOk={this.handleOk.bind(this)}
onCancel={this.handleCancel.bind(this)}
okText={intl.formatMessage({ id: 'ok' })}
cancelText={intl.formatMessage({ id: 'cancel' })}
>
<Content
className="sidebar-content"
code={`${intlPrefix}.create.addpermission`}
>
<Table
className="c7n-role-permission-table"
columns={[{
title: <FormattedMessage id={`${intlPrefix}.permission.code`} />,
width: '50%',
dataIndex: 'code',
key: 'code',
render: text => (
<MouseOverWrapper text={text} width={0.4}>
{text}
</MouseOverWrapper>
),
}, {
title: <FormattedMessage id={`${intlPrefix}.permission.desc`} />,
width: '50%',
dataIndex: 'description',
key: 'description',
render: text => (
<MouseOverWrapper text={text} width={0.4}>
{text}
</MouseOverWrapper>
),
}]}
rowKey="id"
dataSource={data}
pagination={pagination}
onChange={this.handlePageChange}
filters={this.state.permissionParams}
filterBarPlaceholder={intl.formatMessage({ id: 'filtertable' })}
rowSelection={{
selectedRowKeys: (changePermission
&& changePermission.map(item => item.id)) || [],
onSelect: (record, selected, selectedRows) => {
this.handleChangePermission(selected, [record.id], selectedRows);
},
onSelectAll: (selected, selectedRows, changeRows) => {
const ids = _.map(changeRows, item => item.id);
this.handleChangePermission(selected, ids, selectedRows);
},
}}
/>
</Content>
</Sidebar>
</div>
</Content>
</Page>
);
}
}
<|start_filename|>react/src/app/iam/stores/user/organization-info/index.js<|end_filename|>
import OrganizationInfoStore from './OrganizationInfoStore';
export default OrganizationInfoStore;
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/UserMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import java.util.List;
import java.util.Set;
import io.choerodon.iam.api.dto.UserSearchDTO;
import io.choerodon.iam.infra.dto.UserDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
import io.choerodon.iam.api.dto.RoleAssignmentSearchDTO;
import io.choerodon.iam.api.dto.SimplifiedUserDTO;
import io.choerodon.iam.api.dto.UserRoleDTO;
/**
* @author wuguokai
* @author superlee
*/
public interface UserMapper extends Mapper<UserDTO> {
List<UserDTO> fulltextSearch(@Param("userSearchDTO") UserSearchDTO userSearchDTO,
@Param("param") String param);
List<UserDTO> selectUserWithRolesByOption(
@Param("roleAssignmentSearchDTO") RoleAssignmentSearchDTO roleAssignmentSearchDTO,
@Param("sourceId") Long sourceId,
@Param("sourceType") String sourceType,
@Param("start") Integer start,
@Param("size") Integer size,
@Param("param") String param);
int selectCountUsers(@Param("roleAssignmentSearchDTO")
RoleAssignmentSearchDTO roleAssignmentSearchDTO,
@Param("sourceId") Long sourceId,
@Param("sourceType") String sourceType,
@Param("param") String param);
List<UserDTO> selectUsersByLevelAndOptions(@Param("sourceType") String sourceType,
@Param("sourceId") Long sourceId,
@Param("userId") Long userId,
@Param("email") String email,
@Param("param") String param);
Integer selectUserCountFromMemberRoleByOptions(@Param("roleId") Long roleId,
@Param("memberType") String memberType,
@Param("sourceId") Long sourceId,
@Param("sourceType") String sourceType,
@Param("roleAssignmentSearchDTO")
RoleAssignmentSearchDTO roleAssignmentSearchDTO,
@Param("param") String param);
List<UserDTO> selectUsersFromMemberRoleByOptions(@Param("roleId") Long roleId,
@Param("memberType") String memberType,
@Param("sourceId") Long sourceId,
@Param("sourceType") String sourceType,
@Param("roleAssignmentSearchDTO")
RoleAssignmentSearchDTO roleAssignmentSearchDTO,
@Param("param") String param);
List<UserDTO> listUsersByIds(@Param("ids") Long[] ids, @Param("onlyEnabled") Boolean onlyEnabled);
List<UserDTO> listUsersByEmails(@Param("emails") String[] emails);
List<UserDTO> selectAdminUserPage(@Param("userDTO") UserDTO userDTO, @Param("params") String params);
Set<String> matchLoginName(@Param("nameSet") Set<String> nameSet);
Set<Long> getIdsByMatchLoginName(@Param("nameSet") Set<String> nameSet);
void disableListByIds(@Param("idSet") Set<Long> ids);
Set<String> matchEmail(@Param("emailSet") Set<String> emailSet);
Long[] listUserIds();
List<SimplifiedUserDTO> selectAllUsersSimplifiedInfo(@Param("params") String params);
/**
* 选择性查询用户,如果用户在组织下,则模糊查询,如果用户不在组织下精确匹配
*
* @param param
* @param organizationId
* @return
*/
List<SimplifiedUserDTO> selectUsersOptional(@Param("params") String param, @Param("organizationId") Long organizationId);
/**
* 全平台用户数(包括停用)
*
* @return 返回全平台用户数
*/
Integer totalNumberOfUsers();
/**
* 全平台新增用户数(包括停用)
*
* @return 返回时间段内新增用户数
*/
Integer newUsersByDate(@Param("begin") String begin,
@Param("end") String end);
List<UserRoleDTO> selectRoles(@Param("userId") long id, @Param("params") String params, @Param("organizationId") Long organizationId);
/**
* 根据用户登录名集合查所有用户
*
* @param loginNames
* @param onlyEnabled
* @return
*/
List<UserDTO> listUsersByLoginNames(@Param("loginNames") String[] loginNames,
@Param("onlyEnabled") Boolean onlyEnabled);
}
<|start_filename|>src/main/java/io/choerodon/iam/api/dto/UserInfoDTO.java<|end_filename|>
package io.choerodon.iam.api.dto;
import io.swagger.annotations.ApiModelProperty;
public class UserInfoDTO extends UserPasswordDTO {
@ApiModelProperty(value = "用户名/非必填")
private String userName;
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/RoleController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import java.util.List;
import com.github.pagehelper.PageInfo;
import io.swagger.annotations.ApiOperation;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.domain.Sort;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.core.base.BaseController;
import io.choerodon.iam.api.query.RoleQuery;
import io.choerodon.iam.app.service.PermissionService;
import io.choerodon.iam.app.service.RoleService;
import io.choerodon.iam.infra.common.utils.ParamUtils;
import io.choerodon.iam.infra.dto.PermissionDTO;
import io.choerodon.iam.infra.dto.RoleDTO;
import io.choerodon.mybatis.annotation.SortDefault;
import io.choerodon.swagger.annotation.CustomPageRequest;
/**
* @author superlee
* @author wuguokai
*/
@RestController
@RequestMapping(value = "/v1/roles")
public class RoleController extends BaseController {
private RoleService roleService;
private PermissionService permissionService;
public RoleController(RoleService roleService, PermissionService permissionService) {
this.roleService = roleService;
this.permissionService = permissionService;
}
/**
* 分页查询角色
*
* @return 查询结果
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "分页查询角色")
@PostMapping(value = "/search")
@CustomPageRequest
public ResponseEntity<PageInfo<RoleDTO>> pagedSearch(@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestBody RoleQuery roleQuery) {
return new ResponseEntity<>(roleService.pagingSearch(pageRequest, roleQuery), HttpStatus.OK);
}
@Permission(permissionWithin = true)
@ApiOperation(value = "通过label查询关联角色列表")
@GetMapping(value = "/id")
public ResponseEntity<List<Long>> queryIdsByLabelNameAndLabelType(@RequestParam(value = "label_name") String labelName,
@RequestParam(value = "label_type") String labelType) {
return new ResponseEntity<>(roleService.queryIdsByLabelNameAndLabelType(labelName, labelType), HttpStatus.OK);
}
/**
* 根据角色id查询角色
*
* @return 查询结果
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "通过id查询角色")
@GetMapping(value = "/{id}")
public ResponseEntity<RoleDTO> queryWithPermissionsAndLabels(@PathVariable Long id) {
return new ResponseEntity<>(roleService.queryWithPermissionsAndLabels(id), HttpStatus.OK);
}
/**
* 根据角色code查询角色
*
* @return 查询结果
*/
@Permission(permissionWithin = true)
@ApiOperation(value = "通过code查询角色")
@GetMapping
public ResponseEntity<RoleDTO> queryByCode(@RequestParam String code) {
return new ResponseEntity<>(roleService.queryByCode(code), HttpStatus.OK);
}
/**
* 根据角色code查询角色Id
*
* @return 查询结果
*/
@Permission(permissionWithin = true)
@ApiOperation(value = "通过code查询角色Id")
@GetMapping("/idByCode")
public ResponseEntity<Long> queryIdByCode(@RequestParam String code) {
RoleDTO dto = roleService.queryByCode(code);
Long roleId = dto == null ? null : dto.getId();
return new ResponseEntity<>(roleId, HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "创建角色")
@PostMapping
public ResponseEntity<RoleDTO> create(@RequestBody @Validated RoleDTO roleDTO) {
return new ResponseEntity<>(roleService.create(roleDTO), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "基于已有角色创建角色")
@PostMapping("/base_on_roles")
public ResponseEntity<RoleDTO> createBaseOnRoles(@RequestBody @Validated RoleDTO roleDTO) {
return new ResponseEntity<>(roleService.createBaseOnRoles(roleDTO), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "修改角色")
@PutMapping(value = "/{id}")
public ResponseEntity<RoleDTO> update(@PathVariable Long id,
@RequestBody RoleDTO roleDTO) {
roleDTO.setId(id);
return new ResponseEntity<>(roleService.update(roleDTO), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "启用角色")
@PutMapping(value = "/{id}/enable")
public ResponseEntity<RoleDTO> enableRole(@PathVariable Long id) {
return new ResponseEntity<>(roleService.enableRole(id), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "禁用角色")
@PutMapping(value = "/{id}/disable")
public ResponseEntity<RoleDTO> disableRole(@PathVariable Long id) {
return new ResponseEntity<>(roleService.disableRole(id), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "角色信息校验")
@PostMapping(value = "/check")
public ResponseEntity check(@RequestBody RoleDTO role) {
roleService.check(role);
return new ResponseEntity(HttpStatus.OK);
}
@Permission(type = ResourceType.SITE, permissionLogin = true)
@ApiOperation("根据角色id查看角色对应的权限")
@GetMapping("/{id}/permissions")
@CustomPageRequest
public ResponseEntity<PageInfo<PermissionDTO>> listPermissionById(@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@PathVariable("id") Long id,
@RequestParam(value = "params", required = false) String[] params) {
return new ResponseEntity<>(permissionService.listPermissionsByRoleId(pageRequest, id, ParamUtils.arrToStr(params)), HttpStatus.OK);
}
/**
* 根据标签查询角色
*
* @return 查询结果
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "根据标签查询角色")
@GetMapping(value = "/selectByLabel")
public ResponseEntity<List<RoleDTO>> selectByLabel(@RequestParam String label) {
return ResponseEntity.ok(roleService.selectByLabel(label, null));
}
}
<|start_filename|>react/src/app/iam/containers/user/password/Password.js<|end_filename|>
import React, { Component } from 'react';
import { Button, Col, Form, Input, Modal, Row } from 'choerodon-ui';
import { FormattedMessage, injectIntl } from 'react-intl';
import { withRouter } from 'react-router-dom';
import { inject, observer } from 'mobx-react';
import { axios, Content, Header, Page, Permission } from '@choerodon/boot';
import UserInfoStore from '../../../stores/user/user-info/UserInfoStore';
import './Password.scss';
const FormItem = Form.Item;
const intlPrefix = 'user.changepwd';
const formItemLayout = {
labelCol: {
xs: { span: 24 },
sm: { span: 100 },
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 9 },
},
};
const inputWidth = 512;
@Form.create({})
@withRouter
@injectIntl
@inject('AppState')
@observer
export default class Password extends Component {
constructor(props) {
super(props);
this.editFocusInput = React.createRef();
}
state = {
submitting: false,
confirmDirty: null,
res: {},
};
componentWillMount() {
this.loadUserInfo();
this.loadEnablePwd();
}
loadUserInfo = () => {
UserInfoStore.setUserInfo(this.props.AppState.getUserInfo);
};
loadEnablePwd = () => {
axios.get(`/iam/v1/system/setting/enable_resetPassword`)
.then((response) => {
this.setState({
res: response,
});
});
};
compareToFirstPassword = (rule, value, callback) => {
const { intl, form } = this.props;
if (value && value !== form.getFieldValue('password')) {
callback(intl.formatMessage({ id: `${intlPrefix}.twopwd.pattern.msg` }));
} else {
callback();
}
};
validateToNextPassword = (rule, value, callback) => {
const { form } = this.props;
if (value && this.state.confirmDirty) {
form.validateFields(['confirm'], { force: true });
}
if (value.indexOf(' ') !== -1) {
callback('密码不能包含空格');
}
callback();
};
handleConfirmBlur = (e) => {
const value = e.target.value;
this.setState({ confirmDirty: this.state.confirmDirty || !!value });
};
handleSubmit = (e) => {
const { getFieldValue } = this.props.form;
const user = UserInfoStore.getUserInfo;
const body = {
originalPassword: getFieldValue('<PASSWORD>'),
password: getFieldValue('<PASSWORD>'),
};
e.preventDefault();
this.props.form.validateFields((err, values) => {
if (!err) {
this.setState({ submitting: true });
UserInfoStore.updatePassword(user.id, body)
.then(({ failed, message }) => {
this.setState({ submitting: false });
if (failed) {
Choerodon.prompt(message);
} else {
Choerodon.logout();
}
})
.catch((error) => {
this.setState({ submitting: false });
Choerodon.handleResponseError(error);
});
}
});
};
reload = () => {
const { resetFields } = this.props.form;
resetFields();
};
showModal = () => {
this.setState({
visible: true,
});
Modal.confirm({
className: 'c7n-iam-confirm-modal',
title: '修改仓库密码',
content: '确定要修改您的gitlab仓库密码吗?点击确定后,您将跳转至GitLab仓库克隆密码的修改页面。',
okText: '修改',
width: 560,
onOk: () => {
const { res: { enable_reset, resetGitlabPasswordUrl } } = this.state;
if (enable_reset) {
window.open(resetGitlabPasswordUrl);
}
},
});
};
render() {
const { intl, form } = this.props;
const { getFieldDecorator } = form;
const { submitting, res: { enable_reset } } = this.state;
const user = UserInfoStore.getUserInfo;
return (
<Page
service={[
'iam-service.user.selfUpdatePassword',
]}
>
<Header title={<FormattedMessage id={`${intlPrefix}.header.title`} />}>
{
enable_reset ? (
<Button onClick={this.showModal} icon="vpn_key">
<FormattedMessage id={`${intlPrefix}.gitlab`} />
</Button>
) : null
}
<Button onClick={this.reload} icon="refresh">
<FormattedMessage id="refresh" />
</Button>
</Header>
<Content
code={intlPrefix}
values={{ name: user.realName }}
>
<div className="ldapContainer">
<Form onSubmit={this.handleSubmit} layout="vertical">
<FormItem
{...formItemLayout}
>
{getFieldDecorator('oldpassword', {
rules: [{
required: true,
message: intl.formatMessage({ id: `${intlPrefix}.oldpassword.require.msg` }),
}, {
validator: this.validateToNextPassword,
}],
validateTrigger: 'onBlur',
})(
<Input
autoComplete="off"
label={<FormattedMessage id={`${intlPrefix}.oldpassword`} />}
type="password"
style={{ width: inputWidth }}
ref={(e) => {
this.editFocusInput = e;
}}
disabled={user.ldap}
/>,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('password', {
rules: [{
required: true,
message: intl.formatMessage({ id: `${intlPrefix}.newpassword.require.msg` }),
}, {
validator: this.validateToNextPassword,
}],
validateTrigger: 'onBlur',
validateFirst: true,
})(
<Input
autoComplete="off"
label={<FormattedMessage id={`${intlPrefix}.newpassword`} />}
type="password"
style={{ width: inputWidth }}
showPasswordEye
disabled={user.ldap}
/>,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('confirm', {
rules: [{
required: true,
message: intl.formatMessage({ id: `${intlPrefix}.confirmpassword.require.msg` }),
}, {
validator: this.compareToFirstPassword,
}],
validateTrigger: 'onBlur',
validateFirst: true,
})(
<Input
autoComplete="off"
label={<FormattedMessage id={`${intlPrefix}.confirmpassword`} />}
type="password"
style={{ width: inputWidth }}
onBlur={this.handleConfirmBlur}
showPasswordEye
disabled={user.ldap}
/>,
)}
</FormItem>
<FormItem>
<Permission
service={['iam-service.user.selfUpdatePassword']}
type={'site'}
onAccess={() => {
setTimeout(() => {
this.editFocusInput.input.focus();
}, 10);
}}
>
<Row>
<hr className="hrLine" />
<Col span={5} style={{ marginRight: 16 }}>
<Button
funcType="raised"
type="primary"
htmlType="submit"
loading={submitting}
disabled={user.ldap}
><FormattedMessage id="save" /></Button>
<Button
funcType="raised"
onClick={this.reload}
style={{ marginLeft: 16 }}
disabled={submitting || user.ldap}
><FormattedMessage id="cancel" /></Button>
</Col>
</Row>
</Permission>
</FormItem>
</Form>
</div>
</Content>
</Page>
);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/LdapMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.infra.dto.LdapDTO;
import io.choerodon.mybatis.common.Mapper;
/**
* @author wuguokai
*/
public interface LdapMapper extends Mapper<LdapDTO> {
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/OrganizationProjectController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.domain.Sort;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.core.base.BaseController;
import io.choerodon.core.iam.InitRoleCode;
import io.choerodon.core.oauth.DetailsHelper;
import io.choerodon.iam.app.service.OrganizationProjectService;
import io.choerodon.iam.infra.common.utils.ParamUtils;
import io.choerodon.iam.infra.dto.ProjectDTO;
import io.choerodon.mybatis.annotation.SortDefault;
import io.choerodon.swagger.annotation.CustomPageRequest;
import io.swagger.annotations.ApiOperation;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import springfox.documentation.annotations.ApiIgnore;
import javax.validation.Valid;
import java.util.List;
import java.util.Map;
/**
* @author flyleft
* @author superlee
*/
@RestController
@RequestMapping(value = "/v1/organizations/{organization_id}/projects")
public class OrganizationProjectController extends BaseController {
private OrganizationProjectService organizationProjectService;
public OrganizationProjectController(OrganizationProjectService organizationProjectService) {
this.organizationProjectService = organizationProjectService;
}
/**
* 添加项目
*
* @param projectDTO 项目信息
* @return 项目信息
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "创建项目")
@PostMapping
public ResponseEntity<ProjectDTO> create(@PathVariable(name = "organization_id") Long organizationId,
@RequestBody @Valid ProjectDTO projectDTO) {
projectDTO.setOrganizationId(organizationId);
return new ResponseEntity<>(organizationProjectService.createProject(projectDTO), HttpStatus.OK);
}
/**
* 分页查询项目
*
* @return 查询结果
*/
@Permission(type = ResourceType.ORGANIZATION)
@GetMapping
@ApiOperation(value = "分页查询项目")
@CustomPageRequest
public ResponseEntity<PageInfo<ProjectDTO>> list(@PathVariable(name = "organization_id") Long organizationId,
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(required = false) String name,
@RequestParam(required = false) String code,
@RequestParam(required = false) String typeName,
@RequestParam(required = false) Boolean enabled,
@RequestParam(required = false) String category,
@RequestParam(required = false) String[] params) {
ProjectDTO project = new ProjectDTO();
project.setOrganizationId(organizationId);
project.setName(name);
project.setCode(code);
project.setEnabled(enabled);
project.setTypeName(typeName);
project.setCategory(category);
return new ResponseEntity<>(organizationProjectService.pagingQuery(project, pageRequest, ParamUtils.arrToStr(params)),
HttpStatus.OK);
}
/**
* 查询分配开发的项目
*
* @return 查询结果
*/
@Permission(type = ResourceType.ORGANIZATION)
@GetMapping("/list")
@ApiOperation(value = "查询分配开发的项目")
public ResponseEntity<List<ProjectDTO>> getAgileProjects(@PathVariable(name = "organization_id") Long organizationId,
@RequestParam(required = false) String[] param) {
return new ResponseEntity<>(organizationProjectService.getAgileProjects(organizationId, ParamUtils.arrToStr(param)),
HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@PutMapping(value = "/{project_id}")
@ApiOperation(value = "修改项目")
public ResponseEntity<ProjectDTO> update(@PathVariable(name = "organization_id") Long organizationId,
@PathVariable(name = "project_id") Long projectId,
@RequestBody @Valid ProjectDTO projectDTO) {
projectDTO.setOrganizationId(organizationId);
projectDTO.setId(projectId);
return new ResponseEntity<>(organizationProjectService.update(organizationId, projectDTO), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "启用项目(同时启用项目关联的项目群关系)")
@PutMapping(value = "/{project_id}/enable")
public ResponseEntity<ProjectDTO> enableProject(@PathVariable(name = "organization_id") Long organizationId,
@PathVariable(name = "project_id") Long projectId) {
Long userId = DetailsHelper.getUserDetails().getUserId();
return new ResponseEntity<>(organizationProjectService.enableProject(organizationId, projectId, userId), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "禁用项目(同时禁用项目关联的项目群关系)")
@PutMapping(value = "/{project_id}/disable")
public ResponseEntity<ProjectDTO> disableProject(@PathVariable(name = "organization_id") Long organizationId,
@PathVariable(name = "project_id") Long projectId) {
Long userId = DetailsHelper.getUserDetails().getUserId();
return new ResponseEntity<>(organizationProjectService.disableProject(
organizationId, projectId, userId), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "项目信息校验")
@PostMapping(value = "/check")
public ResponseEntity check(@PathVariable(name = "organization_id") Long organizationId,
@RequestBody ProjectDTO projectDTO) {
projectDTO.setOrganizationId(organizationId);
organizationProjectService.check(projectDTO);
return new ResponseEntity(HttpStatus.OK);
}
@Permission(type = ResourceType.SITE, roles = {InitRoleCode.SITE_ADMINISTRATOR})
@ApiOperation(value = "查询组织下的项目类型及类下项目数及项目")
@GetMapping("/under_the_type")
public ResponseEntity<Map<String, Object>> getProjectsByType(@PathVariable(name = "organization_id") Long organizationId) {
return new ResponseEntity<>(organizationProjectService.getProjectsByType(organizationId), HttpStatus.OK);
}
/**
* @param organizationId 组织Id
* @param projectId 项目Id
* @return 组织下的敏捷项目(除去已被该项目群选择的敏捷项目)
*/
@Permission(type = ResourceType.ORGANIZATION, roles = {InitRoleCode.ORGANIZATION_ADMINISTRATOR})
@ApiOperation(value = "查询项目群下可选的敏捷项目")
@GetMapping("/{project_id}/agile")
public ResponseEntity<List<ProjectDTO>> getProjectsNotGroup(@PathVariable(name = "organization_id") Long organizationId,
@PathVariable(name = "project_id") Long projectId) {
return new ResponseEntity<>(organizationProjectService.getAvailableAgileProj(organizationId, projectId), HttpStatus.OK);
}
/**
* @param organizationId 组织Id
* @param projectId 项目Id
* @return 当前项目生效的普通项目群信息
*/
@Permission(type = ResourceType.PROJECT, roles = {InitRoleCode.PROJECT_OWNER, InitRoleCode.PROJECT_MEMBER})
@ApiOperation(value = "查询当前项目生效的普通项目群信息(项目为启用状态且当前时间在其有效期内)")
@GetMapping(value = "/{project_id}/program")
public ResponseEntity<ProjectDTO> getGroupInfoByEnableProject(@PathVariable(name = "organization_id") Long organizationId,
@PathVariable(name = "project_id") Long projectId) {
return new ResponseEntity<>(organizationProjectService.getGroupInfoByEnableProject(organizationId, projectId), HttpStatus.OK);
}
}
<|start_filename|>react/src/app/iam/dashboard/Announcement/index.js<|end_filename|>
import React, { Component } from 'react';
import { Modal, Timeline, Button } from 'choerodon-ui';
import { withRouter, Link } from 'react-router-dom';
import { inject, observer } from 'mobx-react';
import { DashBoardNavBar } from '@choerodon/boot';
import AnnouncementInfoStore from '../../stores/user/announcement-info';
import './index.scss';
const Item = Timeline.Item;
@withRouter
@inject('AppState', 'HeaderStore')
@observer
export default class Announcement extends Component {
componentWillMount() {
AnnouncementInfoStore.loadData();
}
handleCancel = () => {
AnnouncementInfoStore.closeDetail();
};
render() {
const { visible, title, content, announcementData } = AnnouncementInfoStore;
let containerStyle = {
display: 'block',
};
if (announcementData.length !== 0) {
containerStyle = {
display: 'flex',
justifyContent: 'center',
};
}
return (
<div className="c7n-iam-dashboard-announcement" style={containerStyle}>
{announcementData.length === 0 ? (
<React.Fragment>
<div className="c7n-iam-dashboard-announcement-empty" />
<div className="c7n-iam-dashboard-announcement-empty-text">暂无公告</div>
</React.Fragment>
) : (
<React.Fragment>
<Timeline className="c7n-iam-dashboard-announcement-timeline">
{announcementData.map(data => (
<Item className="item" key={`${data.id}`}>
<div className="time"><p>{data.sendDate.split(' ')[0]}</p><p>{data.sendDate.split(' ')[1]}</p></div>
<div className="title"><a onClick={() => AnnouncementInfoStore.showDetail(data)}>{data.title}</a></div>
</Item>
))}
<Item>null</Item>
</Timeline>
</React.Fragment>
)}
<Modal
visible={visible}
width={800}
title={title}
onCancel={this.handleCancel}
footer={[
<Button key="back" onClick={this.handleCancel}>返回</Button>,
]}
>
<div
className="c7n-iam-dashboard-announcement-detail-content"
dangerouslySetInnerHTML={{ __html: `${content}` }}
/>
</Modal>
<DashBoardNavBar>
<Link to="/notify/user-msg?type=site&msgType=announcement">转至所有公告</Link>
</DashBoardNavBar>
</div>
);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/asserts/OrganizationAssertHelper.java<|end_filename|>
package io.choerodon.iam.infra.asserts;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.infra.dto.OrganizationDTO;
import io.choerodon.iam.infra.mapper.OrganizationMapper;
import org.springframework.stereotype.Component;
import org.springframework.util.ObjectUtils;
/**
* 组织断言帮助类
*
* @author superlee
* @since 2019-05-13
*/
@Component
public class OrganizationAssertHelper extends AssertHelper {
private OrganizationMapper organizationMapper;
public OrganizationAssertHelper(OrganizationMapper organizationMapper) {
this.organizationMapper = organizationMapper;
}
public OrganizationDTO organizationNotExisted(Long id) {
return organizationNotExisted(id, "error.organization.not.exist");
}
public OrganizationDTO organizationNotExisted(Long id, String message) {
OrganizationDTO dto = organizationMapper.selectByPrimaryKey(id);
if (ObjectUtils.isEmpty(dto)) {
throw new CommonException(message, id);
}
return dto;
}
public OrganizationDTO organizationNotExisted(String code) {
return organizationNotExisted(code, "error.organization.not.exist");
}
public OrganizationDTO organizationNotExisted(String code, String message) {
OrganizationDTO dto = new OrganizationDTO();
dto.setCode(code);
OrganizationDTO result = organizationMapper.selectOne(dto);
if (ObjectUtils.isEmpty(dto)) {
throw new CommonException(message);
}
return result;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/enums/ProjectCategory.java<|end_filename|>
package io.choerodon.iam.infra.enums;
/**
* @author Eugen
**/
public enum ProjectCategory {
AGILE("AGILE"),
PROGRAM("PROGRAM"),
ANALYTICAL("ANALYTICAL");
private String value;
public String value() {
return value;
}
ProjectCategory(String value) {
this.value = value;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/annotation/NamingRuleTransStrategy.java<|end_filename|>
package io.choerodon.iam.infra.annotation;
/**
* @author dengyouquan
* UNDER_LINE:实体类为Under_line命名,前端url传值为camel命名
* CAMEL:实体类为Camel命名,前端url传值为under_line命名
**/
public enum NamingRuleTransStrategy {
CAMEL("CAMEL"),
UNDER_LINE("UNDER_LINE");
private String value;
public String value() {
return value;
}
NamingRuleTransStrategy(String value) {
this.value = value;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/dto/UserSearchDTO.java<|end_filename|>
package io.choerodon.iam.api.dto;
import io.choerodon.iam.infra.dto.UserDTO;
import io.swagger.annotations.ApiModelProperty;
/**
* @author superlee
*/
public class UserSearchDTO extends UserDTO {
@ApiModelProperty(value = "其他参数")
private String[] param;
public String[] getParam() {
return param;
}
public void setParam(String[] param) {
this.param = param;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/LoginAttemptTimesMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.infra.dto.LoginAttemptTimesDTO;
import io.choerodon.mybatis.common.Mapper;
/**
* @author wuguokai
*/
public interface LoginAttemptTimesMapper extends Mapper<LoginAttemptTimesDTO> {
}
<|start_filename|>react/src/app/iam/containers/organization/application/EditSider.js<|end_filename|>
import React, { Component } from 'react';
import get from 'lodash/get';
import { Form, Modal, Tooltip, Select, Input } from 'choerodon-ui';
import { inject, observer } from 'mobx-react';
import { withRouter } from 'react-router-dom';
import { Content } from '@choerodon/boot';
import { injectIntl, FormattedMessage } from 'react-intl';
import './Application.scss';
import ApplicationStore from '../../../stores/organization/application/ApplicationStore';
const { Option } = Select;
const FormItem = Form.Item;
const { Sidebar } = Modal;
const intlPrefix = 'organization.application';
const formItemLayout = {
labelCol: {
xs: { span: 24 },
sm: { span: 8 },
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 16 },
},
};
@withRouter
@injectIntl
@inject('AppState')
@observer
@Form.create({})
export default class Application extends Component {
/**
* 校验应用名称唯一性
* @param value 应用编码
* @param callback 回调函数
*/
checkName = (rule, value, callback) => {
const { editData } = ApplicationStore;
const { intl } = this.props;
const params = { name: value };
if (editData && editData.name === value) callback();
ApplicationStore.checkApplicationCode(params)
.then((mes) => {
if (mes.failed) {
callback(intl.formatMessage({ id: `${intlPrefix}.name.exist.msg` }));
} else {
callback();
}
}).catch((err) => {
callback('校验超时');
Choerodon.handleResponseError(err);
});
};
handleOk = () => {
const { onOk } = this.props;
const { editData } = ApplicationStore;
const { validateFields } = this.props.form;
validateFields((err, validated) => {
if (!err) {
const data = {
...editData,
name: validated.name.trim(),
projectId: validated.projectId || undefined,
};
ApplicationStore.updateApplication(data, editData.id)
.then((value) => {
if (!value.failed) {
Choerodon.prompt(this.props.intl.formatMessage({ id: 'save.success' }));
if (onOk) {
onOk();
}
} else {
Choerodon.prompt(value.message);
}
}).catch((error) => {
Choerodon.handleResponseError(error);
});
}
});
}
renderContent() {
const { intl, form } = this.props;
const { getFieldDecorator } = form;
const { projectData, editData } = ApplicationStore;
const inputWidth = 512;
const isCombina = get(editData, 'applicationCategory', undefined) === 'combination-application';
return (
<Form layout="vertical" className="rightForm" style={{ width: 512 }}>
{
!isCombina && (
<FormItem
{...formItemLayout}
>
{getFieldDecorator('applicationType', {
initialValue: intl.formatMessage({ id: `${intlPrefix}.type.${editData.applicationType.toLowerCase()}` }),
})(
<Input
disabled
label={<FormattedMessage id={`${intlPrefix}.type`} />}
style={{ width: inputWidth }}
ref={(e) => { this.createFocusInput = e; }}
/>,
)}
</FormItem>
)
}
<FormItem
{...formItemLayout}
>
{getFieldDecorator('code', {
initialValue: editData.code,
})(
<Input
disabled
label={<FormattedMessage id={`${intlPrefix}.code`} />}
style={{ width: inputWidth }}
ref={(e) => { this.createFocusInput = e; }}
/>,
)}
</FormItem>
<FormItem
{...formItemLayout}
>
{getFieldDecorator('name', {
initialValue: editData.name,
rules: [{
required: true,
message: intl.formatMessage({ id: `${intlPrefix}.name.require.msg` }),
}, {
pattern: /^[^\s]*$/,
message: intl.formatMessage({ id: `${intlPrefix}.whitespace.msg` }),
}, {
validator: this.checkName,
}],
validateTrigger: 'onBlur',
validateFirst: true,
})(
<Input
autoComplete="off"
label={<FormattedMessage id={`${intlPrefix}.name`} />}
style={{ width: inputWidth }}
ref={(e) => { this.editFocusInput = e; }}
maxLength={14}
showLengthInfo={false}
/>,
)}
</FormItem>
{
!isCombina && (
<FormItem
{...formItemLayout}
>
{getFieldDecorator('projectId', {
initialValue: editData.projectId || undefined,
})(
<Select
label={<FormattedMessage id={`${intlPrefix}.assignment`} />}
className="c7n-iam-application-radiogroup"
getPopupContainer={that => that}
filterOption={(input, option) => {
const childNode = option.props.children;
if (childNode && React.isValidElement(childNode)) {
return childNode.props.children.props.children.toLowerCase().indexOf(input.toLowerCase()) >= 0;
}
return false;
}}
disabled={(editData && !!editData.projectId)}
allowClear
filter
>
{
projectData.map(({ id, name, code }) => (
<Option value={id} key={id} title={name}>
<Tooltip title={code} placement="right" align={{ offset: [20, 0] }}>
<span style={{ display: 'inline-block', width: '100%' }}>{name}</span>
</Tooltip>
</Option>
))
}
</Select>,
)}
</FormItem>
)
}
</Form>
);
}
render() {
const { onCancel } = this.props;
const { editData } = ApplicationStore;
const isCombina = get(editData, 'applicationCategory', undefined) === 'combination-application';
return (
<Sidebar
visible
title="修改应用"
bodyStyle={{ padding: 0 }}
onCancel={onCancel}
onOk={this.handleOk}
onText="保存"
>
<Content
title={`修改${isCombina ? '组合' : '普通'}应用"${editData.name}"`}
description="您可以在此修改应用名称。如果此应用是组合应用,您可以在此查看此组合应用下子应用的信息,同时您还可以在此添加或删除此组合应用下的子应用。"
link="#"
>
{this.renderContent()}
</Content>
</Sidebar>
);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/RoleMemberController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import java.util.List;
import javax.validation.Valid;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.domain.Sort;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.core.validator.ValidList;
import io.choerodon.iam.api.dto.*;
import io.choerodon.iam.api.query.ClientRoleQuery;
import io.choerodon.iam.infra.dto.*;
import io.choerodon.iam.infra.dto.UploadHistoryDTO;
import io.choerodon.mybatis.annotation.SortDefault;
import io.choerodon.swagger.annotation.CustomPageRequest;
import io.swagger.annotations.ApiOperation;
import org.springframework.core.io.Resource;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import io.choerodon.core.base.BaseController;
import io.choerodon.core.iam.InitRoleCode;
import io.choerodon.core.iam.ResourceLevel;
import io.choerodon.iam.api.validator.MemberRoleValidator;
import io.choerodon.iam.api.validator.RoleAssignmentViewValidator;
import io.choerodon.iam.app.service.*;
import io.choerodon.iam.infra.enums.ExcelSuffix;
import springfox.documentation.annotations.ApiIgnore;
/**
* @author superlee
* @author wuguokai
*/
@RestController
@RequestMapping(value = "/v1")
public class RoleMemberController extends BaseController {
public static final String MEMBER_ROLE = "member-role";
private RoleMemberService roleMemberService;
private UserService userService;
private ClientService clientService;
private RoleService roleService;
private UploadHistoryService uploadHistoryService;
private MemberRoleValidator memberRoleValidator;
public RoleMemberController(RoleMemberService roleMemberService,
UserService userService,
RoleService roleService,
ClientService clientService,
UploadHistoryService uploadHistoryService,
MemberRoleValidator memberRoleValidator) {
this.roleMemberService = roleMemberService;
this.userService = userService;
this.roleService = roleService;
this.uploadHistoryService = uploadHistoryService;
this.clientService = clientService;
this.memberRoleValidator = memberRoleValidator;
}
/**
* 在site层分配角色
* <p>
* is_edit 是否是编辑,如果false就表示新建角色,true表示是在是编辑角色
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "全局层批量分配给用户/客户端角色")
@PostMapping(value = "/site/role_members")
public ResponseEntity<List<MemberRoleDTO>> createOrUpdateOnSiteLevel(@RequestParam(value = "is_edit", required = false) Boolean isEdit,
@RequestParam(name = "member_type", required = false) String memberType,
@RequestParam(name = "member_ids") List<Long> memberIds,
@RequestBody ValidList<MemberRoleDTO> memberRoleDTOList) {
memberRoleValidator.distributionRoleValidator(ResourceLevel.SITE.value(), memberRoleDTOList);
return new ResponseEntity<>(roleMemberService.createOrUpdateRolesByMemberIdOnSiteLevel(
isEdit, memberIds, memberRoleDTOList, memberType), HttpStatus.OK);
}
/**
* 在organization层分配角色
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "组织层批量分配给用户角色/客户端")
@PostMapping(value = "/organizations/{organization_id}/role_members")
public ResponseEntity<List<MemberRoleDTO>> createOrUpdateOnOrganizationLevel(@RequestParam(value = "is_edit", required = false) Boolean isEdit,
@PathVariable(name = "organization_id") Long sourceId,
@RequestParam(name = "member_type", required = false) String memberType,
@RequestParam(name = "member_ids") List<Long> memberIds,
@RequestBody ValidList<MemberRoleDTO> memberRoleDTOList) {
memberRoleValidator.distributionRoleValidator(ResourceLevel.ORGANIZATION.value(), memberRoleDTOList);
return new ResponseEntity<>(roleMemberService.createOrUpdateRolesByMemberIdOnOrganizationLevel(
isEdit, sourceId, memberIds, memberRoleDTOList, memberType), HttpStatus.OK);
}
/**
* 在project层分配角色
*/
@Permission(type = ResourceType.PROJECT, roles = InitRoleCode.PROJECT_OWNER)
@ApiOperation(value = "项目层批量分配给用户/客户端角色")
@PostMapping(value = "/projects/{project_id}/role_members")
public ResponseEntity<List<MemberRoleDTO>> createOrUpdateOnProjectLevel(@RequestParam(value = "is_edit", required = false) Boolean isEdit,
@PathVariable(name = "project_id") Long sourceId,
@RequestParam(name = "member_type", required = false) String memberType,
@RequestParam(name = "member_ids") List<Long> memberIds,
@RequestBody ValidList<MemberRoleDTO> memberRoleDTOList) {
memberRoleValidator.distributionRoleValidator(ResourceLevel.PROJECT.value(), memberRoleDTOList);
return new ResponseEntity<>(roleMemberService.createOrUpdateRolesByMemberIdOnProjectLevel(
isEdit, sourceId, memberIds, memberRoleDTOList, memberType), HttpStatus.OK);
}
/**
* 在site层根据成员id和角色id删除角色
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "全局层批量移除用户/客户端的角色")
@PostMapping(value = "/site/role_members/delete")
public ResponseEntity deleteOnSiteLevel(@RequestBody @Valid RoleAssignmentDeleteDTO roleAssignmentDeleteDTO) {
RoleAssignmentViewValidator.validate(roleAssignmentDeleteDTO.getView());
roleAssignmentDeleteDTO.setSourceId(0L);
roleMemberService.deleteOnSiteLevel(roleAssignmentDeleteDTO);
return new ResponseEntity(HttpStatus.NO_CONTENT);
}
/**
* 在organization层根据成员id和角色id删除角色
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "组织层批量移除用户/客户端的角色")
@PostMapping(value = "/organizations/{organization_id}/role_members/delete")
public ResponseEntity deleteOnOrganizationLevel(@PathVariable(name = "organization_id") Long sourceId,
@RequestBody @Valid RoleAssignmentDeleteDTO roleAssignmentDeleteDTO) {
RoleAssignmentViewValidator.validate(roleAssignmentDeleteDTO.getView());
roleAssignmentDeleteDTO.setSourceId(sourceId);
roleMemberService.deleteOnOrganizationLevel(roleAssignmentDeleteDTO);
return new ResponseEntity(HttpStatus.NO_CONTENT);
}
/**
* 在project层根据id删除角色
*/
@Permission(type = ResourceType.PROJECT, roles = InitRoleCode.PROJECT_OWNER)
@ApiOperation(value = "项目层批量移除用户/客户端的角色")
@PostMapping(value = "/projects/{project_id}/role_members/delete")
public ResponseEntity deleteOnProjectLevel(@PathVariable(name = "project_id") Long sourceId,
@RequestBody @Valid RoleAssignmentDeleteDTO roleAssignmentDeleteDTO) {
RoleAssignmentViewValidator.validate(roleAssignmentDeleteDTO.getView());
roleAssignmentDeleteDTO.setSourceId(sourceId);
roleMemberService.deleteOnProjectLevel(roleAssignmentDeleteDTO);
return new ResponseEntity(HttpStatus.NO_CONTENT);
}
/**
* 根据角色Id分页查询该角色被分配的用户
*
* @param roleAssignmentSearchDTO
* @return
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "全局层分页查询角色下的用户")
@PostMapping(value = "/site/role_members/users")
public ResponseEntity<PageInfo<UserDTO>> pagingQueryUsersByRoleIdOnSiteLevel(
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(name = "role_id") Long roleId,
@RequestBody(required = false) @Valid RoleAssignmentSearchDTO roleAssignmentSearchDTO,
@RequestParam(defaultValue = "true") boolean doPage) {
return new ResponseEntity<>(userService.pagingQueryUsersByRoleIdOnSiteLevel(
pageRequest, roleAssignmentSearchDTO, roleId, doPage), HttpStatus.OK);
}
/**
* 根据角色Id分页查询该角色被分配的客户端
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "全局层分页查询角色下的客户端")
@PostMapping(value = "/site/role_members/clients")
@CustomPageRequest
public ResponseEntity<PageInfo<ClientDTO>> pagingQueryClientsByRoleIdOnSiteLevel(
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(name = "role_id") Long roleId,
@RequestBody(required = false) @Valid ClientRoleQuery clientRoleSearchDTO) {
return new ResponseEntity<>(clientService.pagingQueryUsersByRoleId(pageRequest, ResourceType.SITE, 0L, clientRoleSearchDTO, roleId), HttpStatus.OK);
}
@CustomPageRequest
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "组织层分页查询角色下的用户")
@PostMapping(value = "/organizations/{organization_id}/role_members/users")
public ResponseEntity<PageInfo<UserDTO>> pagingQueryUsersByRoleIdOnOrganizationLevel(
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(name = "role_id") Long roleId,
@PathVariable(name = "organization_id") Long sourceId,
@RequestBody(required = false) @Valid RoleAssignmentSearchDTO roleAssignmentSearchDTO,
@RequestParam(defaultValue = "true") boolean doPage) {
return new ResponseEntity<>(userService.pagingQueryUsersByRoleIdOnOrganizationLevel(
pageRequest, roleAssignmentSearchDTO, roleId, sourceId, doPage), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "组织层分页查询角色下的客户端")
@PostMapping(value = "/organizations/{organization_id}/role_members/clients")
public ResponseEntity<PageInfo<ClientDTO>> pagingQueryClientsByRoleIdOnOrganizationLevel(
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(name = "role_id") Long roleId,
@PathVariable(name = "organization_id") Long sourceId,
@RequestBody(required = false) @Valid ClientRoleQuery clientRoleSearchDTO) {
return new ResponseEntity<>(clientService.pagingQueryUsersByRoleId(pageRequest, ResourceType.ORGANIZATION, sourceId, clientRoleSearchDTO, roleId), HttpStatus.OK);
}
/**
* @param roleId
* @param sourceId
* @param roleAssignmentSearchDTO
* @param doPage 是否分页,如果为false,则不分页
* @return
*/
@Permission(type = ResourceType.PROJECT, roles = InitRoleCode.PROJECT_OWNER)
@ApiOperation(value = "项目层分页查询角色下的用户")
@CustomPageRequest
@PostMapping(value = "/projects/{project_id}/role_members/users")
public ResponseEntity<PageInfo<UserDTO>> pagingQueryUsersByRoleIdOnProjectLevel(
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(name = "role_id") Long roleId,
@PathVariable(name = "project_id") Long sourceId,
@RequestBody(required = false) @Valid RoleAssignmentSearchDTO roleAssignmentSearchDTO,
@RequestParam(defaultValue = "true") boolean doPage) {
return new ResponseEntity<>(userService.pagingQueryUsersByRoleIdOnProjectLevel(
pageRequest, roleAssignmentSearchDTO, roleId, sourceId, doPage), HttpStatus.OK);
}
@Permission(type = ResourceType.PROJECT, roles = InitRoleCode.PROJECT_OWNER)
@ApiOperation(value = "项目层分页查询角色下的客户端")
@CustomPageRequest
@PostMapping(value = "/projects/{project_id}/role_members/clients")
public ResponseEntity<PageInfo<ClientDTO>> pagingQueryClientsByRoleIdOnProjectLevel(@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(name = "role_id") Long roleId,
@PathVariable(name = "project_id") Long sourceId,
@RequestBody(required = false) @Valid ClientRoleQuery clientRoleSearchDTO) {
return new ResponseEntity<>(clientService.pagingQueryUsersByRoleId(pageRequest, ResourceType.PROJECT, sourceId, clientRoleSearchDTO, roleId), HttpStatus.OK);
}
/**
* 查询site层角色,附带该角色下分配的用户数
*
* @return 查询结果
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "全局层查询角色列表以及该角色下的用户数量")
@PostMapping(value = "/site/role_members/users/count")
public ResponseEntity<List<RoleDTO>> listRolesWithUserCountOnSiteLevel(
@RequestBody(required = false) @Valid RoleAssignmentSearchDTO roleAssignmentSearchDTO) {
return new ResponseEntity<>(roleService.listRolesWithUserCountOnSiteLevel(
roleAssignmentSearchDTO), HttpStatus.OK);
}
/**
* 查询site层角色,附带该角色下分配的客户端数
*
* @return 查询结果
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "全局层查询角色列表以及该角色下的客户端数量")
@PostMapping(value = "/site/role_members/clients/count")
public ResponseEntity<List<RoleDTO>> listRolesWithClientCountOnSiteLevel(
@RequestBody(required = false) @Valid ClientRoleQuery clientRoleSearchDTO) {
return new ResponseEntity<>(roleService.listRolesWithClientCountOnSiteLevel(clientRoleSearchDTO), HttpStatus.OK);
}
/**
* 分页查询site层有角色的用户
*
* @return 查询结果
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "全局层分页查询site层有角色的用户")
@GetMapping(value = "/site/role_members/users")
@CustomPageRequest
public ResponseEntity<PageInfo<UserDTO>> pagingQueryUsersOnSiteLevel(@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(required = false, name = "id") Long userId,
@RequestParam(required = false) String email,
@RequestParam(required = false) String param) {
return new ResponseEntity<>(userService.pagingQueryUsersOnSiteLevel(userId, email, pageRequest, param), HttpStatus.OK);
}
/**
* 查询organization层角色,附带该角色下分配的用户数
*
* @return 查询结果
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "组织层查询角色列表以及该角色下的用户数量")
@PostMapping(value = "/organizations/{organization_id}/role_members/users/count")
public ResponseEntity<List<RoleDTO>> listRolesWithUserCountOnOrganizationLevel(
@PathVariable(name = "organization_id") Long sourceId,
@RequestBody(required = false) @Valid RoleAssignmentSearchDTO roleAssignmentSearchDTO) {
return new ResponseEntity<>(roleService.listRolesWithUserCountOnOrganizationLevel(
roleAssignmentSearchDTO, sourceId), HttpStatus.OK);
}
/**
* 查询organization层角色,附带该角色下分配的客户端数
*
* @return 查询结果
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "组织层查询角色列表以及该角色下的客户端数量")
@PostMapping(value = "/organizations/{organization_id}/role_members/clients/count")
public ResponseEntity<List<RoleDTO>> listRolesWithClientCountOnOrganizationLevel(
@PathVariable(name = "organization_id") Long sourceId,
@RequestBody(required = false) @Valid ClientRoleQuery clientRoleSearchDTO) {
return new ResponseEntity<>(roleService.listRolesWithClientCountOnOrganizationLevel(
clientRoleSearchDTO, sourceId), HttpStatus.OK);
}
/**
* 查询project层角色,附带该角色下分配的用户数
*
* @return 查询结果
*/
@Permission(type = ResourceType.PROJECT, roles = InitRoleCode.PROJECT_OWNER)
@ApiOperation(value = "项目层查询角色列表以及该角色下的用户数量")
@PostMapping(value = "/projects/{project_id}/role_members/users/count")
public ResponseEntity<List<RoleDTO>> listRolesWithUserCountOnProjectLevel(
@PathVariable(name = "project_id") Long sourceId,
@RequestBody(required = false) @Valid RoleAssignmentSearchDTO roleAssignmentSearchDTO) {
return new ResponseEntity<>(roleService.listRolesWithUserCountOnProjectLevel(
roleAssignmentSearchDTO, sourceId), HttpStatus.OK);
}
/**
* 查询project层角色,附带该角色下分配的客户端数
*
* @return 查询结果
*/
@Permission(type = ResourceType.PROJECT, roles = InitRoleCode.PROJECT_OWNER)
@ApiOperation(value = "项目层查询角色列表以及该角色下的客户端数量")
@PostMapping(value = "/projects/{project_id}/role_members/clients/count")
public ResponseEntity<List<RoleDTO>> listRolesWithClientCountOnProjectLevel(
@PathVariable(name = "project_id") Long sourceId,
@RequestBody(required = false) @Valid ClientRoleQuery clientRoleSearchDTO) {
return new ResponseEntity<>(roleService.listRolesWithClientCountOnProjectLevel(
clientRoleSearchDTO, sourceId), HttpStatus.OK);
}
/**
* 在site层查询用户,用户包含拥有的site层的角色
*
* @param roleAssignmentSearchDTO 搜索条件
*/
@Permission(type = ResourceType.SITE, roles = {InitRoleCode.SITE_ADMINISTRATOR})
@ApiOperation(value = "全局层查询用户列表以及该用户拥有的角色")
@PostMapping(value = "/site/role_members/users/roles")
public ResponseEntity<PageInfo<UserDTO>> pagingQueryUsersWithSiteLevelRoles(@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestBody(required = false) @Valid RoleAssignmentSearchDTO roleAssignmentSearchDTO) {
return new ResponseEntity<>(userService.pagingQueryUsersWithRoles(
pageRequest, roleAssignmentSearchDTO, 0L, ResourceType.SITE), HttpStatus.OK);
}
/**
* 在site层查询用户,用户包含拥有的site层的角色 (可供平台开发者调用)
*
* @param roleAssignmentSearchDTO 搜索条件
*/
@Permission(type = ResourceType.SITE, roles = {InitRoleCode.SITE_ADMINISTRATOR, InitRoleCode.SITE_DEVELOPER})
@ApiOperation(value = "全局层查询用户列表以及该用户拥有的角色")
@PostMapping(value = "/site/role_members/users/roles/for_all")
public ResponseEntity<PageInfo<UserDTO>> pagingQueryUsersWithSiteLevelRolesWithDeveloper(
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestBody(required = false) @Valid RoleAssignmentSearchDTO roleAssignmentSearchDTO) {
return new ResponseEntity<>(userService.pagingQueryUsersWithRoles(
pageRequest, roleAssignmentSearchDTO, 0L, ResourceType.SITE), HttpStatus.OK);
}
/**
* 在site层查询客户端,客户端包含拥有的site层的角色
*
* @param clientRoleSearchDTO 搜索条件
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "全局层查询客户端列表以及该客户端拥有的角色")
@PostMapping(value = "/site/role_members/clients/roles")
public ResponseEntity<PageInfo<ClientDTO>> pagingQueryClientsWithSiteLevelRoles(
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestBody(required = false) @Valid ClientRoleQuery clientRoleSearchDTO) {
return new ResponseEntity<>(roleMemberService.pagingQueryClientsWithRoles(pageRequest, clientRoleSearchDTO, 0L, ResourceType.SITE), HttpStatus.OK);
}
/**
* 在site层查询用户,用户包含拥有的organization层的角色
*
* @param roleAssignmentSearchDTO 搜索条件
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "组织层查询用户列表以及该用户拥有的角色")
@PostMapping(value = "/organizations/{organization_id}/role_members/users/roles")
public ResponseEntity<PageInfo<UserDTO>> pagingQueryUsersWithOrganizationLevelRoles(
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@PathVariable(name = "organization_id") Long sourceId,
@RequestBody(required = false) @Valid RoleAssignmentSearchDTO roleAssignmentSearchDTO) {
return new ResponseEntity<>(userService.pagingQueryUsersWithRoles(
pageRequest, roleAssignmentSearchDTO, sourceId, ResourceType.ORGANIZATION), HttpStatus.OK);
}
/**
* 在组织层层查询用户,用户包含拥有的organization层的角色
*
* @param clientRoleSearchDTO 搜索条件
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "组织层查询客户端列表以及该客户端拥有的角色")
@PostMapping(value = "/organizations/{organization_id}/role_members/clients/roles")
public ResponseEntity<PageInfo<ClientDTO>> pagingQueryClientsWithOrganizationLevelRoles(
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@PathVariable(name = "organization_id") Long sourceId,
@RequestBody(required = false) @Valid ClientRoleQuery clientRoleSearchDTO) {
return new ResponseEntity<>(roleMemberService.pagingQueryClientsWithRoles(pageRequest, clientRoleSearchDTO, sourceId, ResourceType.ORGANIZATION), HttpStatus.OK);
}
/**
* 在site层查询用户,用户包含拥有的project层的角色
*
* @param sourceId 源id,即项目id
* @param roleAssignmentSearchDTO 查询请求体,无查询条件需要传{}
* @return
*/
@Permission(type = ResourceType.PROJECT, roles = InitRoleCode.PROJECT_OWNER)
@ApiOperation(value = "项目层查询用户列表以及该用户拥有的角色")
@PostMapping(value = "/projects/{project_id}/role_members/users/roles")
public ResponseEntity<PageInfo<UserDTO>> pagingQueryUsersWithProjectLevelRoles(
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@PathVariable(name = "project_id") Long sourceId,
@RequestBody(required = false) @Valid RoleAssignmentSearchDTO roleAssignmentSearchDTO) {
return new ResponseEntity<>(userService.pagingQueryUsersWithRoles(
pageRequest, roleAssignmentSearchDTO, sourceId, ResourceType.PROJECT), HttpStatus.OK);
}
/**
* 在项目层查询客户端,客户端包含拥有的项目层的角色
*
* @param clientRoleSearchDTO 搜索条件
*/
@Permission(type = ResourceType.PROJECT, roles = InitRoleCode.PROJECT_OWNER)
@ApiOperation(value = "项目层查询客户端列表以及该客户端拥有的角色")
@PostMapping(value = "/projects/{project_id}/role_members/clients/roles")
public ResponseEntity<PageInfo<ClientDTO>> pagingQueryClientsWithProjectLevelRoles(
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@PathVariable(name = "project_id") Long sourceId,
@RequestBody(required = false) @Valid ClientRoleQuery clientRoleSearchDTO) {
return new ResponseEntity<>(roleMemberService.pagingQueryClientsWithRoles(pageRequest, clientRoleSearchDTO, sourceId, ResourceType.PROJECT), HttpStatus.OK);
}
/**
* 在 organization 层根据 用户Id 及 组织Id 查询用户及该用户在此组织下拥有的角色
*/
@Permission(type = ResourceType.ORGANIZATION, permissionLogin = true)
@ApiOperation(value = "组织层根据用户Id及组织Id查询用户及该用户拥有的角色")
@GetMapping(value = "/organizations/{organization_id}/role_members/users/{user_id}")
public ResponseEntity<List<RoleDTO>> getUserWithOrgLevelRolesByUserId(@PathVariable(name = "organization_id") Long organizationId,
@PathVariable(name = "user_id") Long userId) {
return new ResponseEntity<>(roleService.listRolesBySourceIdAndTypeAndUserId(ResourceLevel.ORGANIZATION.value(), organizationId, userId), HttpStatus.OK);
}
/**
* 在 project 层根据 用户Id 及 项目Id 查询用户及该用户在此项目下拥有的角色
*/
@Permission(type = ResourceType.PROJECT, permissionLogin = true)
@ApiOperation(value = "项目层根据用户Id及项目Id查询用户及该用户拥有的角色")
@GetMapping(value = "/projects/{project_id}/role_members/users/{user_id}")
public ResponseEntity<List<RoleDTO>> getUserWithProjLevelRolesByUserId(@PathVariable(name = "project_id") Long projectId,
@PathVariable(name = "user_id") Long userId) {
return new ResponseEntity<>(roleService.listRolesBySourceIdAndTypeAndUserId(ResourceLevel.PROJECT.value(), projectId, userId), HttpStatus.OK);
}
/**
* 全局层下载模板
*
* @return
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "全局层下载excel导入模板")
@GetMapping(value = "/site/role_members/download_templates")
public ResponseEntity<Resource> downloadTemplatesOnSite() {
return roleMemberService.downloadTemplates(ExcelSuffix.XLSX.value());
}
/**
* 组织层下载模板
*
* @param organizationId
* @return
*/
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "组织层下载excel导入模板")
@GetMapping(value = "/organizations/{organization_id}/role_members/download_templates")
public ResponseEntity<Resource> downloadTemplatesOnOrganization(@PathVariable(name = "organization_id") Long organizationId) {
return roleMemberService.downloadTemplates(ExcelSuffix.XLSX.value());
}
/**
* 项目层下载模板
*
* @param projectId
* @return
*/
@Permission(type = ResourceType.PROJECT, roles = InitRoleCode.PROJECT_OWNER)
@ApiOperation(value = "项目层下载excel导入模板")
@GetMapping(value = "/projects/{project_id}/role_members/download_templates")
public ResponseEntity<Resource> downloadTemplatesOnProject(@PathVariable(name = "project_id") Long projectId) {
return roleMemberService.downloadTemplates(ExcelSuffix.XLSX.value());
}
@Permission(type = ResourceType.SITE)
@ApiOperation("site层从excel里面批量导入用户角色关系")
@PostMapping("/site/role_members/batch_import")
public ResponseEntity import2MemberRoleOnSite(@RequestPart MultipartFile file) {
roleMemberService.import2MemberRole(0L, ResourceLevel.SITE.value(), file);
return new ResponseEntity(HttpStatus.NO_CONTENT);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation("组织层从excel里面批量导入用户角色关系")
@PostMapping("/organizations/{organization_id}/role_members/batch_import")
public ResponseEntity import2MemberRoleOnOrganization(@PathVariable(name = "organization_id") Long organizationId,
@RequestPart MultipartFile file) {
roleMemberService.import2MemberRole(organizationId, ResourceLevel.ORGANIZATION.value(), file);
return new ResponseEntity(HttpStatus.NO_CONTENT);
}
@Permission(type = ResourceType.PROJECT, roles = InitRoleCode.PROJECT_OWNER)
@ApiOperation("项目层从excel里面批量导入用户角色关系")
@PostMapping("/projects/{project_id}/role_members/batch_import")
public ResponseEntity import2MemberRoleOnProject(@PathVariable(name = "project_id") Long projectId,
@RequestPart MultipartFile file) {
roleMemberService.import2MemberRole(projectId, ResourceLevel.PROJECT.value(), file);
return new ResponseEntity(HttpStatus.NO_CONTENT);
}
@Permission(type = ResourceType.SITE)
@ApiOperation("查site层的历史")
@GetMapping("/site/member_role/users/{user_id}/upload/history")
public ResponseEntity<UploadHistoryDTO> latestHistoryOnSite(@PathVariable(name = "user_id") Long userId) {
return new ResponseEntity<>(uploadHistoryService.latestHistory(userId, MEMBER_ROLE, 0L, ResourceLevel.SITE.value()), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation("查组织层的历史")
@GetMapping("/organizations/{organization_id}/member_role/users/{user_id}/upload/history")
public ResponseEntity<UploadHistoryDTO> latestHistoryOnOrganization(@PathVariable(name = "organization_id") Long organizationId,
@PathVariable(name = "user_id") Long userId) {
return new ResponseEntity<>(uploadHistoryService.latestHistory(userId, MEMBER_ROLE, organizationId, ResourceLevel.ORGANIZATION.value()), HttpStatus.OK);
}
@Permission(type = ResourceType.PROJECT, roles = InitRoleCode.PROJECT_OWNER)
@ApiOperation("查项目层的历史")
@GetMapping("/projects/{project_id}/member_role/users/{user_id}/upload/history")
public ResponseEntity<UploadHistoryDTO> latestHistoryOnProject(@PathVariable(name = "project_id") Long projectId,
@PathVariable(name = "user_id") Long userId) {
return new ResponseEntity<>(uploadHistoryService.latestHistory(userId, MEMBER_ROLE, projectId, ResourceLevel.PROJECT.value()), HttpStatus.OK);
}
@Permission(permissionPublic = true)
@ApiOperation(value = "分页查询全平台层用户(未禁用)")
@GetMapping(value = "/all/users")
@CustomPageRequest
public ResponseEntity<PageInfo<SimplifiedUserDTO>> queryAllUsers(@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(value = "organization_id") Long organizationId,
@RequestParam(value = "param", required = false) String param) {
return new ResponseEntity<>(userService.pagingQueryAllUser(pageRequest, param, organizationId), HttpStatus.OK);
}
@Permission(permissionPublic = true)
@ApiOperation(value = "分页查询全平台层客户端")
@GetMapping(value = "/all/clients")
@CustomPageRequest
public ResponseEntity<PageInfo<SimplifiedClientDTO>> queryAllClients(@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(value = "param", required = false) String param) {
return new ResponseEntity<>(clientService.pagingQueryAllClients(pageRequest, param), HttpStatus.OK);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/dto/SystemAnnouncementDTO.java<|end_filename|>
package io.choerodon.iam.api.dto;
import java.util.Date;
/**
* @author dengyouquan
**/
public class SystemAnnouncementDTO {
private Long id;
private String title;
private String content;
private Date sendDate;
private Long objectVersionNumber;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public Date getSendDate() {
return sendDate;
}
public void setSendDate(Date sendDate) {
this.sendDate = sendDate;
}
public Long getObjectVersionNumber() {
return objectVersionNumber;
}
public void setObjectVersionNumber(Long objectVersionNumber) {
this.objectVersionNumber = objectVersionNumber;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/DashboardService.java<|end_filename|>
package io.choerodon.iam.app.service;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.iam.infra.dto.DashboardDTO;
/**
* @author <EMAIL>
*/
public interface DashboardService {
DashboardDTO update(Long dashboardId, DashboardDTO dashboardDTO, Boolean updateRole);
DashboardDTO query(Long dashboardId);
PageInfo<DashboardDTO> list(DashboardDTO dashboardDTO, PageRequest pageRequest, String param);
void reset(Long dashboardId);
}
<|start_filename|>react/src/app/iam/stores/organization/organization-setting/OrganizationSettingStore.js<|end_filename|>
import { action, computed, observable } from 'mobx';
import { axios, store } from '@choerodon/boot';
@store('OrganizationSetting')
class OrganizationSettingStore {
@observable projectInfo = {};
@observable projectTypes = [];
@observable imageUrl = null;
@action setImageUrl(data) {
this.imageUrl = data;
}
@computed get getImageUrl() {
return this.imageUrl;
}
@action setOrganizationInfo(data) {
this.projectInfo = data;
}
@computed get organizationInfo() {
return this.projectInfo;
}
axiosGetOrganizationInfo(id) {
return axios.get(`/iam/v1/organizations/${id}/org_level`);
}
axiosSaveProjectInfo(data) {
return axios.put(
`/iam/v1/organizations/${data.id}/organization_level`,
data
);
}
}
const organizationSetting = new OrganizationSettingStore();
export default organizationSetting;
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/SystemSettingServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.choerodon.asgard.saga.annotation.Saga;
import io.choerodon.asgard.saga.dto.StartInstanceDTO;
import io.choerodon.asgard.saga.feign.SagaClient;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.api.dto.payload.SystemSettingEventPayload;
import io.choerodon.iam.app.service.SystemSettingService;
import io.choerodon.iam.infra.common.utils.ImageUtils;
import io.choerodon.iam.infra.common.utils.MockMultipartFile;
import io.choerodon.iam.infra.common.utils.SagaTopic;
import io.choerodon.iam.infra.dto.SystemSettingDTO;
import io.choerodon.iam.infra.exception.AlreadyExsitedException;
import io.choerodon.iam.infra.exception.UpdateExcetion;
import io.choerodon.iam.infra.feign.FileFeignClient;
import io.choerodon.iam.infra.mapper.SystemSettingMapper;
import net.coobird.thumbnailator.Thumbnails;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.List;
/**
* @author zmf
* @since 2018-10-15
*/
@Service
@Saga(code = SagaTopic.SystemSetting.SYSTEM_SETTING_UPDATE, description = "iam更改系统设置", inputSchemaClass = SystemSettingEventPayload.class)
public class SystemSettingServiceImpl implements SystemSettingService {
private final FileFeignClient fileFeignClient;
private final SagaClient sagaClient;
private final ObjectMapper objectMapper = new ObjectMapper();
private static final String ERROR_UPDATE_SYSTEM_SETTING_EVENT_SEND = "error.system.setting.update.send.event";
private final Boolean enableCategory;
private SystemSettingMapper systemSettingMapper;
public SystemSettingServiceImpl(FileFeignClient fileFeignClient,
SagaClient sagaClient,
SystemSettingMapper systemSettingMapper,
@Value("${choerodon.category.enabled:false}") Boolean enableCategory) {
this.fileFeignClient = fileFeignClient;
this.sagaClient = sagaClient;
this.enableCategory = enableCategory;
this.systemSettingMapper = systemSettingMapper;
}
@Override
public String uploadFavicon(MultipartFile file, Double rotate, Integer axisX, Integer axisY, Integer width, Integer height) {
try {
file = ImageUtils.cutImage(file, rotate, axisX, axisY, width, height);
} catch (IOException e) {
throw new CommonException("error.image.cut");
}
return uploadFile(file);
}
@Override
public String uploadSystemLogo(MultipartFile file, Double rotate, Integer axisX, Integer axisY, Integer width, Integer height) {
try {
file = ImageUtils.cutImage(file, rotate, axisX, axisY, width, height);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
Thumbnails.of(file.getInputStream()).forceSize(80, 80).toOutputStream(outputStream);
file = new MockMultipartFile(file.getName(), file.getOriginalFilename(), file.getContentType(), outputStream.toByteArray());
return uploadFile(file);
} catch (Exception e) {
throw new CommonException("error.setting.logo.save.failure");
}
}
@Override
public SystemSettingDTO addSetting(SystemSettingDTO systemSettingDTO) {
addDefaultLengthValue(systemSettingDTO);
validateLength(systemSettingDTO);
// 执行业务代码
List<SystemSettingDTO> records = systemSettingMapper.selectAll();
if (!records.isEmpty()) {
throw new AlreadyExsitedException("error.setting.already.one");
}
if (systemSettingMapper.insertSelective(systemSettingDTO) != 1) {
throw new CommonException("error.setting.insert.failed");
}
SystemSettingDTO dto = systemSettingMapper.selectByPrimaryKey(systemSettingDTO.getId());
// 触发 saga 流程
triggerSagaFlow(dto);
return dto;
}
@Override
public SystemSettingDTO updateSetting(SystemSettingDTO systemSettingDTO) {
addDefaultLengthValue(systemSettingDTO);
validateLength(systemSettingDTO);
// 执行业务代码
List<SystemSettingDTO> records = systemSettingMapper.selectAll();
if (records.isEmpty()) {
throw new CommonException("error.setting.update.invalid");
}
systemSettingDTO.setId(records.get(0).getId());
if (systemSettingMapper.updateByPrimaryKeySelective(systemSettingDTO) != 1) {
throw new UpdateExcetion("error.setting.update");
}
SystemSettingDTO dto = systemSettingMapper.selectByPrimaryKey(systemSettingDTO.getId());
// 触发 saga 流程
triggerSagaFlow(dto);
return dto;
}
/**
* 触发 saga 流程
*
* @param dto 返回的 dto
*/
private void triggerSagaFlow(final SystemSettingDTO dto) {
try {
SystemSettingEventPayload payload = new SystemSettingEventPayload();
BeanUtils.copyProperties(dto, payload);
sagaClient.startSaga(SagaTopic.SystemSetting.SYSTEM_SETTING_UPDATE, new StartInstanceDTO(objectMapper.writeValueAsString(payload)));
} catch (Exception e) {
throw new CommonException(ERROR_UPDATE_SYSTEM_SETTING_EVENT_SEND, e);
}
}
@Override
public void resetSetting() {
// 执行业务代码
List<SystemSettingDTO> records = systemSettingMapper.selectAll();
for (SystemSettingDTO domain : records) {
systemSettingMapper.deleteByPrimaryKey(domain.getId());
}
// 触发 saga 流程
try {
sagaClient.startSaga(SagaTopic.SystemSetting.SYSTEM_SETTING_UPDATE, new StartInstanceDTO(objectMapper.writeValueAsString(new SystemSettingEventPayload())));
} catch (Exception e) {
throw new CommonException(ERROR_UPDATE_SYSTEM_SETTING_EVENT_SEND, e);
}
}
@Override
public SystemSettingDTO getSetting() {
List<SystemSettingDTO> records = systemSettingMapper.selectAll();
return records.isEmpty() ? null : records.get(0);
}
private String uploadFile(MultipartFile file) {
return fileFeignClient.uploadFile("iam-service", file.getOriginalFilename(), file).getBody();
}
/**
* If the value is empty, default value is to be set.
*
* @param systemSettingDTO the dto
*/
private void addDefaultLengthValue(SystemSettingDTO systemSettingDTO) {
if (systemSettingDTO.getMinPasswordLength() == null) {
systemSettingDTO.setMinPasswordLength(0);
}
if (systemSettingDTO.getMaxPasswordLength() == null) {
systemSettingDTO.setMaxPasswordLength(65535);
}
}
/**
* validate the value of min length and max length
*
* @param systemSettingDTO dto
*/
private void validateLength(SystemSettingDTO systemSettingDTO) {
if (systemSettingDTO.getMinPasswordLength() > systemSettingDTO.getMaxPasswordLength()) {
throw new CommonException("error.maxLength.lessThan.minLength");
}
}
@Override
public Boolean getEnabledStateOfTheCategory() {
return enableCategory;
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/infra/common/utils/excel/ExcelImportUserTaskSpec.groovy<|end_filename|>
package io.choerodon.iam.infra.common.utils.excel
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.api.dto.ExcelMemberRoleDTO
import io.choerodon.iam.api.validator.UserPasswordValidator
import io.choerodon.iam.app.service.OrganizationUserService
import io.choerodon.iam.app.service.RoleMemberService
import io.choerodon.iam.app.service.UserService
import io.choerodon.iam.infra.dto.UploadHistoryDTO
import io.choerodon.iam.infra.dto.UserDTO
import io.choerodon.iam.infra.feign.FileFeignClient
import io.choerodon.iam.infra.mapper.MemberRoleMapper
import io.choerodon.iam.infra.mapper.RoleMapper
import io.choerodon.iam.infra.mapper.UploadHistoryMapper
import io.choerodon.iam.infra.mapper.UserMapper
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.context.annotation.Import
import org.springframework.http.HttpStatus
import org.springframework.http.ResponseEntity
import org.springframework.transaction.annotation.Transactional
import spock.lang.Specification
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class ExcelImportUserTaskSpec extends Specification {
@Autowired
private RoleMemberService roleMemberService
@Autowired
private OrganizationUserService organizationUserService
private FileFeignClient fileFeignClient = Mock(FileFeignClient)
@Autowired
private UserService userService
@Autowired
private UserPasswordValidator userPasswordValidator
private int count = 3
private ExcelImportUserTask excelImportUserTask
@Autowired
UserMapper userMapper
@Autowired
RoleMapper roleMapper
@Autowired
private MemberRoleMapper memberRoleMapper
@Autowired
UploadHistoryMapper uploadHistoryMapper
@Autowired
ExcelImportUserTask.FinishFallback finishFallback
def setup() {
excelImportUserTask = new ExcelImportUserTask(roleMemberService, organizationUserService,
fileFeignClient, userService, userPasswordValidator, userMapper, roleMapper, memberRoleMapper)
}
@Transactional
def "ImportUsers"() {
given: "构造请求参数"
long userId = 1L
UserDTO dto = new UserDTO()
dto.setLoginName("login")
dto.setPassword("password")
dto.setPhone("110")
dto.setEmail("<EMAIL>")
dto.setRealName("real")
List<UserDTO> users = new ArrayList<>()
users << dto
UploadHistoryDTO uploadHistoryDTO = new UploadHistoryDTO()
uploadHistoryDTO.setUserId(1L)
uploadHistoryDTO.setType("user")
uploadHistoryMapper.insert(uploadHistoryDTO)
UploadHistoryDTO history = uploadHistoryMapper.selectByPrimaryKey(uploadHistoryDTO.getId())
when: "调用方法"
excelImportUserTask.importUsers(userId, users, 1L, history, finishFallback)
then: "校验结果"
noExceptionThrown()
1 * fileFeignClient.uploadFile(_, _, _) >> { new ResponseEntity<String>(HttpStatus.OK) }
}
@Transactional
def "ImportMemberRole"() {
given: "构造请求参数"
ExcelMemberRoleDTO excelMemberRole = new ExcelMemberRoleDTO()
excelMemberRole.setLoginName("admin")
excelMemberRole.setRoleCode("role/site/default/site-user")
List<ExcelMemberRoleDTO> excelMemberRoles = new ArrayList<>()
excelMemberRoles << excelMemberRole
UploadHistoryDTO uploadHistoryDTO = new UploadHistoryDTO()
uploadHistoryDTO.setUserId(1L)
uploadHistoryDTO.setType("member-role")
uploadHistoryDTO.setSourceId(0L)
uploadHistoryDTO.setSourceType("site")
uploadHistoryMapper.insert(uploadHistoryDTO)
UploadHistoryDTO history = uploadHistoryMapper.selectByPrimaryKey(uploadHistoryDTO.getId())
when: "调用方法"
excelImportUserTask.importMemberRole(excelMemberRoles, history, finishFallback)
then: "校验结果"
noExceptionThrown()
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/api/controller/v1/ProjectTypeControllerSpec.groovy<|end_filename|>
package io.choerodon.iam.api.controller.v1
import io.choerodon.base.domain.PageRequest
import io.choerodon.core.exception.CommonException
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.app.service.ProjectTypeService
import io.choerodon.iam.infra.dto.ProjectTypeDTO
import io.choerodon.iam.infra.mapper.ProjectTypeMapper
import org.modelmapper.ModelMapper
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.boot.test.web.client.TestRestTemplate
import org.springframework.context.annotation.Import
import org.springframework.transaction.annotation.Transactional
import spock.lang.Specification
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class ProjectTypeControllerSpec extends Specification {
@Autowired
private TestRestTemplate restTemplate
@Autowired
private ProjectTypeController projectTypeController
@Autowired
ProjectTypeMapper projectTypeMapper
@Autowired
ProjectTypeService service
def "test list"() {
given:
def projectTypeService = Mock(ProjectTypeService)
projectTypeController.setProjectTypeService(projectTypeService)
when:
def entity = restTemplate.getForEntity("/v1/projects/types", String)
then:
entity.statusCode.is2xxSuccessful()
1 * projectTypeService.list()
}
def "pagingQuery"() {
given:
ProjectTypeController controller = new ProjectTypeController(service)
PageRequest pageRequest = new PageRequest(1, 20)
when:
def result = controller.pagingQuery(pageRequest, null, null, null)
then:
result.statusCode.is2xxSuccessful()
!result.body.list.isEmpty()
}
@Transactional
def "create"() {
given:
ProjectTypeController controller = new ProjectTypeController(service)
ProjectTypeDTO dto = new ProjectTypeDTO()
dto.setCode("test-code")
dto.setName("name")
dto.setDescription("desc")
when:
def result = controller.create(dto)
then:
result.statusCode.is2xxSuccessful()
result.getBody().getCode() == "test-code"
}
@Transactional
def "update"() {
given:
ProjectTypeController controller = new ProjectTypeController(service)
ProjectTypeDTO example = new ProjectTypeDTO()
example.setCode("type/others")
ProjectTypeDTO projectType = projectTypeMapper.selectOne(example)
ModelMapper modelMapper = new ModelMapper()
ProjectTypeDTO dto = modelMapper.map(projectType, ProjectTypeDTO.class)
dto.setDescription("desc")
when:
def result = controller.update(dto.getId(), dto)
then:
result.statusCode.is2xxSuccessful()
result.getBody().description == "desc"
}
def "check"() {
given:
ProjectTypeController controller = new ProjectTypeController(service)
ProjectTypeDTO dto = new ProjectTypeDTO()
dto.setCode("type/research-technology")
when:
controller.check(dto)
then:
thrown(CommonException)
when:
dto.setId(1L)
controller.check(dto)
then:
noExceptionThrown()
when:
dto.setId(2L)
controller.check(dto)
then:
thrown(CommonException)
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/api/controller/v1/BookMarkControllerSpec.groovy<|end_filename|>
package io.choerodon.iam.api.controller.v1
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.infra.dto.BookMarkDTO
import io.choerodon.iam.infra.mapper.BookMarkMapper
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.boot.test.web.client.TestRestTemplate
import org.springframework.context.annotation.Import
import org.springframework.http.HttpEntity
import org.springframework.http.HttpMethod
import spock.lang.Shared
import spock.lang.Specification
import spock.lang.Stepwise
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan
* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
@Stepwise
class BookMarkControllerSpec extends Specification {
private static String BASE_PATH = "/v1/bookmarks"
@Autowired
private TestRestTemplate restTemplate
@Autowired
private BookMarkMapper bookMarkMapper
@Shared
def bookMarkList = new ArrayList<BookMarkDTO>()
@Shared
def needInit = true
@Shared
def needClean = false
def count = 3
def setup() {
if (needInit) {
given: "构造参数"
needInit = false
for (int i = 0; i < count; i++) {
BookMarkDTO bookMarkDO = new BookMarkDTO()
bookMarkDO.setUserId(i)
bookMarkDO.setName("name")
bookMarkDO.setUrl("url")
bookMarkDO.setIcon("icon")
bookMarkDO.setSort(i)
bookMarkDO.setColor("color")
bookMarkList.add(bookMarkDO)
}
when: "插入数据"
def result = 0
for(BookMarkDTO dto :bookMarkList ){
bookMarkMapper.insert(dto)
result++
}
then: "校验参数"
result == count
}
}
def cleanup() {
if (needClean) {
given: ""
needClean = false
when: "删除数据"
int result = 0
for (BookMarkDTO bookMarkDO : bookMarkList) {
result += bookMarkMapper.deleteByPrimaryKey(bookMarkDO)
}
then: "校验参数"
result == count
}
}
def "Create"() {
given: "构造请求参数"
BookMarkDTO bookMarkDTO = new BookMarkDTO()
bookMarkDTO.setUserId(1L)
bookMarkDTO.setName("name")
bookMarkDTO.setUrl("url")
bookMarkDTO.setIcon("icon")
bookMarkDTO.setSort(1L)
bookMarkDTO.setColor("color")
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH, bookMarkDTO, BookMarkDTO)
then: "校验参数"
entity.statusCode.is2xxSuccessful()
entity.getBody().getUserId().equals(0L)
entity.getBody().getName().equals(bookMarkDTO.getName())
entity.getBody().getUrl().equals(bookMarkDTO.getUrl())
entity.getBody().getSort().equals(bookMarkDTO.getSort())
entity.getBody().getIcon().equals(bookMarkDTO.getIcon())
entity.getBody().getColor().equals(bookMarkDTO.getColor())
}
def "Update"() {
given: "构造请求参数"
List<BookMarkDTO> bookMarkDTOList = new ArrayList<>()
BookMarkDTO bookMarkDTO = bookMarkList.get(0)
bookMarkDTO.setObjectVersionNumber(1L)
bookMarkDTOList.add(bookMarkDTO)
HttpEntity<Object> httpEntity = new HttpEntity<>(bookMarkDTOList)
when: "调用方法"
def entity = restTemplate.exchange(BASE_PATH, HttpMethod.PUT, httpEntity, List)
then: "校验参数"
entity.statusCode.is2xxSuccessful()
entity.getBody().size() == bookMarkDTOList.size()
}
def "Delete"() {
HttpEntity<Object> httpEntity = new HttpEntity<>()
when: "调用方法"
def entity = restTemplate.exchange(BASE_PATH + "/{id}", HttpMethod.DELETE, httpEntity, Void, bookMarkList.get(0).getId())
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "List"() {
given: "构造请求参数"
needClean = true
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH, List, 1L)
then: "校验参数"
entity.statusCode.is2xxSuccessful()
entity.getBody().size() == 1
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/ClientService.java<|end_filename|>
package io.choerodon.iam.app.service;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.iam.api.query.ClientRoleQuery;
import io.choerodon.iam.api.dto.SimplifiedClientDTO;
import io.choerodon.iam.infra.dto.ClientDTO;
/**
* @author wuguokai
*/
public interface ClientService {
ClientDTO create(Long orgId, ClientDTO clientDTO);
ClientDTO getDefaultCreateData(Long orgId);
/**
* 更新客户端
*
* @param clientDTO
* @return
*/
ClientDTO update(ClientDTO clientDTO);
/**
* 根据id删除客户端,同时删除member_role里的关系数据
*
* @param orgId
* @param clientId
*/
void delete(Long orgId, Long clientId);
ClientDTO query(Long orgId, Long clientId);
ClientDTO queryByName(Long orgId, String clientName);
/**
* 分页查询client
*
* @param clientDTO
* @param pageRequest
* @param param
* @return
*/
PageInfo<ClientDTO> list(ClientDTO clientDTO, PageRequest pageRequest, String param);
void check(ClientDTO client);
PageInfo<ClientDTO> pagingQueryUsersByRoleId(PageRequest pageRequest, ResourceType resourceType, Long sourceId, ClientRoleQuery clientRoleSearchDTO, Long roleId);
PageInfo<SimplifiedClientDTO> pagingQueryAllClients(PageRequest pageRequest, String params);
}
<|start_filename|>src/test/groovy/io/choerodon/iam/api/controller/v1/UserControllerSpec.groovy<|end_filename|>
package io.choerodon.iam.api.controller.v1
import com.github.pagehelper.PageInfo
import io.choerodon.core.domain.Page
import io.choerodon.core.exception.ExceptionResponse
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.api.dto.CreateUserWithRolesDTO
import io.choerodon.iam.api.dto.UserPasswordDTO
import io.choerodon.iam.infra.dto.UserDTO
import io.choerodon.iam.infra.mapper.UserMapper
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.boot.test.web.client.TestRestTemplate
import org.springframework.context.annotation.Import
import org.springframework.http.HttpEntity
import org.springframework.http.HttpMethod
import org.springframework.web.multipart.MultipartFile
import spock.lang.Shared
import spock.lang.Specification
import spock.lang.Stepwise
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan
* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
@Stepwise
class UserControllerSpec extends Specification {
private static final String BASE_PATH = "/v1/users"
@Autowired
private TestRestTemplate restTemplate
@Autowired
private UserMapper userMapper
@Shared
def needInit = true
@Shared
def needClean = false
def setup() {
}
def cleanup() {
}
def "QuerySelf"() {
when: "调用方法"
//default用户
def entity = restTemplate.getForEntity(BASE_PATH + "/self", UserDTO)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody() == null
}
def "QueryInfo"() {
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/{id}/info", ExceptionResponse, 1L)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.user.id.not.equals")
// when: "调用方法"
// entity = restTemplate.getForEntity(BASE_PATH + "/{id}/info", ExceptionResponse, 0L)
//
// then: "校验结果"
// entity.statusCode.is2xxSuccessful()
}
def "UpdateInfo"() {
given: "构造请求参数"
def userDTO = userMapper.selectAll().get(0)
def userId = userDTO.getId()
when: "调用方法[异常-用户版本号为null]"
userDTO.setObjectVersionNumber(null)
def httpEntity = new HttpEntity<Object>(userDTO)
def entity = restTemplate.exchange(BASE_PATH + "/{id}/info", HttpMethod.PUT, httpEntity, ExceptionResponse, userId)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.user.objectVersionNumber.null")
when: "调用方法"
userDTO.setObjectVersionNumber(1)
httpEntity = new HttpEntity<Object>(userDTO)
entity = restTemplate.exchange(BASE_PATH + "/{id}/info", HttpMethod.PUT, httpEntity, UserDTO, userId)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "UploadPhoto"() {
when: "调用方法"
MultipartFile file = null;
def entity = restTemplate.postForEntity(BASE_PATH + "/{id}/upload_photo", null, ExceptionResponse, 0L)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.upload.multipartSize")
}
def "SavePhoto"() {
when: "调用方法"
MultipartFile file = null;
def entity = restTemplate.postForEntity(BASE_PATH + "/{id}/save_photo", null, ExceptionResponse, 0L)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.upload.multipartSize")
}
def "QueryOrganizations"() {
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/{id}/organizations", List, 0L)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().size() == 0
}
def "QueryProjects"() {
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/{id}/projects", List, 0L)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().size() == 0
}
def "PagingQueryProjectsSelf"() {
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/self/projects/paging_query", Page)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "PagingQueryOrganizationsSelf"() {
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/self/organizations/paging_query", Page)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "QueryProjectsByOrganizationId"() {
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/{id}/organizations/{organization_id}/projects", List, 0L, 1L)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "QueryOrganizationWithProjects"() {
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/self/organizations_projects", List)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "Query"() {
given: "构造请求参数"
def userDTO = userMapper.selectAll().get(0)
def userId = userDTO.getId()
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "?login_name={login_name}", UserDTO, userDTO.getLoginName())
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getId().equals(userDTO.getId())
entity.getBody().getRealName().equals(userDTO.getRealName())
entity.getBody().getLoginName().equals(userDTO.getLoginName())
}
def "SelfUpdatePassword"() {
given: "构造请求参数"
def userDTO = userMapper.selectAll().get(0)
def userId = userDTO.getId()
def userPasswordDTO = new UserPasswordDTO()
userPasswordDTO.setOriginalPassword("<PASSWORD>")
userPasswordDTO.setPassword("<PASSWORD>")
when: "调用方法[异常-原始密码错误]"
def httpEntity = new HttpEntity<Object>(userPasswordDTO)
def entity = restTemplate.exchange(BASE_PATH + "/{id}/password", HttpMethod.PUT, httpEntity, ExceptionResponse, userId)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.user.id.not.equals")
}
def "Check"() {
given: "构造请求参数"
def userDTO = userMapper.selectAll().get(0)
when: "调用方法[异常-校验属性为空]"
def userDTO1 = new UserDTO()
userDTO1.setLoginName(null)
userDTO1.setEmail(null)
def entity = restTemplate.postForEntity(BASE_PATH + "/check", userDTO1, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.user.validation.fields.empty")
when: "调用方法[异常-用户存在,id不同,字段相同]"
//id不能相同
userDTO.setId(null)
entity = restTemplate.postForEntity(BASE_PATH + "/check", userDTO, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.user.loginName.exist")
when: "调用方法"
userDTO1.setEmail("email")
userDTO1.setLoginName("login")
userDTO1.setId(1L)
entity = restTemplate.postForEntity(BASE_PATH + "/check", userDTO1, Void)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "PagingQueryAdminUsers"() {
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/admin", PageInfo)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().total == 1
}
def "DeleteDefaultUser"() {
given: "构造请求参数"
def userDTO = userMapper.selectAll().get(0)
def userId = userDTO.getId()
when: "调用方法[异常-用户不存在]"
def httpEntity = new HttpEntity<Object>()
def entity = restTemplate.exchange(BASE_PATH + "/admin/{id}", HttpMethod.DELETE, httpEntity, ExceptionResponse, 1000L)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.user.not.exist")
when: "调用方法[异常-用户大小不对]"
httpEntity = new HttpEntity<Object>()
entity = restTemplate.exchange(BASE_PATH + "/admin/{id}", HttpMethod.DELETE, httpEntity, ExceptionResponse, userId)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.user.admin.size")
}
def "AddDefaultUsers"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
long[] ids = new long[2]
ids[0] = 1
ids[1] = 2
paramsMap.put("id", ids)
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/admin?id={id}", Void, ExceptionResponse, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "ListUsersByIds"() {
given: "构造请求参数"
long[] ids = new long[1]
ids[0] = 1
when: "调用方法"
//id数组
def entity = restTemplate.postForEntity(BASE_PATH + "/ids", ids, List)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().size() == 1
}
def "PagingQueryOrganizationAndRolesById"() {
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/{id}/organization_roles", PageInfo, 1L)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().list.size() == 1
}
def "PagingQueryProjectAndRolesById"() {
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/{id}/project_roles", PageInfo, 1L)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "CreateUserAndAssignRoles"() {
given: "构造请求参数"
def userDO = new UserDTO()
userDO.setLoginName("dengyouquan")
userDO.setRealName("dengyouquan")
userDO.setEmail("<EMAIL>")
def userWithRoles = new CreateUserWithRolesDTO()
userWithRoles.setSourceId(0)
userWithRoles.setSourceType("site")
userWithRoles.setMemberType("user")
userWithRoles.setUser(userDO)
when: "调用方法"
def entity = restTemplate.postForEntity(BASE_PATH + "/init_role", userWithRoles, UserDTO)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "GetUserIds"() {
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/ids", Long[], 1L)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().size() == 1
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/dto/ProjectMapCategorySimpleDTO.java<|end_filename|>
package io.choerodon.iam.api.dto;
public class ProjectMapCategorySimpleDTO {
private Long projectId;
private String category;
public Long getProjectId() {
return projectId;
}
public void setProjectId(Long projectId) {
this.projectId = projectId;
}
public String getCategory() {
return category;
}
public void setCategory(String category) {
this.category = category;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/MenuServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import io.choerodon.base.enums.MenuType;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.core.exception.CommonException;
import io.choerodon.core.oauth.CustomUserDetails;
import io.choerodon.iam.api.validator.MenuValidator;
import io.choerodon.iam.app.service.MenuService;
import io.choerodon.iam.infra.asserts.DetailsHelperAssert;
import io.choerodon.iam.infra.asserts.MenuAssertHelper;
import io.choerodon.iam.infra.dto.MenuDTO;
import io.choerodon.iam.infra.dto.OrganizationDTO;
import io.choerodon.iam.infra.dto.ProjectDTO;
import io.choerodon.iam.infra.mapper.MenuMapper;
import io.choerodon.iam.infra.mapper.OrganizationMapper;
import io.choerodon.iam.infra.mapper.ProjectMapCategoryMapper;
import io.choerodon.iam.infra.mapper.ProjectMapper;
import io.choerodon.mybatis.entity.Criteria;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import java.util.*;
import java.util.stream.Collectors;
/**
* @author wuguokai
* @author superlee
*/
@Service
public class MenuServiceImpl implements MenuService {
private boolean enableCategory;
private OrganizationMapper organizationMapper;
private MenuMapper menuMapper;
private ProjectMapCategoryMapper projectMapCategoryMapper;
private MenuAssertHelper menuAssertHelper;
private ProjectMapper projectMapper;
public MenuServiceImpl(@Value("${choerodon.category.enabled:false}") Boolean enableCategory,
OrganizationMapper organizationMapper,
MenuMapper menuMapper,
MenuAssertHelper menuAssertHelper,
ProjectMapCategoryMapper projectMapCategoryMapper,
ProjectMapper projectMapper) {
this.enableCategory = enableCategory;
this.organizationMapper = organizationMapper;
this.menuMapper = menuMapper;
this.menuAssertHelper = menuAssertHelper;
this.projectMapCategoryMapper = projectMapCategoryMapper;
this.projectMapper = projectMapper;
}
@Override
public MenuDTO query(Long id) {
return menuMapper.selectByPrimaryKey(id);
}
@Override
public MenuDTO create(MenuDTO menuDTO) {
preCreate(menuDTO);
menuMapper.insertSelective(menuDTO);
return menuDTO;
}
private void preCreate(MenuDTO menuDTO) {
menuAssertHelper.codeExisted(menuDTO.getCode());
if (menuDTO.getSort() == null) {
menuDTO.setSort(0);
}
if (menuDTO.getParentCode() == null) {
menuDTO.setParentCode("");
}
String level = menuDTO.getResourceLevel();
if (!ResourceType.contains(level)) {
throw new CommonException("error.illegal.level");
}
String type = menuDTO.getType();
if (!MenuType.contains(type)) {
throw new CommonException("error.menu.illegal.type", type);
}
}
@Override
public void delete(Long id) {
MenuDTO dto = menuAssertHelper.menuNotExisted(id);
if (dto.getDefault()) {
throw new CommonException("error.menu.default");
}
menuMapper.deleteByPrimaryKey(id);
}
@Override
public MenuDTO update(Long id, MenuDTO menuDTO) {
MenuDTO dto = menuAssertHelper.menuNotExisted(id);
if (dto.getDefault()) {
throw new CommonException("error.menu.default");
}
menuDTO.setId(id);
Criteria criteria = new Criteria();
criteria.update("name", "icon", "page_permission_code", "search_condition", "category");
menuMapper.updateByPrimaryKeyOptions(menuDTO, criteria);
return menuMapper.selectByPrimaryKey(id);
}
@Override
public MenuDTO menus(String code, Long sourceId) {
MenuDTO topMenu = getTopMenuByCode(code);
String level = topMenu.getResourceLevel();
CustomUserDetails userDetails = DetailsHelperAssert.userDetailNotExisted();
Long userId = userDetails.getUserId();
boolean isAdmin = userDetails.getAdmin();
Set<MenuDTO> menus;
boolean isProjectOrOrganization = (ResourceType.isProject(level) || ResourceType.isOrganization(level));
if (isProjectOrOrganization && enableCategory) {
menus = menusByCategory(isAdmin, userId, level, sourceId);
} else {
if (isAdmin) {
if (ResourceType.isProject(level)) {
menus = new LinkedHashSet<>(
menuMapper.queryProjectMenusWithCategoryByRootUser(getProjectCategory(level, sourceId)));
} else {
menus = menuMapper.selectByLevelWithPermissionType(level);
}
} else {
menus = new HashSet<>(
menuMapper.selectMenusAfterCheckPermission(userId, level, sourceId, getProjectCategory(level, sourceId), "user"));
}
}
//查类型为menu的菜单
MenuDTO dto = new MenuDTO();
dto.setType(MenuType.MENU.value());
dto.setResourceLevel(level);
List<MenuDTO> allMenus = menuMapper.select(dto);
//筛除重复menu
Set<Long> menuIds = menus.stream().map(MenuDTO::getId).collect(Collectors.toSet());
Set<MenuDTO> menuCollect = allMenus.stream().filter(m -> !menuIds.contains(m.getId())).collect(Collectors.toSet());
//添加类型为menu的菜单
menus.addAll(menuCollect);
toTreeMenu(topMenu, menus, true);
return topMenu;
}
/**
* menus 项目/组织 开启类别控制
*
* @param isAdmin
* @param userId
* @param level
* @param sourceId
* @return
*/
private Set<MenuDTO> menusByCategory(Boolean isAdmin, Long userId, String level, Long sourceId) {
Set<MenuDTO> menus;
List<String> categories = getCategories(level, sourceId);
if (CollectionUtils.isEmpty(categories)) {
throw new CommonException("error.category.not.exist");
}
if (isAdmin) {
menus = new LinkedHashSet<>(
menuMapper.queryMenusWithCategoryAndLevelByRootUser(getCategories(level, sourceId), level));
} else {
menus = new HashSet<>(
menuMapper.selectMenusAfterPassingThePermissionCheck(userId, level, sourceId, getCategories(level, sourceId), "user"));
}
return menus;
}
/**
* 开源版本获取 category
* project 与 category 一对一,存于 FD_PROJECT 表中
* organization 没有 category
*
* @param level
* @param sourceId
* @return
*/
private String getProjectCategory(String level, Long sourceId) {
String category = null;
if (ResourceType.isProject(level)) {
ProjectDTO project = projectMapper.selectByPrimaryKey(sourceId);
if (project != null) {
category = project.getCategory();
}
}
return category;
}
/**
* 非开源版本获取 category
* organization 与 category 一对一,存于 FD_ORGANIZATION 表中
* project 与 category 一对多,存于 FD_PROJECT_MAP_CATEGORY 表中
*
* @param level
* @param sourceId
* @return
*/
private List<String> getCategories(String level, Long sourceId) {
List<String> categories = new ArrayList<>();
if (ResourceType.isProject(level)) {
categories.addAll(projectMapCategoryMapper.selectProjectCategories(sourceId));
}
if (ResourceType.isOrganization(level)) {
OrganizationDTO organizationDTO = organizationMapper.selectByPrimaryKey(sourceId);
if (organizationDTO != null) {
categories.add(organizationDTO.getCategory());
}
}
return categories;
}
@Override
public MenuDTO menuConfig(String code) {
MenuDTO menu = getTopMenuByCode(code);
String level = menu.getResourceLevel();
Set<MenuDTO> menus = new HashSet<>(menuMapper.selectMenusWithPermission(level));
toTreeMenu(menu, menus, true);
return menu;
}
private MenuDTO getTopMenuByCode(String code) {
MenuDTO dto = new MenuDTO();
dto.setCode(code);
MenuDTO menu = menuMapper.selectOne(dto);
if (menu == null) {
throw new CommonException("error.menu.top.not.existed");
}
return menu;
}
@Transactional(rollbackFor = Exception.class)
@Override
public void saveMenuConfig(String code, List<MenuDTO> menus) {
MenuDTO topMenu = getTopMenuByCode(code);
validate(code, menus);
String level = topMenu.getResourceLevel();
// 传入的菜单列表
List<MenuDTO> submitMenuList = menuTreeToList(menus);
// 数据库已存在的菜单
List<MenuDTO> existMenus = getMenuByResourceLevel(level);
// 实际要插入的菜单
List<MenuDTO> insertMenus = submitMenuList.stream().filter(item -> item.getId() == null).collect(Collectors.toList());
// 传入的更新菜单列表
List<MenuDTO> submitUpdateMenus = submitMenuList.stream().filter(item -> item.getId() != null).collect(Collectors.toList());
// 实际要更新的菜单
List<MenuDTO> updateMenus = new ArrayList<>();
// 实际要删除的菜单
List<MenuDTO> deleteMenus = new ArrayList<>();
// 数据库已存在的菜单跟传入的更新菜单做对比 如果已存在的菜单不在更新菜单列表里 表示菜单已被删除 否则表示菜单需更新
if (!CollectionUtils.isEmpty(existMenus)) {
for (MenuDTO existMenu : existMenus) {
boolean needToDelete = true;
for (MenuDTO submitMenu : submitUpdateMenus) {
if (existMenu.getId().equals(submitMenu.getId())) {
updateMenus.add(submitMenu);
needToDelete = false;
break;
}
}
if (needToDelete && MenuType.isMenu(existMenu.getType())) {
boolean isNotDefaultMenu = existMenu.getDefault() != null && !existMenu.getDefault();
// 追溯到自设目录的根目录 只有与传入根目录相同的才删除
if (isNotDefaultMenu) {
MenuDTO deleteTopMenu = getTopMenu(existMenu);
if (topMenu.getCode().equalsIgnoreCase(deleteTopMenu.getCode())) {
deleteMenus.add(existMenu);
}
}
}
}
}
//do insert
if (!CollectionUtils.isEmpty(insertMenus)) {
for (MenuDTO insertMenu : insertMenus) {
MenuValidator.insertValidate(insertMenu, level);
menuMapper.insertSelective(insertMenu);
}
}
// do update
if (!CollectionUtils.isEmpty(updateMenus)) {
for (MenuDTO updateMenu : updateMenus) {
boolean isNotDefault = MenuType.isMenu(updateMenu.getType()) && updateMenu.getDefault() != null && !updateMenu.getDefault();
// only self menu can update name and icon
MenuDTO menuDTO = new MenuDTO();
if (isNotDefault) {
menuDTO.setName(updateMenu.getName());
menuDTO.setIcon(updateMenu.getIcon());
}
menuDTO.setSort(updateMenu.getSort());
menuDTO.setParentCode(updateMenu.getParentCode());
menuDTO.setId(updateMenu.getId());
menuDTO.setObjectVersionNumber(updateMenu.getObjectVersionNumber());
menuMapper.updateByPrimaryKeySelective(menuDTO);
}
}
// do delete
if (!CollectionUtils.isEmpty(deleteMenus)) {
for (MenuDTO deleteMenu : deleteMenus) {
MenuValidator.deleteValidate(deleteMenu);
menuMapper.deleteByPrimaryKey(deleteMenu);
}
}
}
private void validate(String code, List<MenuDTO> menus) {
menus.forEach(m -> {
String parentCode = m.getParentCode();
//由于菜单目前只能是两层结构,所以所有的menu父节点必须是top menu
if (MenuType.isMenu(m.getType()) && !code.equals(parentCode)) {
throw new CommonException("error.menu.illegal.parent.code", m.getCode());
}
});
}
@Override
public List<MenuDTO> list() {
return menuMapper.selectAll();
}
/**
* 根据自设目录追溯到根目录.
*
* @param menuDTO 自设目录
* @return 根目录
*/
private MenuDTO getTopMenu(MenuDTO menuDTO) {
if (MenuType.isTop(menuDTO.getType())) {
return menuDTO;
}
MenuDTO result = new MenuDTO();
result.setCode(menuDTO.getParentCode());
result = menuMapper.selectOne(result);
if (!MenuType.isTop(result.getType())) {
result = getTopMenu(result);
}
return result;
}
/**
* 根据资源层级查询菜单列表.
*
* @param level 资源层级
* @return 菜单列表
*/
private List<MenuDTO> getMenuByResourceLevel(String level) {
MenuDTO menuDTO = new MenuDTO();
menuDTO.setResourceLevel(level);
return menuMapper.select(menuDTO);
}
/**
* 树形菜单转换为List菜单.
*
* @param menus 树形菜单
* @return List菜单
*/
private List<MenuDTO> menuTreeToList(List<MenuDTO> menus) {
List<MenuDTO> menuList = new ArrayList<>();
if (!CollectionUtils.isEmpty(menus)) {
doProcessMenu(menus, menuList);
}
return menuList;
}
/**
* 递归解析树形菜单为List菜单.
*
* @param menus 树形菜单
* @param menuList List菜单
*/
private void doProcessMenu(List<MenuDTO> menus, List<MenuDTO> menuList) {
for (MenuDTO menuDTO : menus) {
menuList.add(menuDTO);
if (menuDTO.getSubMenus() != null) {
doProcessMenu(menuDTO.getSubMenus(), menuList);
}
}
}
/**
* 转换树形菜单.
* 情况1:用户菜单不显示空目录
* 情况2:菜单配置显示空目录
*
* @param parentMenu 父级菜单
* @param menus 所有菜单列表
* @param isShowEmptyMenu 是否显示空目录
*/
private void toTreeMenu(MenuDTO parentMenu, Set<MenuDTO> menus, Boolean isShowEmptyMenu) {
String code = parentMenu.getCode();
List<MenuDTO> subMenus = new ArrayList<>();
for (MenuDTO menu : menus) {
if (code.equalsIgnoreCase(menu.getParentCode())) {
// 如果是叶子菜单 直接放到父级目录的子菜单列表里面
if (MenuType.isMenuItem(menu.getType())) {
subMenus.add(menu);
}
if (MenuType.isMenu(menu.getType())) {
toTreeMenu(menu, menus, isShowEmptyMenu);
if (isShowEmptyMenu) {
subMenus.add(menu);
} else {
// 目录有叶子菜单 放到父级目录的子目录里面(过滤空目录)
if (!CollectionUtils.isEmpty(menu.getSubMenus())) {
subMenus.add(menu);
}
}
}
}
}
if (CollectionUtils.isEmpty(subMenus)) {
parentMenu.setSubMenus(null);
} else {
subMenus.sort(Comparator.comparing(MenuDTO::getSort));
parentMenu.setSubMenus(subMenus);
}
}
@Override
public void check(MenuDTO menu) {
if (StringUtils.isEmpty(menu.getCode())) {
throw new CommonException("error.menu.code.empty");
}
checkCode(menu);
}
private void checkCode(MenuDTO menu) {
boolean createCheck = menu.getId() == null;
MenuDTO dto = new MenuDTO();
dto.setCode(menu.getCode());
if (createCheck) {
if (!menuMapper.select(dto).isEmpty()) {
throw new CommonException("error.menu.code-level-type.exist");
}
} else {
Long id = menu.getId();
MenuDTO menuDTO = menuMapper.selectOne(dto);
Boolean existed = menuDTO != null && !id.equals(menuDTO.getId());
if (existed) {
throw new CommonException("error.menu.code-level-type.exist");
}
}
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/dto/SimplifiedUserDTO.java<|end_filename|>
package io.choerodon.iam.api.dto;
import io.swagger.annotations.ApiModelProperty;
/**
* @author Eugen
*/
public class SimplifiedUserDTO {
@ApiModelProperty(value = "userId")
private Long id;
@ApiModelProperty(value = "登录名")
private String loginName;
@ApiModelProperty(value = "用户名")
private String realName;
@ApiModelProperty(value = "头像")
private String imageUrl;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getLoginName() {
return loginName;
}
public void setLoginName(String loginName) {
this.loginName = loginName;
}
public String getRealName() {
return realName;
}
public void setRealName(String realName) {
this.realName = realName;
}
public String getImageUrl() {
return imageUrl;
}
public void setImageUrl(String imageUrl) {
this.imageUrl = imageUrl;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/asserts/LabelAssertHelper.java<|end_filename|>
package io.choerodon.iam.infra.asserts;
import io.choerodon.iam.infra.dto.LabelDTO;
import io.choerodon.iam.infra.exception.NotExistedException;
import io.choerodon.iam.infra.mapper.LabelMapper;
import org.springframework.stereotype.Component;
/**
* label断言类
*
* @author superlee
* @since 2019-07-15
*/
@Component
public class LabelAssertHelper extends AssertHelper {
private LabelMapper labelMapper;
public LabelAssertHelper (LabelMapper labelMapper) {
this.labelMapper = labelMapper;
}
public LabelDTO labelNotExisted(Long id) {
return labelNotExisted(id, "error.label.not.exist");
}
public LabelDTO labelNotExisted(Long id, String message) {
LabelDTO dto = labelMapper.selectByPrimaryKey(id);
if(dto == null) {
throw new NotExistedException(message);
}
return dto;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/common/utils/LocaleUtils.java<|end_filename|>
package io.choerodon.iam.infra.common.utils;
import io.choerodon.core.oauth.CustomUserDetails;
import io.choerodon.core.oauth.DetailsHelper;
import java.util.Locale;
public class LocaleUtils {
private LocaleUtils() {
}
/**
* 用户的language需要满足en_US,cn_ZH格式
* 以下划线分隔,语言_国家
*
* @return
*/
public static Locale locale() {
CustomUserDetails details = DetailsHelper.getUserDetails();
if (details == null) {
return Locale.SIMPLIFIED_CHINESE;
}
String language = details.getLanguage();
String[] arr = language.split("_");
if (arr.length != 2) {
return Locale.SIMPLIFIED_CHINESE;
} else {
String lang = arr[0];
String country = arr[1];
return new Locale.Builder().setLanguage(lang).setRegion(country).build();
}
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/LdapServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import java.net.MalformedURLException;
import java.net.UnknownHostException;
import java.util.*;
import java.util.regex.Pattern;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.core.ldap.DirectoryType;
import io.choerodon.iam.api.dto.LdapAccountDTO;
import io.choerodon.iam.api.dto.LdapConnectionDTO;
import io.choerodon.iam.infra.asserts.LdapAssertHelper;
import io.choerodon.iam.infra.asserts.OrganizationAssertHelper;
import io.choerodon.iam.infra.dto.LdapDTO;
import io.choerodon.iam.infra.dto.LdapErrorUserDTO;
import io.choerodon.iam.infra.dto.LdapHistoryDTO;
import io.choerodon.iam.infra.enums.LdapSyncType;
import io.choerodon.iam.infra.exception.InsertException;
import io.choerodon.iam.infra.exception.NotExistedException;
import io.choerodon.iam.infra.exception.UpdateExcetion;
import io.choerodon.iam.infra.mapper.LdapHistoryMapper;
import io.choerodon.iam.infra.mapper.LdapMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.ldap.AuthenticationException;
import org.springframework.ldap.CommunicationException;
import org.springframework.ldap.InvalidNameException;
import org.springframework.ldap.UncategorizedLdapException;
import org.springframework.ldap.core.AttributesMapper;
import org.springframework.ldap.core.DirContextOperations;
import org.springframework.ldap.core.LdapTemplate;
import org.springframework.ldap.core.support.AbstractContextMapper;
import org.springframework.ldap.core.support.LdapContextSource;
import org.springframework.ldap.filter.AndFilter;
import org.springframework.ldap.filter.EqualsFilter;
import org.springframework.ldap.filter.Filter;
import org.springframework.ldap.query.SearchScope;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.api.validator.LdapValidator;
import io.choerodon.iam.app.service.LdapService;
import io.choerodon.iam.infra.common.utils.LocaleUtils;
import io.choerodon.iam.infra.common.utils.ldap.LdapSyncUserTask;
import io.choerodon.iam.infra.factory.MessageSourceFactory;
import io.choerodon.iam.infra.mapper.LdapErrorUserMapper;
import org.springframework.context.MessageSource;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.directory.Attributes;
import static io.choerodon.iam.infra.asserts.LdapAssertHelper.WhichColumn;
import static org.springframework.ldap.query.LdapQueryBuilder.query;
/**
* @author wuguokai
*/
@Service
public class LdapServiceImpl implements LdapService {
private static final String LDAP_ERROR_USER_MESSAGE_DIR = "classpath:messages/messages";
private static final String REGEX = "\\(.*\\)";
public static final String LDAP_CONNECTION_DTO = "ldapConnectionDTO";
public static final String LDAP_TEMPLATE = "ldapTemplate";
private static final String OBJECT_CLASS = "objectclass";
private static final Logger LOGGER = LoggerFactory.getLogger(LdapServiceImpl.class);
private LdapSyncUserTask ldapSyncUserTask;
private LdapSyncUserTask.FinishFallback finishFallback;
private LdapErrorUserMapper ldapErrorUserMapper;
private OrganizationAssertHelper organizationAssertHelper;
private LdapAssertHelper ldapAssertHelper;
private LdapMapper ldapMapper;
private LdapHistoryMapper ldapHistoryMapper;
public LdapServiceImpl(OrganizationAssertHelper organizationAssertHelper,
LdapAssertHelper ldapAssertHelper,
LdapMapper ldapMapper,
LdapSyncUserTask ldapSyncUserTask,
LdapSyncUserTask.FinishFallback finishFallback,
LdapErrorUserMapper ldapErrorUserMapper,
LdapHistoryMapper ldapHistoryMapper) {
this.ldapSyncUserTask = ldapSyncUserTask;
this.finishFallback = finishFallback;
this.ldapErrorUserMapper = ldapErrorUserMapper;
this.organizationAssertHelper = organizationAssertHelper;
this.ldapMapper = ldapMapper;
this.ldapAssertHelper = ldapAssertHelper;
this.ldapHistoryMapper = ldapHistoryMapper;
}
@Override
public LdapDTO create(Long orgId, LdapDTO ldapDTO) {
organizationAssertHelper.organizationNotExisted(orgId);
ldapDTO.setOrganizationId(orgId);
validateLdap(ldapDTO);
if (ldapMapper.insertSelective(ldapDTO) != 1) {
throw new InsertException("error.ldap.insert");
}
return ldapMapper.selectByPrimaryKey(ldapDTO);
}
private void validateLdap(LdapDTO ldapDTO) {
String customFilter = ldapDTO.getCustomFilter();
if (!StringUtils.isEmpty(customFilter) && !Pattern.matches(REGEX, customFilter)) {
throw new CommonException("error.ldap.customFilter");
}
if (ldapDTO.getSagaBatchSize() < 1) {
ldapDTO.setSagaBatchSize(1);
}
if (ldapDTO.getConnectionTimeout() < 1) {
throw new CommonException("error.ldap.connectionTimeout");
}
}
@Override
public LdapDTO update(Long organizationId, Long id, LdapDTO ldapDTO) {
ldapDTO.setId(id);
validateLdap(ldapDTO);
organizationAssertHelper.organizationNotExisted(organizationId);
ldapAssertHelper.ldapNotExisted(WhichColumn.ID, id);
return doUpdate(ldapDTO);
}
private LdapDTO doUpdate(LdapDTO ldapDTO) {
if (ldapMapper.updateByPrimaryKey(ldapDTO) != 1) {
throw new UpdateExcetion("error.ldap.update");
}
return ldapMapper.selectByPrimaryKey(ldapDTO.getId());
}
@Override
public LdapDTO queryByOrganizationId(Long orgId) {
organizationAssertHelper.organizationNotExisted(orgId);
return ldapAssertHelper.ldapNotExisted(WhichColumn.ORGANIZATION_ID, orgId);
}
@Override
public void delete(Long orgId, Long id) {
organizationAssertHelper.organizationNotExisted(orgId);
ldapAssertHelper.ldapNotExisted(WhichColumn.ID, id);
ldapMapper.deleteByPrimaryKey(id);
}
@Override
public LdapConnectionDTO testConnect(Long organizationId, Long id, LdapAccountDTO ldapAccount) {
organizationAssertHelper.organizationNotExisted(organizationId);
LdapDTO ldap = ldapAssertHelper.ldapNotExisted(WhichColumn.ID, id);
if (!organizationId.equals(ldap.getOrganizationId())) {
throw new CommonException("error.organization.not.has.ldap", organizationId, id);
}
ldap.setAccount(ldapAccount.getAccount());
ldap.setPassword(ldapAccount.getPassword());
return (LdapConnectionDTO) testConnect(ldap).get(LDAP_CONNECTION_DTO);
}
@Override
public Map<String, Object> testConnect(LdapDTO ldapDTO) {
LdapValidator.validate(ldapDTO);
boolean anonymous = StringUtils.isEmpty(ldapDTO.getAccount()) || StringUtils.isEmpty(ldapDTO.getPassword());
LdapConnectionDTO ldapConnectionDTO = new LdapConnectionDTO();
Map<String, Object> returnMap = new HashMap<>(2);
LdapTemplate ldapTemplate = initLdapTemplate(ldapDTO, anonymous);
returnMap.put(LDAP_TEMPLATE, ldapTemplate);
//默认将account当作userDn,如果无法登陆,则去ldap服务器抓取ldapDO.getLoginNameField()==account的userDn,然后使用返回的userDn登陆
accountAsUserDn(ldapDTO, ldapConnectionDTO, ldapTemplate);
//输入的账户无法登陆,去ldap服务器抓取userDn(例外hand ldap)
if (!anonymous && ldapConnectionDTO.getCanConnectServer() && !ldapConnectionDTO.getCanLogin()) {
returnMap.put(LDAP_TEMPLATE, fetchUserDn2Authenticate(ldapDTO, ldapConnectionDTO));
}
returnMap.put(LDAP_CONNECTION_DTO, ldapConnectionDTO);
return returnMap;
}
@Override
public void syncLdapUser(Long organizationId, Long id) {
LdapDTO ldap = validateLdap(organizationId, id);
Map<String, Object> map = testConnect(ldap);
LdapConnectionDTO ldapConnectionDTO =
(LdapConnectionDTO) map.get(LDAP_CONNECTION_DTO);
if (!ldapConnectionDTO.getCanConnectServer()) {
throw new CommonException("error.ldap.connect");
}
if (!ldapConnectionDTO.getCanLogin()) {
throw new CommonException("error.ldap.authenticate");
}
if (!ldapConnectionDTO.getMatchAttribute()) {
throw new CommonException("error.ldap.attribute.match");
}
LdapTemplate ldapTemplate = (LdapTemplate) map.get(LdapServiceImpl.LDAP_TEMPLATE);
ldapSyncUserTask.syncLDAPUser(ldapTemplate, ldap, LdapSyncType.SYNC.value(), finishFallback);
}
@Override
public LdapDTO validateLdap(Long organizationId, Long id) {
organizationAssertHelper.organizationNotExisted(organizationId);
LdapDTO ldap = ldapAssertHelper.ldapNotExisted(WhichColumn.ID, id);
LdapValidator.validate(ldap);
return ldap;
}
@Override
public LdapHistoryDTO queryLatestHistory(Long ldapId) {
LdapHistoryDTO example = new LdapHistoryDTO();
example.setLdapId(ldapId);
List<LdapHistoryDTO> ldapHistoryList = ldapHistoryMapper.select(example);
if (ldapHistoryList.isEmpty()) {
return null;
} else {
ldapHistoryList.sort(Comparator.comparing(LdapHistoryDTO::getId).reversed());
return ldapHistoryList.get(0);
}
}
@Override
public LdapDTO enableLdap(Long organizationId, Long id) {
return updateEnabled(organizationId, id, true);
}
private LdapDTO updateEnabled(Long organizationId, Long id, Boolean enabled) {
LdapDTO dto = ldapAssertHelper.ldapNotExisted(WhichColumn.ID, id);
if (!dto.getOrganizationId().equals(organizationId)) {
throw new CommonException("error.ldap.organizationId.not.match");
}
dto.setEnabled(enabled);
return doUpdate(dto);
}
@Override
public LdapDTO disableLdap(Long organizationId, Long id) {
return updateEnabled(organizationId, id, false);
}
@Override
public LdapHistoryDTO stop(Long id) {
LdapHistoryDTO ldapHistoryDTO = queryLatestHistory(id);
if (ldapHistoryDTO == null) {
throw new NotExistedException("error.ldapHistory.not.exist");
}
ldapHistoryDTO.setSyncEndTime(new Date(System.currentTimeMillis()));
if (ldapHistoryMapper.updateByPrimaryKeySelective(ldapHistoryDTO) != 1) {
throw new UpdateExcetion("error.ldapHistory.update");
}
return ldapHistoryMapper.selectByPrimaryKey(ldapHistoryDTO.getId());
}
@Override
public PageInfo<LdapHistoryDTO> pagingQueryHistories(PageRequest pageRequest, Long ldapId) {
return PageHelper
.startPage(pageRequest.getPage(), pageRequest.getSize())
.doSelectPageInfo(() -> ldapHistoryMapper.selectAllEnd(ldapId));
}
@Override
public PageInfo<LdapErrorUserDTO> pagingQueryErrorUsers(PageRequest pageRequest, Long ldapHistoryId, LdapErrorUserDTO ldapErrorUserDTO) {
int page = pageRequest.getPage();
int size = pageRequest.getSize();
PageInfo<LdapErrorUserDTO> result =
PageHelper
.startPage(page, size)
.doSelectPageInfo(() -> ldapErrorUserMapper.fuzzyQuery(ldapHistoryId, ldapErrorUserDTO));
//cause国际化处理
List<LdapErrorUserDTO> errorUsers = result.getList();
MessageSource messageSource = MessageSourceFactory.create(LDAP_ERROR_USER_MESSAGE_DIR);
Locale locale = LocaleUtils.locale();
errorUsers.forEach(errorUser -> {
String cause = errorUser.getCause();
errorUser.setCause(messageSource.getMessage(cause, null, locale));
});
return result;
}
private LdapTemplate fetchUserDn2Authenticate(LdapDTO ldapDTO, LdapConnectionDTO ldapConnectionDTO) {
LdapContextSource contextSource = new LdapContextSource();
String url = ldapDTO.getServerAddress() + ":" + ldapDTO.getPort();
int connectionTimeout = ldapDTO.getConnectionTimeout();
contextSource.setUrl(url);
contextSource.setBase(ldapDTO.getBaseDn());
contextSource.setAnonymousReadOnly(true);
setConnectionTimeout(contextSource, connectionTimeout);
contextSource.afterPropertiesSet();
LdapTemplate ldapTemplate = new LdapTemplate(contextSource);
if (DirectoryType.MICROSOFT_ACTIVE_DIRECTORY.value().equals(ldapDTO.getDirectoryType())) {
ldapTemplate.setIgnorePartialResultException(true);
}
String userDn = null;
Filter filter = getFilterByObjectClassAndAttribute(ldapDTO);
try {
List<String> names =
ldapTemplate.search(
query()
.searchScope(SearchScope.SUBTREE)
.filter(filter),
new AbstractContextMapper() {
@Override
protected Object doMapFromContext(DirContextOperations ctx) {
return ctx.getNameInNamespace();
}
});
if (names.size() == 1) {
userDn = names.get(0);
}
} catch (UncategorizedLdapException e) {
if (e.getRootCause() instanceof NamingException) {
LOGGER.warn("baseDn or userDn may be wrong!");
}
LOGGER.warn("uncategorized ldap exception {}", e);
} catch (Exception e) {
LOGGER.warn("can not find anything while filter is {}, exception {}", filter, e);
}
if (userDn == null) {
LOGGER.error("can not find anything or find more than one userDn while filter is {}, login failed", filter);
return null;
} else {
contextSource.setAnonymousReadOnly(false);
contextSource.setUserDn(userDn);
contextSource.setPassword(ldapDTO.getPassword());
ldapConnectionDTO.setCanLogin(false);
ldapConnectionDTO.setMatchAttribute(false);
try {
LdapTemplate newLdapTemplate = new LdapTemplate(contextSource);
matchAttributes(ldapDTO, ldapConnectionDTO, newLdapTemplate);
ldapConnectionDTO.setCanLogin(true);
return newLdapTemplate;
} catch (InvalidNameException | AuthenticationException e) {
LOGGER.error("userDn = {} or password is invalid, login failed, exception: {}", userDn, e);
return null;
} catch (Exception e) {
LOGGER.error("unexpected exception: {} ", e);
return null;
}
}
}
private Filter getFilterByObjectClassAndAttribute(LdapDTO ldapDTO) {
String account = ldapDTO.getAccount();
AndFilter andFilter = getAndFilterByObjectClass(ldapDTO);
andFilter.and(new EqualsFilter(ldapDTO.getLoginNameField(), account));
return andFilter;
}
private AndFilter getAndFilterByObjectClass(LdapDTO ldapDO) {
String objectClass = ldapDO.getObjectClass();
String[] arr = objectClass.split(",");
AndFilter andFilter = new AndFilter();
for (String str : arr) {
andFilter.and(new EqualsFilter(OBJECT_CLASS, str));
}
return andFilter;
}
private void setConnectionTimeout(LdapContextSource contextSource, int connectionTimeout) {
Map<String, Object> environment = new HashMap<>(1);
//设置ldap服务器连接超时时间为10s
environment.put("com.sun.jndi.ldap.connect.timeout", String.valueOf(connectionTimeout * 1000));
contextSource.setBaseEnvironmentProperties(environment);
}
private LdapTemplate initLdapTemplate(LdapDTO ldapDTO, boolean anonymous) {
LdapContextSource contextSource = new LdapContextSource();
String url = ldapDTO.getServerAddress() + ":" + ldapDTO.getPort();
int connectionTimeout = ldapDTO.getConnectionTimeout();
contextSource.setUrl(url);
contextSource.setBase(ldapDTO.getBaseDn());
setConnectionTimeout(contextSource, connectionTimeout);
if (!anonymous) {
contextSource.setUserDn(ldapDTO.getAccount());
contextSource.setPassword(ldapDTO.getPassword());
} else {
contextSource.setAnonymousReadOnly(true);
}
contextSource.afterPropertiesSet();
return new LdapTemplate(contextSource);
}
private void accountAsUserDn(LdapDTO ldapDTO, LdapConnectionDTO ldapConnectionDTO, LdapTemplate ldapTemplate) {
try {
if (DirectoryType.MICROSOFT_ACTIVE_DIRECTORY.value().equals(ldapDTO.getDirectoryType())) {
ldapTemplate.setIgnorePartialResultException(true);
}
ldapConnectionDTO.setCanConnectServer(false);
ldapConnectionDTO.setCanLogin(false);
ldapConnectionDTO.setMatchAttribute(false);
//使用管理员登陆,查询一个objectclass=ldapDO.getObjectClass的对象去匹配属性
matchAttributes(ldapDTO, ldapConnectionDTO, ldapTemplate);
ldapConnectionDTO.setCanConnectServer(true);
ldapConnectionDTO.setCanLogin(true);
} catch (InvalidNameException | AuthenticationException e) {
if (e.getRootCause() instanceof javax.naming.InvalidNameException
|| e.getRootCause() instanceof javax.naming.AuthenticationException) {
ldapConnectionDTO.setCanConnectServer(true);
ldapConnectionDTO.setCanLogin(false);
}
LOGGER.warn("can not login when using account as userDn, so fetch userDn from ldap server, exception {}", e);
} catch (UncategorizedLdapException | CommunicationException e) {
if (e.getRootCause() instanceof MalformedURLException
|| e.getRootCause() instanceof UnknownHostException) {
//ldap连接失败
ldapConnectionDTO.setCanConnectServer(false);
ldapConnectionDTO.setCanLogin(false);
}
LOGGER.error("connect to ldap server failed, exception: {}", e);
} catch (Exception e) {
ldapConnectionDTO.setCanConnectServer(false);
ldapConnectionDTO.setCanLogin(false);
LOGGER.error("connect to ldap server failed, exception: {}", e);
}
}
private void matchAttributes(LdapDTO ldapDTO, LdapConnectionDTO ldapConnectionDTO, LdapTemplate ldapTemplate) {
Map<String, String> attributeMap = initAttributeMap(ldapDTO);
Filter filter = getAndFilterByObjectClass(ldapDTO);
List<Attributes> attributesList =
ldapTemplate.search(
query()
.searchScope(SearchScope.SUBTREE)
.countLimit(100).filter(filter),
new AttributesMapper<Attributes>() {
@Override
public Attributes mapFromAttributes(Attributes attributes) throws NamingException {
return attributes;
}
});
if (attributesList.isEmpty()) {
LOGGER.warn("can not get any attributes while the filter is {}", filter);
ldapConnectionDTO.setLoginNameField(ldapDTO.getLoginNameField());
ldapConnectionDTO.setRealNameField(ldapDTO.getRealNameField());
ldapConnectionDTO.setPhoneField(ldapDTO.getPhoneField());
ldapConnectionDTO.setEmailField(ldapDTO.getEmailField());
} else {
Set<String> keySet = new HashSet<>();
for (Attributes attributes : attributesList) {
NamingEnumeration<String> attributesIDs = attributes.getIDs();
while (attributesIDs != null && attributesIDs.hasMoreElements()) {
keySet.add(attributesIDs.nextElement());
}
}
fullMathAttribute(ldapConnectionDTO, attributeMap, keySet);
}
}
private Map<String, String> initAttributeMap(LdapDTO ldap) {
Map<String, String> attributeMap = new HashMap<>(10);
attributeMap.put(LdapDTO.GET_LOGIN_NAME_FIELD, ldap.getLoginNameField());
attributeMap.put(LdapDTO.GET_REAL_NAME_FIELD, ldap.getRealNameField());
attributeMap.put(LdapDTO.GET_EMAIL_FIELD, ldap.getEmailField());
attributeMap.put(LdapDTO.GET_PHONE_FIELD, ldap.getPhoneField());
attributeMap.put(LdapDTO.GET_UUID_FIELD, ldap.getUuidField());
return attributeMap;
}
private void fullMathAttribute(LdapConnectionDTO ldapConnectionDTO, Map<String, String> attributeMap, Set<String> keySet) {
boolean match = true;
for (Map.Entry<String, String> entry : attributeMap.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
if (value != null && !keySet.contains(value)) {
match = false;
ldapConnectionDTO.fullFields(key, value);
}
}
ldapConnectionDTO.setMatchAttribute(match);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/OrganizationMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import java.util.List;
import java.util.Set;
import io.choerodon.iam.api.dto.OrgSharesDTO;
import io.choerodon.iam.infra.dto.OrganizationDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
import io.choerodon.iam.api.dto.OrganizationSimplifyDTO;
/**
* @author wuguokai
*/
public interface OrganizationMapper extends Mapper<OrganizationDTO> {
List<OrganizationDTO> fulltextSearch(@Param("organization") OrganizationDTO organization,
@Param("param") String param);
List<OrganizationDTO> selectFromMemberRoleByMemberId(@Param("memberId") Long memberId,
@Param("includedDisabled") Boolean includedDisabled);
List<OrganizationDTO> selectOrgByUserAndPros(@Param("memberId") Long memberId,
@Param("includedDisabled") Boolean includedDisabled);
List<OrganizationDTO> selectAllWithEnabledProjects();
List<OrganizationDTO> selectOrganizationsWithRoles(
@Param("id") Long id,
@Param("start") Integer start,
@Param("size") Integer size,
@Param("params") String params);
List<OrganizationDTO> selectOrganizationsByUserId(@Param("userId") Long userId,
@Param("organizationDTO") OrganizationDTO organizationDTO,
@Param("params") String params);
List<Long> listMemberIds(@Param("orgId") Long orgId,
@Param("orgName") String orgName);
Boolean organizationEnabled(@Param("sourceId") Long sourceId);
List<OrganizationDTO> selectByIds(@Param("ids") Set<Long> ids);
/**
* 获取所有组织{id,name}
*
* @return 组织{id,name}
*/
List<OrganizationSimplifyDTO> selectAllOrgIdAndName();
/**
* 获取 指定id范围 的 组织简要信息
*
* @param orgIds 指定的组织范围
* @param name 组织名查询参数
* @param code 组织编码查询参数
* @param enabled 组织启停用查询参数
* @param params 全局模糊搜索查询参数
* @return 查询结果
*/
List<OrgSharesDTO> selectSpecified(@Param("orgIds") Set<Long> orgIds,
@Param("name") String name,
@Param("code") String code,
@Param("enabled") Boolean enabled,
@Param("params") String params);
}
<|start_filename|>react/src/app/iam/stores/organization/application/index.js<|end_filename|>
/**
*create by <NAME> on 2019/3/7
*/
import ApplicationStore from './ApplicationStore';
export default ApplicationStore;
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/LoginHistoryMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.infra.dto.LoginHistoryDTO;
import io.choerodon.mybatis.common.Mapper;
/**
* @author wuguokai
*/
public interface LoginHistoryMapper extends Mapper<LoginHistoryDTO> {
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/RolePermissionMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.infra.dto.RolePermissionDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* @author wuguokai
*/
public interface RolePermissionMapper extends Mapper<RolePermissionDTO> {
List<Long> queryExistingPermissionIdsByRoleIds(@Param("list") List<Long> roleIds);
}
<|start_filename|>react/src/app/iam/stores/global/system-setting/index.js<|end_filename|>
import SystemSettingStore from './SystemSettingStore';
export default SystemSettingStore;
<|start_filename|>src/main/java/io/choerodon/iam/infra/dto/OrganizationDTO.java<|end_filename|>
package io.choerodon.iam.infra.dto;
import java.util.Date;
import java.util.List;
import javax.persistence.*;
import javax.validation.constraints.NotEmpty;
import javax.validation.constraints.Pattern;
import javax.validation.constraints.Size;
import io.swagger.annotations.ApiModelProperty;
import io.choerodon.mybatis.entity.BaseDTO;
/**
* @author superlee
* @since 2019-04-22
*/
@Table(name = "fd_organization")
public class OrganizationDTO extends BaseDTO {
private static final String CODE_REGULAR_EXPRESSION = "^[a-z](([a-z0-9]|-(?!-))*[a-z0-9])*$";
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@ApiModelProperty(value = "主键/非必填")
private Long id;
@ApiModelProperty(value = "组织名/必填")
@NotEmpty(message = "error.organization.name.empty")
@Size(min = 1, max = 32, message = "error.organization.name.size")
private String name;
@ApiModelProperty(value = "组织编码/必填")
@NotEmpty(message = "error.code.empty")
@Pattern(regexp = CODE_REGULAR_EXPRESSION, message = "error.code.illegal")
@Size(min = 1, max = 15, message = "error.organization.code.size")
private String code;
private Long userId;
private String address;
@ApiModelProperty(value = "组织类别")
private String category;
@ApiModelProperty(value = "组织图标url")
private String imageUrl;
@Column(name = "is_enabled")
@ApiModelProperty(value = "是否启用/非必填/默认:true")
private Boolean enabled;
@ApiModelProperty(value = "组织官网地址")
private String homePage;
@ApiModelProperty(value = "组织规模")
private Integer scale;
@ApiModelProperty(value = "组织所在行业")
private String businessType;
@ApiModelProperty(value = "邮箱后缀,唯一。注册时必输,数据库非必输")
private String emailSuffix;
@Transient
private List<ProjectDTO> projects;
@Transient
@ApiModelProperty(value = "项目数量")
private Integer projectCount;
@Transient
private List<RoleDTO> roles;
@Transient
private String ownerLoginName;
@Transient
private String ownerRealName;
@Transient
private String ownerPhone;
@Transient
private String ownerEmail;
@Transient
private Boolean isInto = true;
@Transient
private Date creationDate;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public Long getUserId() {
return userId;
}
public void setUserId(Long userId) {
this.userId = userId;
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
public String getImageUrl() {
return imageUrl;
}
public void setImageUrl(String imageUrl) {
this.imageUrl = imageUrl;
}
public Boolean getEnabled() {
return enabled;
}
public void setEnabled(Boolean enabled) {
this.enabled = enabled;
}
public List<ProjectDTO> getProjects() {
return projects;
}
public void setProjects(List<ProjectDTO> projects) {
this.projects = projects;
}
public Integer getProjectCount() {
return projectCount;
}
public void setProjectCount(Integer projectCount) {
this.projectCount = projectCount;
}
public List<RoleDTO> getRoles() {
return roles;
}
public void setRoles(List<RoleDTO> roles) {
this.roles = roles;
}
public String getOwnerLoginName() {
return ownerLoginName;
}
public void setOwnerLoginName(String ownerLoginName) {
this.ownerLoginName = ownerLoginName;
}
public String getOwnerRealName() {
return ownerRealName;
}
public void setOwnerRealName(String ownerRealName) {
this.ownerRealName = ownerRealName;
}
public String getOwnerPhone() {
return ownerPhone;
}
public void setOwnerPhone(String ownerPhone) {
this.ownerPhone = ownerPhone;
}
public String getOwnerEmail() {
return ownerEmail;
}
public void setOwnerEmail(String ownerEmail) {
this.ownerEmail = ownerEmail;
}
public Boolean getInto() {
return isInto;
}
public void setInto(Boolean into) {
isInto = into;
}
@Override
public Date getCreationDate() {
return creationDate;
}
@Override
public void setCreationDate(Date creationDate) {
this.creationDate = creationDate;
}
public String getHomePage() {
return homePage;
}
public void setHomePage(String homePage) {
this.homePage = homePage;
}
public String getCategory() {
return category;
}
public void setCategory(String category) {
this.category = category;
}
public Integer getScale() {
return scale;
}
public void setScale(Integer scale) {
this.scale = scale;
}
public String getBusinessType() {
return businessType;
}
public void setBusinessType(String businessType) {
this.businessType = businessType;
}
public String getEmailSuffix() {
return emailSuffix;
}
public void setEmailSuffix(String emailSuffix) {
this.emailSuffix = emailSuffix;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/ProjectController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import java.util.List;
import java.util.Set;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.domain.Sort;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.infra.dto.ProjectDTO;
import io.choerodon.iam.infra.dto.UserDTO;
import io.choerodon.mybatis.annotation.SortDefault;
import io.choerodon.swagger.annotation.CustomPageRequest;
import io.swagger.annotations.ApiOperation;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.*;
import io.choerodon.core.base.BaseController;
import io.choerodon.core.iam.InitRoleCode;
import io.choerodon.iam.app.service.ProjectService;
import springfox.documentation.annotations.ApiIgnore;
/**
* @author flyleft
*/
@RestController
@RequestMapping(value = "/v1/projects")
public class ProjectController extends BaseController {
private ProjectService projectService;
public ProjectController(ProjectService projectService) {
this.projectService = projectService;
}
/**
* 按照Id查询项目
*
* @param id 要查询的项目ID
* @return 查询到的项目
*/
@Permission(type = ResourceType.PROJECT, roles = {InitRoleCode.PROJECT_OWNER})
@GetMapping(value = "/{project_id}")
@ApiOperation(value = "通过id查询项目")
public ResponseEntity<ProjectDTO> query(@PathVariable(name = "project_id") Long id) {
return new ResponseEntity<>(projectService.queryProjectById(id), HttpStatus.OK);
}
/**
* 根据id集合查询项目
*
* @param ids id集合,去重
* @return 项目集合
*/
@Permission(permissionWithin = true)
@ApiOperation(value = "根据id集合查询项目")
@PostMapping("/ids")
public ResponseEntity<List<ProjectDTO>> queryByIds(@RequestBody Set<Long> ids) {
return new ResponseEntity<>(projectService.queryByIds(ids), HttpStatus.OK);
}
/**
* 根据projectId和param模糊查询loginName和realName两列
*/
@Permission(type = ResourceType.PROJECT, roles = {InitRoleCode.PROJECT_OWNER, InitRoleCode.PROJECT_MEMBER})
@ApiOperation(value = "分页模糊查询项目下的用户")
@GetMapping(value = "/{project_id}/users")
@CustomPageRequest
public ResponseEntity<PageInfo<UserDTO>> list(@PathVariable(name = "project_id") Long id,
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(required = false, name = "id") Long userId,
@RequestParam(required = false) String email,
@RequestParam(required = false) String param) {
return new ResponseEntity<>(projectService.pagingQueryTheUsersOfProject(id, userId, email, pageRequest, param), HttpStatus.OK);
}
/**
* 项目层更新项目,code和organizationId都不可更改
*/
@Permission(type = ResourceType.PROJECT, roles = InitRoleCode.PROJECT_OWNER)
@ApiOperation(value = "修改项目")
@PutMapping(value = "/{project_id}")
public ResponseEntity<ProjectDTO> update(@PathVariable(name = "project_id") Long id,
@RequestBody ProjectDTO projectDTO) {
if (StringUtils.isEmpty(projectDTO.getName())) {
throw new CommonException("error.project.name.empty");
}
if (projectDTO.getName().length() < 1 || projectDTO.getName().length() > 32) {
throw new CommonException("error.project.code.size");
}
if (projectDTO.getObjectVersionNumber() == null) {
throw new CommonException("error.objectVersionNumber.null");
}
projectDTO.setId(id);
//项目code不可编辑
projectDTO.setCode(null);
//组织id不可编辑
projectDTO.setOrganizationId(null);
return new ResponseEntity<>(projectService.update(projectDTO), HttpStatus.OK);
}
@Permission(type = ResourceType.PROJECT, roles = InitRoleCode.PROJECT_OWNER)
@ApiOperation(value = "禁用项目")
@PutMapping(value = "/{project_id}/disable")
public ResponseEntity<ProjectDTO> disableProject(@PathVariable(name = "project_id") Long id) {
return new ResponseEntity<>(projectService.disableProject(id), HttpStatus.OK);
}
@Permission(permissionWithin = true)
@GetMapping(value = "/check/{code}")
public ResponseEntity<Boolean> checkProjCode(@PathVariable(name = "code") String code) {
return new ResponseEntity<>(projectService.checkProjCode(code), HttpStatus.OK);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/eventhandler/ParsePermissionListener.java<|end_filename|>
package io.choerodon.iam.api.eventhandler;
import io.choerodon.eureka.event.AbstractEurekaEventObserver;
import io.choerodon.eureka.event.EurekaEventPayload;
import io.choerodon.iam.app.service.UploadHistoryService;
import org.springframework.stereotype.Component;
/**
* 根据接口解析权限
*
* @author superlee
*/
@Component
public class ParsePermissionListener extends AbstractEurekaEventObserver {
private UploadHistoryService.ParsePermissionService parsePermissionService;
public ParsePermissionListener(UploadHistoryService.ParsePermissionService parsePermissionService) {
this.parsePermissionService = parsePermissionService;
}
@Override
public void receiveUpEvent(EurekaEventPayload payload) {
//parsePermissionService.parser(payload);
}
@Override
public void receiveDownEvent(EurekaEventPayload payload) {
// do nothing
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/app/service/impl/RoleServiceImplSpec.groovy<|end_filename|>
package io.choerodon.iam.app.service.impl
import com.github.pagehelper.PageInfo
import io.choerodon.base.domain.PageRequest
import io.choerodon.core.domain.Page
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.api.dto.RoleSearchDTO
import io.choerodon.iam.api.query.RoleQuery
import io.choerodon.iam.app.service.RoleService
import io.choerodon.iam.infra.dto.RoleDTO
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.context.annotation.Import
import spock.lang.Specification
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan
* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class RoleServiceImplSpec extends Specification {
@Autowired
private RoleService roleService
def "PagingQuery"() {
given: "构造请求参数"
Boolean needUsers = true
Long sourceId = 0L
String sourceType = "site"
RoleSearchDTO role = new RoleSearchDTO()
when: "调用方法[site层]"
RoleQuery roleQuery = new RoleQuery()
roleQuery.setSourceId(sourceId)
roleQuery.setWithUser(needUsers)
roleQuery.setSourceType(sourceType)
PageRequest pageRequest = new PageRequest(1,20)
PageInfo<RoleDTO> page = roleService.pagingSearch(pageRequest, roleQuery)
then: "校验参数"
page.pages != 0
page.total != 0
when: "调用方法[organization层]"
page = roleService.pagingSearch(pageRequest, roleQuery)
then: "校验参数"
page.pages != 0
page.total != 0
when: "调用方法[project层]"
page = roleService.pagingSearch(pageRequest, roleQuery)
then: "校验参数"
page.pages != 0
page.total != 0
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/annotation/NamingRuleTrans.java<|end_filename|>
package io.choerodon.iam.infra.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Target({ElementType.PARAMETER})
@Retention(RetentionPolicy.RUNTIME)
public @interface NamingRuleTrans {
public NamingRuleTransStrategy value() default NamingRuleTransStrategy.CAMEL;
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/PasswordPolicyService.java<|end_filename|>
package io.choerodon.iam.app.service;
import io.choerodon.iam.infra.dto.PasswordPolicyDTO;
/**
* @author wuguokai
*/
public interface PasswordPolicyService {
PasswordPolicyDTO create(Long orgId, PasswordPolicyDTO passwordPolicyDTO);
PasswordPolicyDTO queryByOrgId(Long orgId);
PasswordPolicyDTO query(Long id);
PasswordPolicyDTO update(Long orgId, Long id, PasswordPolicyDTO passwordPolicyDTO);
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/LookupService.java<|end_filename|>
package io.choerodon.iam.app.service;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.iam.infra.dto.LookupDTO;
/**
* @author superlee
*/
public interface LookupService {
LookupDTO create(LookupDTO lookupDTO);
PageInfo<LookupDTO> pagingQuery(PageRequest pageRequest, LookupDTO lookupDTO, String param);
void delete(Long id);
LookupDTO update(LookupDTO lookupDTO);
LookupDTO queryById(Long id);
LookupDTO listByCodeWithLookupValues(String code);
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/AccessTokenServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import java.util.*;
import java.util.stream.Collectors;
import com.github.pagehelper.Page;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.core.oauth.CustomUserDetails;
import io.choerodon.iam.infra.asserts.DetailsHelperAssert;
import io.choerodon.iam.infra.asserts.UserAssertHelper;
import io.choerodon.iam.infra.common.utils.PageUtils;
import io.choerodon.iam.infra.dto.AccessTokenDTO;
import io.choerodon.iam.infra.dto.UserDTO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.security.oauth2.common.DefaultOAuth2AccessToken;
import org.springframework.security.oauth2.common.util.SerializationUtils;
import org.springframework.stereotype.Service;
import io.choerodon.asgard.schedule.annotation.JobTask;
import io.choerodon.core.exception.CommonException;
import io.choerodon.core.iam.ResourceLevel;
import io.choerodon.iam.app.service.AccessTokenService;
import io.choerodon.iam.infra.feign.OauthTokenFeignClient;
import io.choerodon.iam.infra.mapper.AccessTokenMapper;
import io.choerodon.iam.infra.mapper.RefreshTokenMapper;
/**
* @author Eugen
**/
@Service
public class AccessTokenServiceImpl implements AccessTokenService {
private static final Logger logger = LoggerFactory.getLogger(AccessTokenServiceImpl.class);
private AccessTokenMapper accessTokenMapper;
private RefreshTokenMapper refreshTokenMapper;
private OauthTokenFeignClient oauthTokenFeignClient;
private UserAssertHelper userAssertHelper;
public AccessTokenServiceImpl(AccessTokenMapper accessTokenMapper, RefreshTokenMapper refreshTokenMapper,
OauthTokenFeignClient oauthTokenFeignClient,
UserAssertHelper userAssertHelper) {
this.accessTokenMapper = accessTokenMapper;
this.refreshTokenMapper = refreshTokenMapper;
this.oauthTokenFeignClient = oauthTokenFeignClient;
this.userAssertHelper = userAssertHelper;
}
@Override
public PageInfo<AccessTokenDTO> pagedSearch(PageRequest pageRequest, String clientName, String currentToken) {
CustomUserDetails userDetails = DetailsHelperAssert.userDetailNotExisted();
UserDTO userDTO = userAssertHelper.userNotExisted(userDetails.getUserId());
List<AccessTokenDTO> result = searchAndOrderBy(clientName, currentToken, userDTO.getLoginName());
return doPage(pageRequest, result);
}
@Override
public void delete(String tokenId, String currentToken) {
AccessTokenDTO accessTokenDTO = accessTokenMapper.selectByPrimaryKey(tokenId);
if (accessTokenDTO == null) {
throw new CommonException("error.token.not.exist");
}
if (((DefaultOAuth2AccessToken) SerializationUtils.deserialize(accessTokenDTO.getToken())).getValue().equalsIgnoreCase(currentToken)) {
throw new CommonException("error.delete.current.token");
}
oauthTokenFeignClient.deleteToken(tokenId);
logger.info("iam delete token,tokenId:{}", tokenId);
}
@Override
public void deleteList(List<String> tokenIds, String currentToken) {
List<AccessTokenDTO> accessTokens = accessTokenMapper.selectTokenList(tokenIds);
List<String> tokens = accessTokens.stream().map(t -> ((DefaultOAuth2AccessToken) SerializationUtils.deserialize(t.getToken())).getValue()).collect(Collectors.toList());
if (tokens != null && !tokens.isEmpty() && tokens.contains(currentToken)) {
throw new CommonException("error.delete.current.token");
}
if (tokens != null && tokens.size() != tokenIds.size()) {
tokenIds = accessTokens.stream().map(AccessTokenDTO::getTokenId).collect(Collectors.toList());
}
oauthTokenFeignClient.deleteTokenList(tokenIds);
}
@JobTask(maxRetryCount = 2, code = "deleteAllExpiredToken", level = ResourceLevel.SITE, description = "删除所有失效token")
@Override
public void deleteAllExpiredToken(Map<String, Object> map) {
List<AccessTokenDTO> accessTokens = accessTokenMapper.selectAll();
//过滤出所有失效token
List<AccessTokenDTO> allExpired = accessTokens.stream().filter(t -> ((DefaultOAuth2AccessToken) SerializationUtils.deserialize(t.getToken())).isExpired()).collect(Collectors.toList());
allExpired.forEach(t -> {
accessTokenMapper.deleteByPrimaryKey(t.getTokenId());
refreshTokenMapper.deleteByPrimaryKey(t.getRefreshToken());
});
logger.info("All expired tokens have been cleared.");
}
private PageInfo<AccessTokenDTO> doPage(PageRequest pageRequest, List<AccessTokenDTO> result) {
int page = pageRequest.getPage();
int size = pageRequest.getSize();
Page<AccessTokenDTO> pageResult = new Page<>(page, size);
int total = result.size();
pageResult.setTotal(total);
if (size == 0) {
pageResult.addAll(result);
} else {
int start = PageUtils.getBegin(page, size);
int end = page * size > total ? total : page * size;
pageResult.addAll(result.subList(start, end));
}
return pageResult.toPageInfo();
}
private List<AccessTokenDTO> searchAndOrderBy(String clientName, String currentToken, String loginName) {
List<AccessTokenDTO> userAccessTokens = accessTokenMapper.selectTokens(loginName, clientName);
List<AccessTokenDTO> result = new ArrayList<>();
List<AccessTokenDTO> tokensWithoutCreateTime = new ArrayList<>();
List<AccessTokenDTO> tokensWithCreateTime = new ArrayList<>();
userAccessTokens.forEach(token -> {
DefaultOAuth2AccessToken defaultToken = SerializationUtils.deserialize(token.getToken());
String tokenValue = defaultToken.getValue();
token.setAccesstoken(tokenValue);
token.setExpirationTime(defaultToken.getExpiration());
token.setExpire(defaultToken.isExpired());
boolean isCurrentToken = tokenValue.equalsIgnoreCase(currentToken);
token.setCurrentToken(isCurrentToken);
Object createTime = defaultToken.getAdditionalInformation().get("createTime");
if (isCurrentToken) {
//当前token置顶
result.add(token);
} else {
if (createTime == null) {
tokensWithoutCreateTime.add(token);
} else {
token.setCreateTime((Date) createTime);
tokensWithCreateTime.add(token);
}
}
token.setCreateTime((Date) createTime);
});
//有createTime的排序,没有的不排序
tokensWithCreateTime.sort(Comparator.comparing(AccessTokenDTO::getCreateTime).reversed());
result.addAll(tokensWithCreateTime);
result.addAll(tokensWithoutCreateTime);
return result;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/DashboardController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.domain.Sort;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.iam.infra.dto.DashboardDTO;
import io.choerodon.mybatis.annotation.SortDefault;
import io.choerodon.swagger.annotation.CustomPageRequest;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import io.choerodon.core.base.BaseController;
import io.choerodon.iam.app.service.DashboardService;
import io.choerodon.iam.infra.common.utils.ParamUtils;
import springfox.documentation.annotations.ApiIgnore;
/**
* @author <EMAIL>
*/
@RestController
@RequestMapping(value = "/v1/dashboards")
public class DashboardController extends BaseController {
private DashboardService dashboardService;
public DashboardController(DashboardService dashboardService) {
this.dashboardService = dashboardService;
}
/**
* 根据dashboardId更新Dashboard
*
* @param dashboardId DashboardE Id
* @param dashboardDto Dashboard对象
* @return 更新成功的Dashboard对象
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "修改dashboard")
@PostMapping(value = "/{dashboard_id}")
public ResponseEntity<DashboardDTO> update(
@PathVariable("dashboard_id") Long dashboardId,
@RequestParam(value = "update_role", required = false, defaultValue = "false")
@ApiParam("是否更新角色列表/默认false") Boolean updateRole,
@RequestBody DashboardDTO dashboardDto) {
return new ResponseEntity<>(dashboardService.update(dashboardId, dashboardDto, updateRole), HttpStatus.OK);
}
/**
* 根据DashboardId,查询Dashboard对象
*
* @param dashboardId DashboardE Id
* @return 查询到的Dashboard对象
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "通过id查询Dashboard")
@GetMapping(value = "/{dashboard_id}")
public ResponseEntity<DashboardDTO> query(@PathVariable("dashboard_id") Long dashboardId) {
return new ResponseEntity<>(dashboardService.query(dashboardId), HttpStatus.OK);
}
/**
* 分页模糊查询Dashboard
*
* @param name Dashboard名称
* @param params 模糊查询参数
* @return 查询到的Dashboard分页对象
*/
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "分页模糊查询Dashboard")
@GetMapping
@CustomPageRequest
public ResponseEntity<PageInfo<DashboardDTO>> list(
@ApiIgnore
@SortDefault(value = "id", direction = Sort.Direction.DESC) PageRequest pageRequest,
@RequestParam(required = false) String name,
@RequestParam(required = false) String code,
@RequestParam(required = false) String level,
@RequestParam(required = false) String namespace,
@RequestParam(required = false) Boolean enable,
@RequestParam(required = false) Boolean needRoles,
@RequestParam(required = false) String[] params) {
DashboardDTO dashboardDTO = new DashboardDTO();
dashboardDTO.setName(name);
dashboardDTO.setCode(code);
dashboardDTO.setEnabled(enable);
dashboardDTO.setLevel(level);
dashboardDTO.setNamespace(namespace);
dashboardDTO.setNeedRoles(needRoles);
return new ResponseEntity<>(dashboardService.list(dashboardDTO, pageRequest, ParamUtils.arrToStr(params)), HttpStatus.OK);
}
@Permission(type = ResourceType.SITE)
@ApiOperation("重置仪表盘配置数据")
@PutMapping("/reset")
public void reset(@RequestParam(value = "dashboard_id", required = false) Long dashboardId) {
dashboardService.reset(dashboardId);
}
}
<|start_filename|>react/src/app/iam/stores/noLevel/register-org/index.js<|end_filename|>
import RegisterOrgStore from './RegisterOrgStore';
export default RegisterOrgStore;
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/OrganizationUserServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import static io.choerodon.iam.infra.common.utils.SagaTopic.User.*;
import java.util.*;
import java.util.stream.Collectors;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.iam.app.service.UserService;
import io.choerodon.iam.infra.asserts.OrganizationAssertHelper;
import io.choerodon.iam.infra.asserts.UserAssertHelper;
import io.choerodon.iam.infra.dto.LdapErrorUserDTO;
import io.choerodon.iam.infra.dto.SystemSettingDTO;
import io.choerodon.iam.infra.dto.UserDTO;
import io.choerodon.iam.infra.exception.InsertException;
import io.choerodon.iam.infra.exception.UpdateExcetion;
import io.choerodon.iam.infra.mapper.OrganizationMapper;
import io.choerodon.iam.infra.mapper.UserMapper;
import io.choerodon.oauth.core.password.domain.BasePasswordPolicyDTO;
import io.choerodon.oauth.core.password.domain.BaseUserDTO;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils;
import io.choerodon.asgard.saga.annotation.Saga;
import io.choerodon.asgard.saga.dto.StartInstanceDTO;
import io.choerodon.asgard.saga.feign.SagaClient;
import io.choerodon.core.exception.CommonException;
import io.choerodon.core.iam.ResourceLevel;
import io.choerodon.core.oauth.DetailsHelper;
import io.choerodon.iam.api.dto.UserSearchDTO;
import io.choerodon.iam.api.dto.payload.UserEventPayload;
import io.choerodon.iam.api.validator.UserPasswordValidator;
import io.choerodon.iam.app.service.OrganizationUserService;
import io.choerodon.iam.app.service.SystemSettingService;
import io.choerodon.iam.infra.common.utils.ParamUtils;
import io.choerodon.iam.infra.enums.LdapErrorUserCause;
import io.choerodon.iam.infra.feign.OauthTokenFeignClient;
import io.choerodon.oauth.core.password.PasswordPolicyManager;
import io.choerodon.oauth.core.password.mapper.BasePasswordPolicyMapper;
import io.choerodon.oauth.core.password.record.PasswordRecord;
/**
* @author superlee
*/
@Component
@RefreshScope
public class OrganizationUserServiceImpl implements OrganizationUserService {
@Value("${choerodon.devops.message:false}")
private boolean devopsMessage;
@Value("${spring.application.name:default}")
private String serviceName;
private PasswordRecord passwordRecord;
private SagaClient sagaClient;
private final ObjectMapper mapper = new ObjectMapper();
private PasswordPolicyManager passwordPolicyManager;
private UserPasswordValidator userPasswordValidator;
private OauthTokenFeignClient oauthTokenFeignClient;
private BasePasswordPolicyMapper basePasswordPolicyMapper;
@Value("${choerodon.site.default.password:<PASSWORD>}")
private String siteDefaultPassword;
private SystemSettingService systemSettingService;
private static final BCryptPasswordEncoder ENCODER = new BCryptPasswordEncoder();
private OrganizationAssertHelper organizationAssertHelper;
private OrganizationMapper organizationMapper;
private UserAssertHelper userAssertHelper;
private UserMapper userMapper;
private UserService userService;
public OrganizationUserServiceImpl(PasswordRecord passwordRecord,
PasswordPolicyManager passwordPolicyManager,
BasePasswordPolicyMapper basePasswordPolicyMapper,
OauthTokenFeignClient oauthTokenFeignClient,
UserPasswordValidator userPasswordValidator,
SystemSettingService systemSettingService,
SagaClient sagaClient,
OrganizationAssertHelper organizationAssertHelper,
OrganizationMapper organizationMapper,
UserAssertHelper userAssertHelper,
UserMapper userMapper,
UserService userService) {
this.passwordPolicyManager = passwordPolicyManager;
this.basePasswordPolicyMapper = basePasswordPolicyMapper;
this.sagaClient = sagaClient;
this.userPasswordValidator = userPasswordValidator;
this.passwordRecord = passwordRecord;
this.systemSettingService = systemSettingService;
this.oauthTokenFeignClient = oauthTokenFeignClient;
this.organizationAssertHelper = organizationAssertHelper;
this.organizationMapper = organizationMapper;
this.userAssertHelper = userAssertHelper;
this.userMapper = userMapper;
this.userService = userService;
}
@Transactional(rollbackFor = CommonException.class)
@Override
@Saga(code = USER_CREATE, description = "iam创建用户", inputSchemaClass = UserEventPayload.class)
public UserDTO create(UserDTO userDTO, boolean checkPassword) {
String password =
Optional.ofNullable(userDTO.getPassword())
.orElseThrow(() -> new CommonException("error.user.password.empty"));
Long organizationId = userDTO.getOrganizationId();
organizationAssertHelper.organizationNotExisted(organizationId);
if (checkPassword) {
validatePasswordPolicy(userDTO, password, organizationId);
// 校验用户密码
userPasswordValidator.validate(password, organizationId, true);
}
UserDTO user = createUser(userDTO);
if (devopsMessage) {
try {
UserEventPayload userEventPayload = new UserEventPayload();
userEventPayload.setEmail(user.getEmail());
userEventPayload.setId(user.getId().toString());
userEventPayload.setName(user.getRealName());
userEventPayload.setUsername(user.getLoginName());
userEventPayload.setFromUserId(DetailsHelper.getUserDetails().getUserId());
userEventPayload.setOrganizationId(organizationId);
//devop处理接受的是list
List<UserEventPayload> payloads = new ArrayList<>();
payloads.add(userEventPayload);
String input = mapper.writeValueAsString(payloads);
sagaClient.startSaga(USER_CREATE, new StartInstanceDTO(input, "user", userEventPayload.getId(), ResourceLevel.ORGANIZATION.value(), organizationId));
} catch (Exception e) {
throw new CommonException("error.organizationUserService.createUser.event", e);
}
}
return user;
}
private UserDTO createUser(UserDTO userDTO) {
userAssertHelper.loginNameExisted(userDTO.getLoginName());
userDTO.setLocked(false);
userDTO.setEnabled(true);
userDTO.setPassword(ENCODER.encode(userDTO.getPassword()));
if (userMapper.insertSelective(userDTO) != 1) {
throw new InsertException("error.user.create");
}
passwordRecord.updatePassword(userDTO.getId(), userDTO.getPassword());
return userMapper.selectByPrimaryKey(userDTO.getId());
}
private UserDTO insertSelective(UserDTO user) {
if (userMapper.insertSelective(user) != 1) {
throw new InsertException("error.user.create");
}
return userMapper.selectByPrimaryKey(user.getId());
}
@Override
@Transactional(rollbackFor = CommonException.class)
@Saga(code = USER_CREATE_BATCH, description = "iam批量创建用户", inputSchemaClass = List.class)
public List<LdapErrorUserDTO> batchCreateUsers(List<UserDTO> insertUsers) {
List<LdapErrorUserDTO> errorUsers = new ArrayList<>();
List<UserEventPayload> payloads = new ArrayList<>();
insertUsers.forEach(user -> {
UserDTO userDTO = null;
try {
userDTO = insertSelective(user);
} catch (Exception e) {
LdapErrorUserDTO errorUser = new LdapErrorUserDTO();
errorUser.setUuid(user.getUuid());
errorUser.setLoginName(user.getLoginName());
errorUser.setEmail(user.getEmail());
errorUser.setRealName(user.getRealName());
errorUser.setPhone(user.getPhone());
errorUser.setCause(LdapErrorUserCause.USER_INSERT_ERROR.value());
errorUsers.add(errorUser);
}
if (devopsMessage && userDTO != null && userDTO.getEnabled()) {
UserEventPayload payload = new UserEventPayload();
payload.setEmail(userDTO.getEmail());
payload.setId(userDTO.getId().toString());
payload.setName(userDTO.getRealName());
payload.setUsername(userDTO.getLoginName());
payload.setOrganizationId(userDTO.getOrganizationId());
payloads.add(payload);
}
});
if (!payloads.isEmpty()) {
try {
String input = mapper.writeValueAsString(payloads);
String refIds = payloads.stream().map(UserEventPayload::getId).collect(Collectors.joining(","));
sagaClient.startSaga(USER_CREATE_BATCH, new StartInstanceDTO(input, "users", refIds, ResourceLevel.ORGANIZATION.value(), insertUsers.get(0).getOrganizationId()));
} catch (Exception e) {
throw new CommonException("error.organizationUserService.batchCreateUser.event", e);
} finally {
payloads.clear();
}
}
return errorUsers;
}
private void validatePasswordPolicy(UserDTO userDTO, String password, Long organizationId) {
BaseUserDTO baseUserDTO = new BaseUserDTO();
BeanUtils.copyProperties(userDTO, baseUserDTO);
BasePasswordPolicyDTO example = new BasePasswordPolicyDTO();
example.setOrganizationId(organizationId);
BasePasswordPolicyDTO basePasswordPolicyDTO = basePasswordPolicyMapper.selectOne(example);
Optional.ofNullable(basePasswordPolicyDTO)
.map(passwordPolicy -> {
if (!password.equals(passwordPolicy.getOriginalPassword())) {
passwordPolicyManager.passwordValidate(password, baseUserDTO, passwordPolicy);
}
return null;
});
}
@Override
public PageInfo<UserDTO> pagingQuery(PageRequest pageRequest, UserSearchDTO user) {
return PageHelper
.startPage(pageRequest.getPage(), pageRequest.getSize())
.doSelectPageInfo(() -> userMapper.fulltextSearch(user, ParamUtils.arrToStr(user.getParam())));
}
@Transactional(rollbackFor = CommonException.class)
@Override
@Saga(code = USER_UPDATE, description = "iam更新用户", inputSchemaClass = UserEventPayload.class)
public UserDTO update(UserDTO userDTO) {
organizationAssertHelper.organizationNotExisted(userDTO.getOrganizationId());
UserDTO dto;
if (devopsMessage) {
UserEventPayload userEventPayload = new UserEventPayload();
dto = updateUser(userDTO);
userEventPayload.setEmail(dto.getEmail());
userEventPayload.setId(dto.getId().toString());
userEventPayload.setName(dto.getRealName());
userEventPayload.setUsername(dto.getLoginName());
try {
String input = mapper.writeValueAsString(userEventPayload);
sagaClient.startSaga(USER_UPDATE, new StartInstanceDTO(input, "user", userEventPayload.getId(), ResourceLevel.ORGANIZATION.value(), userDTO.getOrganizationId()));
} catch (Exception e) {
throw new CommonException("error.organizationUserService.updateUser.event", e);
}
} else {
dto = updateUser(userDTO);
}
return dto;
}
private UserDTO updateUser(UserDTO userDTO) {
if (userDTO.getPassword() != null) {
userDTO.setPassword(ENCODER.encode(userDTO.getPassword()));
}
return updateSelective(userDTO);
}
private UserDTO updateSelective(UserDTO userDTO) {
userAssertHelper.objectVersionNumberNotNull(userDTO.getObjectVersionNumber());
if (userMapper.updateByPrimaryKeySelective(userDTO) != 1) {
throw new UpdateExcetion("error.user.update");
}
return userMapper.selectByPrimaryKey(userDTO.getId());
}
@Transactional
@Override
public UserDTO resetUserPassword(Long organizationId, Long userId) {
UserDTO user = userAssertHelper.userNotExisted(userId);
if (user.getLdap()) {
throw new CommonException("error.ldap.user.can.not.update.password");
}
String defaultPassword = getDefaultPassword(organizationId);
user.setPassword(ENCODER.encode(defaultPassword));
updateSelective(user);
passwordRecord.updatePassword(user.getId(), user.getPassword());
// delete access tokens, refresh tokens and sessions of the user after resetting his password
oauthTokenFeignClient.deleteTokens(user.getLoginName());
// send siteMsg
Map<String, Object> paramsMap = new HashMap<>();
paramsMap.put("userName", user.getRealName());
paramsMap.put("defaultPassword", <PASSWORD>);
List<Long> userIds = Collections.singletonList(userId);
userService.sendNotice(userId, userIds, "resetOrganizationUserPassword", paramsMap, organizationId);
return user;
}
/**
* get password to reset
*
* @param organizationId organization id
* @return the password
*/
private String getDefaultPassword(Long organizationId) {
BasePasswordPolicyDTO basePasswordPolicyDTO = new BasePasswordPolicyDTO();
basePasswordPolicyDTO.setOrganizationId(organizationId);
basePasswordPolicyDTO = basePasswordPolicyMapper.selectOne(basePasswordPolicyDTO);
if (basePasswordPolicyDTO != null && basePasswordPolicyDTO.getEnablePassword() && !StringUtils.isEmpty(basePasswordPolicyDTO.getOriginalPassword())) {
return basePasswordPolicyDTO.getOriginalPassword();
}
SystemSettingDTO setting = systemSettingService.getSetting();
if (setting != null && !StringUtils.isEmpty(setting.getDefaultPassword())) {
return setting.getDefaultPassword();
}
return siteDefaultPassword;
}
@Transactional(rollbackFor = CommonException.class)
@Override
@Saga(code = USER_DELETE, description = "iam删除用户", inputSchemaClass = UserEventPayload.class)
public void delete(Long organizationId, Long id) {
organizationAssertHelper.organizationNotExisted(organizationId);
UserDTO user = userAssertHelper.userNotExisted(id);
UserEventPayload userEventPayload = new UserEventPayload();
userEventPayload.setUsername(user.getLoginName());
userMapper.deleteByPrimaryKey(id);
if (devopsMessage) {
try {
String input = mapper.writeValueAsString(userEventPayload);
sagaClient.startSaga(USER_DELETE, new StartInstanceDTO(input, "user", userEventPayload.getId()));
} catch (Exception e) {
throw new CommonException("error.organizationUserService.deleteUser.event", e);
}
}
}
@Override
public UserDTO query(Long organizationId, Long id) {
organizationAssertHelper.organizationNotExisted(organizationId);
return userMapper.selectByPrimaryKey(id);
}
@Override
@Transactional(rollbackFor = Exception.class)
public UserDTO unlock(Long organizationId, Long userId) {
organizationAssertHelper.organizationNotExisted(organizationId);
return unlockUser(userId);
}
private UserDTO unlockUser(Long userId) {
UserDTO userDTO = userAssertHelper.userNotExisted(userId);
userDTO.setLocked(false);
passwordRecord.unLockUser(userDTO.getId());
return updateSelective(userDTO);
}
@Transactional(rollbackFor = CommonException.class)
@Override
@Saga(code = USER_ENABLE, description = "iam启用用户", inputSchemaClass = UserEventPayload.class)
public UserDTO enableUser(Long organizationId, Long userId) {
organizationAssertHelper.organizationNotExisted(organizationId);
UserDTO user = updateStatus(userId, true);
if (devopsMessage) {
UserEventPayload userEventPayload = new UserEventPayload();
userEventPayload.setUsername(user.getLoginName());
userEventPayload.setId(userId.toString());
try {
String input = mapper.writeValueAsString(userEventPayload);
sagaClient.startSaga(USER_ENABLE, new StartInstanceDTO(input, "user", userEventPayload.getId(), ResourceLevel.ORGANIZATION.value(), organizationId));
} catch (Exception e) {
throw new CommonException("error.organizationUserService.enableUser.event", e);
}
}
return user;
}
private UserDTO updateStatus(Long userId, boolean enabled) {
UserDTO dto = userAssertHelper.userNotExisted(userId);
dto.setEnabled(enabled);
if (userMapper.updateByPrimaryKeySelective(dto) != 1) {
throw new UpdateExcetion("error.user.update");
}
return dto;
}
@Transactional(rollbackFor = CommonException.class)
@Override
@Saga(code = USER_DISABLE, description = "iam停用用户", inputSchemaClass = UserEventPayload.class)
public UserDTO disableUser(Long organizationId, Long userId) {
organizationAssertHelper.organizationNotExisted(organizationId);
UserDTO user = updateStatus(userId, false);
if (devopsMessage) {
UserEventPayload userEventPayload = new UserEventPayload();
userEventPayload.setUsername(user.getLoginName());
userEventPayload.setId(userId.toString());
try {
String input = mapper.writeValueAsString(userEventPayload);
sagaClient.startSaga(USER_DISABLE, new StartInstanceDTO(input, "user", userEventPayload.getId(), ResourceLevel.ORGANIZATION.value(), organizationId));
} catch (Exception e) {
throw new CommonException("error.organizationUserService.disableUser.event", e);
}
}
return user;
}
@Override
public List<Long> listUserIds(Long organizationId) {
return organizationMapper.listMemberIds(organizationId, "organization");
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/ApplicationService.java<|end_filename|>
package io.choerodon.iam.app.service;
import java.util.List;
import com.github.pagehelper.PageInfo;
import io.choerodon.iam.api.query.ApplicationQuery;
import io.choerodon.iam.infra.dto.ApplicationDTO;
import io.choerodon.iam.infra.dto.ApplicationExplorationDTO;
/**
* @author superlee
* @since 0.15.0
*/
public interface ApplicationService {
/**
* 新建应用
*
* @param applicationDTO 应用DTO
* @return 应用DTO
*/
ApplicationDTO create(ApplicationDTO applicationDTO);
/**
* 根据主键更新应用
* code,organizationId不可更新,如果projectId非空也不可更新
*
* @param applicationDTO 应用DTO
* @return 应用DTO
*/
ApplicationDTO update(ApplicationDTO applicationDTO);
/**
* 删除应用.
*
* @param organizationId 组织Id
* @param id 应用Id
*/
void delete(Long organizationId, Long id);
/**
* 分页模糊查询applications.
*
* @param page
* @param size
* @param applicationSearchDTO
* @return
*/
PageInfo<ApplicationDTO> pagingQuery(int page, int size, ApplicationQuery applicationSearchDTO, Boolean withDescendants);
/**
* 启用
*
* @param id 应用Id
* @return 应用DTO
*/
ApplicationDTO enable(Long id);
/**
* 禁用
*
* @param id 应用Id
* @return 应用DTO
*/
ApplicationDTO disable(Long id);
/**
* 获取{@link io.choerodon.iam.infra.enums.ApplicationType}的所有code
*
* @return 应用类型编码列表
*/
List<String> types();
/**
* 校验code,name的唯一性
*
* @param applicationDTO 应用DTO
*/
void check(ApplicationDTO applicationDTO);
/**
* 在组织下面将应用或组合应用添加到指定的组合应用里
*
* @param organizationId 组织id
* @param id 应用id,applicationCategory为combination-application {@link io.choerodon.iam.infra.enums.ApplicationCategory#COMBINATION}
* @param ids 需要被分配的应用或组合应用
*/
void addToCombination(Long organizationId, Long id, Long[] ids);
/**
* 查询指定组合应用下的所有节点
*
* @param id
* @return
*/
List<ApplicationExplorationDTO> queryDescendant(Long id);
/**
* 根据组合应用id查询下面所有的普通应用{@link io.choerodon.iam.infra.enums.ApplicationCategory#APPLICATION}
*
* @param id
* @return
*/
PageInfo<ApplicationDTO> queryApplicationList(int page, int size, Long id, String name, String code);
/**
* 查询可以向指定组合应用添加的后代,判别标准是不构成环
*
* @param id
* @param organizationId
* @return
*/
List<ApplicationDTO> queryEnabledApplication(Long organizationId, Long id);
/**
* 根据id查询应用详情
*
* @param id
* @param withDescendants 是否携带后代
* @return
*/
ApplicationDTO query(Long id, Boolean withDescendants);
/**
* 从组合应用中移除指定应用
*
* @param organizationId 组织id
* @param id 应用id
* @param ids 被移除的应用id数组
*/
void deleteCombination(Long organizationId, Long id, Long[] ids);
String getToken(Long id);
String createToken(Long id);
/**
* 根据token查询应用
* @param applicationToken
* @return
*/
ApplicationDTO getApplicationByToken(String applicationToken);
Long getIdByCode(String code, Long projectId);
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/DashboardRoleMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import java.util.List;
import io.choerodon.iam.infra.dto.DashboardRoleDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
/**
* @author <EMAIL>
*/
public interface DashboardRoleMapper extends Mapper<DashboardRoleDTO> {
List<String> selectRoleCodes(@Param("dashboardCode") String dashboardCode);
void deleteByDashboardCode(@Param("dashboardCode") String dashboardCode);
List<Long> selectDashboardByUserId(@Param("userId") Long userId,
@Param("sourceId") Long sourceId,
@Param("level") String level);
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/ProjectRelationshipServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import io.choerodon.asgard.saga.annotation.Saga;
import io.choerodon.asgard.saga.producer.StartSagaBuilder;
import io.choerodon.asgard.saga.producer.TransactionalProducer;
import io.choerodon.core.exception.CommonException;
import io.choerodon.core.iam.ResourceLevel;
import io.choerodon.iam.api.dto.ProjectCategoryDTO;
import io.choerodon.iam.api.dto.RelationshipCheckDTO;
import io.choerodon.iam.api.dto.payload.ProjectRelationshipInsertPayload;
import io.choerodon.iam.app.service.OrganizationProjectService;
import io.choerodon.iam.app.service.ProjectRelationshipService;
import io.choerodon.iam.infra.asserts.ProjectAssertHelper;
import io.choerodon.iam.infra.dto.ProjectDTO;
import io.choerodon.iam.infra.dto.ProjectMapCategoryDTO;
import io.choerodon.iam.infra.dto.ProjectRelationshipDTO;
import io.choerodon.iam.infra.enums.ProjectCategory;
import io.choerodon.iam.infra.mapper.ProjectCategoryMapper;
import io.choerodon.iam.infra.mapper.ProjectMapCategoryMapper;
import io.choerodon.iam.infra.mapper.ProjectMapper;
import io.choerodon.iam.infra.mapper.ProjectRelationshipMapper;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.stream.Collectors;
import static io.choerodon.iam.infra.common.utils.SagaTopic.ProjectRelationship.PROJECT_RELATIONSHIP_ADD;
import static io.choerodon.iam.infra.common.utils.SagaTopic.ProjectRelationship.PROJECT_RELATIONSHIP_DELETE;
/**
* @author Eugen
*/
@Service
public class ProjectRelationshipServiceImpl implements ProjectRelationshipService {
private static final Logger logger = LoggerFactory.getLogger(ProjectRelationshipServiceImpl.class);
private static final String PROGRAM_CANNOT_BE_CONFIGURA_SUBPROJECTS = "error.program.cannot.be.configured.subprojects";
private static final String AGILE_CANNOT_CONFIGURA_SUBPROJECTS = "error.agile.projects.cannot.configure.subprojects";
private static final String RELATIONSHIP_NOT_EXIST_EXCEPTION = "error.project.relationship.not.exist";
public static final String STATUS_ADD = "add";
public static final String STATUS_UPDATE = "update";
public static final String STATUS_DELETE = "delete";
private TransactionalProducer producer;
private ProjectRelationshipMapper relationshipMapper;
private ProjectCategoryMapper projectCategoryMapper;
private ProjectMapCategoryMapper projectMapCategoryMapper;
private OrganizationProjectService organizationProjectService;
@Value("${choerodon.category.enabled:false}")
private Boolean categoryEnable;
private ProjectRelationshipMapper projectRelationshipMapper;
private ProjectAssertHelper projectAssertHelper;
private ProjectMapper projectMapper;
public ProjectRelationshipServiceImpl(TransactionalProducer producer, ProjectRelationshipMapper relationshipMapper,
ProjectCategoryMapper projectCategoryMapper, ProjectMapCategoryMapper projectMapCategoryMapper,
OrganizationProjectService organizationProjectService,
ProjectRelationshipMapper projectRelationshipMapper,
ProjectAssertHelper projectAssertHelper,
ProjectMapper projectMapper) {
this.producer = producer;
this.relationshipMapper = relationshipMapper;
this.projectCategoryMapper = projectCategoryMapper;
this.projectMapCategoryMapper = projectMapCategoryMapper;
this.organizationProjectService = organizationProjectService;
this.projectRelationshipMapper = projectRelationshipMapper;
this.projectAssertHelper = projectAssertHelper;
this.projectMapper = projectMapper;
}
@Override
public List<ProjectRelationshipDTO> getProjUnderGroup(Long projectId, Boolean onlySelectEnable) {
ProjectDTO projectDTO;
if (categoryEnable) {
projectDTO = organizationProjectService.selectCategoryByPrimaryKey(projectId);
} else {
projectDTO = projectAssertHelper.projectNotExisted(projectId);
}
if (!projectDTO.getCategory().equalsIgnoreCase(ProjectCategory.PROGRAM.value()) &&
!projectDTO.getCategory().equalsIgnoreCase(ProjectCategory.ANALYTICAL.value())) {
throw new CommonException(AGILE_CANNOT_CONFIGURA_SUBPROJECTS);
}
return projectRelationshipMapper.selectProjectsByParentId(projectId, onlySelectEnable);
}
@Override
@Saga(code = PROJECT_RELATIONSHIP_DELETE, description = "项目群下移除项目", inputSchemaClass = ProjectRelationshipInsertPayload.class)
public void removesAProjUnderGroup(Long orgId, Long groupId) {
ProjectRelationshipDTO projectRelationshipDTO = projectRelationshipMapper.selectByPrimaryKey(groupId);
if (projectRelationshipDTO == null) {
throw new CommonException(RELATIONSHIP_NOT_EXIST_EXCEPTION);
}
if (categoryEnable && projectRelationshipDTO.getEnabled()) {
removeProgramProject(projectRelationshipDTO.getProjectId());
}
ProjectRelationshipInsertPayload sagaPayload = new ProjectRelationshipInsertPayload();
ProjectDTO parent = projectAssertHelper.projectNotExisted(projectRelationshipDTO.getParentId());
sagaPayload.setCategory(parent.getCategory());
sagaPayload.setParentCode(parent.getCode());
sagaPayload.setParentId(parent.getId());
ProjectDTO project = projectAssertHelper.projectNotExisted(projectRelationshipDTO.getProjectId());
ProjectRelationshipInsertPayload.ProjectRelationship relationship
= new ProjectRelationshipInsertPayload.ProjectRelationship(project.getId(), project.getCode(),
projectRelationshipDTO.getStartDate(), projectRelationshipDTO.getEndDate(), projectRelationshipDTO.getEnabled(), STATUS_DELETE);
sagaPayload.setRelationships(Collections.singletonList(relationship));
producer.applyAndReturn(
StartSagaBuilder
.newBuilder()
.withLevel(ResourceLevel.ORGANIZATION)
.withRefType("organization")
.withSagaCode(PROJECT_RELATIONSHIP_DELETE),
builder -> {
if (projectRelationshipMapper.selectByPrimaryKey(groupId) == null) {
throw new CommonException("error.delete.project.group.not.exist");
}
if (projectRelationshipMapper.deleteByPrimaryKey(groupId) != 1) {
throw new CommonException("error.delete.project.group");
}
builder
.withPayloadAndSerialize(sagaPayload)
.withRefId(String.valueOf(orgId))
.withSourceId(orgId);
return sagaPayload;
});
}
@Override
public RelationshipCheckDTO checkRelationshipCanBeEnabled(Long id) {
ProjectRelationshipDTO projectRelationshipDTO = projectRelationshipMapper.selectByPrimaryKey(id);
if (projectRelationshipDTO == null) {
throw new CommonException(RELATIONSHIP_NOT_EXIST_EXCEPTION);
} else if (projectRelationshipDTO.getEnabled()) {
throw new CommonException("error.check.relationship.is.already.enabled");
}
return checkDate(projectRelationshipDTO);
}
@Override
public List<Map<String, Date>> getUnavailableTime(Long projectId, Long parentId) {
ProjectDTO project = projectAssertHelper.projectNotExisted(projectId);
if (!project.getCategory().equalsIgnoreCase(ProjectCategory.AGILE.value())) {
throw new CommonException(PROGRAM_CANNOT_BE_CONFIGURA_SUBPROJECTS);
}
ProjectDTO parent = projectAssertHelper.projectNotExisted(parentId);
if (parent.getCategory().equalsIgnoreCase(ProjectCategory.AGILE.value())) {
throw new CommonException(AGILE_CANNOT_CONFIGURA_SUBPROJECTS);
}
//查询projectId所有被建立的关系
ProjectRelationshipDTO selectTmpDTO = new ProjectRelationshipDTO();
selectTmpDTO.setProjectId(projectId);
List<ProjectRelationshipDTO> relationshipDOS = projectRelationshipMapper.select(selectTmpDTO);
List<Map<String, Date>> list = new ArrayList<>();
//去除已与当前项目群建立的关系
relationshipDOS = relationshipDOS.stream().filter(r -> !r.getParentId().equals(parentId)).collect(Collectors.toList());
relationshipDOS.forEach(r -> {
ProjectDTO projectDTO = projectMapper.selectByPrimaryKey(r.getParentId());
if (projectDTO != null &&
projectDTO.getCategory().equalsIgnoreCase(ProjectCategory.PROGRAM.value()) &&
r.getEnabled()) {
Map<String, Date> map = new HashMap<>();
map.put("start", r.getStartDate());
map.put("end", r.getEndDate());
list.add(map);
}
});
return list;
}
@Saga(code = PROJECT_RELATIONSHIP_ADD, description = "iam组合项目中新增子项目", inputSchemaClass = ProjectRelationshipInsertPayload.class)
@Override
@Transactional
public List<ProjectRelationshipDTO> batchUpdateRelationShipUnderProgram(Long orgId, List<ProjectRelationshipDTO> list) {
//check list
if (CollectionUtils.isEmpty(list)) {
logger.info("The array for batch update relationships cannot be empty");
return Collections.emptyList();
}
checkUpdateList(list);
//update与create分区
List<ProjectRelationshipDTO> updateNewList = new ArrayList<>();
List<ProjectRelationshipDTO> insertNewList = new ArrayList<>();
list.forEach(g -> {
if (g.getId() == null) {
insertNewList.add(g);
} else {
updateNewList.add(g);
}
});
List<ProjectRelationshipDTO> returnList = new ArrayList<>();
// build project relationship saga payload
ProjectRelationshipInsertPayload sagaPayload = new ProjectRelationshipInsertPayload();
ProjectDTO parent = projectAssertHelper.projectNotExisted(list.get(0).getParentId());
sagaPayload.setCategory(parent.getCategory());
sagaPayload.setParentCode(parent.getCode());
sagaPayload.setParentId(parent.getId());
List<ProjectRelationshipInsertPayload.ProjectRelationship> relationships = new ArrayList<>();
//批量插入
insertNewList.forEach(relationshipDTO -> {
checkGroupIsLegal(relationshipDTO);
checkCategoryEnable(relationshipDTO);
// insert
if (projectRelationshipMapper.insertSelective(relationshipDTO) != 1) {
throw new CommonException("error.create.project.group");
}
BeanUtils.copyProperties(projectRelationshipMapper.selectByPrimaryKey(relationshipDTO.getId()), relationshipDTO);
returnList.add(relationshipDTO);
if (categoryEnable && relationshipDTO.getEnabled()) {
addProgramProject(relationshipDTO.getProjectId());
}
// fill the saga payload
ProjectDTO project = projectAssertHelper.projectNotExisted(relationshipDTO.getProjectId());
ProjectRelationshipInsertPayload.ProjectRelationship relationship
= new ProjectRelationshipInsertPayload.ProjectRelationship(project.getId(), project.getCode(),
relationshipDTO.getStartDate(), relationshipDTO.getEndDate(), relationshipDTO.getEnabled(), STATUS_ADD);
relationships.add(relationship);
});
//批量更新
updateNewList.forEach(relationshipDTO -> {
checkGroupIsLegal(relationshipDTO);
// 更新项目群关系的有效结束时间
updateProjectRelationshipEndDate(relationshipDTO);
if (projectRelationshipMapper.selectByPrimaryKey(relationshipDTO.getId()) == null) {
logger.warn("Batch update project relationship exists Nonexistent relationship,id is{}:{}", relationshipDTO.getId(), relationshipDTO);
} else {
checkCategoryEnable(relationshipDTO);
ProjectRelationshipDTO projectRelationship = new ProjectRelationshipDTO();
BeanUtils.copyProperties(relationshipDTO, projectRelationship);
// update
if (projectRelationshipMapper.updateByPrimaryKey(projectRelationship) != 1) {
throw new CommonException("error.project.group.update");
}
projectRelationship = projectRelationshipMapper.selectByPrimaryKey(projectRelationship.getId());
BeanUtils.copyProperties(projectRelationship, relationshipDTO);
returnList.add(relationshipDTO);
if (categoryEnable) {
if (relationshipDTO.getEnabled()) {
addProgramProject(relationshipDTO.getProjectId());
} else {
removeProgramProject(relationshipDTO.getProjectId());
}
}
// fill the saga payload
ProjectDTO project =
projectAssertHelper.projectNotExisted(relationshipDTO.getProjectId());
ProjectRelationshipInsertPayload.ProjectRelationship relationship
= new ProjectRelationshipInsertPayload.ProjectRelationship(project.getId(), project.getCode(),
relationshipDTO.getStartDate(), relationshipDTO.getEndDate(), relationshipDTO.getEnabled(), STATUS_UPDATE);
relationships.add(relationship);
}
});
sagaPayload.setRelationships(relationships);
producer.applyAndReturn(
StartSagaBuilder
.newBuilder()
.withLevel(ResourceLevel.ORGANIZATION)
.withRefType("organization")
.withSagaCode(PROJECT_RELATIONSHIP_ADD),
builder -> {
builder
.withPayloadAndSerialize(sagaPayload)
.withRefId(String.valueOf(orgId))
.withSourceId(orgId);
return sagaPayload;
});
return returnList;
}
private void addProgramProject(Long projectId) {
ProjectCategoryDTO projectCategoryDTO = new ProjectCategoryDTO();
projectCategoryDTO.setCode("PROGRAM_PROJECT");
projectCategoryDTO = projectCategoryMapper.selectOne(projectCategoryDTO);
ProjectMapCategoryDTO projectMapCategoryDTO = new ProjectMapCategoryDTO();
projectMapCategoryDTO.setProjectId(projectId);
projectMapCategoryDTO.setCategoryId(projectCategoryDTO.getId());
if (projectMapCategoryMapper.insert(projectMapCategoryDTO) != 1) {
throw new CommonException("error.project.map.category.insert");
}
}
private void removeProgramProject(Long projectId) {
ProjectCategoryDTO projectCategoryDTO = new ProjectCategoryDTO();
projectCategoryDTO.setCode("PROGRAM_PROJECT");
projectCategoryDTO = projectCategoryMapper.selectOne(projectCategoryDTO);
ProjectMapCategoryDTO projectMapCategoryDTO = new ProjectMapCategoryDTO();
projectMapCategoryDTO.setProjectId(projectId);
projectMapCategoryDTO.setCategoryId(projectCategoryDTO.getId());
if (projectMapCategoryMapper.delete(projectMapCategoryDTO) != 1) {
throw new CommonException("error.project.map.category.delete");
}
}
/**
* 更新项目群关系的有效结束时间.
*
* @param projectRelationshipDTO 项目群关系
*/
private void updateProjectRelationshipEndDate(ProjectRelationshipDTO projectRelationshipDTO) {
// 启用操作 结束时间置为空
if (projectRelationshipDTO.getEnabled()) {
projectRelationshipDTO.setEndDate(null);
} else {
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd");
try {
// 禁用操作 结束时间为禁用操作的时间
projectRelationshipDTO.setEndDate(simpleDateFormat.parse(simpleDateFormat.format(new Date())));
} catch (ParseException e) {
logger.info("Relationship end time format failed");
}
}
}
private void checkCategoryEnable(ProjectRelationshipDTO relationshipDTO) {
if (categoryEnable) {
if (organizationProjectService.selectCategoryByPrimaryKey(relationshipDTO.getParentId()).getCategory()
.equalsIgnoreCase(ProjectCategory.PROGRAM.value())) {
relationshipDTO.setProgramId(relationshipDTO.getParentId());
}
} else if (projectAssertHelper.projectNotExisted(relationshipDTO.getParentId()).getCategory()
.equalsIgnoreCase(ProjectCategory.PROGRAM.value())) {
relationshipDTO.setProgramId(relationshipDTO.getParentId());
}
}
/**
* 校验批量更新DTO
* 检验不能为空
* 校验不能批量更新不同项目群下的项目关系
* 校验项目本身已停用 则无法被项目群添加或更新
* 校验一个项目只能被一个普通项目群添加
* 校验一个项目只能被一个普通项目群更新
*
* @param list 项目群关系列表
*/
private void checkUpdateList(List<ProjectRelationshipDTO> list) {
// list不能为空
if (list == null || list.isEmpty()) {
logger.info("The array for batch update relationships cannot be empty");
return;
}
list.forEach(r -> {
// 开始时间为空则填充为当前时间
if (r.getStartDate() == null) {
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd");
try {
r.setStartDate(simpleDateFormat.parse(simpleDateFormat.format(new Date())));
} catch (ParseException e) {
logger.info("Relationship start time format failed");
}
}
// 项目已停用 无法被项目群添加或更新
ProjectDTO project = projectAssertHelper.projectNotExisted(r.getProjectId());
if (!project.getEnabled()) {
throw new CommonException("error.insertOrUpdate.project.relationships.when.project.disabled", project.getName());
}
if (r.getId() == null) {
// 一个项目只能被一个普通项目群添加
List<ProjectDTO> projectDTOS = relationshipMapper.selectProgramsByProjectId(r.getProjectId(), true);
if (projectDTOS != null && projectDTOS.size() > 0) {
throw new CommonException("error.insert.project.relationships.exists.one.program", projectDTOS.get(0).getName());
}
} else if (r.getEnabled()) {
// 一个项目只能被一个普通项目群更新
List<ProjectDTO> projectDTOS = relationshipMapper.selectProgramsByProjectId(r.getProjectId(), true);
if (projectDTOS != null && projectDTOS.size() > 0) {
List<String> programs = new ArrayList<>();
for (ProjectDTO projectDTO : projectDTOS) {
programs.add(projectDTO.getName());
}
throw new CommonException("error.update.project.relationships.exists.multiple.program", StringUtils.join(programs, ","));
}
}
});
Set<Long> collect = list.stream().map(ProjectRelationshipDTO::getParentId).collect(Collectors.toSet());
if (collect.size() != 1) {
throw new CommonException("error.update.project.relationships.must.be.under.the.same.program");
}
}
/**
* 校验
* 校验parent是否为空,是否非敏捷项目
* 校验project是否为空,是否为敏捷项目
*
* @param projectRelationshipDTO
*/
private void checkGroupIsLegal(ProjectRelationshipDTO projectRelationshipDTO) {
ProjectDTO parent;
if (categoryEnable) {
parent = organizationProjectService.selectCategoryByPrimaryKey(projectRelationshipDTO.getParentId());
} else {
parent = projectAssertHelper.projectNotExisted(projectRelationshipDTO.getParentId());
}
if (!parent.getCategory().equalsIgnoreCase(ProjectCategory.PROGRAM.value()) &&
!parent.getCategory().equalsIgnoreCase(ProjectCategory.ANALYTICAL.value())) {
throw new CommonException(AGILE_CANNOT_CONFIGURA_SUBPROJECTS);
}
ProjectDTO son;
if (categoryEnable) {
son = organizationProjectService.selectCategoryByPrimaryKey(projectRelationshipDTO.getProjectId());
} else {
son = projectAssertHelper.projectNotExisted(projectRelationshipDTO.getProjectId());
}
if (!son.getCategory().equalsIgnoreCase(ProjectCategory.AGILE.value())) {
throw new CommonException(PROGRAM_CANNOT_BE_CONFIGURA_SUBPROJECTS);
}
}
private RelationshipCheckDTO checkDate(ProjectRelationshipDTO needCheckDTO) {
// db list
ProjectRelationshipDTO checkDTO = new ProjectRelationshipDTO();
checkDTO.setProjectId(needCheckDTO.getProjectId());
List<ProjectRelationshipDTO> dbList = projectRelationshipMapper.select(checkDTO);
long start = needCheckDTO.getStartDate().getTime();
// build result
RelationshipCheckDTO result = new RelationshipCheckDTO();
result.setResult(true);
// check
dbList.forEach(r -> {
ProjectDTO parent = projectAssertHelper.projectNotExisted(r.getParentId());
if (!r.getId().equals(needCheckDTO.getId()) && r.getEnabled()
&& parent.getCategory().equalsIgnoreCase(ProjectCategory.PROGRAM.value())) {
long min = r.getStartDate().getTime();
Boolean flag = true;
if (needCheckDTO.getEndDate() != null) {
long end = needCheckDTO.getEndDate().getTime();
if (r.getEndDate() != null) {
long max = r.getEndDate().getTime();
if (!(start >= max || end <= min)) {
flag = false;
}
} else {
if (end > min) {
flag = false;
}
}
} else {
if (r.getEndDate() != null) {
long max = r.getEndDate().getTime();
if (start < max) {
flag = false;
}
} else {
flag = false;
}
}
if (!flag) {
result.setResult(false);
result.setProjectCode(parent.getCode());
result.setProjectName(parent.getName());
logger.warn("Project associated time is not legal,relationship:{},conflict project name:{},code:{}",
needCheckDTO, result.getProjectName(), result.getProjectCode());
return;
}
}
});
// return
return result;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/OrganizationServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import io.choerodon.asgard.saga.annotation.Saga;
import io.choerodon.asgard.saga.dto.StartInstanceDTO;
import io.choerodon.asgard.saga.feign.SagaClient;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.core.exception.CommonException;
import io.choerodon.core.iam.ResourceLevel;
import io.choerodon.core.oauth.CustomUserDetails;
import io.choerodon.iam.api.dto.OrgSharesDTO;
import io.choerodon.iam.api.dto.OrganizationSimplifyDTO;
import io.choerodon.iam.api.dto.payload.OrganizationEventPayload;
import io.choerodon.iam.api.dto.payload.OrganizationPayload;
import io.choerodon.iam.app.service.OrganizationService;
import io.choerodon.iam.app.service.UserService;
import io.choerodon.iam.infra.asserts.DetailsHelperAssert;
import io.choerodon.iam.infra.asserts.OrganizationAssertHelper;
import io.choerodon.iam.infra.dto.OrganizationDTO;
import io.choerodon.iam.infra.dto.ProjectDTO;
import io.choerodon.iam.infra.dto.RoleDTO;
import io.choerodon.iam.infra.dto.UserDTO;
import io.choerodon.iam.infra.exception.UpdateExcetion;
import io.choerodon.iam.infra.feign.AsgardFeignClient;
import io.choerodon.iam.infra.mapper.OrganizationMapper;
import io.choerodon.iam.infra.mapper.ProjectMapper;
import io.choerodon.iam.infra.mapper.RoleMapper;
import io.choerodon.iam.infra.mapper.UserMapper;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.ObjectUtils;
import org.springframework.util.StringUtils;
import java.util.*;
import static io.choerodon.iam.infra.common.utils.SagaTopic.Organization.*;
/**
* @author wuguokai
*/
@Component
public class OrganizationServiceImpl implements OrganizationService {
private AsgardFeignClient asgardFeignClient;
private boolean devopsMessage;
private SagaClient sagaClient;
private final ObjectMapper mapper = new ObjectMapper();
private UserService userService;
private OrganizationAssertHelper organizationAssertHelper;
private ProjectMapper projectMapper;
private UserMapper userMapper;
private OrganizationMapper organizationMapper;
private RoleMapper roleMapper;
public OrganizationServiceImpl(@Value("${choerodon.devops.message:false}") Boolean devopsMessage,
SagaClient sagaClient,
UserService userService,
AsgardFeignClient asgardFeignClient,
OrganizationAssertHelper organizationAssertHelper,
ProjectMapper projectMapper,
UserMapper userMapper,
OrganizationMapper organizationMapper,
RoleMapper roleMapper) {
this.devopsMessage = devopsMessage;
this.sagaClient = sagaClient;
this.userService = userService;
this.asgardFeignClient = asgardFeignClient;
this.organizationAssertHelper = organizationAssertHelper;
this.projectMapper = projectMapper;
this.userMapper = userMapper;
this.organizationMapper = organizationMapper;
this.roleMapper = roleMapper;
}
@Override
public OrganizationDTO queryOrganizationById(Long organizationId) {
OrganizationDTO organizationDTO = organizationAssertHelper.organizationNotExisted(organizationId);
ProjectDTO example = new ProjectDTO();
example.setOrganizationId(organizationId);
List<ProjectDTO> projects = projectMapper.select(example);
organizationDTO.setProjects(projects);
organizationDTO.setProjectCount(projects.size());
Long userId = organizationDTO.getUserId();
UserDTO user = userMapper.selectByPrimaryKey(userId);
if (user != null) {
organizationDTO.setOwnerLoginName(user.getLoginName());
organizationDTO.setOwnerRealName(user.getRealName());
organizationDTO.setOwnerPhone(user.getPhone());
organizationDTO.setOwnerEmail(user.getEmail());
}
return organizationDTO;
}
@Override
@Transactional(rollbackFor = Exception.class)
@Saga(code = ORG_UPDATE, description = "iam更新组织", inputSchemaClass = OrganizationPayload.class)
public OrganizationDTO updateOrganization(Long organizationId, OrganizationDTO organizationDTO, String resourceLevel, Long sourceId) {
preUpdate(organizationId, organizationDTO);
organizationDTO = doUpdate(organizationDTO);
if (devopsMessage) {
OrganizationPayload payload = new OrganizationPayload();
payload
.setId(organizationDTO.getId())
.setName(organizationDTO.getName())
.setCode(organizationDTO.getCode())
.setUserId(organizationDTO.getUserId())
.setAddress(organizationDTO.getAddress())
.setImageUrl(organizationDTO.getImageUrl());
try {
String input = mapper.writeValueAsString(payload);
sagaClient.startSaga(ORG_UPDATE, new StartInstanceDTO(input, "organization", organizationId + "", resourceLevel, sourceId));
} catch (JsonProcessingException e) {
throw new CommonException("error.organization.update.payload.to.string");
} catch (Exception e) {
throw new CommonException("error.organization.update.event", e);
}
}
return organizationDTO;
}
private OrganizationDTO doUpdate(OrganizationDTO organizationDTO) {
if (organizationMapper.updateByPrimaryKeySelective(organizationDTO) != 1) {
throw new UpdateExcetion("error.organization.update");
}
return organizationMapper.selectByPrimaryKey(organizationDTO);
}
private void preUpdate(Long organizationId, OrganizationDTO organizationDTO) {
OrganizationDTO organization = organizationAssertHelper.organizationNotExisted(organizationId);
organizationDTO.setId(organizationId);
//code和创建人不可修改
organizationDTO.setUserId(organization.getUserId());
organizationDTO.setCode(organization.getCode());
if (ObjectUtils.isEmpty(organizationDTO.getEnabled())) {
organizationDTO.setEnabled(true);
}
}
@Override
public OrganizationDTO queryOrganizationWithRoleById(Long organizationId) {
CustomUserDetails customUserDetails = DetailsHelperAssert.userDetailNotExisted();
OrganizationDTO dto = queryOrganizationById(organizationId);
long userId = customUserDetails.getUserId();
List<ProjectDTO> projects = projectMapper.selectUserProjectsUnderOrg(userId, organizationId, null);
dto.setProjects(projects);
dto.setProjectCount(projects.size());
List<RoleDTO> roles =
roleMapper.queryRolesInfoByUser(ResourceType.ORGANIZATION.value(), organizationId, userId);
dto.setRoles(roles);
return dto;
}
@Override
public PageInfo<OrganizationDTO> pagingQuery(OrganizationDTO organizationDTO, PageRequest pageRequest, String param) {
return PageHelper.startPage(pageRequest.getPage(), pageRequest.getSize()).doSelectPageInfo(() -> organizationMapper.fulltextSearch(organizationDTO, param));
}
@Override
@Saga(code = ORG_ENABLE, description = "iam启用组织", inputSchemaClass = OrganizationEventPayload.class)
public OrganizationDTO enableOrganization(Long organizationId, Long userId) {
OrganizationDTO organization = organizationAssertHelper.organizationNotExisted(organizationId);
organization.setEnabled(true);
return updateAndSendEvent(organization, ORG_ENABLE, userId);
}
@Override
@Saga(code = ORG_DISABLE, description = "iam停用组织", inputSchemaClass = OrganizationEventPayload.class)
public OrganizationDTO disableOrganization(Long organizationId, Long userId) {
OrganizationDTO organizationDTO = organizationAssertHelper.organizationNotExisted(organizationId);
organizationDTO.setEnabled(false);
return updateAndSendEvent(organizationDTO, ORG_DISABLE, userId);
}
private OrganizationDTO updateAndSendEvent(OrganizationDTO organization, String consumerType, Long userId) {
OrganizationDTO organizationDTO = doUpdate(organization);
if (devopsMessage) {
OrganizationEventPayload payload = new OrganizationEventPayload();
payload.setOrganizationId(organization.getId());
//saga
try {
String input = mapper.writeValueAsString(payload);
sagaClient.startSaga(consumerType, new StartInstanceDTO(input, "organization", payload.getOrganizationId() + ""));
} catch (Exception e) {
throw new CommonException("error.organizationService.enableOrDisable.event", e);
}
//给asgard发送禁用定时任务通知
asgardFeignClient.disableOrg(organization.getId());
// 给组织下所有用户发送通知
List<Long> userIds = organizationMapper.listMemberIds(organization.getId(), "organization");
Map<String, Object> params = new HashMap<>();
params.put("organizationName", organizationDTO.getName());
if (ORG_DISABLE.equals(consumerType)) {
userService.sendNotice(userId, userIds, "disableOrganization", params, organization.getId());
} else if (ORG_ENABLE.equals(consumerType)) {
userService.sendNotice(userId, userIds, "enableOrganization", params, organization.getId());
}
}
return organizationDTO;
}
@Override
public void check(OrganizationDTO organization) {
Boolean checkCode = !StringUtils.isEmpty(organization.getCode());
if (!checkCode) {
throw new CommonException("error.organization.code.empty");
} else {
checkCode(organization);
}
}
@Override
public PageInfo<UserDTO> pagingQueryUsersInOrganization(Long organizationId, Long userId, String email, PageRequest pageRequest, String param) {
return PageHelper
.startPage(pageRequest.getPage(), pageRequest.getSize())
.doSelectPageInfo(() -> userMapper.selectUsersByLevelAndOptions(ResourceLevel.ORGANIZATION.value(), organizationId, userId, email, param));
}
@Override
public List<OrganizationDTO> queryByIds(Set<Long> ids) {
if (ids.isEmpty()) {
return new ArrayList<>();
} else {
return organizationMapper.selectByIds(ids);
}
}
private void checkCode(OrganizationDTO organization) {
Boolean createCheck = StringUtils.isEmpty(organization.getId());
String code = organization.getCode();
OrganizationDTO organizationDTO = new OrganizationDTO();
organizationDTO.setCode(code);
if (createCheck) {
Boolean existed = organizationMapper.selectOne(organizationDTO) != null;
if (existed) {
throw new CommonException("error.organization.code.exist");
}
} else {
Long id = organization.getId();
OrganizationDTO dto = organizationMapper.selectOne(organizationDTO);
Boolean existed = dto != null && !id.equals(dto.getId());
if (existed) {
throw new CommonException("error.organization.code.exist");
}
}
}
@Override
public PageInfo<OrganizationSimplifyDTO> getAllOrgs(PageRequest pageRequest) {
return PageHelper
.startPage(pageRequest.getPage(), pageRequest.getSize())
.doSelectPageInfo(() -> organizationMapper.selectAllOrgIdAndName());
}
@Override
public PageInfo<OrgSharesDTO> pagingSpecified(Set<Long> orgIds, String name, String code, Boolean enabled, String params, PageRequest pageRequest) {
if (CollectionUtils.isEmpty(orgIds)) {
return new PageInfo<>();
}
return PageHelper.startPage(pageRequest.getPage(), pageRequest.getSize(), pageRequest.getSort().toSql())
.doSelectPageInfo(() -> organizationMapper.selectSpecified(orgIds, name, code, enabled, params));
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/dto/payload/UserMemberEventPayload.java<|end_filename|>
package io.choerodon.iam.api.dto.payload;
import java.util.Set;
/**
* @author flyleft
* @date 2018/4/10
*/
public class UserMemberEventPayload {
private Long userId;
private String username;
private Long resourceId;
private String resourceType;
private Set<String> roleLabels;
private String uuid;
public Set<String> getRoleLabels() {
return roleLabels;
}
public void setRoleLabels(Set<String> roleLabels) {
this.roleLabels = roleLabels;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public Long getResourceId() {
return resourceId;
}
public void setResourceId(Long resourceId) {
this.resourceId = resourceId;
}
public String getResourceType() {
return resourceType;
}
public void setResourceType(String resourceType) {
this.resourceType = resourceType;
}
public String getUuid() {
return uuid;
}
public void setUuid(String uuid) {
this.uuid = uuid;
}
public Long getUserId() {
return userId;
}
public void setUserId(Long userId) {
this.userId = userId;
}
}
<|start_filename|>src/main/resources/script/db/fd_project.groovy<|end_filename|>
package script.db
databaseChangeLog(logicalFilePath: 'script/db/fd_project.groovy') {
changeSet(author: '<EMAIL>', id: '2018-03-21-fd-project') {
if (helper.dbType().isSupportSequence()) {
createSequence(sequenceName: 'FD_PROJECT_S', startValue: "1")
}
createTable(tableName: "FD_PROJECT") {
column(name: 'ID', type: 'BIGINT UNSIGNED', autoIncrement: true, remarks: '表ID,主键,供其他表做外键,unsigned bigint、单表时自增、步长为 1') {
constraints(primaryKey: true, primaryKeyName: 'PK_FD_PROJECT')
}
column(name: 'NAME', type: 'VARCHAR(32)', remarks: '项目名') {
constraints(nullable: false)
}
column(name: 'CODE', type: 'VARCHAR(14)', remarks: '项目code') {
constraints(nullable: false, unique: true, uniqueConstraintName: 'UK_FD_PROJECT_U1')
}
column(name: 'ORGANIZATION_ID', type: 'BIGINT UNSIGNED', remarks: '组织ID')
column(name: 'IS_ENABLED', type: 'TINYINT UNSIGNED', defaultValue: "1", remarks: '是否启用。1启用,0未启用') {
constraints(nullable: false)
}
column(name: "OBJECT_VERSION_NUMBER", type: "BIGINT UNSIGNED", defaultValue: "1") {
constraints(nullable: true)
}
column(name: "CREATED_BY", type: "BIGINT UNSIGNED", defaultValue: "0") {
constraints(nullable: true)
}
column(name: "CREATION_DATE", type: "DATETIME", defaultValueComputed: "CURRENT_TIMESTAMP")
column(name: "LAST_UPDATED_BY", type: "BIGINT UNSIGNED", defaultValue: "0") {
constraints(nullable: true)
}
column(name: "LAST_UPDATE_DATE", type: "DATETIME", defaultValueComputed: "CURRENT_TIMESTAMP")
}
addUniqueConstraint(tableName: 'FD_PROJECT', columnNames: 'CODE, ORGANIZATION_ID', constraintName: 'UK_FD_PROJECT_U2')
}
changeSet(author: '<EMAIL>', id: '2018-05-24-drop-unique') {
dropUniqueConstraint(constraintName: "UK_FD_PROJECT_U1", tableName: "FD_PROJECT")
}
changeSet(author: '<EMAIL>', id: '2018-11-27-fd-project-add-type') {
addColumn(tableName: 'FD_PROJECT') {
column(name: 'TYPE', type: 'VARCHAR(64)', remarks: '项目类型')
}
}
changeSet(author: '<EMAIL>', id: '2018-12-18-fd-project-add') {
addColumn(tableName: 'FD_PROJECT') {
column(name: 'IMAGE_URL', type: 'VARCHAR(255)', remarks: '项目图标url', afterColumn: 'IS_ENABLED')
}
}
changeSet(author: '<EMAIL>', id: '2019-03-04-fd-project-add-category') {
addColumn(tableName: 'FD_PROJECT') {
column(name: 'CATEGORY', type: 'VARCHAR(64)', remarks: '项目类别:AGILE(敏捷项目),PROGRAM(普通项目组),ANALYTICAL(分析型项目群)', afterColumn: 'TYPE', defaultValue: 'AGILE')
}
}
changeSet(author: 'superlee', id: '2019-07-18-fd-project-add-remark') {
setTableRemarks(tableName:"FD_PROJECT",remarks: "项目表")
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/api/eventhandler/NotifyListenerSpec.groovy<|end_filename|>
package io.choerodon.iam.api.eventhandler
import com.fasterxml.jackson.databind.ObjectMapper
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.api.dto.payload.UserEventPayload
import io.choerodon.iam.app.service.UserService
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.context.annotation.Import
import spock.lang.Specification
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class NotifyListenerSpec extends Specification {
private UserService userService=Mock(UserService)
private NotifyListener notifyListener = new NotifyListener(userService)
private final ObjectMapper mapper = new ObjectMapper()
def "Create"() {
given: "构造请求参数"
UserEventPayload eventPayload = new UserEventPayload()
eventPayload.setFromUserId(1L)
eventPayload.setOrganizationId(1L)
List<UserEventPayload> userEventPayloads = new ArrayList<>()
userEventPayloads.add(eventPayload)
String message = mapper.writeValueAsString(userEventPayloads)
when: "调用方法"
notifyListener.create(message)
then: "校验结果"
1 * userService.sendNotice(_, _, _, _, _)
}
}
<|start_filename|>react/src/app/iam/containers/global/member-role/MemberRole.js<|end_filename|>
import React, { Component } from 'react';
import get from 'lodash/get';
import { findDOMNode } from 'react-dom';
import { inject, observer } from 'mobx-react';
import { Button, Form, Modal, Progress, Select, Table, Tooltip, Upload, Spin, Radio } from 'choerodon-ui';
import { withRouter } from 'react-router-dom';
import { Content, Header, Page, Permission } from '@choerodon/boot';
import { FormattedMessage, injectIntl } from 'react-intl';
import classnames from 'classnames';
import MemberRoleType, { pageSize } from './MemberRoleType';
import './MemberRole.scss';
import '../../../common/ConfirmModal.scss';
let timer;
let selectFilterEmpty = true;
const { Sidebar } = Modal;
const FormItem = Form.Item;
const Option = Select.Option;
const RadioGroup = Radio.Group;
const FormItemNumLayout = {
labelCol: {
xs: { span: 24 },
sm: { span: 100 },
},
wrapperCol: {
xs: { span: 24 },
sm: { span: 10 },
},
};
const intlPrefix = 'memberrole';
@Form.create({})
@withRouter
@injectIntl
@inject('AppState')
@observer
export default class MemberRole extends Component {
state = this.getInitState();
getInitState() {
const { MemberRoleStore, AppState } = this.props;
MemberRoleStore.loadCurrentMenuType(AppState.currentMenuType, AppState.getUserId);
return {
selectLoading: true,
loading: true,
submitting: false,
sidebar: false,
selectType: '',
showMember: true,
expandedKeys: [], // 角色展开
roleIds: [],
overflow: false,
fileLoading: false,
createMode: 'user',
selectRoleMemberKeys: [],
roleData: MemberRoleStore.getRoleData, // 所有角色
roleMemberDatas: MemberRoleStore.getRoleMemberDatas, // 用户-角色表数据源
memberDatas: [], // 用户-成员表数据源
currentMemberData: [], // 当前成员的角色分配信息
selectMemberRoles: {},
selectRoleMembers: [],
roleMemberFilters: {}, // 用户-角色表格过滤
roleMemberParams: [], // 用户-角色表格参数
memberRoleFilters: {}, // 用户-成员表格过滤
params: [], // 用户-成员表格参数
memberRolePageInfo: { // 用户-成员表格分页信息
current: 1,
total: 0,
pageSize,
},
roleMemberFilterRole: [],
clientMemberDatas: [],
cilentRoleMemberDatas: MemberRoleStore.getClientRoleMemberDatas,
clientMemberRolePageInfo: { // 客户端-成员表格分页信息
current: 1,
total: 0,
pageSize,
},
clientMemberRoleFilters: {},
clientMemberParams: [],
clientRoleMemberFilters: {},
clientParams: [],
clientRoleMemberParams: [],
selectClientMemberRoles: {},
selectClientRoleMembers: [],
clientRoleMemberFilterRole: [],
};
}
init() {
const { MemberRoleStore } = this.props;
this.initMemberRole();
if (MemberRoleStore.currentMode === 'user') {
this.roles.fetch();
} else {
this.roles.fetchClient();
}
}
// 第一次渲染前获得数据
componentWillMount() {
this.init();
}
componentDidMount() {
this.updateSelectContainer();
}
componentDidUpdate() {
const { MemberRoleStore } = this.props;
this.updateSelectContainer();
MemberRoleStore.setRoleMemberDatas(this.state.roleMemberDatas);
MemberRoleStore.setRoleData(this.state.roleData);
}
componentWillUnmount() {
clearInterval(this.timer);
clearTimeout(timer);
const { MemberRoleStore } = this.props;
MemberRoleStore.setRoleMemberDatas([]);
MemberRoleStore.setRoleData([]);
MemberRoleStore.setCurrentMode('user');
}
initMemberRole() {
this.roles = new MemberRoleType(this);
}
/**
* 更改模式
* @param value 模式
*/
changeMode = (value) => {
const { MemberRoleStore } = this.props;
MemberRoleStore.setCurrentMode(value);
this.reload();
}
updateSelectContainer() {
const body = this.sidebarBody;
if (body) {
const { overflow } = this.state;
const bodyOverflow = body.clientHeight < body.scrollHeight;
if (bodyOverflow !== overflow) {
this.setState({
overflow: bodyOverflow,
});
}
}
}
reload = () => {
this.setState(this.getInitState(), () => {
this.init();
});
};
formatMessage = (id, values = {}) => {
const { intl } = this.props;
return intl.formatMessage({
id,
}, values);
};
openSidebar = () => {
this.props.form.resetFields();
this.setState({
roleIds: this.initFormRoleIds(),
sidebar: true,
});
};
closeSidebar = () => {
this.setState({ sidebar: false });
};
initFormRoleIds() {
const { selectType, currentMemberData } = this.state;
let roleIds = [undefined];
if (selectType === 'edit') {
roleIds = currentMemberData.roles.map(({ id }) => id);
}
return roleIds;
}
/**
* 批量移除角色
*/
deleteRoleByMultiple = () => {
const { selectMemberRoles, showMember, selectRoleMembers } = this.state;
const { MemberRoleStore } = this.props;
let content;
if (MemberRoleStore.currentMode === 'user' && showMember) {
content = 'memberrole.remove.select.all.content';
} else if (MemberRoleStore.currentMode === 'user' && !showMember) {
content = 'memberrole.remove.select.content';
} else if (MemberRoleStore.currentMode === 'client' && showMember) {
content = 'memberrole.remove.select.all.client.content';
} else {
content = 'memberrole.remove.select.client.content';
}
Modal.confirm({
className: 'c7n-iam-confirm-modal',
title: this.formatMessage('memberrole.remove.title'),
content: this.formatMessage(content),
onOk: () => {
if (showMember) {
return this.deleteRolesByIds(selectMemberRoles);
} else {
const data = {};
selectRoleMembers.forEach(({ id, roleId }) => {
if (!data[roleId]) {
data[roleId] = [];
}
data[roleId].push(id);
});
return this.deleteRolesByIds(data);
}
},
});
};
/**
* 删除单个成员或客户端
* @param record
*/
handleDelete = (record) => {
const { MemberRoleStore } = this.props;
const isUsersMode = MemberRoleStore.currentMode === 'user';
let content;
if (isUsersMode) {
content = this.formatMessage('memberrole.remove.all.content', { name: record.loginName });
} else {
content = this.formatMessage('memberrole.remove.all.client.content', { name: record.name });
}
Modal.confirm({
className: 'c7n-iam-confirm-modal',
title: this.formatMessage('memberrole.remove.title'),
content,
onOk: () => this.deleteRolesByIds({
[record.id]: record.roles.map(({ id }) => id),
}),
});
};
deleteRoleByRole = (record) => {
const { MemberRoleStore } = this.props;
const isUsersMode = MemberRoleStore.currentMode === 'user';
let content;
if (isUsersMode) {
content = this.formatMessage('memberrole.remove.content', {
member: record.loginName,
role: record.roleName,
});
} else {
content = this.formatMessage('memberrole.remove.client.content', {
member: record.name,
role: record.roleName,
});
}
Modal.confirm({
className: 'c7n-iam-confirm-modal',
title: this.formatMessage('memberrole.remove.title'),
content,
onOk: () => this.deleteRolesByIds({ [record.roleId]: [record.id] }),
});
};
deleteRolesByIds = (data) => {
const { showMember } = this.state;
const { MemberRoleStore } = this.props;
const isUsersMode = MemberRoleStore.currentMode === 'user';
const body = {
view: showMember ? 'userView' : 'roleView',
memberType: isUsersMode ? 'user' : 'client',
data,
};
return this.roles.deleteRoleMember(body).then(({ failed, message }) => {
if (failed) {
Choerodon.prompt(message);
} else {
Choerodon.prompt(this.formatMessage('remove.success'));
this.setState({
selectRoleMemberKeys: [],
selectMemberRoles: {},
});
if (isUsersMode) {
this.roles.fetch();
} else {
this.roles.fetchClient();
}
}
});
};
getSidebarTitle() {
const { selectType } = this.state;
if (selectType === 'create') {
return <FormattedMessage id="memberrole.add" />;
} else if (selectType === 'edit') {
return <FormattedMessage id="memberrole.modify" />;
} else if (selectType === 'upload') {
return <FormattedMessage id="memberrole.upload" />;
}
}
getUploadOkText = () => {
const { fileLoading } = this.state;
const { MemberRoleStore } = this.props;
const uploading = MemberRoleStore.getUploading;
if (fileLoading === true) {
return '上传中';
} else if (uploading) {
return '导入中';
} else {
return '上传';
}
};
renderUpload = () => (
<Content
{...this.getHeader()}
>
<div>
<div style={{ width: '512px' }}>
{this.getUploadInfo()}
</div>
<div style={{ display: 'none' }}>
<Upload {...this.getUploadProps()}>
<Button className="c7n-user-upload-hidden" />
</Upload>
</div>
</div>
</Content>
);
getSidebarContent() {
const { roleData = [], roleIds, selectType } = this.state;
const disabled = roleIds.findIndex((id, index) => id === undefined) !== -1
|| !roleData.filter(({ enabled, id }) => enabled && roleIds.indexOf(id) === -1).length;
return (
<Content
{...this.getHeader()}
>
{this.getForm()}
{this.getAddOtherBtn(disabled)}
</Content>);
}
getHeader() {
const { selectType, currentMemberData } = this.state;
const { values } = this.roles;
const modify = selectType === 'edit';
return {
className: 'sidebar-content',
ref: this.saveSideBarRef,
code: this.getHeaderCode(),
values: modify ? { name: currentMemberData.loginName || currentMemberData.name } : values,
};
}
getHeaderCode = () => {
const { selectType } = this.state;
const { code, clientCode } = this.roles;
const { MemberRoleStore } = this.props;
let codeType = '';
switch (selectType) {
case 'edit':
codeType = 'modify';
break;
case 'create':
codeType = 'add';
break;
default:
codeType = 'upload';
break;
}
if (selectType !== 'edit') {
return `${code}.${codeType}`;
} else {
return MemberRoleStore.currentMode === 'user' ? `${code}.${codeType}` : `${code}.${codeType}.client`;
}
};
saveSideBarRef = (node) => {
if (node) {
/* eslint-disable-next-line */
this.sidebarBody = findDOMNode(node).parentNode;
}
};
/**
* 渲染创建及修改的表单
* @returns {*}
*/
getForm = () => {
const { selectType } = this.state;
return selectType === 'create' ? (
<Form layout="vertical">
{this.getModeDom()}
{this.getProjectNameDom()}
{this.getRoleFormItems()}
</Form>
) : (
<Form layout="vertical">
{this.getRoleFormItems()}
</Form>
);
};
/**
* 渲染表单选择成员类型的节点
* @returns {null}
*/
getModeDom() {
const { selectType } = this.state;
const { form, MemberRoleStore, intl } = this.props;
const { getFieldDecorator } = form;
return selectType === 'create' ? (
<FormItem
{...FormItemNumLayout}
>
{getFieldDecorator('mode', {
initialValue: MemberRoleStore.currentMode,
})(
<RadioGroup label={<FormattedMessage id="memberrole.member.type" />} className="c7n-iam-memberrole-radiogroup" onChange={this.changeCreateMode}>
<Radio value={'user'}>{intl.formatMessage({ id: 'memberrole.type.user' })}</Radio>
<Radio value={'client'}>{intl.formatMessage({ id: 'memberrole.client' })}</Radio>
</RadioGroup>,
)}
</FormItem>
) : null;
}
/**
* 渲染表单客户端或用户下拉框的节点
* @returns {*}
*/
getProjectNameDom() {
const { selectType, currentMemberData, createMode, overflow } = this.state;
const { form, MemberRoleStore, intl } = this.props;
const { getFieldDecorator } = form;
const member = [];
const style = {
marginTop: '-15px',
};
if (selectType === 'edit') {
member.push(MemberRoleStore.currentMode === 'user' ? currentMemberData.loginName : currentMemberData.id);
style.display = 'none';
return null;
}
if (createMode === 'user') {
return (
selectType === 'create' && <FormItem
{...FormItemNumLayout}
>
{getFieldDecorator('member', {
rules: [{
required: true,
message: intl.formatMessage({ id: 'memberrole.user.require.msg' }),
}],
initialValue: selectType === 'create' ? [] : member,
})(
<Select
label={<FormattedMessage id="memberrole.type.user" />}
optionLabelProp="label"
allowClear
style={{ width: 512 }}
mode="multiple"
optionFilterProp="children"
filterOption={false}
filter
getPopupContainer={() => (overflow ? this.sidebarBody : document.body)}
onFilterChange={this.handleSelectFilter}
notFoundContent={selectFilterEmpty ? intl.formatMessage({ id: 'memberrole.noFilter.msg' }) : intl.formatMessage({ id: 'memberrole.notfound.msg' })}
loading={this.state.selectLoading}
>
{this.getUserOption()}
</Select>,
)}
</FormItem>
);
} else {
return (
selectType === 'create' && <FormItem
{...FormItemNumLayout}
>
{getFieldDecorator('member', {
rules: [{
required: true,
message: intl.formatMessage({ id: 'memberrole.client.require.msg' }),
}],
initialValue: selectType === 'create' ? [] : member,
})(
<Select
label={<FormattedMessage id="memberrole.client" />}
allowClear
style={{ width: 512 }}
mode="multiple"
optionFilterProp="children"
filterOption={false}
filter
getPopupContainer={() => (overflow ? this.sidebarBody : document.body)}
onFilterChange={this.handleSelectFilter}
notFoundContent={selectFilterEmpty ? intl.formatMessage({ id: 'memberrole.noFilter.msg' }) : intl.formatMessage({ id: 'memberrole.notfound.msg' })}
loading={this.state.selectLoading}
>
{this.getClientOption()}
</Select>,
)}
</FormItem>
);
}
}
/**
* 渲染表单增删角色的节点
* @returns {any[]}
*/
getRoleFormItems = () => {
const { selectType, roleIds, overflow } = this.state;
const { getFieldDecorator } = this.props.form;
const formItems = roleIds.map((id, index) => {
const key = id === undefined ? `role-index-${index}` : String(id);
return (<FormItem
{...FormItemNumLayout}
key={key}
>
{getFieldDecorator(key, {
rules: [
{
required: roleIds.length === 1 && selectType === 'create',
message: this.formatMessage('memberrole.role.require.msg'),
},
],
initialValue: id,
})(
<Select
className="member-role-select"
style={{ width: 300 }}
label={<FormattedMessage id="memberrole.role.label" />}
getPopupContainer={() => (overflow ? this.sidebarBody : document.body)}
filterOption={(input, option) => {
const childNode = option.props.children;
if (childNode && React.isValidElement(childNode)) {
return childNode.props.children.props.children.toLowerCase().indexOf(input.toLowerCase()) >= 0;
}
return false;
}}
onChange={(value) => {
roleIds[index] = value;
}}
filter
>
{this.getOption(id)}
</Select>,
)}
<Button
size="small"
icon="delete"
shape="circle"
onClick={() => this.removeRole(index)}
disabled={roleIds.length === 1 && selectType === 'create'}
className={'delete-role'}
/>
</FormItem>);
});
return formItems;
};
changeCreateMode = (e) => {
const { form } = this.props;
this.setState({
createMode: e.target.value,
selectLoading: true,
roleIds: [undefined],
});
form.setFields({
member: {
values: [],
},
'role-index-0': {
values: undefined,
},
});
}
handleSelectFilter = (value) => {
selectFilterEmpty = !value;
this.setState({
selectLoading: true,
});
const { createMode } = this.state;
const queryObj = {
param: value,
sort: 'id',
organization_id: get(this.props.AppState, 'currentMenuType.organizationId', 0),
};
if (timer) {
clearTimeout(timer);
}
if (value) {
timer = setTimeout(() => (createMode === 'user' ? this.loadUsers(queryObj) : this.loadClients(queryObj)), 300);
} else {
return createMode === 'user' ? this.loadUsers(queryObj) : this.loadClients(queryObj);
}
}
// 加载全平台用户信息
loadUsers = (queryObj) => {
const { MemberRoleStore } = this.props;
MemberRoleStore.loadUsers(queryObj).then((data) => {
MemberRoleStore.setUsersData(data.list.slice());
this.setState({
selectLoading: false,
});
});
}
// 加载全平台客户端信息
loadClients = (queryObj) => {
const { MemberRoleStore } = this.props;
MemberRoleStore.loadClients(queryObj).then((data) => {
MemberRoleStore.setClientsData(data.list.slice());
this.setState({
selectLoading: false,
});
});
}
getUserOption = () => {
const { MemberRoleStore } = this.props;
const usersData = MemberRoleStore.getUsersData;
return usersData && usersData.length > 0 ? (
usersData.map(({ id, imageUrl, loginName, realName }) => (
<Option key={id} value={id} label={`${loginName}${realName}`}>
<Tooltip title={`${loginName}${realName}`} placement="topLeft">
<div className="c7n-iam-memberrole-user-option">
<div className="c7n-iam-memberrole-user-option-avatar">
{
imageUrl ? <img src={imageUrl} alt="userAvatar" style={{ width: '100%' }} /> :
<span className="c7n-iam-memberrole-user-option-avatar-noavatar">{realName && realName.split('')[0]}</span>
}
</div>
<span>{realName}</span>
</div>
</Tooltip>
</Option>
))
) : null;
}
getClientOption = () => {
const { MemberRoleStore } = this.props;
const clientsData = MemberRoleStore.getClientsData;
return clientsData && clientsData.length > 0 ? (
clientsData.map(({ id, clientName }) => (
<Option key={id} value={id}>{clientName}</Option>
))
) : null;
}
// 创建/编辑角色 下拉框的option
getOption = (current) => {
const { roleData = [], roleIds } = this.state;
return roleData.reduce((options, { id, name, enabled, code }) => {
if (roleIds.indexOf(id) === -1 || id === current) {
if (enabled === false) {
options.push(<Option style={{ display: 'none' }} disabled value={id} key={id}>{name}</Option>);
} else {
options.push(
<Option value={id} key={id} title={name}>
<Tooltip title={code} placement="right" align={{ offset: [20, 0] }}>
<span style={{ display: 'inline-block', width: '100%' }}>{name}</span>
</Tooltip>
</Option>,
);
}
}
return options;
}, []);
};
// sidebar 删除角色
removeRole = (index) => {
const { roleIds } = this.state;
roleIds.splice(index, 1);
this.setState({ roleIds });
};
getAddOtherBtn(disabled) {
return (
<Button type="primary" disabled={disabled} className="add-other-role" icon="add" onClick={this.addRoleList}>
<FormattedMessage id="memberrole.add.other" />
</Button>
);
}
addRoleList = () => {
const { roleIds } = this.state;
roleIds.push(undefined);
this.setState({ roleIds });
};
getSpentTime = (startTime, endTime) => {
const { intl } = this.props;
const timeUnit = {
day: intl.formatMessage({ id: 'day' }),
hour: intl.formatMessage({ id: 'hour' }),
minute: intl.formatMessage({ id: 'minute' }),
second: intl.formatMessage({ id: 'second' }),
};
const spentTime = new Date(endTime).getTime() - new Date(startTime).getTime(); // 时间差的毫秒数
// 天数
const days = Math.floor(spentTime / (24 * 3600 * 1000));
// 小时
const leave1 = spentTime % (24 * 3600 * 1000); // 计算天数后剩余的毫秒数
const hours = Math.floor(leave1 / (3600 * 1000));
// 分钟
const leave2 = leave1 % (3600 * 1000); // 计算小时数后剩余的毫秒数
const minutes = Math.floor(leave2 / (60 * 1000));
// 秒数
const leave3 = leave2 % (60 * 1000); // 计算分钟数后剩余的毫秒数
const seconds = Math.round(leave3 / 1000);
const resultDays = days ? (days + timeUnit.day) : '';
const resultHours = hours ? (hours + timeUnit.hour) : '';
const resultMinutes = minutes ? (minutes + timeUnit.minute) : '';
const resultSeconds = seconds ? (seconds + timeUnit.second) : '';
return resultDays + resultHours + resultMinutes + resultSeconds;
};
getUploadInfo = () => {
const { MemberRoleStore } = this.props;
const { fileLoading } = this.state;
const uploadInfo = MemberRoleStore.getUploadInfo || {};
const uploading = MemberRoleStore.getUploading;
const container = [];
if (uploading) { // 如果正在导入
container.push(this.renderLoading());
this.handleUploadInfo();
if (fileLoading) {
this.setState({
fileLoading: false,
});
}
} else if (fileLoading) { // 如果还在上传
container.push(this.renderLoading());
} else if (!uploadInfo.noData) {
const failedStatus = uploadInfo.finished ? 'detail' : 'error';
container.push(
<p key={'upload.lasttime'}>
<FormattedMessage id={'upload.lasttime'} />
{uploadInfo.beginTime}
(<FormattedMessage id={'upload.spendtime'} />
{this.getSpentTime(uploadInfo.beginTime, uploadInfo.endTime)})
</p>,
<p key={'upload.time'}>
<FormattedMessage
id={'upload.time'}
values={{
successCount: <span className="success-count">{uploadInfo.successfulCount || 0}</span>,
failedCount: <span className="failed-count">{uploadInfo.failedCount || 0}</span>,
}}
/>
{uploadInfo.url && (
<span className={`download-failed-${failedStatus}`}>
<a href={uploadInfo.url}>
<FormattedMessage id={`download.failed.${failedStatus}`} />
</a>
</span>
)}
</p>,
);
} else {
container.push(<p key={'upload.norecord'}><FormattedMessage id={'upload.norecord'} /></p>);
}
return (
<div className="c7n-user-upload-container">
{container}
</div>
);
};
/**
* application/vnd.ms-excel 2003-2007
* application/vnd.openxmlformats-officedocument.spreadsheetml.sheet 2010
*/
getUploadProps = () => {
const { intl, MemberRoleStore } = this.props;
return {
multiple: false,
name: 'file',
accept: 'application/vnd.ms-excel, application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
action: `${process.env.API_HOST}${MemberRoleStore.urlRoleMember}/batch_import`,
headers: {
Authorization: `bearer ${Choerodon.getCookie('access_token')}`,
},
showUploadList: false,
onChange: ({ file }) => {
const { status, response } = file;
const { fileLoading } = this.state;
if (status === 'done') {
this.handleUploadInfo(true);
} else if (status === 'error') {
Choerodon.prompt(`${response.message}`);
this.setState({
fileLoading: false,
});
}
if (response && response.failed === true) {
Choerodon.prompt(`${response.message}`);
this.setState({
fileLoading: false,
});
}
if (!fileLoading) {
this.setState({
fileLoading: status === 'uploading',
});
}
},
};
};
isModify = () => {
const { roleIds, currentMemberData } = this.state;
const roles = currentMemberData.roles;
if (roles.length !== roleIds.length) {
return true;
}
for (let i = 0; i < roles.length; i += 1) {
if (!roleIds.includes(roles[i].id)) {
return true;
}
}
return false;
};
handleDownLoad = () => {
const { MemberRoleStore } = this.props;
MemberRoleStore.downloadTemplate().then((result) => {
const blob = new Blob([result], {
type: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;charset=utf-8' });
const url = window.URL.createObjectURL(blob);
const linkElement = document.getElementById('c7n-user-download-template');
linkElement.setAttribute('href', url);
linkElement.click();
});
};
// ok 按钮保存
handleOk = (e) => {
const { selectType, roleIds } = this.state;
const { MemberRoleStore } = this.props;
e.preventDefault();
this.props.form.validateFields((err, values) => {
const memberType = selectType === 'create' ? values.mode : MemberRoleStore.currentMode;
if (!err) {
const body = roleIds.filter(roleId => roleId).map((roleId, index) => ({
memberType,
roleId,
sourceId: sessionStorage.selectData.id || 0,
sourceType: sessionStorage.type,
}));
const pageInfo = {
current: 1,
total: 0,
pageSize,
};
this.setState({ submitting: true });
if (selectType === 'create') {
this.roles.fetchRoleMember(values.member, body, memberType)
.then(({ failed, message }) => {
this.setState({ submitting: false });
if (failed) {
Choerodon.prompt(message);
} else {
Choerodon.prompt(this.formatMessage('add.success'));
this.closeSidebar();
if (MemberRoleStore.currentMode === 'user') {
this.setState({
memberRolePageInfo: pageInfo,
}, () => {
this.roles.fetch();
});
} else {
this.setState({
clientMemberRolePageInfo: pageInfo,
}, () => {
this.roles.fetchClient();
});
}
}
})
.catch((error) => {
this.setState({ submitting: false });
Choerodon.handleResponseError(error);
});
} else if (selectType === 'edit') {
if (!this.isModify()) {
this.setState({ submitting: false });
Choerodon.prompt(this.formatMessage('modify.success'));
this.closeSidebar();
return;
}
const { currentMemberData } = this.state;
const memberIds = [currentMemberData.id];
this.roles.fetchRoleMember(memberIds, body, memberType, true)
.then(({ failed, message }) => {
this.setState({ submitting: false });
if (failed) {
Choerodon.prompt(message);
} else {
Choerodon.prompt(this.formatMessage('modify.success'));
this.closeSidebar();
if (MemberRoleStore.currentMode === 'user') {
if (body.length) {
this.setState({
memberRolePageInfo: pageInfo,
}, () => {
this.roles.fetch();
});
} else {
this.roles.fetch();
}
} else if (MemberRoleStore.currentMode === 'client') {
if (body.length) {
this.setState({
clientMemberRolePageInfo: pageInfo,
}, () => {
this.roles.fetchClient();
});
} else {
this.roles.fetchClient();
}
}
}
})
.catch((error) => {
this.setState({ submitting: false });
Choerodon.handleResponseError(error);
});
}
}
});
};
createRole = () => {
const { MemberRoleStore } = this.props;
this.setState({ selectType: 'create', createMode: MemberRoleStore.currentMode }, () => {
this.openSidebar();
});
};
editRole = (memberData) => {
this.setState({
selectType: 'edit',
currentMemberData: memberData,
}, () => this.openSidebar());
};
handleEditRole = ({ id: memberId, loginName }) => {
const member = this.state.memberDatas.find(({ id }) => id === memberId);
if (!member) {
this.roles.loadMemberDatas({
current: 1,
pageSize,
}, {
loginName: [loginName],
}).then(({ list }) => {
this.editRole(list.find(memberData => memberData.loginName === loginName));
});
} else {
this.editRole(member);
}
};
handleEditClientRole = ({ id: memberId, clientName }) => {
const member = this.state.clientMemberDatas.find(({ id }) => id === memberId);
if (!member) {
this.roles.loadClientMemberDatas({
current: 1,
pageSize,
}, {
clientName,
}).then(({ list }) => {
this.editRole(list.find(memberData => memberData.name === clientName));
});
} else {
this.editRole(member);
}
};
showMemberTable(show) {
this.reload();
this.setState({
showMember: show,
});
}
memberRoleTableChange = (memberRolePageInfo, memberRoleFilters, sort, params) => {
this.setState({
memberRolePageInfo,
memberRoleFilters,
params,
loading: true,
});
this.roles.loadMemberDatas(memberRolePageInfo, memberRoleFilters, params).then(({ list, total, pageNum, pageSize }) => {
this.setState({
loading: false,
memberDatas: list,
memberRolePageInfo: {
current: pageNum,
total,
pageSize,
},
params,
memberRoleFilters,
});
});
};
clientMemberRoleTableChange = (clientMemberRolePageInfo, clientMemberRoleFilters, sort, clientParams) => {
this.setState({
clientMemberRolePageInfo,
clientMemberRoleFilters,
clientParams,
loading: true,
});
this.roles.loadClientMemberDatas(clientMemberRolePageInfo, clientMemberRoleFilters, clientParams).then(({ list, total, pageNum, pageSize }) => {
this.setState({
loading: false,
clientMemberDatas: list,
clientMemberRolePageInfo: {
current: pageNum,
total,
pageSize,
},
clientParams,
clientMemberRoleFilters,
});
});
}
roleMemberTableChange = (pageInfo, { name, ...roleMemberFilters }, sort, params) => {
const newState = {
roleMemberFilterRole: name,
roleMemberFilters,
roleMemberParams: params,
};
newState.loading = true;
const { expandedKeys } = this.state;
this.roles.loadRoleMemberDatas({ ...roleMemberFilters }, params)
.then((roleData) => {
const roleMemberDatas = roleData.filter((role) => {
role.users = role.users || [];
if (role.userCount > 0) {
if (expandedKeys.find(expandedKey => expandedKey.split('-')[1] === String(role.id))) {
this.roles.loadRoleMemberData(role, {
current: 1,
pageSize,
}, roleMemberFilters, params);
}
return true;
}
return false;
});
this.setState({
loading: false,
expandedKeys,
roleMemberDatas,
});
});
this.setState(newState);
};
clientRoleMemberTableChange = (pageInfo, { name, ...clientRoleMemberFilters }, sort, params) => {
const newState = {
clientRoleMemberFilterRole: name,
clientRoleMemberFilters,
clientRoleMemberParams: params,
};
newState.loading = true;
const { expandedKeys } = this.state;
this.roles.loadClientRoleMemberDatas({ name: params, ...clientRoleMemberFilters })
.then((roleData) => {
const cilentRoleMemberDatas = roleData.filter((role) => {
role.users = role.users || [];
if (role.userCount > 0) {
if (expandedKeys.find(expandedKey => expandedKey.split('-')[1] === String(role.id))) {
this.roles.loadClientRoleMemberData(role, {
current: 1,
pageSize,
}, clientRoleMemberFilters);
}
return true;
}
return false;
});
this.setState({
loading: false,
expandedKeys,
cilentRoleMemberDatas,
});
});
this.setState(newState);
}
renderSimpleColumn = (text, { enabled }) => {
if (enabled === false) {
return (
<Tooltip title={<FormattedMessage id="memberrole.member.disabled.tip" />}>
<span className="text-disabled">
{text}
</span>
</Tooltip>
);
}
return text;
};
renderRoleColumn = text => text.map(({ id, name, enabled }) => {
let item = <span className={classnames('role-wrapper', { 'role-wrapper-enabled': enabled, 'role-wrapper-disabled': !enabled })} key={id}>{name}</span>;
if (enabled === false) {
item = (
<Tooltip title={<FormattedMessage id="memberrole.role.disabled.tip" />}>
{item}
</Tooltip>
);
}
return item;
});
renderRoleLoginNameColumn = (text, data) => {
const { roleMemberFilters, roleMemberParams } = this.state;
const { loginName, name } = data;
if (loginName) {
return loginName;
} else if (name) {
const { userCount, users: { length }, loading: isLoading, enabled } = data;
const more = isLoading ? (<Progress type="loading" width={12} />) : (length > 0 && userCount > length && (
<a onClick={() => {
this.roles.loadRoleMemberData(data, {
current: Math.floor(length / pageSize) + 1,
pageSize,
}, roleMemberFilters, roleMemberParams);
this.forceUpdate();
}}
>更多</a>
));
const item = <span className={classnames({ 'text-disabled': !enabled })}>{name} ({userCount}) {more}</span>;
return enabled ? item : (<Tooltip title={<FormattedMessage id="memberrole.role.disabled.tip" />}>{item}</Tooltip>);
}
};
renderRoleClientNameColumn = (text, data) => {
const { clientRoleMemberFilters } = this.state;
const { clientName, name } = data;
if (clientName) {
return clientName;
} else if (name) {
const { userCount, users: { length }, loading: isLoading, enabled } = data;
const more = isLoading ? (<Progress type="loading" width={12} />) : (length > 0 && userCount > length && (
<a onClick={() => {
this.roles.loadClientRoleMemberData(data, {
current: (length / pageSize),
pageSize,
}, clientRoleMemberFilters);
this.forceUpdate();
}}
>更多</a>
));
const item = <span className={classnames({ 'text-disabled': !enabled })}>{name} ({userCount}) {more}</span>;
return enabled ? item : (<Tooltip title={<FormattedMessage id="memberrole.role.disabled.tip" />}>{item}</Tooltip>);
}
}
/**
* 渲染操作列
* @param text
* @param record
* @returns {*}
*/
renderActionColumn = (text, record) => {
const { organizationId, projectId, createService, deleteService, type } = this.getPermission();
const { MemberRoleStore } = this.props;
if ('roleId' in record || 'email' in record || 'secret' in record) {
return (
<div>
<Permission
service={createService}
>
<Tooltip
title={<FormattedMessage id="modify" />}
placement="bottom"
>
{
MemberRoleStore.currentMode === 'user' ? (
<Button
onClick={() => {
this.handleEditRole(record);
}}
size="small"
shape="circle"
icon="mode_edit"
/>
) : (
<Button
onClick={() => {
this.handleEditClientRole(record);
}}
size="small"
shape="circle"
icon="mode_edit"
/>
)
}
</Tooltip>
</Permission>
<Permission
service={deleteService}
type={type}
organizationId={organizationId}
projectId={projectId}
>
<Tooltip
title={<FormattedMessage id="remove" />}
placement="bottom"
>
<Button
size="small"
shape="circle"
onClick={this.state.showMember ? this.handleDelete.bind(this, record) : this.deleteRoleByRole.bind(this, record)}
icon="delete"
/>
</Tooltip>
</Permission>
</div>
);
}
};
renderMemberTable() {
const { selectMemberRoles, roleMemberDatas, memberRolePageInfo, memberDatas, memberRoleFilters, loading } = this.state;
const filtersRole = [...new Set(roleMemberDatas.map(({ name }) => (name)))].map(value => ({ value, text: value }));
const columns = [
{
title: <FormattedMessage id="memberrole.loginname" />,
dataIndex: 'loginName',
key: 'loginName',
width: '15%',
filters: [],
filteredValue: memberRoleFilters.loginName || [],
render: this.renderSimpleColumn,
},
{
title: <FormattedMessage id="memberrole.realname" />,
dataIndex: 'realName',
key: 'realName',
width: '15%',
filters: [],
filteredValue: memberRoleFilters.realName || [],
render: this.renderSimpleColumn,
},
{
title: <FormattedMessage id="memberrole.role" />,
dataIndex: 'roles',
key: 'roles',
filters: filtersRole,
filteredValue: memberRoleFilters.roles || [],
className: 'memberrole-roles',
width: '50%',
render: this.renderRoleColumn,
},
{
title: '',
width: '20%',
align: 'right',
render: this.renderActionColumn,
},
];
const rowSelection = {
selectedRowKeys: Object.keys(selectMemberRoles).map(key => Number(key)),
onChange: (selectedRowkeys, selectedRecords) => {
this.setState({
selectMemberRoles: selectedRowkeys.reduce((data, key, index) => {
const currentRecord = selectedRecords.find(r => r.id === key) || { roles: [] };
data[key] = currentRecord.roles.map(({ id }) => id);
return data;
}, {}),
});
},
};
return (
<Table
key="member-role"
className="member-role-table"
loading={loading}
rowSelection={rowSelection}
pagination={memberRolePageInfo}
columns={columns}
filters={this.state.params}
onChange={this.memberRoleTableChange}
dataSource={memberDatas}
filterBarPlaceholder={this.formatMessage('filtertable')}
rowKey={({ id }) => id}
noFilter
/>
);
}
renderRoleTable() {
const { roleMemberDatas, roleMemberFilterRole, selectRoleMemberKeys, expandedKeys, roleMemberParams, roleMemberFilters, loading } = this.state;
const filtersData = [...new Set(roleMemberDatas.map(({ name }) => (name)))].map(value => ({ value, text: value }));
let dataSource = roleMemberDatas;
if (roleMemberFilterRole && roleMemberFilterRole.length) {
dataSource = roleMemberDatas.filter(({ name }) => roleMemberFilterRole.some(role => name.indexOf(role) !== -1));
}
const columns = [
{
title: <FormattedMessage id="memberrole.loginname" />,
key: 'loginName',
hidden: true,
filters: [],
filteredValue: roleMemberFilters.loginName || [],
},
{
title: <FormattedMessage id="memberrole.rolemember" />,
filterTitle: <FormattedMessage id="memberrole.role" />,
key: 'name',
dataIndex: 'name',
filters: filtersData,
filteredValue: roleMemberFilterRole || [],
render: this.renderRoleLoginNameColumn,
},
{
title: <FormattedMessage id="memberrole.realname" />,
key: 'realName',
dataIndex: 'realName',
filteredValue: roleMemberFilters.realName || [],
filters: [],
},
{
title: '',
width: 100,
align: 'right',
render: this.renderActionColumn,
},
];
const rowSelection = {
type: 'checkbox',
selectedRowKeys: selectRoleMemberKeys,
getCheckboxProps: ({ loginName }) => ({
disabled: !loginName,
}),
onChange: (newSelectRoleMemberKeys, newSelectRoleMembers) => {
this.setState({
selectRoleMemberKeys: newSelectRoleMemberKeys,
selectRoleMembers: newSelectRoleMembers,
});
},
};
return (
<Table
key="role-member"
loading={loading}
rowSelection={rowSelection}
expandedRowKeys={expandedKeys}
className="role-member-table"
pagination={false}
columns={columns}
filters={roleMemberParams}
indentSize={0}
dataSource={dataSource}
rowKey={({ roleId = '', id }) => [roleId, id].join('-')}
childrenColumnName="users"
onChange={this.roleMemberTableChange}
onExpand={this.handleExpand}
onExpandedRowsChange={this.handleExpandedRowsChange}
filterBarPlaceholder={this.formatMessage('filtertable')}
noFilter
/>
);
}
renderClientMemberTable() {
const { selectMemberRoles, cilentRoleMemberDatas, clientMemberRolePageInfo, clientMemberDatas, clientMemberRoleFilters, loading } = this.state;
const filtersRole = [...new Set(cilentRoleMemberDatas.map(({ name }) => (name)))].map(value => ({ value, text: value }));
const columns = [
{
title: <FormattedMessage id="memberrole.client" />,
dataIndex: 'name',
key: 'name',
filters: [],
filteredValue: clientMemberRoleFilters.name || [],
},
{
title: <FormattedMessage id="memberrole.role" />,
dataIndex: 'roles',
key: 'roles',
filters: filtersRole,
filteredValue: clientMemberRoleFilters.roles || [],
className: 'memberrole-roles',
width: '60%',
render: this.renderRoleColumn,
},
{
title: '',
width: 100,
align: 'right',
render: this.renderActionColumn,
},
];
const rowSelection = {
selectedRowKeys: Object.keys(selectMemberRoles).map(key => Number(key)),
onChange: (selectedRowkeys, selectedRecords) => {
this.setState({
selectMemberRoles: selectedRowkeys.reduce((data, key, index) => {
// data[key] = selectedRecords[index].roles.map(({ id }) => id);
const currentRecord = selectedRecords.find(r => r.id === key) || { roles: [] };
data[key] = currentRecord.roles.map(({ id }) => id);
return data;
}, {}),
});
},
};
return (
<Table
key="client-member-role"
className="member-role-table"
loading={loading}
rowSelection={rowSelection}
pagination={clientMemberRolePageInfo}
columns={columns}
filters={this.state.clientParams}
onChange={this.clientMemberRoleTableChange}
dataSource={clientMemberDatas}
filterBarPlaceholder={this.formatMessage('filtertable')}
rowKey={({ id }) => id}
noFilter
/>
);
}
renderClientRoleTable() {
const { cilentRoleMemberDatas, clientRoleMemberFilterRole, selectRoleMemberKeys, expandedKeys, clientRoleMemberParams, clientRoleMemberFilters, loading } = this.state;
const filtersData = [...new Set(cilentRoleMemberDatas.map(({ name }) => (name)))].map(value => ({ value, text: value }));
let dataSource = cilentRoleMemberDatas;
if (clientRoleMemberFilterRole && clientRoleMemberFilterRole.length) {
dataSource = cilentRoleMemberDatas.filter(({ name }) => clientRoleMemberFilterRole.some(role => name.indexOf(role) !== -1));
}
const columns = [
{
title: <FormattedMessage id="memberrole.client" />,
key: 'clientName',
hidden: true,
filters: [],
filteredValue: clientRoleMemberFilters.clientName || [],
},
{
title: <FormattedMessage id="memberrole.roleclient" />,
filterTitle: <FormattedMessage id="memberrole.role" />,
key: 'name',
dataIndex: 'name',
filters: filtersData,
filteredValue: clientRoleMemberFilterRole || [],
render: this.renderRoleClientNameColumn,
},
{
title: '',
width: 100,
align: 'right',
render: this.renderActionColumn,
},
];
const rowSelection = {
type: 'checkbox',
selectedRowKeys: selectRoleMemberKeys,
getCheckboxProps: ({ secret }) => ({
disabled: !secret,
}),
onChange: (newSelectRoleMemberKeys, newSelectRoleMembers) => {
this.setState({
selectRoleMemberKeys: newSelectRoleMemberKeys,
selectRoleMembers: newSelectRoleMembers,
});
},
};
return (
<Table
key="client-role-member"
loading={loading}
rowSelection={rowSelection}
className="role-member-table"
pagination={false}
columns={columns}
filters={clientRoleMemberParams}
indentSize={0}
dataSource={dataSource}
rowKey={({ roleId = '', id }) => [roleId, id].join('-')}
childrenColumnName="users"
onChange={this.clientRoleMemberTableChange}
onExpand={this.handleExpand}
onExpandedRowsChange={this.handleExpandedRowsChange}
filterBarPlaceholder={this.formatMessage('filtertable')}
noFilter
/>
);
}
handleExpandedRowsChange = (expandedKeys) => {
this.setState({
expandedKeys,
});
};
/**
* 角色表格展开控制
* @param expand Boolean 是否展开
* @param data 展开行数据
*/
handleExpand = (expand, data) => {
const { users = [], id } = data;
const { MemberRoleStore } = this.props;
if (expand && !users.length) {
if (MemberRoleStore.currentMode === 'user') {
this.roles.loadRoleMemberData(data, {
current: 1,
pageSize,
}, this.state.roleMemberFilters, this.state.roleMemberParams);
} else {
this.roles.loadClientRoleMemberData(data, {
current: 1,
pageSize,
}, this.state.clientRoleMemberFilters, this.state.clientRoleMemberParams);
}
}
};
/**
* 上传按钮点击时触发
*/
handleUpload = () => {
this.handleUploadInfo(true);
this.setState({
sidebar: true,
selectType: 'upload',
});
};
/**
* immediately为false时设置2秒查询一次接口,若有更新删除定时器并更新列表
* @param immediately
*/
handleUploadInfo = (immediately) => {
const { MemberRoleStore } = this.props;
const { fileLoading } = this.state;
const uploadInfo = MemberRoleStore.getUploadInfo || {};
if (uploadInfo.finished !== null && fileLoading) {
this.setState({
fileLoading: false,
});
}
if (immediately) {
MemberRoleStore.handleUploadInfo();
return;
}
if (uploadInfo.finished !== null) {
clearInterval(this.timer);
return;
}
clearInterval(this.timer);
this.timer = setInterval(() => {
MemberRoleStore.handleUploadInfo();
this.init();
}, 2000);
};
upload = (e) => {
e.stopPropagation();
const { MemberRoleStore } = this.props;
const uploading = MemberRoleStore.getUploading;
const { fileLoading } = this.state;
if (uploading || fileLoading) {
return;
}
const uploadElement = document.getElementsByClassName('c7n-user-upload-hidden')[0];
uploadElement.click();
};
renderLoading() {
const { intl: { formatMessage } } = this.props;
const { fileLoading } = this.state;
return (
<div className="c7n-user-uploading-container" key="c7n-user-uploading-container">
<div className="loading">
<Spin size="large" />
</div>
<p className="text">{formatMessage({
id: `${intlPrefix}.${fileLoading ? 'fileloading' : 'uploading'}.text` })}
</p>
{!fileLoading && (<p className="tip">{formatMessage({ id: `${intlPrefix}.uploading.tip` })}</p>)}
</div>
);
}
getMemberRoleClass(name) {
const { showMember } = this.state;
return classnames({ active: name === 'role' ^ showMember });
}
getPermission() {
const { AppState } = this.props;
const { type } = AppState.currentMenuType;
let createService = ['iam-service.role-member.createOrUpdateOnSiteLevel'];
let deleteService = ['iam-service.role-member.deleteOnSiteLevel'];
let importService = ['iam-service.role-member.import2MemberRoleOnSite'];
if (type === 'organization') {
createService = ['iam-service.role-member.createOrUpdateOnOrganizationLevel'];
deleteService = ['iam-service.role-member.deleteOnOrganizationLevel'];
importService = ['iam-service.role-member.import2MemberRoleOnOrganization'];
} else if (type === 'project') {
createService = ['iam-service.role-member.createOrUpdateOnProjectLevel'];
deleteService = ['iam-service.role-member.deleteOnProjectLevel'];
importService = ['iam-service.role-member.import2MemberRoleOnProject'];
}
return {
createService,
deleteService,
importService,
};
}
renderTable = () => {
const { showMember } = this.state;
const { MemberRoleStore: { currentMode } } = this.props;
let showTable;
if (showMember && currentMode === 'user') {
showTable = this.renderMemberTable();
} else if (showMember && currentMode === 'client') {
showTable = this.renderClientMemberTable();
} else if (!showMember && currentMode === 'user') {
showTable = this.renderRoleTable();
} else {
showTable = this.renderClientRoleTable();
}
return showTable;
};
render() {
const { MemberRoleStore, intl } = this.props;
const { sidebar, selectType, roleData, showMember, selectMemberRoles, selectRoleMemberKeys, submitting, fileLoading } = this.state;
const uploading = MemberRoleStore.getUploading;
const okText = selectType === 'create' ? this.formatMessage('add') : this.formatMessage('save');
const { createService, deleteService, importService } = this.getPermission();
return (
<Page
service={[
'iam-service.role-member.createOrUpdateOnSiteLevel',
'iam-service.role-member.deleteOnSiteLevel',
'iam-service.role-member.createOrUpdateOnOrganizationLevel',
'iam-service.role-member.deleteOnOrganizationLevel',
'iam-service.role-member.createOrUpdateOnProjectLevel',
'iam-service.role-member.deleteOnProjectLevel',
'iam-service.role-member.pagingQueryUsersByRoleIdOnOrganizationLevel',
'iam-service.role-member.listRolesWithUserCountOnOrganizationLevel',
'iam-service.role-member.pagingQueryUsersWithOrganizationLevelRoles',
'iam-service.role-member.pagingQueryUsersByRoleIdOnProjectLevel',
'iam-service.role-member.listRolesWithUserCountOnProjectLevel',
'iam-service.role-member.pagingQueryUsersWithProjectLevelRoles',
'iam-service.role-member.pagingQueryUsersByRoleIdOnSiteLevel',
'iam-service.role-member.listRolesWithUserCountOnSiteLevel',
'iam-service.role-member.pagingQueryUsersWithSiteLevelRoles',
'iam-service.role-member.listRolesWithClientCountOnSiteLevel',
'iam-service.role-member.listRolesWithClientCountOnSiteLevel',
'iam-service.role-member.pagingQueryClientsWithSiteLevelRoles',
'iam-service.role-member.listRolesWithClientCountOnOrganizationLevel',
'iam-service.role-member.pagingQueryClientsByRoleIdOnOrganizationLevel',
'iam-service.role-member.pagingQueryClientsWithOrganizationLevelRoles',
'iam-service.role-member.listRolesWithClientCountOnProjectLevel',
'iam-service.role-member.pagingQueryClientsWithProjectLevelRoles',
'iam-service.role-member.pagingQueryClientsByRoleIdOnProjectLevel',
'iam-service.role-member.queryAllUsers',
'iam-service.role-member.queryAllClients',
]}
>
<Header title={<FormattedMessage id={`${this.roles.code}.header.title`} />}>
<Select
value={MemberRoleStore.currentMode}
dropdownClassName="c7n-memberrole-select-dropdown"
className="c7n-memberrole-select"
onChange={this.changeMode}
>
<Option value="user" key="user">{intl.formatMessage({ id: 'memberrole.type.user' })}</Option>
<Option value="client" key="client">{intl.formatMessage({ id: 'memberrole.client' })}</Option>
</Select>
<Permission
service={createService}
>
<Button
onClick={this.createRole}
icon="playlist_add"
>
<FormattedMessage id="add" />
</Button>
</Permission>
<Permission
service={importService}
>
<Button
icon="get_app"
style={{ display: MemberRoleStore.currentMode === 'user' ? 'inline' : 'none' }}
onClick={this.handleDownLoad}
>
<FormattedMessage id={'download.template'} />
<a id="c7n-user-download-template" href="" onClick={(event) => { event.stopPropagation(); }} download="roleAssignment.xlsx" />
</Button>
<Button
icon="file_upload"
style={{ display: MemberRoleStore.currentMode === 'user' ? 'inline' : 'none' }}
onClick={this.handleUpload}
>
<FormattedMessage id={'upload.file'} />
</Button>
</Permission>
<Permission
service={deleteService}
>
<Button
onClick={this.deleteRoleByMultiple}
icon="delete"
disabled={!(showMember ? Object.keys(selectMemberRoles) : selectRoleMemberKeys).length}
>
<FormattedMessage id="remove" />
</Button>
</Permission>
<Button
onClick={this.reload}
icon="refresh"
>
<FormattedMessage id="refresh" />
</Button>
</Header>
<Content
code={this.roles.code}
values={this.roles.values}
>
<div className="member-role-btns">
<span className="text">
<FormattedMessage id="memberrole.view" />:
</span>
<Button
className={this.getMemberRoleClass('member')}
onClick={() => {
this.showMemberTable(true);
}}
type="primary"
><FormattedMessage id="memberrole.member" /></Button>
<Button
className={this.getMemberRoleClass('role')}
onClick={() => {
this.showMemberTable(false);
}}
type="primary"
><FormattedMessage id="memberrole.role" /></Button>
</div>
{this.renderTable()}
<Sidebar
title={this.getSidebarTitle()}
visible={sidebar}
okText={selectType === 'upload' ? this.getUploadOkText() : okText}
confirmLoading={uploading || fileLoading || submitting}
cancelText={<FormattedMessage id={selectType === 'upload' ? 'close' : 'cancel'} />}
onOk={selectType === 'upload' ? this.upload : this.handleOk}
onCancel={this.closeSidebar}
>
{roleData.length && this.state.selectType !== 'upload' ? this.getSidebarContent() : null}
{this.state.selectType === 'upload' ? this.renderUpload() : null}
</Sidebar>
</Content>
</Page>
);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/LookupMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.infra.dto.LookupDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* @author superlee
*/
public interface LookupMapper extends Mapper<LookupDTO> {
List<LookupDTO> fulltextSearch(@Param("lookupDTO") LookupDTO lookupDTO,
@Param("param") String param);
LookupDTO selectByCodeWithLookupValues(String code);
}
<|start_filename|>src/main/java/io/choerodon/iam/api/dto/RegistrantInfoDTO.java<|end_filename|>
package io.choerodon.iam.api.dto;
import io.choerodon.iam.infra.dto.UserDTO;
import io.swagger.annotations.ApiModelProperty;
public class RegistrantInfoDTO {
@ApiModelProperty(value = "注册人Id")
private Long id;
@ApiModelProperty(value = "注册人登录名")
private String loginName;
@ApiModelProperty(value = "注册人邮箱")
private String email;
@ApiModelProperty(value = "注册人用户名")
private String realName;
@ApiModelProperty(value = "注册组织ID")
private Long organizationId;
@ApiModelProperty(value = "注册组织名称")
private String organizationName;
@ApiModelProperty(value = "adminId")
private Long adminId;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getOrganizationId() {
return organizationId;
}
public void setOrganizationId(Long organizationId) {
this.organizationId = organizationId;
}
public String getOrganizationName() {
return organizationName;
}
public void setOrganizationName(String organizationName) {
this.organizationName = organizationName;
}
public String getLoginName() {
return loginName;
}
public void setLoginName(String loginName) {
this.loginName = loginName;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getRealName() {
return realName;
}
public void setRealName(String realName) {
this.realName = realName;
}
public Long getAdminId() {
return adminId;
}
public void setAdminId(Long adminId) {
this.adminId = adminId;
}
public void setUser(UserDTO userDTO) {
this.id = userDTO.getId();
this.email = userDTO.getEmail();
this.loginName = userDTO.getLoginName();
this.realName = userDTO.getRealName();
this.organizationId = userDTO.getOrganizationId();
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/query/RoleQuery.java<|end_filename|>
package io.choerodon.iam.api.query;
/**
* 角色查询对象
*
* @author superlee
* @since 2019-04-16
*/
public class RoleQuery {
private String name;
private String code;
private String level;
private Boolean builtIn;
private Boolean enabled;
private String[] params;
private Boolean isWithUser;
private Long userId;
private String sourceType;
private Long sourceId;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getLevel() {
return level;
}
public void setLevel(String level) {
this.level = level;
}
public Boolean getBuiltIn() {
return builtIn;
}
public void setBuiltIn(Boolean builtIn) {
this.builtIn = builtIn;
}
public Boolean getEnabled() {
return enabled;
}
public void setEnabled(Boolean enabled) {
this.enabled = enabled;
}
public String[] getParams() {
return params;
}
public void setParams(String[] params) {
this.params = params;
}
public Boolean getWithUser() {
return isWithUser;
}
public void setWithUser(Boolean withUser) {
isWithUser = withUser;
}
public Long getUserId() {
return userId;
}
public void setUserId(Long userId) {
this.userId = userId;
}
public String getSourceType() {
return sourceType;
}
public void setSourceType(String sourceType) {
this.sourceType = sourceType;
}
public Long getSourceId() {
return sourceId;
}
public void setSourceId(Long sourceId) {
this.sourceId = sourceId;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/enums/ApplicationType.java<|end_filename|>
package io.choerodon.iam.infra.enums;
/**
* 应用的分类
*
* @author superlee
* @since 0.15.0
*/
public enum ApplicationType {
/**
* 开发应用
*/
DEVELOPMENT("开发应用", "normal"),
/**
* 测试应用
*/
TEST("测试应用", "test");
private String value;
private String code;
ApplicationType(String value, String code) {
this.value = value;
this.code = code;
}
public String value() {
return value;
}
public String code() {
return code;
}
public static boolean matchCode(String code) {
for (ApplicationType applicationType : ApplicationType.values()) {
if (applicationType.code.equals(code)) {
return true;
}
}
return false;
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/app/service/impl/RoleMemberServiceImplSpec.groovy<|end_filename|>
package io.choerodon.iam.app.service.impl
import io.choerodon.asgard.saga.feign.SagaClient
import io.choerodon.core.oauth.DetailsHelper
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.app.service.RoleMemberService
import io.choerodon.iam.infra.asserts.UserAssertHelper
import io.choerodon.iam.infra.common.utils.excel.ExcelImportUserTask
import io.choerodon.iam.infra.dto.OrganizationDTO
import io.choerodon.iam.infra.mapper.ClientMapper
import io.choerodon.iam.infra.mapper.LabelMapper
import io.choerodon.iam.infra.mapper.MemberRoleMapper
import io.choerodon.iam.infra.mapper.OrganizationMapper
import io.choerodon.iam.infra.mapper.ProjectMapper
import io.choerodon.iam.infra.mapper.RoleMapper
import io.choerodon.iam.infra.mapper.UploadHistoryMapper
import org.apache.http.entity.ContentType
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.context.annotation.Import
import org.springframework.mock.web.MockMultipartFile
import org.springframework.transaction.annotation.Transactional
import org.springframework.web.multipart.MultipartFile
import spock.lang.Specification
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class RoleMemberServiceImplSpec extends Specification {
@Autowired
ExcelImportUserTask excelImportUserTask
@Autowired
ExcelImportUserTask.FinishFallback finishFallback
@Autowired
OrganizationMapper organizationMapper
@Autowired
ProjectMapper projectMapper
@Autowired
MemberRoleMapper memberRoleMapper
@Autowired
RoleMapper roleMapper
@Autowired
UserAssertHelper userAssertHelper
SagaClient sagaClient = Mock(SagaClient)
@Autowired
LabelMapper labelMapper
@Autowired
ClientMapper clientMapper
@Autowired
UploadHistoryMapper UploadHistoryMapper
RoleMemberService roleMemberService
def setup() {
given: "构造 roleMemberService"
roleMemberService = new RoleMemberServiceImpl(excelImportUserTask, finishFallback,
organizationMapper, projectMapper, memberRoleMapper, roleMapper,userAssertHelper, sagaClient,
labelMapper, clientMapper, UploadHistoryMapper)
DetailsHelper.setCustomUserDetails(1L,"zh_CN")
}
@Transactional
def "Import2MemberRole"() {
given: "构造请求参数"
File excelFile = new File(this.class.getResource('/templates/roleAssignment.xlsx').toURI())
FileInputStream fileInputStream = new FileInputStream(excelFile)
MultipartFile multipartFile = new MockMultipartFile(excelFile.getName(),
excelFile.getName(), ContentType.APPLICATION_OCTET_STREAM.toString(),
fileInputStream)
when: "调用方法"
roleMemberService.import2MemberRole(0L, "site", multipartFile)
then: "校验结果"
noExceptionThrown()
}
}
<|start_filename|>src/main/java/io/choerodon/iam/app/service/impl/RoleServiceImpl.java<|end_filename|>
package io.choerodon.iam.app.service.impl;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import io.choerodon.asgard.saga.dto.StartInstanceDTO;
import io.choerodon.asgard.saga.feign.SagaClient;
import io.choerodon.base.domain.PageRequest;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.core.exception.CommonException;
import io.choerodon.core.iam.ResourceLevel;
import io.choerodon.iam.api.query.ClientRoleQuery;
import io.choerodon.iam.api.dto.RoleAssignmentSearchDTO;
import io.choerodon.iam.api.dto.payload.UserMemberEventPayload;
import io.choerodon.iam.api.query.RoleQuery;
import io.choerodon.iam.api.validator.ResourceLevelValidator;
import io.choerodon.iam.app.service.RoleService;
import io.choerodon.iam.infra.asserts.LabelAssertHelper;
import io.choerodon.iam.infra.asserts.PermissionAssertHelper;
import io.choerodon.iam.infra.asserts.RoleAssertHelper;
import io.choerodon.iam.infra.common.utils.ParamUtils;
import io.choerodon.iam.infra.dto.*;
import io.choerodon.iam.infra.exception.EmptyParamException;
import io.choerodon.iam.infra.exception.IllegalArgumentException;
import io.choerodon.iam.infra.exception.InsertException;
import io.choerodon.iam.infra.exception.UpdateExcetion;
import io.choerodon.iam.infra.mapper.*;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.ObjectUtils;
import org.springframework.util.StringUtils;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static io.choerodon.iam.infra.common.utils.SagaTopic.MemberRole.MEMBER_ROLE_UPDATE;
/**
* @author superlee
* @data 2018/3/27
*/
@Component
public class RoleServiceImpl implements RoleService {
@Value("${choerodon.devops.message:false}")
private boolean devopsMessage;
private ClientMapper clientMapper;
private RoleMapper roleMapper;
private RoleAssertHelper roleAssertHelper;
private LabelAssertHelper labelAssertHelper;
private RoleLabelMapper roleLabelMapper;
private LabelMapper labelMapper;
private UserMapper userMapper;
private RolePermissionMapper rolePermissionMapper;
private PermissionAssertHelper permissionAssertHelper;
private SagaClient sagaClient;
private final ObjectMapper mapper = new ObjectMapper();
public RoleServiceImpl(ClientMapper clientMapper,
RoleMapper roleMapper,
RoleAssertHelper roleAssertHelper,
LabelAssertHelper labelAssertHelper,
RoleLabelMapper roleLabelMapper,
PermissionAssertHelper permissionAssertHelper,
LabelMapper labelMapper,
SagaClient sagaClient, UserMapper userMapper,
RolePermissionMapper rolePermissionMapper) {
this.clientMapper = clientMapper;
this.roleMapper = roleMapper;
this.roleAssertHelper = roleAssertHelper;
this.labelAssertHelper = labelAssertHelper;
this.roleLabelMapper = roleLabelMapper;
this.permissionAssertHelper = permissionAssertHelper;
this.labelMapper = labelMapper;
this.sagaClient = sagaClient;
this.userMapper = userMapper;
this.rolePermissionMapper = rolePermissionMapper;
}
@Override
public PageInfo<RoleDTO> pagingSearch(PageRequest pageRequest, RoleQuery roleQuery) {
boolean isWithUser = (roleQuery.getWithUser() != null && roleQuery.getWithUser() == true);
final String level = roleQuery.getLevel();
final Long sourceId = roleQuery.getSourceId();
PageInfo<RoleDTO> roles =
PageHelper
.startPage(pageRequest.getPage(), pageRequest.getSize())
.doSelectPageInfo(() -> roleMapper.fulltextSearch(roleQuery, ParamUtils.arrToStr(roleQuery.getParams())));
if (isWithUser) {
roles.getList().forEach(role -> {
Long roleId = role.getId();
List<UserDTO> users = new ArrayList<>();
if (level == null || ResourceType.isSite(level)) {
users = userMapper.selectUsersFromMemberRoleByOptions(roleId, "user", 0L,
ResourceLevel.SITE.value(), null, null);
}
if (ResourceType.isOrganization(level)) {
users = userMapper.selectUsersFromMemberRoleByOptions(roleId, "user",
sourceId, ResourceLevel.ORGANIZATION.value(), null, null);
}
if (ResourceType.isProject(level)) {
users = userMapper.selectUsersFromMemberRoleByOptions(roleId, "user",
sourceId, ResourceLevel.PROJECT.value(), null, null);
}
role.setUsers(users);
});
}
return roles;
}
@Override
public PageInfo<RoleDTO> pagingQueryOrgRoles(Long orgId, PageRequest pageRequest, RoleQuery roleQuery) {
return PageHelper
.startPage(pageRequest.getPage(), pageRequest.getSize(), pageRequest.getSort().toSql())
.doSelectPageInfo(() -> roleMapper.pagingQueryOrgRoles(orgId, roleQuery, ParamUtils.arrToStr(roleQuery.getParams())));
}
@Override
@Transactional(rollbackFor = Exception.class)
public RoleDTO create(RoleDTO roleDTO) {
insertCheck(roleDTO);
roleDTO.setBuiltIn(false);
roleDTO.setEnabled(true);
roleDTO.setEnableForbidden(true);
roleDTO.setModified(true);
roleAssertHelper.codeExisted(roleDTO.getCode());
if (roleMapper.insertSelective(roleDTO) != 1) {
throw new InsertException("error.role.insert");
}
//维护role_permission表
insertRolePermission(roleDTO);
//维护role_label表
insertRoleLabel(roleDTO);
return roleDTO;
}
private void insertRoleLabel(RoleDTO role) {
List<LabelDTO> labels = role.getLabels();
if (labels != null) {
labels.forEach(label -> {
Long labelId = label.getId();
if (labelId == null) {
throw new CommonException("error.label.id.null");
}
labelAssertHelper.labelNotExisted(labelId);
RoleLabelDTO roleLabelDTO = new RoleLabelDTO();
roleLabelDTO.setLabelId(label.getId());
roleLabelDTO.setRoleId(role.getId());
roleLabelMapper.insertSelective(roleLabelDTO);
});
}
}
private void insertRolePermission(RoleDTO role) {
List<PermissionDTO> permissions = role.getPermissions();
if (!ObjectUtils.isEmpty(permissions)) {
permissions.forEach(p -> {
RolePermissionDTO dto = new RolePermissionDTO();
dto.setPermissionId(p.getId());
dto.setRoleId(role.getId());
rolePermissionMapper.insertSelective(dto);
});
}
}
private void insertCheck(RoleDTO roleDTO) {
validateResourceLevel(roleDTO);
validateCode(roleDTO.getCode());
validatePermissions(roleDTO.getPermissions());
}
private void validateResourceLevel(RoleDTO roleDTO) {
String level = roleDTO.getResourceLevel();
if (!ResourceType.contains(level)) {
throw new IllegalArgumentException("error.role.illegal.level");
}
List<Long> roleIds = roleDTO.getRoleIds();
if (roleIds != null) {
roleIds.forEach(roleId -> {
RoleDTO dto = roleAssertHelper.roleNotExisted(roleId);
if (!dto.getResourceLevel().equals(level)) {
throw new CommonException("error.roles.in.same.level");
}
});
}
}
private void validatePermissions(List<PermissionDTO> permissions) {
if (permissions == null || permissions.isEmpty()) {
throw new CommonException("error.role_permission.empty");
}
}
private void validateCode(String code) {
if (StringUtils.isEmpty(code)) {
throw new CommonException("error.role.code.empty");
}
if (code.length() > 128) {
throw new CommonException("error.role.code.length");
}
String[] codes = code.split("/");
String lastCode = codes[codes.length - 1];
Pattern p = Pattern.compile("^[a-z]([-a-z0-9]*[a-z0-9])$");
Matcher m = p.matcher(lastCode);
boolean isCheck = m.matches();
if (!isCheck) {
throw new CommonException("error.role.code.regular.illegal");
}
}
@Override
public RoleDTO createBaseOnRoles(RoleDTO roleDTO) {
insertCheck(roleDTO);
List<PermissionDTO> permissionDTOS = new ArrayList<>();
List<Long> roleIds = roleDTO.getRoleIds();
if (roleIds != null && !roleIds.isEmpty()) {
List<Long> permissionIds = rolePermissionMapper.queryExistingPermissionIdsByRoleIds(roleIds);
permissionDTOS = permissionIds.parallelStream().map(id -> {
PermissionDTO permissionDTO = new PermissionDTO();
permissionDTO.setId(id);
return permissionDTO;
}).collect(Collectors.toList());
}
roleDTO.setPermissions(permissionDTOS);
return create(roleDTO);
}
@Transactional(rollbackFor = CommonException.class)
@Override
public RoleDTO update(RoleDTO roleDTO) {
updateCheck(roleDTO);
//更新操作不能改level
roleDTO.setResourceLevel(null);
Long id = roleDTO.getId();
RoleDTO role1 = roleAssertHelper.roleNotExisted(id);
//内置的角色不允许更新字段,只能更新label
if (role1.getBuiltIn()) {
updateRoleLabel(roleDTO);
return roleDTO;
} else {
RoleDTO dto = updateRole(roleDTO);
//维护role_permission关系
updateRolePermission(dto);
//维护role_label表
updateRoleLabel(dto);
return dto;
}
}
private RoleDTO updateRole(RoleDTO roleDTO) {
if (roleMapper.updateByPrimaryKeySelective(roleDTO) != 1) {
throw new UpdateExcetion("error.role.update");
}
RoleDTO dto = roleMapper.selectByPrimaryKey(roleDTO);
roleDTO.setResourceLevel(dto.getResourceLevel());
return roleDTO;
}
private void updateRolePermission(RoleDTO role) {
Long roleId = role.getId();
List<PermissionDTO> permissions = role.getPermissions();
RolePermissionDTO rolePermissionDTO = new RolePermissionDTO();
rolePermissionDTO.setRoleId(roleId);
List<RolePermissionDTO> existingRolePermissions = rolePermissionMapper.select(rolePermissionDTO);
List<Long> existingPermissionId =
existingRolePermissions.stream().map(RolePermissionDTO::getPermissionId).collect(Collectors.toList());
List<Long> newPermissionId =
permissions.stream().map(PermissionDTO::getId).collect(Collectors.toList());
//permissionId交集
List<Long> intersection = existingPermissionId.stream().filter(newPermissionId::contains).collect(Collectors.toList());
//删除的permissionId集合
List<Long> deleteList = existingPermissionId.stream().filter(item ->
!intersection.contains(item)).collect(Collectors.toList());
//新增的permissionId集合
List<Long> insertList = newPermissionId.stream().filter(item ->
!intersection.contains(item)).collect(Collectors.toList());
insertList.forEach(permissionId -> {
validate(role, permissionId);
RolePermissionDTO rp = new RolePermissionDTO();
rp.setRoleId(roleId);
rp.setPermissionId(permissionId);
if (rolePermissionMapper.insertSelective(rp) != 1) {
throw new InsertException("error.rolePermission.insert");
}
});
deleteList.forEach(permissionId -> {
validate(role, permissionId);
RolePermissionDTO rp = new RolePermissionDTO();
rp.setRoleId(roleId);
rp.setPermissionId(permissionId);
rolePermissionMapper.delete(rp);
});
}
private void validate(RoleDTO role, Long permissionId) {
if (permissionId == null) {
throw new EmptyParamException("error.permission.id.null");
}
PermissionDTO permission = permissionAssertHelper.permissionNotExisted(permissionId);
if (!permission.getResourceLevel().equals(role.getResourceLevel())) {
throw new CommonException("error.role.level.not.equals.to.permission.level");
}
}
private void updateRoleLabel(RoleDTO roleDTO) {
RoleLabelDTO roleLabelDTO = new RoleLabelDTO();
roleLabelDTO.setRoleId(roleDTO.getId());
List<RoleLabelDTO> roleLabels = roleLabelMapper.select(roleLabelDTO);
List<Long> existingLabelIds = roleLabels.stream()
.map(RoleLabelDTO::getLabelId).collect(Collectors.toList());
List<LabelDTO> labels = roleDTO.getLabels();
final List<Long> newLabelIds = new ArrayList<>();
if (!ObjectUtils.isEmpty(labels)) {
newLabelIds.addAll(labels.stream().map(LabelDTO::getId).collect(Collectors.toList()));
}
//labelId交集
List<Long> intersection = existingLabelIds.stream().filter(newLabelIds::contains).collect(Collectors.toList());
//删除的labelId集合
List<Long> deleteList = existingLabelIds.stream().filter(item ->
!intersection.contains(item)).collect(Collectors.toList());
//新增的labelId集合
List<Long> insertList = newLabelIds.stream().filter(item ->
!intersection.contains(item)).collect(Collectors.toList());
List<UserMemberEventPayload> userMemberEventPayloads = new ArrayList<>();
List<UserDTO> users = userMapper.selectUsersFromMemberRoleByOptions(roleDTO.getId(), "user", null, ResourceLevel.PROJECT.value(), null, null);
boolean sendSagaEvent = !ObjectUtils.isEmpty(users) && devopsMessage;
doUpdateAndDelete(roleDTO, insertList, deleteList);
if (sendSagaEvent) {
users.forEach(user -> {
List<LabelDTO> labelList = labelMapper.selectByUserId(user.getId());
UserMemberEventPayload payload = new UserMemberEventPayload();
payload.setResourceId(user.getSourceId());
payload.setUserId(user.getId());
payload.setResourceType(ResourceLevel.PROJECT.value());
payload.setUsername(user.getLoginName());
Set<String> nameSet = new HashSet<>(labelList.stream().map(LabelDTO::getName).collect(Collectors.toSet()));
payload.setRoleLabels(nameSet);
userMemberEventPayloads.add(payload);
});
try {
String input = mapper.writeValueAsString(userMemberEventPayloads);
String refIds = userMemberEventPayloads.stream().map(t -> String.valueOf(t.getUserId())).collect(Collectors.joining(","));
sagaClient.startSaga(MEMBER_ROLE_UPDATE, new StartInstanceDTO(input, "users", refIds));
} catch (Exception e) {
throw new CommonException("error.IRoleServiceImpl.update.event", e);
}
}
}
private void doUpdateAndDelete(RoleDTO roleDTO, List<Long> insertList, List<Long> deleteList) {
insertList.forEach(labelId -> {
checkLabelId(labelId);
RoleLabelDTO rl = new RoleLabelDTO();
rl.setRoleId(roleDTO.getId());
rl.setLabelId(labelId);
if (roleLabelMapper.insertSelective(rl) != 1) {
throw new CommonException("error.roleLabel.create");
}
});
deleteList.forEach(labelId -> {
checkLabelId(labelId);
RoleLabelDTO rl = new RoleLabelDTO();
rl.setRoleId(roleDTO.getId());
rl.setLabelId(labelId);
roleLabelMapper.delete(rl);
});
}
private void checkLabelId(Long labelId) {
if (labelId == null) {
throw new CommonException("error.labelId.empty");
}
labelAssertHelper.labelNotExisted(labelId);
}
@Override
public RoleDTO orgUpdate(RoleDTO roleDTO, Long orgId) {
checkRoleCanBeModified(roleDTO.getId(), orgId);
return update(roleDTO);
}
private void updateCheck(RoleDTO roleDTO) {
if (StringUtils.isEmpty(roleDTO.getName())) {
throw new CommonException("error.role.name.empty");
}
if (roleDTO.getName().length() > 64) {
throw new CommonException("error.role.name.size");
}
ResourceLevelValidator.validate(roleDTO.getResourceLevel());
validateCode(roleDTO.getCode());
if (roleDTO.getObjectVersionNumber() == null) {
throw new CommonException("error.role.objectVersionNumber.empty");
}
validatePermissions(roleDTO.getPermissions());
}
@Override
public void delete(Long id) {
RoleDTO roleDTO = roleAssertHelper.roleNotExisted(id);
if (!roleDTO.getBuiltIn()) {
roleMapper.deleteByPrimaryKey(id);
} else {
throw new CommonException("error.role.not.allow.to.be.delete");
}
RolePermissionDTO rolePermission = new RolePermissionDTO();
rolePermission.setRoleId(id);
rolePermissionMapper.delete(rolePermission);
RoleLabelDTO roleLabelDTO = new RoleLabelDTO();
roleLabelDTO.setRoleId(id);
roleLabelMapper.delete(roleLabelDTO);
}
@Override
public RoleDTO queryById(Long id) {
return roleAssertHelper.roleNotExisted(id);
}
@Override
public RoleDTO enableRole(Long id) {
return updateStatus(id, true);
}
private RoleDTO updateStatus(Long id, boolean enabled) {
RoleDTO dto = roleAssertHelper.roleNotExisted(id);
dto.setEnabled(enabled);
if (roleMapper.updateByPrimaryKeySelective(dto) != 1) {
throw new UpdateExcetion("error.role.update.status");
}
return dto;
}
@Override
public RoleDTO disableRole(Long id) {
return updateStatus(id, false);
}
@Override
public RoleDTO orgEnableRole(Long roleId, Long orgId) {
checkRoleCanBeModified(roleId, orgId);
return enableRole(roleId);
}
@Override
public RoleDTO orgDisableRole(Long roleId, Long orgId) {
checkRoleCanBeModified(roleId, orgId);
return disableRole(roleId);
}
@Override
public RoleDTO queryWithPermissionsAndLabels(Long id) {
return roleMapper.selectRoleWithPermissionsAndLabels(id);
}
@Override
public List<RoleDTO> listRolesWithUserCountOnSiteLevel(RoleAssignmentSearchDTO roleAssignmentSearchDTO) {
List<RoleDTO> roles = roleMapper.fuzzySearchRolesByName(roleAssignmentSearchDTO.getRoleName(), ResourceLevel.SITE.value());
String param = ParamUtils.arrToStr(roleAssignmentSearchDTO.getParam());
roles.forEach(r -> {
Integer count = userMapper.selectUserCountFromMemberRoleByOptions(r.getId(),
"user", 0L, ResourceLevel.SITE.value(), roleAssignmentSearchDTO, param);
r.setUserCount(count);
});
return roles;
}
@Override
public List<RoleDTO> listRolesWithClientCountOnSiteLevel(ClientRoleQuery clientRoleSearchDTO) {
List<RoleDTO> roles = roleMapper.fuzzySearchRolesByName(clientRoleSearchDTO.getRoleName(), ResourceLevel.SITE.value());
String param = ParamUtils.arrToStr(clientRoleSearchDTO.getParam());
roles.forEach(r -> {
Integer count = clientMapper.selectClientCountFromMemberRoleByOptions(
r.getId(), ResourceLevel.SITE.value(), 0L, clientRoleSearchDTO, param);
r.setUserCount(count);
});
return roles;
}
@Override
public List<RoleDTO> listRolesWithUserCountOnOrganizationLevel(RoleAssignmentSearchDTO roleAssignmentSearchDTO, Long sourceId) {
List<RoleDTO> roles = roleMapper.fuzzySearchRolesByName(roleAssignmentSearchDTO.getRoleName(), ResourceLevel.ORGANIZATION.value());
String param = ParamUtils.arrToStr(roleAssignmentSearchDTO.getParam());
roles.forEach(r -> {
Integer count = userMapper.selectUserCountFromMemberRoleByOptions(r.getId(),
"user", sourceId, ResourceLevel.ORGANIZATION.value(), roleAssignmentSearchDTO, param);
r.setUserCount(count);
});
return roles;
}
@Override
public List<RoleDTO> listRolesWithClientCountOnOrganizationLevel(ClientRoleQuery clientRoleSearchDTO, Long sourceId) {
List<RoleDTO> roles = roleMapper.fuzzySearchRolesByName(clientRoleSearchDTO.getRoleName(), ResourceLevel.ORGANIZATION.value());
String param = ParamUtils.arrToStr(clientRoleSearchDTO.getParam());
roles.forEach(r -> {
Integer count = clientMapper.selectClientCountFromMemberRoleByOptions(
r.getId(), ResourceLevel.ORGANIZATION.value(), sourceId, clientRoleSearchDTO, param);
r.setUserCount(count);
});
return roles;
}
@Override
public List<RoleDTO> listRolesWithUserCountOnProjectLevel(RoleAssignmentSearchDTO roleAssignmentSearchDTO, Long sourceId) {
List<RoleDTO> roles = roleMapper.fuzzySearchRolesByName(roleAssignmentSearchDTO.getRoleName(), ResourceLevel.PROJECT.value());
String param = ParamUtils.arrToStr(roleAssignmentSearchDTO.getParam());
roles.forEach(r -> {
Integer count = userMapper.selectUserCountFromMemberRoleByOptions(
r.getId(), "user", sourceId, ResourceLevel.PROJECT.value(), roleAssignmentSearchDTO, param);
r.setUserCount(count);
});
return roles;
}
@Override
public List<RoleDTO> listRolesWithClientCountOnProjectLevel(ClientRoleQuery clientRoleSearchDTO, Long sourceId) {
List<RoleDTO> roles = roleMapper.fuzzySearchRolesByName(clientRoleSearchDTO.getRoleName(), ResourceLevel.PROJECT.value());
String param = ParamUtils.arrToStr(clientRoleSearchDTO.getParam());
roles.forEach(r -> {
Integer count = clientMapper.selectClientCountFromMemberRoleByOptions(
r.getId(), ResourceLevel.PROJECT.value(), sourceId, clientRoleSearchDTO, param);
r.setUserCount(count);
});
return roles;
}
@Override
public void check(RoleDTO role) {
Boolean checkCode = !StringUtils.isEmpty(role.getCode());
if (!checkCode) {
throw new CommonException("error.role.code.empty");
}
checkCode(role);
}
private void checkCode(RoleDTO role) {
Boolean createCheck = StringUtils.isEmpty(role.getId());
if (createCheck) {
roleAssertHelper.codeExisted(role.getCode());
} else {
Long id = role.getId();
RoleDTO roleDTO = new RoleDTO();
roleDTO.setCode(role.getCode());
RoleDTO roleDTO1 = roleMapper.selectOne(roleDTO);
Boolean existed = roleDTO1 != null && !id.equals(roleDTO1.getId());
if (existed) {
throw new CommonException("error.role.code.exist");
}
}
}
@Override
public List<Long> queryIdsByLabelNameAndLabelType(String labelName, String labelType) {
List<RoleDTO> roles = roleMapper.selectRolesByLabelNameAndType(labelName, labelType, null);
return roles.stream().map(RoleDTO::getId).collect(Collectors.toList());
}
@Override
public List<RoleDTO> selectByLabel(String label, Long organizationId) {
return roleMapper.selectRolesByLabelNameAndType(label, "role", organizationId);
}
@Override
public List<RoleDTO> listRolesBySourceIdAndTypeAndUserId(String sourceType, Long sourceId, Long userId) {
return roleMapper.queryRolesInfoByUser(sourceType, sourceId, userId);
}
@Override
public RoleDTO queryByCode(String code) {
RoleDTO roleDTO = new RoleDTO();
roleDTO.setCode(code);
return roleMapper.selectOne(roleDTO);
}
/**
* 校验是否可修改该角色
* 时机:组织层修改/启停用角色时校验
* 内容:1.角色必须存在,2.不可修改平台层创建的组织层角色,3.不可修改其他组织创建的角色
*
* @param roleId
* @param orgId
*/
private void checkRoleCanBeModified(Long roleId, Long orgId) {
RoleDTO roleDTO = roleAssertHelper.roleNotExisted(roleId);
if (roleDTO.getOrganizationId() == null || !Objects.equals(roleDTO.getOrganizationId(), orgId)) {
throw new CommonException("error.role.modify.check");
}
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/controller/v1/LabelController.java<|end_filename|>
package io.choerodon.iam.api.controller.v1;
import io.choerodon.base.annotation.Permission;
import io.choerodon.base.enums.ResourceType;
import io.choerodon.core.base.BaseController;
import io.choerodon.iam.app.service.LabelService;
import io.choerodon.iam.infra.dto.LabelDTO;
import io.swagger.annotations.ApiOperation;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @author superlee
*/
@RestController
@RequestMapping(value = "/v1/labels")
public class LabelController extends BaseController {
private LabelService labelService;
public LabelController(LabelService labelService) {
this.labelService = labelService;
}
@Permission(type = ResourceType.SITE)
@ApiOperation(value = "通过类型查询label")
@GetMapping
public ResponseEntity<List<LabelDTO>> listByType(LabelDTO label) {
return new ResponseEntity<>(labelService.listByOption(label), HttpStatus.OK);
}
@Permission(type = ResourceType.ORGANIZATION)
@ApiOperation(value = "通过类型查询组织层label")
@GetMapping(value = "/org/{organization_id}")
public ResponseEntity<List<LabelDTO>> listByTypeAtOrg(@PathVariable(name = "organization_id") Long organizationId,
LabelDTO label) {
label.setLevel(ResourceType.ORGANIZATION.value());
return new ResponseEntity<>(labelService.listByOption(label), HttpStatus.OK);
}
}
<|start_filename|>src/test/groovy/io/choerodon/iam/SecurityConfiguration.groovy<|end_filename|>
package io.choerodon.iam
import org.springframework.boot.test.context.TestConfiguration
import org.springframework.core.annotation.Order
import org.springframework.security.config.annotation.web.builders.HttpSecurity
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter
/**
* Created by superlee on 2018/9/7.
*/
@TestConfiguration
@Order(1)
class SecurityConfiguration extends WebSecurityConfigurerAdapter {
/**
* 解决访问h2-console跨域问题
* @param http
* @throws Exception
*/
@Override
protected void configure(HttpSecurity http) throws Exception {
http.csrf().ignoringAntMatchers("/h2-console/**")
.and()
.headers().frameOptions().disable()
}
}
<|start_filename|>react/src/app/iam/dashboard/Document/index.js<|end_filename|>
import React, { Component } from 'react';
import { DashBoardNavBar } from '@choerodon/boot';
import './index.scss';
export default class Document extends Component {
render() {
return (
<div className="c7n-iam-dashboard-document">
<ul>
<li>
<a target="choerodon" href="http://choerodon.io/zh/docs/concept/choerodon-concept/">Choerodon 是什么?</a>
</li>
<li>
<a target="choerodon" href="http://choerodon.io/zh/docs/concept/platform-concept/">平台概念</a>
</li>
<li>
<a target="choerodon" href="http://choerodon.io/zh/docs/concept/choerodon-system-architecture/">系统架构</a>
</li>
<li>
<a target="choerodon" href="http://choerodon.io/zh/docs/user-guide/">用户手册</a>
</li>
<li>
<a target="choerodon" href="http://choerodon.io/zh/docs/development-guide/">开发手册</a>
</li>
</ul>
<DashBoardNavBar>
<a target="choerodon" href="http://choerodon.io/zh/docs/">转至所有文档</a>
</DashBoardNavBar>
</div>
);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/dto/RolePermissionDTO.java<|end_filename|>
package io.choerodon.iam.infra.dto;
import io.choerodon.mybatis.entity.BaseDTO;
import io.swagger.annotations.ApiModelProperty;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.validation.constraints.NotEmpty;
/**
* @author superlee
* @since 2019-04-23
*/
@Table(name = "iam_role_permission")
public class RolePermissionDTO extends BaseDTO {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@ApiModelProperty(value = "主键ID")
private Long id;
@ApiModelProperty(value = "角色ID/必填")
private Long roleId;
@ApiModelProperty(value = "权限ID/必填")
@NotEmpty(message = "error.rolePermission.permissionId.empty")
private Long permissionId;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getRoleId() {
return roleId;
}
public void setRoleId(Long roleId) {
this.roleId = roleId;
}
public Long getPermissionId() {
return permissionId;
}
public void setPermissionId(Long permissionId) {
this.permissionId = permissionId;
}
}
<|start_filename|>react/src/app/iam/containers/organization/organization-setting/OrganizationSetting.js<|end_filename|>
import React, { Component } from 'react';
import { inject, observer } from 'mobx-react';
import { Button, Form, Input, Modal, Select, Icon, Tabs } from 'choerodon-ui';
import { Content, Header, Page, Permission, stores } from '@choerodon/boot';
import { injectIntl, FormattedMessage } from 'react-intl';
import { withRouter } from 'react-router-dom';
import classnames from 'classnames';
import './OrganizationSetting.scss';
import OrganizationSettingStore from '../../../stores/organization/organization-setting/OrganizationSettingStore';
import '../../../common/ConfirmModal.scss';
import BasicInfoSetting from './basic-info-setting/BasicInfoSetting';
import PasswordPolicy from './password-policy/PasswordPolicy';
const { TabPane } = Tabs;
@withRouter
// @injectIntl
// @inject('AppState')
export default class OrganizationSetting extends Component {
render() {
return (
<div className="c7n-organization-manager">
<h1 className="header">组织设置</h1>
<Tabs>
<TabPane tab="基本信息">
<BasicInfoSetting />
</TabPane>
<TabPane tab="密码策略">
<PasswordPolicy />
</TabPane>
<TabPane tab="LDAP设置">3</TabPane>
<TabPane tab="客户端">3</TabPane>
<TabPane tab="工作日志">3</TabPane>
</Tabs>
</div>
);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/feign/fallback/AsgardFeignClientFallback.java<|end_filename|>
package io.choerodon.iam.infra.feign.fallback;
import org.springframework.stereotype.Component;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.infra.feign.AsgardFeignClient;
/**
* @author dengyouquan
**/
@Component
public class AsgardFeignClientFallback implements AsgardFeignClient {
@Override
public void disableOrg(long orgId) {
throw new CommonException("error.asgard.quartzTask.disableOrg");
}
@Override
public void disableProj(long projectId) {
throw new CommonException("error.asgard.quartzTask.disableProject");
}
}
<|start_filename|>react/src/app/iam/containers/user/organization-info/OrganizationInfo.js<|end_filename|>
/**
* Created by hulingfangzi on 2018/7/2.
*/
import React, { Component } from 'react';
import { runInAction } from 'mobx';
import { inject, observer } from 'mobx-react';
import { Button, Icon, Modal, Table, Tooltip } from 'choerodon-ui';
import { FormattedMessage, injectIntl } from 'react-intl';
import { Link, withRouter } from 'react-router-dom';
import { Content, Header, Page, Permission } from '@choerodon/boot';
import './OrganizationInfo.scss';
import PermissionInfo from '../permission-info';
import MouseOverWrapper from '../../../components/mouseOverWrapper';
const intlPrefix = 'user.orginfo';
const { Sidebar } = Modal;
@withRouter
@injectIntl
@inject('AppState')
@observer
export default class ProjectInfo extends Component {
componentWillMount() {
this.loadInitData();
}
loadInitData(pagination, params) {
const { OrganizationInfoStore, AppState: { getUserInfo: { id } } } = this.props;
OrganizationInfoStore.loadData(id, pagination, params);
}
handlePageChange = (pagination, filters, sort, params) => {
this.loadInitData(pagination, params);
};
getRowKey = (record, id) => {
if ('roles' in record) {
return record.id;
} else {
return `${id}-${record.id}`;
}
};
/* 打开sidebar */
openSidebar = (record) => {
const { OrganizationInfoStore, PermissionInfoStore } = this.props;
runInAction(() => {
if (record.id !== PermissionInfoStore.role.id) {
PermissionInfoStore.clear();
PermissionInfoStore.setRole(record);
PermissionInfoStore.loadData();
}
OrganizationInfoStore.showSideBar();
});
};
// 关闭sidebar
closeSidebar = () => {
const { OrganizationInfoStore } = this.props;
OrganizationInfoStore.hideSideBar();
};
handleRefresh = () => {
const { OrganizationInfoStore, AppState: { getUserInfo: { id } } } = this.props;
OrganizationInfoStore.refresh(id);
};
getTableColumns() {
return [{
title: <FormattedMessage id={`${intlPrefix}.name`} />,
dataIndex: 'name',
key: 'name',
width: '35%',
render: (text, record) => {
let icon = '';
if ('roles' in record) {
icon = 'domain';
} else {
icon = 'person';
}
return (
<MouseOverWrapper text={text} width={0.25} className={'c7n-org-info-orgname'}>
<Icon type={icon} style={{ verticalAlign: 'text-bottom' }} />
{text}
</MouseOverWrapper>
);
},
}, {
title: <FormattedMessage id="code" />,
dataIndex: 'code',
key: 'code',
className: 'c7n-org-info-code',
width: '35%',
render: text => (
<MouseOverWrapper text={text} width={0.3}>
{text}
</MouseOverWrapper>
),
}, {
title: <FormattedMessage id="type" />,
dataIndex: 'type',
key: 'type',
width: '15%',
render: (text, record) => (
'projects' in record ? '组织' : '角色'
),
}, {
title: '',
width: '15%',
key: 'action',
align: 'right',
render: (text, record) => {
if (!('projects' in record)) {
return (
<Permission service={['iam-service.role.listPermissionById']}>
<Tooltip
title={<FormattedMessage id="detail" />}
placement="bottom"
>
<Button
shape="circle"
icon="find_in_page"
size="small"
onClick={this.openSidebar.bind(this, record)}
/>
</Tooltip>
</Permission>
);
} else {
const { id, name } = record;
return (
<Tooltip
title={<FormattedMessage id={`${intlPrefix}.organization.redirect`} values={{ name }} />}
placement="bottomRight"
>
<Link to={`/?type=organization&id=${id}&name=${encodeURIComponent(name)}`}>
<Button
shape="circle"
icon="exit_to_app"
size="small"
/>
</Link>
</Tooltip>
);
}
},
}];
}
render() {
const {
AppState: { getUserInfo: { realName: name } }, intl, PermissionInfoStore,
OrganizationInfoStore: { organizationRolesData, sidebarVisible, pagination, loading, params },
} = this.props;
let proId;
return (
<Page
service={[
'iam-service.user.listOrganizationAndRoleById',
'iam-service.role.listPermissionById',
]}
>
<Header title={<FormattedMessage id={`${intlPrefix}.header.title`} />}>
<Button
onClick={this.handleRefresh}
icon="refresh"
>
<FormattedMessage id="refresh" />
</Button>
</Header>
<Content>
<Table
loading={loading}
dataSource={organizationRolesData}
pagination={pagination}
columns={this.getTableColumns()}
filters={params}
childrenColumnName="roles"
rowKey={(record) => {
proId = this.getRowKey(record, proId);
return proId;
}}
onChange={this.handlePageChange}
filterBarPlaceholder={intl.formatMessage({ id: 'filtertable' })}
/>
<Sidebar
title={<FormattedMessage id={`${intlPrefix}.detail.header.title`} />}
visible={sidebarVisible}
onOk={this.closeSidebar}
okText={<FormattedMessage id="close" />}
okCancel={false}
>
<PermissionInfo store={PermissionInfoStore} type={intlPrefix} />
</Sidebar>
</Content>
</Page>
);
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/common/utils/PageUtils.java<|end_filename|>
package io.choerodon.iam.infra.common.utils;
/**
* 根据page, size参数获取数据库start的行
*/
public class PageUtils {
public static int getBegin(int page, int size) {
page = page <= 1 ? 1 : page;
return (page - 1) * size;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/asserts/ApplicationAssertHelper.java<|end_filename|>
package io.choerodon.iam.infra.asserts;
import org.springframework.stereotype.Component;
import org.springframework.util.ObjectUtils;
import io.choerodon.core.exception.CommonException;
import io.choerodon.iam.infra.dto.ApplicationDTO;
import io.choerodon.iam.infra.mapper.ApplicationMapper;
/**
* 应用断言帮助类
*
* @author superlee
* @since 2019-05-13
*/
@Component
public class ApplicationAssertHelper extends AssertHelper {
private static final String ERROR_APPLICATION_NOT_EXIST = "error.application.not.exist";
private static final String ERROR_APPLICATION_EXIST = "error.application.exist";
private ApplicationMapper applicationMapper;
public ApplicationAssertHelper(ApplicationMapper applicationMapper) {
this.applicationMapper = applicationMapper;
}
public ApplicationDTO applicationNotExisted(Long id) {
ApplicationDTO applicationDTO = applicationMapper.selectByPrimaryKey(id);
if (ObjectUtils.isEmpty(applicationDTO)) {
throw new CommonException(ERROR_APPLICATION_NOT_EXIST);
}
return applicationDTO;
}
public void applicationExisted(ApplicationDTO applicationDTO) {
ApplicationDTO example = new ApplicationDTO();
example.setCode(applicationDTO.getCode());
example.setProjectId(applicationDTO.getProjectId());
example.setOrganizationId(applicationDTO.getOrganizationId());
example = applicationMapper.selectOne(example);
if (!ObjectUtils.isEmpty(example)) {
throw new CommonException(ERROR_APPLICATION_EXIST);
}
}
}
<|start_filename|>react/src/app/iam/stores/dashboard/organizationStatistics/index.js<|end_filename|>
import OrganizationStatistics from './OrganizationStatistics';
export default OrganizationStatistics;
<|start_filename|>src/test/groovy/io/choerodon/iam/api/controller/v1/RoleControllerSpec.groovy<|end_filename|>
package io.choerodon.iam.api.controller.v1
import com.github.pagehelper.PageInfo
import io.choerodon.core.domain.Page
import io.choerodon.core.exception.ExceptionResponse
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.api.dto.RoleSearchDTO
import io.choerodon.iam.infra.dto.PermissionDTO
import io.choerodon.iam.infra.dto.RoleDTO
import io.choerodon.iam.infra.dto.RolePermissionDTO
import io.choerodon.iam.infra.mapper.PermissionMapper
import io.choerodon.iam.infra.mapper.RoleMapper
import io.choerodon.iam.infra.mapper.RolePermissionMapper
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.boot.test.web.client.TestRestTemplate
import org.springframework.context.annotation.Import
import org.springframework.http.HttpEntity
import org.springframework.http.HttpMethod
import spock.lang.Shared
import spock.lang.Specification
import spock.lang.Stepwise
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan
* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
@Stepwise
class RoleControllerSpec extends Specification {
private static final String BASE_PATH = "/v1/roles"
@Autowired
private TestRestTemplate restTemplate
@Shared
def roleId = 1L
@Autowired
private PermissionMapper permissionMapper
@Autowired
private RoleMapper roleMapper
@Autowired
RolePermissionMapper rolePermissionMapper
@Shared
def needInit = true
@Shared
def needClean = false
@Shared
def permissionDOList = new ArrayList<PermissionDTO>()
@Shared
def rolePermissionDOList = new ArrayList<RolePermissionDTO>()
@Shared
def roleDTO = new RoleDTO()
def setup() {
if (needInit) {
given: "构造参数"
needInit = false
for (int i = 0; i < 3; i++) {
PermissionDTO permissionDTO = new PermissionDTO()
permissionDTO.setCode("iam-service.permission.get" + i)
permissionDTO.setPath("/v1/permission/" + i)
permissionDTO.setMethod("get")
permissionDTO.setResourceLevel("site")
permissionDTO.setDescription("Description" + i)
permissionDTO.setAction("get")
permissionDTO.setController("service" + i)
permissionDTO.setLoginAccess(false)
permissionDTO.setPublicAccess(false)
permissionDTO.setServiceCode("iam-service")
permissionDOList.add(permissionDTO)
}
roleDTO.setCode("role/site/default/permissioner")
roleDTO.setName("权限管理员")
roleDTO.setResourceLevel("site")
when: "插入记录"
int count = 0;
for (PermissionDTO dto : permissionDOList) {
permissionMapper.insert(dto)
count++
}
roleDTO.setPermissions(permissionDOList)
count += roleMapper.insert(roleDTO)
for (int i = 0; i < 3; i++) {
RolePermissionDTO rolePermissionDTO = new RolePermissionDTO()
rolePermissionDTO.setPermissionId(permissionDOList.get(i).getId())
rolePermissionDTO.setRoleId(roleDTO.getId())
rolePermissionDOList.add(rolePermissionDTO)
}
for (RolePermissionDTO dto:rolePermissionDOList) {
rolePermissionMapper.insert(dto)
count++
}
then: "校验结果"
count == 7
}
}
def cleanup() {
if (needClean) {
given: ""
def count = 0
needClean = false
when: "删除记录"
for (PermissionDTO permissionDO : permissionDOList) {
count += permissionMapper.deleteByPrimaryKey(permissionDO)
}
for (RolePermissionDTO rolePermissionDO : rolePermissionDOList) {
count += rolePermissionMapper.deleteByPrimaryKey(rolePermissionDO)
}
count += roleMapper.deleteByPrimaryKey(roleDTO)
then: "校验结果"
count == 7
}
}
def "List"() {
given: "构造请求参数"
def paramsMap = new HashMap<String, Object>()
def roleSearchDTO = new RoleSearchDTO()
when: "调用方法[全查询]"
def entity = restTemplate.postForEntity(BASE_PATH + "/search", roleSearchDTO, PageInfo, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().pageNum != 0
when: "调用方法"
paramsMap.put("source_id", 0)
paramsMap.put("source_type", "site")
paramsMap.put("need_users", true)
roleSearchDTO.setLevel("site")
entity = restTemplate.postForEntity(BASE_PATH + "/search", roleSearchDTO, PageInfo, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().pageNum != 0
}
def "QueryIdsByLabelNameAndLabelType"() {
given: "构造请求参数"
// def roleDTO = roleMapper.selectByPrimaryKey(roleId)
def paramsMap = new HashMap<String, Object>()
paramsMap.put("label_name", "organization.owner")
paramsMap.put("label_type", "role")
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/id?label_name={label_name}&label_type={label_type}", List, paramsMap)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().size() != 0
}
def "QueryWithPermissionsAndLabels"() {
given: "构造请求参数"
def roleDTO = roleMapper.selectByPrimaryKey(roleId)
when: "调用方法"
def entity = restTemplate.getForEntity(BASE_PATH + "/{id}", RoleDTO, roleId)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getId().equals(roleDTO.getId())
entity.getBody().getCode().equals(roleDTO.getCode())
entity.getBody().getName().equals(roleDTO.getName())
entity.getBody().getBuiltIn().equals(roleDTO.getBuiltIn())
entity.getBody().getModified().equals(roleDTO.getModified())
// entity.getBody().getAssignable().equals(roleDTO.getAssignable())
entity.getBody().getEnableForbidden().equals(roleDTO.getEnableForbidden())
entity.getBody().getEnabled().equals(roleDTO.getEnabled())
entity.getBody().getResourceLevel().equals(roleDTO.getResourceLevel())
}
def "Create"() {
given: "构造请求参数"
def roleDTO = new RoleDTO()
roleDTO.setCode("role/site/default/tester")
roleDTO.setName("测试管理员")
roleDTO.setResourceLevel("site")
roleDTO.setBuiltIn(false)
roleDTO.setModified(false)
roleDTO.setEnabled(true)
// roleDTO.setAssignable(false)
roleDTO.setEnableForbidden(true)
when: "调用方法[异常-层级不合法]"
roleDTO.setResourceLevel("error")
def entity = restTemplate.postForEntity(BASE_PATH, roleDTO, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.role.illegal.level")
when: "调用方法[异常-没有权限]"
roleDTO.setResourceLevel("site")
entity = restTemplate.postForEntity(BASE_PATH, roleDTO, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.role_permission.empty")
when: "调用方法"
def permissionDTOList = permissionDOList
roleDTO.setPermissions(permissionDTOList)
entity = restTemplate.postForEntity(BASE_PATH, roleDTO, RoleDTO)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals(roleDTO.getCode())
entity.getBody().getName().equals(roleDTO.getName())
entity.getBody().getBuiltIn().equals(roleDTO.getBuiltIn())
entity.getBody().getEnableForbidden().equals(roleDTO.getEnableForbidden())
entity.getBody().getEnabled().equals(roleDTO.getEnabled())
entity.getBody().getResourceLevel().equals(roleDTO.getResourceLevel())
roleMapper.deleteByPrimaryKey(entity.getBody().getId())
}
def "CreateBaseOnRoles"() {
given: "构造请求参数"
def roleDTO = new RoleDTO()
roleDTO.setCode("role/site/default/tester1")
roleDTO.setName("测试管理员")
roleDTO.setResourceLevel("site")
roleDTO.setBuiltIn(false)
roleDTO.setModified(false)
roleDTO.setEnabled(true)
roleDTO.setEnableForbidden(true)
roleDTO.setRoleIds(new ArrayList<Long>())
def permissionDTOList = permissionDOList
roleDTO.setPermissions(permissionDTOList)
when: "调用方法[角色id为空]"
def entity = restTemplate.postForEntity(BASE_PATH + "/base_on_roles", roleDTO, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.role_permission.empty")
when: "调用方法[异常-角色层级不相同]"
def roleIds = new ArrayList<Long>()
roleIds.add(1L)
roleIds.add(2L)
roleIds.add(3L)
roleDTO.setRoleIds(roleIds)
entity = restTemplate.postForEntity(BASE_PATH + "/base_on_roles", roleDTO, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.role_permission.empty")
when: "调用方法"
roleIds = new ArrayList<Long>()
roleIds.add(1L)
roleDTO.setRoleIds(roleIds)
entity = restTemplate.postForEntity(BASE_PATH + "/base_on_roles", roleDTO, RoleDTO)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
// entity.getBody().getCode().equals(roleDTO.getCode())
// entity.getBody().getName().equals(roleDTO.getName())
// entity.getBody().getBuiltIn().equals(roleDTO.getBuiltIn())
// entity.getBody().getEnableForbidden().equals(roleDTO.getEnableForbidden())
// entity.getBody().getEnabled().equals(roleDTO.getEnabled())
// entity.getBody().getResourceLevel().equals(roleDTO.getResourceLevel())
// roleMapper.deleteByPrimaryKey(entity.getBody().getId())
}
def "Update"() {
given: "构造请求参数"
def roleDTO = roleDTO
roleDTO.setDescription("update")
roleDTO.setObjectVersionNumber(1)
def httpEntity = new HttpEntity<Object>(roleDTO)
when: "调用方法"
def entity = restTemplate.exchange(BASE_PATH + "/{id}", HttpMethod.PUT, httpEntity, RoleDTO, roleDTO.getId())
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals(roleDTO.getCode())
entity.getBody().getName().equals(roleDTO.getName())
// entity.getBody().getResourceLevel().equals(roleDTO.getResourceLevel())
}
def "EnableRole"() {
given: "构造请求参数"
def httpEntity = new HttpEntity<Object>()
def roleDTO = roleDTO
when: "调用方法[角色id为空]"
def entity = restTemplate.exchange(BASE_PATH + "/{id}/enable", HttpMethod.PUT, httpEntity, ExceptionResponse, 1000L)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.role.not.exist")
when: "调用方法"
entity = restTemplate.exchange(BASE_PATH + "/{id}/enable", HttpMethod.PUT, httpEntity, RoleDTO, roleDTO.getId())
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getEnabled()
}
def "DisableRole"() {
given: "构造请求参数"
def roleDTO = roleDTO
def httpEntity = new HttpEntity<Object>()
when: "调用方法[角色id为空]"
def entity = restTemplate.exchange(BASE_PATH + "/{id}/disable", HttpMethod.PUT, httpEntity, ExceptionResponse, 1000L)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.role.not.exist")
when: "调用方法"
entity = restTemplate.exchange(BASE_PATH + "/{id}/disable", HttpMethod.PUT, httpEntity, RoleDTO, roleDTO.getId())
then: "校验结果"
entity.statusCode.is2xxSuccessful()
!entity.getBody().getEnabled()
}
def "Check"() {
given: "构造请求参数"
def roleDTO = new RoleDTO()
roleDTO.setCode("")
roleDTO.setName("测试管理员")
roleDTO.setResourceLevel("site")
when: "调用方法[角色code为空]"
def entity = restTemplate.postForEntity(BASE_PATH + "/check", roleDTO, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.role.code.empty")
when: "调用方法[角色code存在]"
roleDTO.setCode("role/site/default/administrator")
entity = restTemplate.postForEntity(BASE_PATH + "/check", roleDTO, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().getCode().equals("error.role.code.existed")
when: "调用方法"
roleDTO.setCode("role/site/default/checker")
entity = restTemplate.postForEntity(BASE_PATH + "/check", roleDTO, ExceptionResponse)
then: "校验结果"
entity.statusCode.is2xxSuccessful()
}
def "ListPermissionById"() {
when: "调用方法[角色code为空]"
def entity = restTemplate.getForEntity(BASE_PATH + "/{id}/permissions", Page, roleDTO.getId())
needClean = true
then: "校验结果"
entity.statusCode.is2xxSuccessful()
entity.getBody().size() == 0
}
}
<|start_filename|>src/main/java/io/choerodon/iam/api/query/ApplicationQuery.java<|end_filename|>
package io.choerodon.iam.api.query;
/**
* @author superlee
* @since 0.15.0
*/
public class ApplicationQuery {
private Long organizationId;
private String name;
private String code;
private String applicationType;
private String applicationCategory;
private Boolean enabled;
private String projectName;
private String param;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getApplicationType() {
return applicationType;
}
public void setApplicationType(String applicationType) {
this.applicationType = applicationType;
}
public String getApplicationCategory() {
return applicationCategory;
}
public void setApplicationCategory(String applicationCategory) {
this.applicationCategory = applicationCategory;
}
public Boolean getEnabled() {
return enabled;
}
public void setEnabled(Boolean enabled) {
this.enabled = enabled;
}
public String getProjectName() {
return projectName;
}
public void setProjectName(String projectName) {
this.projectName = projectName;
}
public String getParam() {
return param;
}
public void setParam(String param) {
this.param = param;
}
public Long getOrganizationId() {
return organizationId;
}
public void setOrganizationId(Long organizationId) {
this.organizationId = organizationId;
}
}
<|start_filename|>src/main/java/io/choerodon/iam/infra/mapper/ApplicationExplorationMapper.java<|end_filename|>
package io.choerodon.iam.infra.mapper;
import io.choerodon.iam.infra.dto.ApplicationDTO;
import io.choerodon.iam.infra.dto.ApplicationExplorationDTO;
import io.choerodon.mybatis.common.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* @author superlee
* @since 0.15.0
*/
public interface ApplicationExplorationMapper extends Mapper<ApplicationExplorationDTO> {
/**
* 根据应用id集合查询该节点所有后代,包含自己
*
* @param idSet
* @return
*/
List<ApplicationExplorationDTO> selectDescendantByApplicationIds(@Param("idSet") Set<Long> idSet);
/**
* 根据应用id集合删除该节点所有的,包含自己
*
* @param idSet
*/
void deleteDescendantByApplicationIds(@Param("idSet") Set<Long> idSet);
/**
* 根据应用id查询该节点所有后代,包含自己
*
* @param path
* @return
*/
List<ApplicationExplorationDTO> selectDescendantByPath(@Param("path") String path);
/**
* 根据应用id查询该节点所有祖先,包含自己
*
* @param id
* @return
*/
List<ApplicationExplorationDTO> selectAncestorByApplicationId(@Param("id") Long id);
/**
* 根据应用id删除该节点所有的,包含自己
*
* @param id
*/
void deleteDescendantByApplicationId(@Param("id") Long id);
/**
* 根据应用id和父id删除所有子节点,包含自己
*
* @param idSet
* @param parentId
*/
void deleteDescendantByApplicationIdsAndParentId(@Param("idSet") Set<Long> idSet, @Param("parentId") Long parentId);
/**
* 根据应用id查询该节点的下一层级的后代
*
* @param id
* @return
*/
List<ApplicationExplorationDTO> selectDirectDescendantByApplicationId(@Param("id") Long id);
/**
* 根据应用节点id集合和该节点的父id查询该节点下的所有节点,包含自己
*
* @param idSet
* @param parentId
*/
List<ApplicationExplorationDTO> selectDescendantByApplicationIdsAndParentId(@Param("idSet") HashSet<Long> idSet, @Param("parentId") Long parentId);
/**
* 根据应用节点id和该节点的父id查询该节点下的所有节点,包含自己
*
* @param id
* @param parentId
* @return
*/
List<ApplicationExplorationDTO> selectDescendantByApplicationIdAndParentId(@Param("id") Long id, @Param("parentId") Long parentId);
/**
* 查询组合应用下指定类型的应用{@link io.choerodon.iam.infra.enums.ApplicationCategory}
*
* @param path
* @param category
* @param code
* @param name
* @return
*/
List<ApplicationDTO> selectDescendantApplications(@Param("path") String path, @Param("category") String category,
@Param("name") String name, @Param("code") String code);
/**
* 根据应用id查询子代,包含自己,带上应用和项目信息
*
* @param path "/"+id+"/"
* @return
*/
List<ApplicationExplorationDTO> selectDescendants(@Param("path") String path);
}
<|start_filename|>src/test/groovy/io/choerodon/iam/app/service/impl/OrganizationUserServiceImplSpec.groovy<|end_filename|>
package io.choerodon.iam.app.service.impl
import io.choerodon.asgard.saga.dto.StartInstanceDTO
import io.choerodon.asgard.saga.feign.SagaClient
import io.choerodon.core.oauth.DetailsHelper
import io.choerodon.iam.IntegrationTestConfiguration
import io.choerodon.iam.api.validator.UserPasswordValidator
import io.choerodon.iam.app.service.OrganizationUserService
import io.choerodon.iam.app.service.SystemSettingService
import io.choerodon.iam.app.service.UserService
import io.choerodon.iam.infra.asserts.OrganizationAssertHelper
import io.choerodon.iam.infra.asserts.UserAssertHelper
import io.choerodon.iam.infra.dto.UserDTO
import io.choerodon.iam.infra.feign.OauthTokenFeignClient
import io.choerodon.iam.infra.mapper.OrganizationMapper
import io.choerodon.iam.infra.mapper.UserMapper
import io.choerodon.oauth.core.password.PasswordPolicyManager
import io.choerodon.oauth.core.password.mapper.BasePasswordPolicyMapper
import io.choerodon.oauth.core.password.record.PasswordRecord
import org.springframework.beans.BeanUtils
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.context.annotation.Import
import org.springframework.transaction.annotation.Transactional
import spock.lang.Specification
import java.lang.reflect.Field
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT
/**
* @author dengyouquan* */
@SpringBootTest(webEnvironment = RANDOM_PORT)
@Import(IntegrationTestConfiguration)
class OrganizationUserServiceImplSpec extends Specification {
private SagaClient sagaClient = Mock(SagaClient)
private OauthTokenFeignClient oauthTokenFeignClient = Mock(OauthTokenFeignClient)
private Long userId
@Autowired
private PasswordRecord passwordRecord
@Autowired
private UserService userService
@Autowired
private PasswordPolicyManager passwordPolicyManager
@Autowired
private BasePasswordPolicyMapper basePasswordPolicyMapper
@Autowired
private UserPasswordValidator userPasswordValidator
@Autowired
private SystemSettingService systemSettingService
@Autowired
private OrganizationUserService organizationUserService
@Autowired
OrganizationAssertHelper organizationAssertHelper
@Autowired
OrganizationMapper organizationMapper
@Autowired
UserAssertHelper userAssertHelper
@Autowired
UserMapper userMapper
def setup() {
given: "构造organizationUserService"
organizationUserService = new OrganizationUserServiceImpl(passwordRecord, passwordPolicyManager,
basePasswordPolicyMapper, oauthTokenFeignClient, userPasswordValidator, systemSettingService, sagaClient,
organizationAssertHelper, organizationMapper, userAssertHelper, userMapper, userService)
Field field = organizationUserService.getClass().getDeclaredField("devopsMessage")
field.setAccessible(true)
field.set(organizationUserService, true)
DetailsHelper.setCustomUserDetails(1L, "zh_CN");
}
@Transactional
def "Create"() {
given: "mock静态方法-ConvertHelper"
def checkPassword = false
def userDTO = new UserDTO()
userDTO.setId(userId)
userDTO.setOrganizationId(1L)
userDTO.setPassword("<PASSWORD>")
userDTO.setPassword("password")
userDTO.setLoginName("askhfuweasdsha")
userDTO.setEmail("<EMAIL>")
when: "调用方法"
organizationUserService.create(userDTO, checkPassword)
then: "校验结果"
1 * sagaClient.startSaga(_ as String, _ as StartInstanceDTO)
}
@Transactional
def "BatchCreateUsers"() {
given: "构造请求参数"
UserDTO user = new UserDTO()
user.setEnabled(true)
user.setLoginName("1")
user.setEmail("email")
user.setOrganizationId(1L)
user.setLanguage("zh_CN")
user.setTimeZone("zcc")
user.setLastPasswordUpdatedAt(new Date())
user.setLocked(false)
UserDTO user1 = new UserDTO()
BeanUtils.copyProperties(user, user1)
user1.setLoginName("2")
user1.setEmail("email2")
List<UserDTO> insertUsers = new ArrayList<>()
insertUsers << user
insertUsers << user1
when: "调用方法"
organizationUserService.batchCreateUsers(insertUsers)
then: "校验结果"
1 * sagaClient.startSaga(_ as String, _ as StartInstanceDTO)
}
@Transactional
def "Update"() {
given: "构造请求参数"
UserDTO dto = userMapper.selectByPrimaryKey(1)
dto.setLoginName("ab<PASSWORD>")
when: "调用方法"
organizationUserService.update(dto)
then: "校验结果"
1 * sagaClient.startSaga(_ as String, _ as StartInstanceDTO)
}
@Transactional
def "Delete"() {
when: "调用方法"
organizationUserService.delete(1L, 1L)
then: "校验结果"
1 * sagaClient.startSaga(_ as String, _ as StartInstanceDTO)
}
@Transactional
def "EnableUser"() {
when: "调用方法"
organizationUserService.enableUser(1, 1)
then: "校验结果"
1 * sagaClient.startSaga(_ as String, _ as StartInstanceDTO)
}
@Transactional
def "DisableUser"() {
when: "调用方法"
organizationUserService.disableUser(1, 1)
then: "校验结果"
1 * sagaClient.startSaga(_ as String, _ as StartInstanceDTO)
}
}
| xforcecloud/iam-service |
<|start_filename|>builtin/ng/uint128.go<|end_filename|>
package ng
import (
"fmt"
"log"
"math/big"
"math/bits"
"strconv"
)
const (
GopPackage = true // to indicate this is a Go+ package
)
const (
maxUint64 = (1 << 64) - 1
intSize = 32 << (^uint(0) >> 63)
)
//
// Gop_: Go+ object prefix
// Gop_xxx_Cast: type Gop_xxx typecast
// xxxx__N: the Nth overload function
//
type Gop_ninteger = uint
// -----------------------------------------------------------------------------
type Uint128 struct {
hi, lo uint64
}
// Uint128_Init: func uint128.init(v int) uint128
func Uint128_Init__0(v int) (out Uint128) {
if v < 0 {
panic("TODO: can't init uint128 from a negative integer")
}
return Uint128{lo: uint64(v)}
}
// Uint128_Init: func bigint.init(v untyped_bigint) bigint
func Uint128_Init__1(v UntypedBigint) (out Uint128) {
out, inRange := Uint128_Cast__9(v)
if !inRange {
log.Panicf("value %v was not in valid uint128 range\n", v)
}
return
}
// Uint128_Cast: func uint128(v untyped_int) uint128
func Uint128_Cast__0(v int) Uint128 {
return Uint128_Cast__6(int64(v))
}
// Uint128_Cast: func uint128(v untyped_bigint) uint128
func Uint128_Cast__1(v UntypedBigint) Uint128 {
return Uint128_Init__1(v)
}
// Uint128_Cast: func uint128(v uint64) uint128
func Uint128_Cast__2(v uint64) Uint128 {
return Uint128{lo: v}
}
// Uint128_Cast: func uint128(v uint32) uint128
func Uint128_Cast__3(v uint32) Uint128 {
return Uint128{lo: uint64(v)}
}
// Uint128_Cast: func uint128(v uint16) uint128
func Uint128_Cast__4(v uint16) Uint128 {
return Uint128{lo: uint64(v)}
}
// Uint128_Cast: func uint128(v uint8) uint128
func Uint128_Cast__5(v uint8) Uint128 {
return Uint128{lo: uint64(v)}
}
// Uint128_Cast: func uint128(v int64) uint128
func Uint128_Cast__6(v int64) (out Uint128) {
if v < 0 {
return Uint128{hi: maxUint64, lo: uint64(v)}
}
return Uint128{lo: uint64(v)}
}
// Uint128_Cast: func uint128(v int64) (uint128, bool)
func Uint128_Cast__7(v int64) (out Uint128, inRange bool) {
if v < 0 {
return
}
return Uint128{lo: uint64(v)}, true
}
// Uint128_Cast: func uint128(v *big.Int) uint128
func Uint128_Cast__8(v *big.Int) Uint128 {
out, _ := Uint128_Cast__9(v)
return out
}
// Uint128_Cast: func uint128(v *big.Int) (uint128, bool)
func Uint128_Cast__9(v *big.Int) (out Uint128, inRange bool) {
if v.Sign() < 0 {
return out, false
}
words := v.Bits()
switch intSize {
case 64:
lw := len(words)
switch lw {
case 0:
return Uint128{}, true
case 1:
return Uint128{lo: uint64(words[0])}, true
case 2:
return Uint128{hi: uint64(words[1]), lo: uint64(words[0])}, true
default:
return Uint128{hi: maxUint64, lo: maxUint64}, false
}
case 32:
lw := len(words)
switch lw {
case 0:
return Uint128{}, true
case 1:
return Uint128{lo: uint64(words[0])}, true
case 2:
return Uint128{lo: (uint64(words[1]) << 32) | (uint64(words[0]))}, true
case 3:
return Uint128{hi: uint64(words[2]), lo: (uint64(words[1]) << 32) | (uint64(words[0]))}, true
case 4:
return Uint128{
hi: (uint64(words[3]) << 32) | (uint64(words[2])),
lo: (uint64(words[1]) << 32) | (uint64(words[0])),
}, true
default:
return Uint128{hi: maxUint64, lo: maxUint64}, false
}
default:
panic("unsupported bit size")
}
}
// Uint128_Cast: func uint128() uint128
func Uint128_Cast__a() Uint128 {
return Uint128{}
}
// Uint128_Cast: func uint128(v uint) uint128
func Uint128_Cast__b(v uint) Uint128 {
return Uint128{lo: uint64(v)}
}
// Uint128_Cast: func uint128(hi, lo uint64) uint128
func Uint128_Cast__c(hi, lo uint64) Uint128 {
return Uint128{hi: hi, lo: lo}
}
// Gop_Rcast: func float64(v uint128) float64
func (u Uint128) Gop_Rcast__0() float64 {
if u.hi == 0 {
return float64(u.lo)
}
return (float64(u.hi) * (1 << 64)) + float64(u.lo)
}
// Gop_Rcast: func uint64(v uint128) uint64
func (u Uint128) Gop_Rcast__1() uint64 {
return u.lo
}
// Gop_Rcast: func uint64(v uint128) (uint64, bool)
func (u Uint128) Gop_Rcast__2() (out uint64, inRange bool) {
return u.lo, u.hi == 0
}
// Gop_Rcast: func int64(v uint128) int64
func (u Uint128) Gop_Rcast__3() int64 {
return int64(u.lo)
}
// Gop_Rcast: func int64(v uint128) (int64, bool)
func (u Uint128) Gop_Rcast__4() (out int64, inRange bool) {
return int64(u.lo), u.hi == 0 && u.lo <= maxInt64
}
// -----------------------------------------------------------------------------
func (u Uint128) IsZero() bool {
return u.lo == 0 && u.hi == 0
}
func (u *Uint128) Scan(state fmt.ScanState, verb rune) (err error) {
t, err := state.Token(true, nil)
if err != nil {
return
}
v, err := ParseUint128(string(t), 10)
if err == nil {
*u = v
}
return
}
func (u Uint128) Format(s fmt.State, c rune) {
// TODO: not so good
u.BigInt().Format(s, c)
}
func (u Uint128) String() string {
return u.Text(10)
}
func (u Uint128) Text(base int) string {
if u.hi == 0 {
return strconv.FormatUint(u.lo, base)
}
// TODO: not so good
return u.BigInt().Text(base)
}
func (u Uint128) BigInt() *big.Int {
var v big.Int
u.ToBigInt(&v)
return &v
}
func (u Uint128) ToBigInt(b *big.Int) {
switch intSize {
case 64:
bits := b.Bits()
ln := len(bits)
if len(bits) < 2 {
bits = append(bits, make([]big.Word, 2-ln)...)
}
bits = bits[:2]
bits[0] = big.Word(u.lo)
bits[1] = big.Word(u.hi)
b.SetBits(bits)
case 32:
bits := b.Bits()
ln := len(bits)
if len(bits) < 4 {
bits = append(bits, make([]big.Word, 4-ln)...)
}
bits = bits[:4]
bits[0] = big.Word(u.lo & 0xFFFFFFFF)
bits[1] = big.Word(u.lo >> 32)
bits[2] = big.Word(u.hi & 0xFFFFFFFF)
bits[3] = big.Word(u.hi >> 32)
b.SetBits(bits)
default:
if u.hi > 0 {
b.SetUint64(u.hi)
b.Lsh(b, 64)
}
var lo big.Int
lo.SetUint64(u.lo)
b.Add(b, &lo)
}
}
// Bit returns the value of the i'th bit of x. That is, it returns (x>>i)&1.
// The bit index i must be 0 <= i < 128
func (u Uint128) Bit(i int) uint {
if i < 0 || i >= 128 {
panic("bit out of range")
}
if i >= 64 {
return uint((u.hi >> uint(i-64)) & 1)
} else {
return uint((u.lo >> uint(i)) & 1)
}
}
// SetBit returns a Uint128 with u's i'th bit set to b (0 or 1).
// If b is not 0 or 1, SetBit will panic. If i < 0, SetBit will panic.
func (u Uint128) SetBit(i int, b uint) (out Uint128) {
if i < 0 || i >= 128 {
panic("bit out of range")
}
if b == 0 {
if i >= 64 {
u.hi = u.hi &^ (1 << uint(i-64))
} else {
u.lo = u.lo &^ (1 << uint(i))
}
} else if b == 1 {
if i >= 64 {
u.hi = u.hi | (1 << uint(i-64))
} else {
u.lo = u.lo | (1 << uint(i))
}
} else {
panic("bit value not 0 or 1")
}
return u
}
func (u Uint128) LeadingZeros() int {
if u.hi == 0 {
return bits.LeadingZeros64(u.lo) + 64
} else {
return bits.LeadingZeros64(u.hi)
}
}
func (u Uint128) TrailingZeros() int {
if u.lo == 0 {
return bits.TrailingZeros64(u.hi) + 64
} else {
return bits.TrailingZeros64(u.lo)
}
}
// BitLen returns the length of the absolute value of u in bits. The bit length of 0 is 0.
func (u Uint128) BitLen() int {
if u.hi > 0 {
return bits.Len64(u.hi) + 64
}
return bits.Len64(u.lo)
}
// OnesCount returns the number of one bits ("population count") in u.
func (u Uint128) OnesCount() int {
return bits.OnesCount64(u.hi) + bits.OnesCount64(u.lo)
}
func (u Uint128) Reverse() Uint128 {
return Uint128{hi: bits.Reverse64(u.lo), lo: bits.Reverse64(u.hi)}
}
func (u Uint128) ReverseBytes() Uint128 {
return Uint128{hi: bits.ReverseBytes64(u.lo), lo: bits.ReverseBytes64(u.hi)}
}
// Cmp compares 'u' to 'n' and returns:
//
// < 0 if u < n
// 0 if u == n
// > 0 if u > n
//
// The specific value returned by Cmp is undefined, but it is guaranteed to
// satisfy the above constraints.
//
func (u Uint128) Cmp__1(n Uint128) int {
if u.hi == n.hi {
if u.lo > n.lo {
return 1
} else if u.lo < n.lo {
return -1
}
} else {
if u.hi > n.hi {
return 1
} else if u.hi < n.hi {
return -1
}
}
return 0
}
func (u Uint128) Cmp__0(n uint64) int {
if u.hi > 0 || u.lo > n {
return 1
} else if u.lo < n {
return -1
}
return 0
}
func (u Uint128) Gop_Dup() (v Uint128) {
return u
}
func (u *Uint128) Gop_Inc() {
u.lo++
if u.lo == 0 {
u.hi++
}
}
func (u *Uint128) Gop_Dec() {
if u.lo == 0 {
u.hi--
}
u.lo--
}
// Gop_AddAssign: func (a *uint128) += (b uint128)
func (u *Uint128) Gop_AddAssign(b Uint128) {
*u = u.Gop_Add__1(b)
}
// Gop_SubAssign: func (a *uint128) -= (b uint128)
func (u *Uint128) Gop_SubAssign(b Uint128) {
*u = u.Gop_Sub__1(b)
}
// Gop_MulAssign: func (a *uint128) *= (b uint128)
func (u *Uint128) Gop_MulAssign(b Uint128) {
*u = u.Gop_Mul__1(b)
}
// Gop_QuoAssign: func (a *uint128) /= (b uint128) {
func (u *Uint128) Gop_QuoAssign(b Uint128) {
*u = u.Gop_Quo__1(b)
}
// Gop_RemAssign: func (a *uint128) %= (b uint128)
func (u *Uint128) Gop_RemAssign(b Uint128) {
*u = u.Gop_Rem__1(b)
}
// Gop_OrAssign: func (a *uint128) |= (b uint128)
func (u *Uint128) Gop_OrAssign(b Uint128) {
*u = u.Gop_Or__1(b)
}
// Gop_XorAssign: func (a *uint128) ^= (b uint128)
func (u *Uint128) Gop_XorAssign(b Uint128) {
*u = u.Gop_Xor__1(b)
}
// Gop_AndAssign: func (a *uint128) &= (b uint128)
func (u *Uint128) Gop_AndAssign(b Uint128) {
*u = u.Gop_And__1(b)
}
// Gop_AndNotAssign: func (a *uint128) &^= (b uint128)
func (u *Uint128) Gop_AndNotAssign(b Uint128) {
*u = u.Gop_AndNot(b)
}
// Gop_LshAssign: func (a *uint128) <<= (n untyped_uint)
func (u *Uint128) Gop_LshAssign(n Gop_ninteger) {
*u = u.Gop_Lsh(n)
}
// Gop_RshAssign: func (a *uint128) >>= (n untyped_uint)
func (u *Uint128) Gop_RshAssign(n Gop_ninteger) {
*u = u.Gop_Rsh(n)
}
func (u Uint128) Gop_Add__1(n Uint128) (v Uint128) {
var carry uint64
v.lo, carry = bits.Add64(u.lo, n.lo, 0)
v.hi, _ = bits.Add64(u.hi, n.hi, carry)
return v
}
func (u Uint128) Gop_Add__0(n uint64) (v Uint128) {
var carry uint64
v.lo, carry = bits.Add64(u.lo, n, 0)
v.hi = u.hi + carry
return v
}
func (u Uint128) Gop_Sub__1(n Uint128) (v Uint128) {
var borrowed uint64
v.lo, borrowed = bits.Sub64(u.lo, n.lo, 0)
v.hi, _ = bits.Sub64(u.hi, n.hi, borrowed)
return v
}
func (u Uint128) Gop_Sub__0(n uint64) (v Uint128) {
var borrowed uint64
v.lo, borrowed = bits.Sub64(u.lo, n, 0)
v.hi = u.hi - borrowed
return v
}
func (u Uint128) Gop_EQ__1(n Uint128) bool {
return u.hi == n.hi && u.lo == n.lo
}
func (u Uint128) Gop_EQ__0(n uint64) bool {
return u.hi == 0 && u.lo == n
}
func (u Uint128) Gop_GT__1(n Uint128) bool {
return u.hi > n.hi || (u.hi == n.hi && u.lo > n.lo)
}
func (u Uint128) Gop_GT__0(n uint64) bool {
return u.hi > 0 || u.lo > n
}
func (u Uint128) Gop_GE__1(n Uint128) bool {
return u.hi > n.hi || (u.hi == n.hi && u.lo >= n.lo)
}
func (u Uint128) Gop_GE__0(n uint64) bool {
return u.hi > 0 || u.lo >= n
}
func (u Uint128) Gop_LT__1(n Uint128) bool {
return u.hi < n.hi || (u.hi == n.hi && u.lo < n.lo)
}
func (u Uint128) Gop_LT__0(n uint64) bool {
return u.hi == 0 && u.lo < n
}
func (u Uint128) Gop_LE__1(n Uint128) bool {
return u.hi < n.hi || (u.hi == n.hi && u.lo <= n.lo)
}
func (u Uint128) Gop_LE__0(n uint64) bool {
return u.hi == 0 && u.lo <= n
}
func (u Uint128) Gop_And__1(n Uint128) Uint128 {
u.hi &= n.hi
u.lo &= n.lo
return u
}
func (u Uint128) Gop_And__0(n uint64) Uint128 {
return Uint128{lo: u.lo & n}
}
func (u Uint128) Gop_AndNot(n Uint128) Uint128 {
u.hi &^= n.hi
u.lo &^= n.lo
return u
}
func (u Uint128) Gop_Not() Uint128 {
return Uint128{hi: ^u.hi, lo: ^u.lo}
}
func (u Uint128) Gop_Or__1(n Uint128) Uint128 {
u.hi |= n.hi
u.lo |= n.lo
return u
}
func (u Uint128) Gop_Or__0(n uint64) Uint128 {
u.lo |= n
return u
}
func (u Uint128) Gop_Xor__1(v Uint128) Uint128 {
u.hi ^= v.hi
u.lo ^= v.lo
return u
}
func (u Uint128) Gop_Xor__0(v uint64) Uint128 {
u.lo ^= v
return u
}
func (u Uint128) Gop_Lsh(n Gop_ninteger) Uint128 {
if n < 64 {
u.hi = (u.hi << n) | (u.lo >> (64 - n))
u.lo <<= n
} else {
u.hi = u.lo << (n - 64)
u.lo = 0
}
return u
}
func (u Uint128) Gop_Rsh(n Gop_ninteger) Uint128 {
if n < 64 {
u.lo = (u.lo >> n) | (u.hi << (64 - n))
u.hi >>= n
} else {
u.lo = u.hi >> (n - 64)
u.hi = 0
}
return u
}
func (u Uint128) Gop_Mul__1(n Uint128) Uint128 {
hi, lo := bits.Mul64(u.lo, n.lo)
hi += u.hi*n.lo + u.lo*n.hi
return Uint128{hi, lo}
}
func (u Uint128) Gop_Mul__0(n uint64) (dest Uint128) {
dest.hi, dest.lo = bits.Mul64(u.lo, n)
dest.hi += u.hi * n
return dest
}
const (
divAlgoLeading0Spill = 16
)
func (u Uint128) Gop_Quo__1(by Uint128) (q Uint128) {
if by.lo == 0 && by.hi == 0 {
panic("division by zero")
}
if u.hi|by.hi == 0 {
q.lo = u.lo / by.lo // FIXME: div/0 risk?
return q
}
var byLoLeading0, byHiLeading0, byLeading0 uint
if by.hi == 0 {
byLoLeading0, byHiLeading0 = uint(bits.LeadingZeros64(by.lo)), 64
byLeading0 = byLoLeading0 + 64
} else {
byHiLeading0 = uint(bits.LeadingZeros64(by.hi))
byLeading0 = byHiLeading0
}
if byLeading0 == 127 {
return u
}
byTrailing0 := uint(by.TrailingZeros())
if (byLeading0 + byTrailing0) == 127 {
return u.Gop_Rsh(byTrailing0)
}
if cmp := u.Cmp__1(by); cmp < 0 {
return q // it's 100% remainder
} else if cmp == 0 {
q.lo = 1 // dividend and divisor are the same
return q
}
uLeading0 := uint(u.LeadingZeros())
if byLeading0-uLeading0 > divAlgoLeading0Spill {
q, _ = quorem128by128(u, by, byHiLeading0, byLoLeading0)
return q
} else {
return quo128bin(u, by, uLeading0, byLeading0)
}
}
func (u Uint128) Gop_Quo__0(by uint64) (q Uint128) {
if u.hi < by {
q.lo, _ = bits.Div64(u.hi, u.lo, by)
} else {
q.hi = u.hi / by
q.lo, _ = bits.Div64(u.hi%by, u.lo, by)
}
return q
}
func (u Uint128) QuoRem__1(by Uint128) (q, r Uint128) {
if by.lo == 0 && by.hi == 0 {
panic("division by zero")
}
if u.hi|by.hi == 0 {
q.lo = u.lo / by.lo
r.lo = u.lo % by.lo
return q, r
}
var byLoLeading0, byHiLeading0, byLeading0 uint
if by.hi == 0 {
byLoLeading0, byHiLeading0 = uint(bits.LeadingZeros64(by.lo)), 64
byLeading0 = byLoLeading0 + 64
} else {
byHiLeading0 = uint(bits.LeadingZeros64(by.hi))
byLeading0 = byHiLeading0
}
if byLeading0 == 127 {
return u, r
}
byTrailing0 := uint(by.TrailingZeros())
if (byLeading0 + byTrailing0) == 127 {
q = u.Gop_Rsh(byTrailing0)
by.Gop_Dec()
r = by.Gop_And__1(u)
return
}
if cmp := u.Cmp__1(by); cmp < 0 {
return q, u // it's 100% remainder
} else if cmp == 0 {
q.lo = 1 // dividend and divisor are the same
return q, r
}
uLeading0 := uint(u.LeadingZeros())
if byLeading0-uLeading0 > divAlgoLeading0Spill {
return quorem128by128(u, by, byHiLeading0, byLoLeading0)
} else {
return quorem128bin(u, by, uLeading0, byLeading0)
}
}
func (u Uint128) QuoRem__0(by uint64) (q, r Uint128) {
if u.hi < by {
q.lo, r.lo = bits.Div64(u.hi, u.lo, by)
} else {
q.hi, r.lo = bits.Div64(0, u.hi, by)
q.lo, r.lo = bits.Div64(r.lo, u.lo, by)
}
return q, r
}
// Gop_Rem: func (a uint128) % (b uint128) uint128
func (u Uint128) Gop_Rem__1(by Uint128) (r Uint128) {
// TODO: inline only the needed bits
_, r = u.QuoRem__1(by)
return r
}
func (u Uint128) Gop_Rem__0(by uint64) (r Uint128) {
// https://github.com/golang/go/issues/28970
// if u.hi < by {
// _, r.lo = bits.Rem64(u.hi, u.lo, by)
// } else {
// _, r.lo = bits.Rem64(bits.Rem64(0, u.hi, by), u.lo, by)
// }
if u.hi < by {
_, r.lo = bits.Div64(u.hi, u.lo, by)
} else {
_, r.lo = bits.Div64(0, u.hi, by)
_, r.lo = bits.Div64(r.lo, u.lo, by)
}
return r
}
// Hacker's delight 9-4, divlu:
func quo128by64(u1, u0, v uint64, vLeading0 uint) (q uint64) {
var b uint64 = 1 << 32
var un1, un0, vn1, vn0, q1, q0, un32, un21, un10, rhat, vs, left, right uint64
vs = v << vLeading0
vn1 = vs >> 32
vn0 = vs & 0xffffffff
if vLeading0 > 0 {
un32 = (u1 << vLeading0) | (u0 >> (64 - vLeading0))
un10 = u0 << vLeading0
} else {
un32 = u1
un10 = u0
}
un1 = un10 >> 32
un0 = un10 & 0xffffffff
q1 = un32 / vn1
rhat = un32 % vn1
left = q1 * vn0
right = (rhat << 32) | un1
again1:
if (q1 >= b) || (left > right) {
q1--
rhat += vn1
if rhat < b {
left -= vn0
right = (rhat << 32) | un1
goto again1
}
}
un21 = (un32 << 32) + (un1 - (q1 * vs))
q0 = un21 / vn1
rhat = un21 % vn1
left = q0 * vn0
right = (rhat << 32) | un0
again2:
if (q0 >= b) || (left > right) {
q0--
rhat += vn1
if rhat < b {
left -= vn0
right = (rhat << 32) | un0
goto again2
}
}
return (q1 << 32) | q0
}
// Hacker's delight 9-4, divlu:
func quorem128by64(u1, u0, v uint64, vLeading0 uint) (q, r uint64) {
var b uint64 = 1 << 32
var un1, un0, vn1, vn0, q1, q0, un32, un21, un10, rhat, left, right uint64
v <<= vLeading0
vn1 = v >> 32
vn0 = v & 0xffffffff
if vLeading0 > 0 {
un32 = (u1 << vLeading0) | (u0 >> (64 - vLeading0))
un10 = u0 << vLeading0
} else {
un32 = u1
un10 = u0
}
un1 = un10 >> 32
un0 = un10 & 0xffffffff
q1 = un32 / vn1
rhat = un32 % vn1
left = q1 * vn0
right = (rhat << 32) + un1
again1:
if (q1 >= b) || (left > right) {
q1--
rhat += vn1
if rhat < b {
left -= vn0
right = (rhat << 32) | un1
goto again1
}
}
un21 = (un32 << 32) + (un1 - (q1 * v))
q0 = un21 / vn1
rhat = un21 % vn1
left = q0 * vn0
right = (rhat << 32) | un0
again2:
if (q0 >= b) || (left > right) {
q0--
rhat += vn1
if rhat < b {
left -= vn0
right = (rhat << 32) | un0
goto again2
}
}
return (q1 << 32) | q0, ((un21 << 32) + (un0 - (q0 * v))) >> vLeading0
}
func quorem128by128(m, v Uint128, vHiLeading0, vLoLeading0 uint) (q, r Uint128) {
if v.hi == 0 {
if m.hi < v.lo {
q.lo, r.lo = quorem128by64(m.hi, m.lo, v.lo, vLoLeading0)
return q, r
} else {
q.hi = m.hi / v.lo
r.hi = m.hi % v.lo
q.lo, r.lo = quorem128by64(r.hi, m.lo, v.lo, vLoLeading0)
r.hi = 0
return q, r
}
} else {
v1 := v.Gop_Lsh(vHiLeading0)
u1 := m.Gop_Rsh(1)
var q1 Uint128
q1.lo = quo128by64(u1.hi, u1.lo, v1.hi, vLoLeading0)
q1 = q1.Gop_Rsh(63 - vHiLeading0)
if q1.hi|q1.lo != 0 {
q1.Gop_Dec()
}
q = q1
q1 = q1.Gop_Mul__1(v)
r = m.Gop_Sub__1(q1)
if r.Cmp__1(v) >= 0 {
q.Gop_Inc()
r = r.Gop_Sub__1(v)
}
return q, r
}
}
func quorem128bin(u, by Uint128, uLeading0, byLeading0 uint) (q, r Uint128) {
shift := int(byLeading0 - uLeading0)
by = by.Gop_Lsh(uint(shift))
for {
// q << 1
q.hi = (q.hi << 1) | (q.lo >> 63)
q.lo = q.lo << 1
// performance tweak: simulate greater than or equal by hand-inlining "not less than".
if u.hi > by.hi || (u.hi == by.hi && u.lo >= by.lo) {
u = u.Gop_Sub__1(by)
q.lo |= 1
}
// by >> 1
by.lo = (by.lo >> 1) | (by.hi << 63)
by.hi = by.hi >> 1
if shift <= 0 {
break
}
shift--
}
r = u
return q, r
}
func quo128bin(u, by Uint128, uLeading0, byLeading0 uint) (q Uint128) {
shift := int(byLeading0 - uLeading0)
by = by.Gop_Lsh(uint(shift))
for {
// q << 1
q.hi = (q.hi << 1) | (q.lo >> 63)
q.lo = q.lo << 1
// u >= by
if u.hi > by.hi || (u.hi == by.hi && u.lo >= by.lo) {
u = u.Gop_Sub__1(by)
q.lo |= 1
}
// q >> 1
by.lo = (by.lo >> 1) | (by.hi << 63)
by.hi = by.hi >> 1
if shift <= 0 {
break
}
shift--
}
return q
}
// -----------------------------------------------------------------------------
func ParseUint128(s string, base int) (out Uint128, err error) {
b, ok := new(big.Int).SetString(s, base)
if !ok {
err = fmt.Errorf("invalid uint128 string: %q", s)
return
}
out, inRange := Uint128_Cast__9(b)
if !inRange {
err = fmt.Errorf("string %q was not in valid uint128 range", s)
}
return
}
func FormatUint128(i Uint128, base int) string {
return i.Text(base)
}
// -----------------------------------------------------------------------------
<|start_filename|>cmd/internal/build/build.go<|end_filename|>
/*
* Copyright (c) 2021 The GoPlus Authors (goplus.org). All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Package build implements the ``gop build'' command.
package build
import (
"fmt"
"os"
"os/exec"
"github.com/qiniu/x/log"
"github.com/goplus/gop/cl"
"github.com/goplus/gop/cmd/internal/base"
"github.com/goplus/gop/x/gopproj"
"github.com/goplus/gop/x/gopprojs"
"github.com/goplus/gox"
)
// gop build
var Cmd = &base.Command{
UsageLine: "gop build [-v -o output] [packages]",
Short: "Build Go+ files",
}
var (
flagVerbose = flag.Bool("v", false, "print verbose information.")
flagOutput = flag.String("o", "a.out", "gop build output file.")
flag = &Cmd.Flag
)
func init() {
Cmd.Run = runCmd
}
func runCmd(_ *base.Command, args []string) {
err := flag.Parse(args)
if err != nil {
log.Fatalln("parse input arguments failed:", err)
}
if *flagVerbose {
gox.SetDebug(gox.DbgFlagAll &^ gox.DbgFlagComments)
cl.SetDebug(cl.DbgFlagAll)
cl.SetDisableRecover(true)
}
args = flag.Args()
if len(args) == 0 {
args = []string{"."}
}
gopBuild(args)
}
func gopBuild(args []string) {
proj, args, err := gopprojs.ParseOne(args...)
if err != nil {
log.Fatalln(err)
}
flags := 0
ctx, goProj, err := gopproj.OpenProject(flags, proj)
if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
goProj.BuildArgs = []string{"-o", *flagOutput}
cmd := ctx.GoCommand("build", goProj)
cmd.Stdin = os.Stdin
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
cmd.Env = os.Environ()
err = cmd.Run()
if err != nil {
switch e := err.(type) {
case *exec.ExitError:
os.Exit(e.ExitCode())
default:
log.Fatalln(err)
}
}
}
// -----------------------------------------------------------------------------
<|start_filename|>builtin/ng/int128.go<|end_filename|>
package ng
import (
"fmt"
"log"
"math/big"
"math/bits"
)
const (
signBit = 0x8000000000000000
maxInt64 = 1<<63 - 1
)
var (
big1 = new(big.Int).SetUint64(1)
bigMaxU128 *big.Int
)
func init() {
bigMaxU128 = new(big.Int).Lsh(big1, 128)
bigMaxU128.Sub(bigMaxU128, big1) // 340282366920938463463374607431768211455
}
// -----------------------------------------------------------------------------
type Int128 struct {
hi uint64
lo uint64
}
// Int128_Init: func int128.init(v int) int128
func Int128_Init__0(v int) (out Int128) {
return Int128_Cast__2(int64(v))
}
// Int128_Init: func int128.init(v untyped_bigint) int128
func Int128_Init__1(v UntypedBigint) (out Int128) {
return Int128_Cast__1(v)
}
// Int128_Cast: func int128(v int) int128
func Int128_Cast__0(v int) Int128 {
return Int128_Cast__2(int64(v))
}
// Int128_Cast: func int128(v untyped_bigint) int128
func Int128_Cast__1(v UntypedBigint) (out Int128) {
out, inRange := Int128_Cast__9(v)
if !inRange {
log.Panicf("value %v was not in valid int128 range\n", v)
}
return
}
// Int128_Cast: func int128(v int64) int128
func Int128_Cast__2(v int64) (out Int128) {
var hi uint64
if v < 0 {
hi = maxUint64
}
return Int128{hi: hi, lo: uint64(v)}
}
// Int128_Cast: func int128(v uint64) int128
func Int128_Cast__3(v uint64) Int128 {
return Int128{lo: v}
}
// Int128_Cast: func int128(v int32) int128
func Int128_Cast__4(v int32) Int128 {
return Int128_Cast__2(int64(v))
}
// Int128_Cast: func int128(v int16) int128
func Int128_Cast__5(v int16) Int128 {
return Int128_Cast__2(int64(v))
}
// Int128_Cast: func int128(v int8) int128
func Int128_Cast__6(v int8) Int128 {
return Int128_Cast__2(int64(v))
}
// Int128_Cast: func int128(v uint18) int128
func Int128_Cast__7(v Uint128) (out Int128) {
return Int128{hi: v.hi, lo: v.lo}
}
// Int128_Cast: func int128(v *big.Int) int128
func Int128_Cast__8(v *big.Int) Int128 {
out, _ := Int128_Cast__9(v)
return out
}
func Int128_Cast__9(v *big.Int) (out Int128, inRange bool) {
neg := v.Sign() < 0
words := v.Bits()
var u Uint128
inRange = true
switch intSize {
case 64:
lw := len(words)
switch lw {
case 0:
case 1:
u.lo = uint64(words[0])
case 2:
u.hi = uint64(words[1])
u.lo = uint64(words[0])
default:
u, inRange = Uint128{hi: maxUint64, lo: maxUint64}, false
}
case 32:
lw := len(words)
switch lw {
case 0:
case 1:
u.lo = uint64(words[0])
case 2:
u.lo = (uint64(words[1]) << 32) | (uint64(words[0]))
case 3:
u.hi = uint64(words[2])
u.lo = (uint64(words[1]) << 32) | (uint64(words[0]))
case 4:
u.hi = (uint64(words[3]) << 32) | (uint64(words[2]))
u.lo = (uint64(words[1]) << 32) | (uint64(words[0]))
default:
u, inRange = Uint128{hi: maxUint64, lo: maxUint64}, false
}
default:
panic("unsupported bit size")
}
if neg {
if cmp := u.Cmp__1(Uint128{hi: 0x8000000000000000, lo: 0}); cmp > 0 {
out, inRange = Int128{hi: 0x8000000000000000, lo: 0}, false
} else {
out = Int128{hi: u.hi, lo: u.lo}.Gop_Neg()
}
} else {
if cmp := u.Cmp__1(Uint128{hi: maxInt64, lo: maxUint64}); cmp > 0 {
out, inRange = Int128{hi: maxInt64, lo: maxUint64}, false
} else {
out = Int128{hi: u.hi, lo: u.lo}
}
}
return
}
// Int128_Cast: func int128() int128
func Int128_Cast__a() Int128 {
return Int128{}
}
// Gop_Rcast: func uint128(v int128) uint128
func (i Int128) Gop_Rcast__0() Uint128 {
return Uint128{lo: i.lo, hi: i.hi}
}
// Gop_Rcast: func uint128(v int128) (uint128, bool)
func (i Int128) Gop_Rcast__1() (out Uint128, inRange bool) {
return Uint128{lo: i.lo, hi: i.hi}, i.hi&signBit == 0
}
// Gop_Rcast: func int64(v int128) int64
func (i Int128) Gop_Rcast__2() int64 {
if i.hi&signBit == 0 {
return int64(i.lo)
}
return -int64(^(i.lo - 1))
}
// Gop_Rcast: func int64(v int128) (int64, bool)
func (i Int128) Gop_Rcast__3() (out int64, inRange bool) {
if i.hi&signBit == 0 {
return int64(i.lo), i.hi == 0 && i.lo <= maxInt64
}
return -int64(^(i.lo - 1)), i.hi == maxUint64 && i.lo >= 0x8000000000000000
}
// Gop_Rcast: func uint64(v int128) uint64
func (i Int128) Gop_Rcast__4() uint64 {
return i.lo
}
// Gop_Rcast: func uint64(v int128) (uint64, bool)
func (i Int128) Gop_Rcast__5() (out uint64, inRange bool) {
return i.lo, i.hi == 0
}
// -----------------------------------------------------------------------------
func (i Int128) IsZero() bool {
return i.lo == 0 && i.hi == 0
}
func (i *Int128) Scan(state fmt.ScanState, verb rune) (err error) {
t, err := state.Token(true, nil)
if err != nil {
return
}
v, err := ParseInt128(string(t), 10)
if err == nil {
*i = v
}
return
}
func (i Int128) Format(s fmt.State, c rune) {
// TODO: not so good
i.BigInt().Format(s, c)
}
func (i Int128) String() string {
return i.Text(10)
}
func (i Int128) Text(base int) string {
// TODO: not so good
return i.BigInt().Text(base)
}
func (i Int128) BigInt() *big.Int {
var v big.Int
i.ToBigInt(&v)
return &v
}
func (i Int128) ToBigInt(b *big.Int) {
neg := i.hi&signBit != 0
if i.hi > 0 {
b.SetUint64(i.hi)
b.Lsh(b, 64)
}
var lo big.Int
lo.SetUint64(i.lo)
b.Add(b, &lo)
if neg {
b.Xor(b, bigMaxU128).Add(b, big1).Neg(b)
}
}
func (i Int128) Sign() int {
if i.lo == 0 && i.hi == 0 {
return 0
} else if i.hi&signBit == 0 {
return 1
}
return -1
}
func (i *Int128) Gop_Inc() {
i.lo++
if i.lo == 0 {
i.hi++
}
}
func (i *Int128) Gop_Dec() {
if i.lo == 0 {
i.hi--
}
i.lo--
}
// Gop_AddAssign: func (a *int128) += (b int128)
func (i *Int128) Gop_AddAssign(b Int128) {
*i = i.Gop_Add__1(b)
}
// Gop_SubAssign: func (a *int128) -= (b int128)
func (i *Int128) Gop_SubAssign(b Int128) {
*i = i.Gop_Sub__1(b)
}
// Gop_MulAssign: func (a *int128) *= (b int128)
func (i *Int128) Gop_MulAssign(b Int128) {
*i = i.Gop_Mul__1(b)
}
// Gop_QuoAssign: func (a *int128) /= (b int128) {
func (i *Int128) Gop_QuoAssign(b Int128) {
*i = i.Gop_Quo__1(b)
}
// Gop_RemAssign: func (a *int128) %= (b int128)
func (i *Int128) Gop_RemAssign(b Int128) {
*i = i.Gop_Rem__1(b)
}
// Gop_OrAssign: func (a *int128) |= (b int128)
func (i *Int128) Gop_OrAssign(b Int128) {
*i = i.Gop_Or(b)
}
// Gop_XorAssign: func (a *int128) ^= (b int128)
func (i *Int128) Gop_XorAssign(b Int128) {
*i = i.Gop_Xor(b)
}
// Gop_AndAssign: func (a *int128) &= (b int128)
func (i *Int128) Gop_AndAssign(b Int128) {
*i = i.Gop_And(b)
}
// Gop_AndNotAssign: func (a *int128) &^= (b int128)
func (i *Int128) Gop_AndNotAssign(b Int128) {
*i = i.Gop_AndNot(b)
}
// Gop_LshAssign: func (a *int128) <<= (n untyped_uint)
func (i *Int128) Gop_LshAssign(n Gop_ninteger) {
*i = i.Gop_Lsh(n)
}
// Gop_RshAssign: func (a *int128) >>= (n untyped_uint)
func (i *Int128) Gop_RshAssign(n Gop_ninteger) {
*i = i.Gop_Rsh(n)
}
func (i Int128) Gop_Add__1(n Int128) (v Int128) {
var carry uint64
v.lo, carry = bits.Add64(i.lo, n.lo, 0)
v.hi, _ = bits.Add64(i.hi, n.hi, carry)
return v
}
func (i Int128) Gop_Add__0(n int64) (v Int128) {
var carry uint64
v.lo, carry = bits.Add64(i.lo, uint64(n), 0)
if n < 0 {
v.hi = i.hi + maxUint64 + carry
} else {
v.hi = i.hi + carry
}
return v
}
func (i Int128) Gop_Sub__1(n Int128) (v Int128) {
var borrowed uint64
v.lo, borrowed = bits.Sub64(i.lo, n.lo, 0)
v.hi, _ = bits.Sub64(i.hi, n.hi, borrowed)
return v
}
func (i Int128) Gop_Sub__0(n int64) (v Int128) {
var borrowed uint64
if n < 0 {
v.lo, borrowed = bits.Sub64(i.lo, uint64(n), 0)
v.hi = i.hi - maxUint64 - borrowed
} else {
v.lo, borrowed = bits.Sub64(i.lo, uint64(n), 0)
v.hi = i.hi - borrowed
}
return v
}
func (i Int128) Gop_Neg() (v Int128) {
if i.lo == 0 && i.hi == 0 {
return
}
if i.hi&signBit == 0 {
v.hi = ^i.hi
v.lo = (^i.lo) + 1
} else {
v.hi = ^i.hi
v.lo = ^(i.lo - 1)
}
if v.lo == 0 { // handle overflow
v.hi++
}
return v
}
func (i Int128) Gop_Dup() (v Int128) {
return i
}
// Abs returns the absolute value of i as a signed integer.
func (i Int128) Abs__0() Int128 {
if i.hi&signBit != 0 {
i.hi = ^i.hi
i.lo = ^(i.lo - 1)
if i.lo == 0 { // handle carry
i.hi++
}
}
return i
}
func (i Int128) Abs__1() (ret Int128, inRange bool) {
return i.Abs__0(), i.hi != 0x8000000000000000 || i.lo != 0
}
// AbsU returns the absolute value of i as an unsigned integer. All
// values of i are representable using this function, but the type is
// changed.
func (i Int128) AbsU() Uint128 {
if i.hi == 0x8000000000000000 && i.lo == 0 {
return Uint128{hi: 0x8000000000000000}
}
if i.hi&signBit != 0 {
i.hi = ^i.hi
i.lo = ^(i.lo - 1)
if i.lo == 0 { // handle carry
i.hi++
}
}
return Uint128{hi: i.hi, lo: i.lo}
}
// Cmp compares i to n and returns:
//
// < 0 if i < n
// 0 if i == n
// > 0 if i > n
//
// The specific value returned by Cmp is undefined, but it is guaranteed to
// satisfy the above constraints.
func (i Int128) Cmp__1(n Int128) int {
if i.hi == n.hi && i.lo == n.lo {
return 0
} else if i.hi&signBit == n.hi&signBit {
if i.hi > n.hi || (i.hi == n.hi && i.lo > n.lo) {
return 1
}
} else if i.hi&signBit == 0 {
return 1
}
return -1
}
// Cmp64 compares 'i' to 64-bit int 'n' and returns:
//
// < 0 if i < n
// 0 if i == n
// > 0 if i > n
//
// The specific value returned by Cmp is undefined, but it is guaranteed to
// satisfy the above constraints.
func (i Int128) Cmp__0(n int64) int {
var nhi uint64
var nlo = uint64(n)
if n < 0 {
nhi = maxUint64
}
if i.hi == nhi && i.lo == nlo {
return 0
} else if i.hi&signBit == nhi&signBit {
if i.hi > nhi || (i.hi == nhi && i.lo > nlo) {
return 1
}
} else if i.hi&signBit == 0 {
return 1
}
return -1
}
func (i Int128) Gop_EQ__1(n Int128) bool {
return i.hi == n.hi && i.lo == n.lo
}
func (i Int128) Gop_EQ__0(n int64) bool {
var nhi uint64
var nlo = uint64(n)
if n < 0 {
nhi = maxUint64
}
return i.hi == nhi && i.lo == nlo
}
func (i Int128) Gop_GT__1(n Int128) bool {
if i.hi&signBit == n.hi&signBit {
return i.hi > n.hi || (i.hi == n.hi && i.lo > n.lo)
} else if i.hi&signBit == 0 {
return true
}
return false
}
func (i Int128) Gop_GT__0(n int64) bool {
var nhi uint64
var nlo = uint64(n)
if n < 0 {
nhi = maxUint64
}
if i.hi&signBit == nhi&signBit {
return i.hi > nhi || (i.hi == nhi && i.lo > nlo)
} else if i.hi&signBit == 0 {
return true
}
return false
}
func (i Int128) Gop_GE__1(n Int128) bool {
if i.hi == n.hi && i.lo == n.lo {
return true
}
if i.hi&signBit == n.hi&signBit {
return i.hi > n.hi || (i.hi == n.hi && i.lo > n.lo)
} else if i.hi&signBit == 0 {
return true
}
return false
}
func (i Int128) Gop_GE__0(n int64) bool {
var nhi uint64
var nlo = uint64(n)
if n < 0 {
nhi = maxUint64
}
if i.hi == nhi && i.lo == nlo {
return true
}
if i.hi&signBit == nhi&signBit {
return i.hi > nhi || (i.hi == nhi && i.lo > nlo)
} else if i.hi&signBit == 0 {
return true
}
return false
}
func (i Int128) Gop_LT__1(n Int128) bool {
if i.hi&signBit == n.hi&signBit {
return i.hi < n.hi || (i.hi == n.hi && i.lo < n.lo)
} else if i.hi&signBit != 0 {
return true
}
return false
}
func (i Int128) Gop_LT__0(n int64) bool {
var nhi uint64
var nlo = uint64(n)
if n < 0 {
nhi = maxUint64
}
if i.hi&signBit == nhi&signBit {
return i.hi < nhi || (i.hi == nhi && i.lo < nlo)
} else if i.hi&signBit != 0 {
return true
}
return false
}
func (i Int128) Gop_LE__1(n Int128) bool {
if i.hi == n.hi && i.lo == n.lo {
return true
}
if i.hi&signBit == n.hi&signBit {
return i.hi < n.hi || (i.hi == n.hi && i.lo < n.lo)
} else if i.hi&signBit != 0 {
return true
}
return false
}
func (i Int128) Gop_LE__0(n int64) bool {
var nhi uint64
var nlo = uint64(n)
if n < 0 {
nhi = maxUint64
}
if i.hi == nhi && i.lo == nlo {
return true
}
if i.hi&signBit == nhi&signBit {
return i.hi < nhi || (i.hi == nhi && i.lo < nlo)
} else if i.hi&signBit != 0 {
return true
}
return false
}
func (i Int128) Gop_And(n Int128) Int128 {
i.hi &= n.hi
i.lo &= n.lo
return i
}
func (i Int128) Gop_AndNot(n Int128) Int128 {
i.hi &^= n.hi
i.lo &^= n.lo
return i
}
func (i Int128) Gop_Not() Int128 {
return Int128{hi: ^i.hi, lo: ^i.lo}
}
func (i Int128) Gop_Or(n Int128) Int128 {
i.hi |= n.hi
i.lo |= n.lo
return i
}
func (i Int128) Gop_Xor(v Int128) Int128 {
i.hi ^= v.hi
i.lo ^= v.lo
return i
}
func (i Int128) Gop_Lsh(n Gop_ninteger) Int128 {
if n < 64 {
i.hi = (i.hi << n) | (i.lo >> (64 - n))
i.lo <<= n
} else {
i.hi = i.lo << (n - 64)
i.lo = 0
}
return i
}
func (i Int128) Gop_Rsh(n Gop_ninteger) Int128 {
if n < 64 {
i.lo = (i.lo >> n) | (i.hi << (64 - n))
} else {
i.lo = i.hi >> (n - 64)
}
i.hi = uint64(int64(i.hi) >> n)
return i
}
// Mul returns the product of two I128s.
//
// Overflow should wrap around, as per the Go spec.
//
func (i Int128) Gop_Mul__1(n Int128) (dest Int128) {
hi, lo := bits.Mul64(i.lo, n.lo)
hi += i.hi*n.lo + i.lo*n.hi
return Int128{hi, lo}
}
func (i Int128) Gop_Mul__0(n int64) Int128 {
nlo := uint64(n)
var nhi uint64
if n < 0 {
nhi = maxUint64
}
hi, lo := bits.Mul64(i.lo, nlo)
hi += i.hi*nlo + i.lo*nhi
return Int128{hi, lo}
}
// QuoRem returns the quotient q and remainder r for y != 0. If y == 0, a
// division-by-zero run-time panic occurs.
//
// QuoRem implements T-division and modulus (like Go):
//
// q = x/y with the result truncated to zero
// r = x - y*q
//
// U128 does not support big.Int.DivMod()-style Euclidean division.
//
// Note: dividing MinI128 by -1 will overflow, returning MinI128, as
// per the Go spec (https://golang.org/ref/spec#Integer_operators):
//
// The one exception to this rule is that if the dividend x is the most
// negative value for the int type of x, the quotient q = x / -1 is equal to x
// (and r = 0) due to two's-complement integer overflow.
func (i Int128) QuoRem__1(by Int128) (q, r Int128) {
qSign, rSign := 1, 1
if i.Gop_LT__0(0) {
qSign, rSign = -1, -1
i = i.Gop_Neg()
}
if by.Gop_LT__0(0) {
qSign = -qSign
by = by.Gop_Neg()
}
qu, ru := i.Gop_Rcast__0().QuoRem__1(by.Gop_Rcast__0())
q, r = Int128_Cast__7(qu), Int128_Cast__7(ru)
if qSign < 0 {
q = q.Gop_Neg()
}
if rSign < 0 {
r = r.Gop_Neg()
}
return q, r
}
func (i Int128) QuoRem__0(by int64) (q, r Int128) {
ineg := i.hi&signBit != 0
if ineg {
i = i.Gop_Neg()
}
byneg := by < 0
if byneg {
by = -by
}
n := uint64(by)
if i.hi < n {
q.lo, r.lo = bits.Div64(i.hi, i.lo, n)
} else {
q.hi, r.lo = bits.Div64(0, i.hi, n)
q.lo, r.lo = bits.Div64(r.lo, i.lo, n)
}
if ineg != byneg {
q = q.Gop_Neg()
}
if ineg {
r = r.Gop_Neg()
}
return q, r
}
// Quo returns the quotient x/y for y != 0. If y == 0, a division-by-zero
// run-time panic occurs. Quo implements truncated division (like Go); see
// QuoRem for more details.
func (i Int128) Gop_Quo__1(by Int128) (q Int128) {
qSign := 1
if i.Gop_LT__0(0) {
qSign = -1
i = i.Gop_Neg()
}
if by.Gop_LT__0(0) {
qSign = -qSign
by = by.Gop_Neg()
}
qu := i.Gop_Rcast__0().Gop_Quo__1(by.Gop_Rcast__0())
q = Int128_Cast__7(qu)
if qSign < 0 {
q = q.Gop_Neg()
}
return q
}
func (i Int128) Gop_Quo__0(by int64) (q Int128) {
ineg := i.hi&signBit != 0
if ineg {
i = i.Gop_Neg()
}
byneg := by < 0
if byneg {
by = -by
}
n := uint64(by)
if i.hi < n {
q.lo, _ = bits.Div64(i.hi, i.lo, n)
} else {
var rlo uint64
q.hi, rlo = bits.Div64(0, i.hi, n)
q.lo, _ = bits.Div64(rlo, i.lo, n)
}
if ineg != byneg {
q = q.Gop_Neg()
}
return q
}
// Gop_Rem returns the remainder of x%y for y != 0. If y == 0, a division-by-zero
// run-time panic occurs. Gop_Rem implements truncated modulus (like Go); see
// QuoRem for more details.
func (i Int128) Gop_Rem__1(by Int128) (r Int128) {
_, r = i.QuoRem__1(by)
return r
}
func (i Int128) Gop_Rem__0(by int64) (r Int128) {
ineg := i.hi&signBit != 0
if ineg {
i = i.Gop_Neg()
}
if by < 0 {
by = -by
}
n := uint64(by)
if i.hi < n {
_, r.lo = bits.Div64(i.hi, i.lo, n)
} else {
_, r.lo = bits.Div64(0, i.hi, n)
_, r.lo = bits.Div64(r.lo, i.lo, n)
}
if ineg {
r = r.Gop_Neg()
}
return r
}
// -----------------------------------------------------------------------------
func ParseInt128(s string, base int) (out Int128, err error) {
b, ok := new(big.Int).SetString(s, base)
if !ok {
err = fmt.Errorf("invalid int128 string: %q", s)
return
}
out, inRange := Int128_Cast__9(b)
if !inRange {
err = fmt.Errorf("string %q was not in valid int128 range", s)
}
return
}
func FormatInt128(i Int128, base int) string {
return i.Text(base)
}
// -----------------------------------------------------------------------------
<|start_filename|>cl/expr.go<|end_filename|>
/*
* Copyright (c) 2021 The GoPlus Authors (goplus.org). All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cl
import (
goast "go/ast"
gotoken "go/token"
"go/types"
"log"
"math/big"
"reflect"
"strconv"
"strings"
"github.com/goplus/gop/ast"
"github.com/goplus/gop/token"
"github.com/goplus/gox"
)
/*-----------------------------------------------------------------------------
Name context:
- varVal (ident)
- varRef = expr (identLHS)
- pkgRef.member (selectorExpr)
- pkgRef.member = expr (selectorExprLHS)
- pkgRef.fn(args) (callExpr)
- fn(args) (callExpr)
- spx.fn(args) (callExpr)
- this.member (classMember)
- this.method(args) (classMember)
Name lookup:
- local variables
- $recv members (only in class files)
- package globals (variables, constants, types, imported packages etc.)
- $spx package exports (only in class files)
- $universe package exports (including builtins)
// ---------------------------------------------------------------------------*/
const (
clIdentAutoCall = 1 << iota
clIdentAllowBuiltin
clIdentLHS
clIdentSelectorExpr
clIdentGoto
clCallWithTwoValue
)
func compileIdent(ctx *blockCtx, ident *ast.Ident, flags int) *gox.PkgRef {
fvalue := (flags&clIdentSelectorExpr) != 0 || (flags&clIdentLHS) == 0
name := ident.Name
if name == "_" {
if fvalue {
panic(ctx.newCodeError(ident.Pos(), "cannot use _ as value"))
}
ctx.cb.VarRef(nil)
return nil
}
scope := ctx.pkg.Types.Scope()
at, o := ctx.cb.Scope().LookupParent(name, token.NoPos)
if o != nil {
if at != scope && at != types.Universe { // local object
goto find
}
}
if ctx.isClass { // in a Go+ class file
if fn := ctx.cb.Func(); fn != nil {
sig := fn.Ancestor().Type().(*types.Signature)
if recv := sig.Recv(); recv != nil {
ctx.cb.Val(recv)
if compileMember(ctx, ident, name, flags) == nil { // class member object
return nil
}
ctx.cb.InternalStack().PopN(1)
}
}
}
// global object
if ctx.loadSymbol(name) {
o, at = scope.Lookup(name), scope
}
if o != nil && at != types.Universe {
goto find
}
// pkgRef object
if (flags & clIdentSelectorExpr) != 0 {
if pr, ok := ctx.findImport(name); ok {
return pr
}
}
// object from import . "xxx"
if compilePkgRef(ctx, nil, ident, flags) {
return nil
}
// universe object
if obj := ctx.pkg.Builtin().TryRef(name); obj != nil {
if (flags&clIdentAllowBuiltin) == 0 && isBuiltin(o) && !strings.HasPrefix(o.Name(), "print") {
panic(ctx.newCodeErrorf(ident.Pos(), "use of builtin %s not in function call", name))
}
o = obj
} else if o == nil {
if (clIdentGoto & flags) != 0 {
l := ident.Obj.Data.(*ast.Ident)
panic(ctx.newCodeErrorf(l.Pos(), "label %v is not defined", l.Name))
}
panic(ctx.newCodeErrorf(ident.Pos(), "undefined: %s", name))
}
find:
if fvalue {
ctx.cb.Val(o, ident)
} else {
ctx.cb.VarRef(o, ident)
}
return nil
}
func isBuiltin(o types.Object) bool {
if _, ok := o.(*types.Builtin); ok {
return ok
}
return false
}
func compileMember(ctx *blockCtx, v ast.Node, name string, flags int) error {
var mflag gox.MemberFlag
switch {
case (flags & clIdentLHS) != 0:
mflag = gox.MemberFlagRef
case (flags & clIdentAutoCall) != 0:
mflag = gox.MemberFlagAutoProperty
default:
mflag = gox.MemberFlagMethodAlias
}
_, err := ctx.cb.Member(name, mflag, v)
return err
}
func compileExprLHS(ctx *blockCtx, expr ast.Expr) {
switch v := expr.(type) {
case *ast.Ident:
compileIdent(ctx, v, clIdentLHS)
case *ast.IndexExpr:
compileIndexExprLHS(ctx, v)
case *ast.SelectorExpr:
compileSelectorExprLHS(ctx, v)
case *ast.StarExpr:
compileStarExprLHS(ctx, v)
default:
log.Panicln("compileExpr failed: unknown -", reflect.TypeOf(v))
}
}
func compileExpr(ctx *blockCtx, expr ast.Expr, twoValue ...bool) {
switch v := expr.(type) {
case *ast.Ident:
compileIdent(ctx, v, clIdentAutoCall)
case *ast.BasicLit:
compileBasicLit(ctx, v)
case *ast.CallExpr:
flags := 0
if twoValue != nil && twoValue[0] {
flags = clCallWithTwoValue
}
compileCallExpr(ctx, v, flags)
case *ast.SelectorExpr:
compileSelectorExpr(ctx, v, clIdentAutoCall)
case *ast.BinaryExpr:
compileBinaryExpr(ctx, v)
case *ast.UnaryExpr:
compileUnaryExpr(ctx, v, twoValue != nil && twoValue[0])
case *ast.FuncLit:
compileFuncLit(ctx, v)
case *ast.CompositeLit:
compileCompositeLit(ctx, v, nil, false)
case *ast.SliceLit:
compileSliceLit(ctx, v)
case *ast.RangeExpr:
compileRangeExpr(ctx, v)
case *ast.IndexExpr:
compileIndexExpr(ctx, v, twoValue != nil && twoValue[0])
case *ast.SliceExpr:
compileSliceExpr(ctx, v)
case *ast.StarExpr:
compileStarExpr(ctx, v)
case *ast.ArrayType:
ctx.cb.Typ(toArrayType(ctx, v), v)
case *ast.MapType:
ctx.cb.Typ(toMapType(ctx, v), v)
case *ast.StructType:
ctx.cb.Typ(toStructType(ctx, v), v)
case *ast.ChanType:
ctx.cb.Typ(toChanType(ctx, v), v)
case *ast.InterfaceType:
ctx.cb.Typ(toInterfaceType(ctx, v), v)
case *ast.ComprehensionExpr:
compileComprehensionExpr(ctx, v, twoValue != nil && twoValue[0])
case *ast.TypeAssertExpr:
compileTypeAssertExpr(ctx, v, twoValue != nil && twoValue[0])
case *ast.ParenExpr:
compileExpr(ctx, v.X, twoValue...)
case *ast.ErrWrapExpr:
compileErrWrapExpr(ctx, v)
case *ast.FuncType:
ctx.cb.Typ(toFuncType(ctx, v, nil), v)
case *ast.Ellipsis:
panic("compileEllipsis: ast.Ellipsis unexpected")
case *ast.KeyValueExpr:
panic("compileExpr: ast.KeyValueExpr unexpected")
default:
log.Panicln("compileExpr failed: unknown -", reflect.TypeOf(v))
}
}
func compileExprOrNone(ctx *blockCtx, expr ast.Expr) {
if expr != nil {
compileExpr(ctx, expr)
} else {
ctx.cb.None()
}
}
func compileUnaryExpr(ctx *blockCtx, v *ast.UnaryExpr, twoValue bool) {
compileExpr(ctx, v.X)
ctx.cb.UnaryOp(gotoken.Token(v.Op), twoValue)
}
func compileBinaryExpr(ctx *blockCtx, v *ast.BinaryExpr) {
compileExpr(ctx, v.X)
compileExpr(ctx, v.Y)
ctx.cb.BinaryOp(gotoken.Token(v.Op), v)
}
func compileIndexExprLHS(ctx *blockCtx, v *ast.IndexExpr) {
compileExpr(ctx, v.X)
compileExpr(ctx, v.Index)
ctx.cb.IndexRef(1, v)
}
func compileStarExprLHS(ctx *blockCtx, v *ast.StarExpr) { // *x = ...
compileExpr(ctx, v.X)
ctx.cb.ElemRef()
}
func compileStarExpr(ctx *blockCtx, v *ast.StarExpr) { // ... = *x
compileExpr(ctx, v.X)
ctx.cb.Star()
}
func compileTypeAssertExpr(ctx *blockCtx, v *ast.TypeAssertExpr, twoValue bool) {
compileExpr(ctx, v.X)
if v.Type == nil {
panic("TODO: x.(type) is only used in type switch")
}
typ := toType(ctx, v.Type)
ctx.cb.TypeAssert(typ, twoValue, v)
}
func compileIndexExpr(ctx *blockCtx, v *ast.IndexExpr, twoValue bool) { // x[i]
compileExpr(ctx, v.X)
compileExpr(ctx, v.Index)
ctx.cb.Index(1, twoValue, v)
}
func compileSliceExpr(ctx *blockCtx, v *ast.SliceExpr) { // x[i:j:k]
compileExpr(ctx, v.X)
compileExprOrNone(ctx, v.Low)
compileExprOrNone(ctx, v.High)
if v.Slice3 {
compileExprOrNone(ctx, v.Max)
}
ctx.cb.Slice(v.Slice3, v)
}
func compileSelectorExprLHS(ctx *blockCtx, v *ast.SelectorExpr) {
switch x := v.X.(type) {
case *ast.Ident:
if at := compileIdent(ctx, x, clIdentLHS|clIdentSelectorExpr); at != nil {
ctx.cb.VarRef(at.Ref(v.Sel.Name))
return
}
default:
compileExpr(ctx, v.X)
}
ctx.cb.MemberRef(v.Sel.Name, v)
}
func compileSelectorExpr(ctx *blockCtx, v *ast.SelectorExpr, flags int) {
switch x := v.X.(type) {
case *ast.Ident:
if at := compileIdent(ctx, x, flags|clIdentSelectorExpr); at != nil {
if compilePkgRef(ctx, at, v.Sel, flags) {
return
}
if token.IsExported(v.Sel.Name) {
panic(ctx.newCodeErrorf(x.Pos(), "undefined: %s.%s", x.Name, v.Sel.Name))
}
panic(ctx.newCodeErrorf(x.Pos(), "cannot refer to unexported name %s.%s", x.Name, v.Sel.Name))
}
default:
compileExpr(ctx, v.X)
}
if err := compileMember(ctx, v, v.Sel.Name, flags); err != nil {
panic(err)
}
}
func pkgRef(at *gox.PkgRef, name string) (o types.Object, alias bool) {
if c := name[0]; c >= 'a' && c <= 'z' {
name = string(rune(c)+('A'-'a')) + name[1:]
if v := at.TryRef(name); v != nil && gox.IsFunc(v.Type()) {
return v, true
}
return
}
return at.TryRef(name), false
}
func lookupPkgRef(ctx *blockCtx, pkg *gox.PkgRef, x *ast.Ident) (o types.Object, alias bool) {
if pkg != nil {
return pkgRef(pkg, x.Name)
}
for _, at := range ctx.lookups {
if o2, alias2 := pkgRef(at, x.Name); o2 != nil {
if o != nil {
panic(ctx.newCodeErrorf(
x.Pos(), "confliction: %s declared both in \"%s\" and \"%s\"",
x.Name, at.Types.Path(), pkg.Types.Path()))
}
pkg, o, alias = at, o2, alias2
}
}
return
}
func compilePkgRef(ctx *blockCtx, at *gox.PkgRef, x *ast.Ident, flags int) bool {
if v, alias := lookupPkgRef(ctx, at, x); v != nil {
cb := ctx.cb
if (flags & clIdentLHS) != 0 {
cb.VarRef(v, x)
} else {
autoprop := alias && (flags&clIdentAutoCall) != 0
if autoprop && !gox.HasAutoProperty(v.Type()) {
return false
}
cb.Val(v, x)
if autoprop {
cb.Call(0)
}
}
return true
}
return false
}
type fnType struct {
params *types.Tuple
n1 int
variadic bool
inited bool
}
func (p *fnType) arg(i int, ellipsis bool) types.Type {
if i < p.n1 {
return p.params.At(i).Type()
}
if p.variadic {
t := p.params.At(p.n1).Type()
if ellipsis {
return t
}
return t.(*types.Slice).Elem()
}
return nil
}
func (p *fnType) init(t *types.Signature) {
p.params, p.variadic = t.Params(), t.Variadic()
p.n1 = p.params.Len()
if p.variadic {
p.n1--
}
}
func (p *fnType) initWith(fnt types.Type, idx, nin int) {
if p.inited {
return
}
p.inited = true
if t := gox.CheckSignature(fnt, idx, nin); t != nil {
p.init(t)
}
}
func compileCallExpr(ctx *blockCtx, v *ast.CallExpr, inFlags int) {
switch fn := v.Fun.(type) {
case *ast.Ident:
compileIdent(ctx, fn, clIdentAllowBuiltin|inFlags)
case *ast.SelectorExpr:
compileSelectorExpr(ctx, fn, 0)
default:
compileExpr(ctx, fn)
}
var fn fnType
var fnt = ctx.cb.Get(-1).Type
var flags gox.InstrFlags
var ellipsis = v.Ellipsis != gotoken.NoPos
if ellipsis {
flags = gox.InstrFlagEllipsis
}
if (inFlags & clCallWithTwoValue) != 0 {
flags |= gox.InstrFlagTwoValue
}
for i, arg := range v.Args {
switch expr := arg.(type) {
case *ast.LambdaExpr:
fn.initWith(fnt, i, len(expr.Lhs))
sig := checkLambdaFuncType(ctx, expr, fn.arg(i, true), clLambaArgument, v.Fun)
compileLambdaExpr(ctx, expr, sig)
case *ast.LambdaExpr2:
fn.initWith(fnt, i, len(expr.Lhs))
sig := checkLambdaFuncType(ctx, expr, fn.arg(i, true), clLambaArgument, v.Fun)
compileLambdaExpr2(ctx, expr, sig)
case *ast.CompositeLit:
fn.initWith(fnt, i, -1)
compileCompositeLit(ctx, expr, fn.arg(i, ellipsis), true)
default:
compileExpr(ctx, arg)
}
}
ctx.cb.CallWith(len(v.Args), flags, v)
}
type clLambaFlag string
const (
clLambaAssign clLambaFlag = "assignment"
clLambaField clLambaFlag = "field value"
clLambaArgument clLambaFlag = "argument"
)
// check lambda func type
func checkLambdaFuncType(ctx *blockCtx, lambda ast.Expr, ftyp types.Type, flag clLambaFlag, toNode ast.Node) *types.Signature {
typ := ftyp
retry:
switch t := typ.(type) {
case *types.Signature:
if l, ok := lambda.(*ast.LambdaExpr); ok {
if len(l.Rhs) != t.Results().Len() {
break
}
}
return t
case *types.Named:
typ = t.Underlying()
goto retry
}
src, _ := ctx.LoadExpr(toNode)
err := ctx.newCodeErrorf(lambda.Pos(), "cannot use lambda literal as type %v in %v to %v", ftyp, flag, src)
panic(err)
}
func compileLambda(ctx *blockCtx, lambda ast.Expr, sig *types.Signature) {
switch expr := lambda.(type) {
case *ast.LambdaExpr:
compileLambdaExpr(ctx, expr, sig)
case *ast.LambdaExpr2:
compileLambdaExpr2(ctx, expr, sig)
}
}
func makeLambdaParams(ctx *blockCtx, pos token.Pos, lhs []*ast.Ident, in *types.Tuple) *types.Tuple {
pkg := ctx.pkg
n := len(lhs)
if nin := in.Len(); n != nin {
fewOrMany := "few"
if n > nin {
fewOrMany = "many"
}
has := make([]string, n)
for i, v := range lhs {
has[i] = v.Name
}
panic(ctx.newCodeErrorf(
pos, "too %s arguments in lambda expression\n\thave (%s)\n\twant %v", fewOrMany, strings.Join(has, ", "), in))
}
if n == 0 {
return nil
}
params := make([]*types.Var, n)
for i, name := range lhs {
params[i] = pkg.NewParam(name.Pos(), name.Name, in.At(i).Type())
}
return types.NewTuple(params...)
}
func makeLambdaResults(pkg *gox.Package, out *types.Tuple) *types.Tuple {
nout := out.Len()
if nout == 0 {
return nil
}
results := make([]*types.Var, nout)
for i := 0; i < nout; i++ {
results[i] = pkg.NewParam(token.NoPos, "", out.At(i).Type())
}
return types.NewTuple(results...)
}
func compileLambdaExpr(ctx *blockCtx, v *ast.LambdaExpr, sig *types.Signature) {
pkg := ctx.pkg
params := makeLambdaParams(ctx, v.Pos(), v.Lhs, sig.Params())
results := makeLambdaResults(pkg, sig.Results())
ctx.cb.NewClosure(params, results, false).BodyStart(pkg)
for _, v := range v.Rhs {
compileExpr(ctx, v)
}
ctx.cb.Return(len(v.Rhs)).End()
}
func compileLambdaExpr2(ctx *blockCtx, v *ast.LambdaExpr2, sig *types.Signature) {
pkg := ctx.pkg
params := makeLambdaParams(ctx, v.Pos(), v.Lhs, sig.Params())
results := makeLambdaResults(pkg, sig.Results())
comments, once := ctx.cb.BackupComments()
fn := ctx.cb.NewClosure(params, results, false)
loadFuncBody(ctx, fn, v.Body)
ctx.cb.SetComments(comments, once)
}
func compileFuncLit(ctx *blockCtx, v *ast.FuncLit) {
cb := ctx.cb
comments, once := cb.BackupComments()
sig := toFuncType(ctx, v.Type, nil)
fn := cb.NewClosureWith(sig)
if body := v.Body; body != nil {
loadFuncBody(ctx, fn, body)
cb.SetComments(comments, once)
}
}
func compileBasicLit(ctx *blockCtx, v *ast.BasicLit) {
if v.Kind == token.RAT {
val := v.Value
bi, _ := new(big.Int).SetString(val[:len(val)-1], 10) // remove r suffix
ctx.cb.UntypedBigInt(bi, v)
return
}
ctx.cb.Val(&goast.BasicLit{Kind: gotoken.Token(v.Kind), Value: v.Value}, v)
}
const (
compositeLitVal = 0
compositeLitKeyVal = 1
)
func checkCompositeLitElts(ctx *blockCtx, elts []ast.Expr) (kind int) {
for _, elt := range elts {
if _, ok := elt.(*ast.KeyValueExpr); ok {
return compositeLitKeyVal
}
}
return compositeLitVal
}
func compileCompositeLitElts(ctx *blockCtx, elts []ast.Expr, kind int, expected *kvType) {
for _, elt := range elts {
if kv, ok := elt.(*ast.KeyValueExpr); ok {
if key, ok := kv.Key.(*ast.CompositeLit); ok && key.Type == nil {
compileCompositeLit(ctx, key, expected.Key(), false)
} else {
compileExpr(ctx, kv.Key)
}
if val, ok := kv.Value.(*ast.CompositeLit); ok && val.Type == nil {
compileCompositeLit(ctx, val, expected.Elem(), false)
} else {
compileExpr(ctx, kv.Value)
}
} else {
if kind == compositeLitKeyVal {
ctx.cb.None()
}
if val, ok := elt.(*ast.CompositeLit); ok && val.Type == nil {
compileCompositeLit(ctx, val, expected.Elem(), false)
} else {
compileExpr(ctx, elt)
}
}
}
}
func compileStructLitInKeyVal(ctx *blockCtx, elts []ast.Expr, t *types.Struct, typ types.Type) {
for _, elt := range elts {
kv := elt.(*ast.KeyValueExpr)
name := kv.Key.(*ast.Ident).Name
idx := lookupField(t, name)
if idx >= 0 {
ctx.cb.Val(idx)
} else {
log.Panicln("TODO: struct member not found -", name)
}
switch expr := kv.Value.(type) {
case *ast.LambdaExpr, *ast.LambdaExpr2:
sig := checkLambdaFuncType(ctx, expr, t.Field(idx).Type(), clLambaField, kv.Key)
compileLambda(ctx, expr, sig)
default:
compileExpr(ctx, kv.Value)
}
}
ctx.cb.StructLit(typ, len(elts)<<1, true)
}
func lookupField(t *types.Struct, name string) int {
for i, n := 0, t.NumFields(); i < n; i++ {
if fld := t.Field(i); fld.Name() == name {
return i
}
}
return -1
}
type kvType struct {
underlying types.Type
key, val types.Type
cached bool
}
func (p *kvType) required() *kvType {
if !p.cached {
p.cached = true
switch t := p.underlying.(type) {
case *types.Slice:
p.key, p.val = types.Typ[types.Int], t.Elem()
case *types.Array:
p.key, p.val = types.Typ[types.Int], t.Elem()
case *types.Map:
p.key, p.val = t.Key(), t.Elem()
}
}
return p
}
func (p *kvType) Key() types.Type {
return p.required().key
}
func (p *kvType) Elem() types.Type {
return p.required().val
}
func getUnderlying(ctx *blockCtx, typ types.Type) types.Type {
u := typ.Underlying()
if u == nil {
if t, ok := typ.(*types.Named); ok {
ctx.loadNamed(ctx.pkg, t)
u = t.Underlying()
}
}
return u
}
func compileCompositeLit(ctx *blockCtx, v *ast.CompositeLit, expected types.Type, onlyStruct bool) {
var hasPtr bool
var typ, underlying types.Type
var kind = checkCompositeLitElts(ctx, v.Elts)
if v.Type != nil {
typ = toType(ctx, v.Type)
underlying = getUnderlying(ctx, typ)
} else if expected != nil {
if t, ok := expected.(*types.Pointer); ok {
expected, hasPtr = t.Elem(), true
}
if onlyStruct {
if kind == compositeLitKeyVal {
t := getUnderlying(ctx, expected)
if _, ok := t.(*types.Struct); ok { // can't omit non-struct type
typ, underlying = expected, t
}
}
} else {
typ, underlying = expected, getUnderlying(ctx, expected)
}
}
if t, ok := underlying.(*types.Struct); ok && kind == compositeLitKeyVal {
compileStructLitInKeyVal(ctx, v.Elts, t, typ)
if hasPtr {
ctx.cb.UnaryOp(gotoken.AND)
}
return
}
compileCompositeLitElts(ctx, v.Elts, kind, &kvType{underlying: underlying})
n := len(v.Elts)
if typ == nil {
if kind == compositeLitVal && n > 0 {
panic("TODO: mapLit should be in {key: val, ...} form")
}
ctx.cb.MapLit(nil, n<<1)
return
}
switch underlying.(type) {
case *types.Slice:
ctx.cb.SliceLit(typ, n<<kind, kind == compositeLitKeyVal)
case *types.Array:
ctx.cb.ArrayLit(typ, n<<kind, kind == compositeLitKeyVal)
case *types.Map:
ctx.cb.MapLit(typ, n<<1)
case *types.Struct:
ctx.cb.StructLit(typ, n, false)
default:
log.Panicln("compileCompositeLit: unknown type -", reflect.TypeOf(underlying))
}
if hasPtr {
ctx.cb.UnaryOp(gotoken.AND)
}
}
func compileSliceLit(ctx *blockCtx, v *ast.SliceLit) {
n := len(v.Elts)
for _, elt := range v.Elts {
compileExpr(ctx, elt)
}
ctx.cb.SliceLit(nil, n)
}
func compileRangeExpr(ctx *blockCtx, v *ast.RangeExpr) {
pkg, cb := ctx.pkg, ctx.cb
cb.Val(pkg.Builtin().Ref("newRange"))
if v.First == nil {
ctx.cb.Val(0, v)
} else {
compileExpr(ctx, v.First)
}
compileExpr(ctx, v.Last)
if v.Expr3 == nil {
ctx.cb.Val(1, v)
} else {
compileExpr(ctx, v.Expr3)
}
cb.Call(3)
}
const (
comprehensionInvalid = iota
comprehensionList
comprehensionMap
comprehensionSelect
)
func comprehensionKind(v *ast.ComprehensionExpr) int {
switch v.Tok {
case token.LBRACK: // [
return comprehensionList
case token.LBRACE: // {
if _, ok := v.Elt.(*ast.KeyValueExpr); ok {
return comprehensionMap
}
return comprehensionSelect
}
panic("TODO: invalid comprehensionExpr")
}
// [expr for k, v <- container, cond]
// {for k, v <- container, cond}
// {expr for k, v <- container, cond}
// {kexpr: vexpr for k, v <- container, cond}
func compileComprehensionExpr(ctx *blockCtx, v *ast.ComprehensionExpr, twoValue bool) {
kind := comprehensionKind(v)
pkg, cb := ctx.pkg, ctx.cb
var results *types.Tuple
var ret *gox.Param
if v.Elt == nil {
boolean := pkg.NewParam(token.NoPos, "_gop_ok", types.Typ[types.Bool])
results = types.NewTuple(boolean)
} else {
ret = pkg.NewAutoParam("_gop_ret")
if kind == comprehensionSelect && twoValue {
boolean := pkg.NewParam(token.NoPos, "_gop_ok", types.Typ[types.Bool])
results = types.NewTuple(ret, boolean)
} else {
results = types.NewTuple(ret)
}
}
cb.NewClosure(nil, results, false).BodyStart(pkg)
if kind == comprehensionMap {
cb.VarRef(ret).ZeroLit(ret.Type()).Assign(1)
}
end := 0
for i := len(v.Fors) - 1; i >= 0; i-- {
names := make([]string, 0, 2)
forStmt := v.Fors[i]
if forStmt.Key != nil {
names = append(names, forStmt.Key.Name)
} else {
names = append(names, "_")
}
names = append(names, forStmt.Value.Name)
cb.ForRange(names...)
compileExpr(ctx, forStmt.X)
cb.RangeAssignThen(forStmt.TokPos)
if forStmt.Cond != nil {
cb.If()
if forStmt.Init != nil {
compileStmt(ctx, forStmt.Init)
}
compileExpr(ctx, forStmt.Cond)
cb.Then()
end++
}
end++
}
switch kind {
case comprehensionList:
// _gop_ret = append(_gop_ret, elt)
cb.VarRef(ret)
cb.Val(pkg.Builtin().Ref("append"))
cb.Val(ret)
compileExpr(ctx, v.Elt)
cb.Call(2).Assign(1)
case comprehensionMap:
// _gop_ret[key] = val
cb.Val(ret)
kv := v.Elt.(*ast.KeyValueExpr)
compileExpr(ctx, kv.Key)
cb.IndexRef(1)
compileExpr(ctx, kv.Value)
cb.Assign(1)
default:
if v.Elt == nil {
// return true
cb.Val(true)
cb.Return(1)
} else {
// return elt, true
compileExpr(ctx, v.Elt)
n := 1
if twoValue {
cb.Val(true)
n++
}
cb.Return(n)
}
}
for i := 0; i < end; i++ {
cb.End()
}
cb.Return(0).End().Call(0)
}
var (
tyError = types.Universe.Lookup("error").Type()
)
func compileErrWrapExpr(ctx *blockCtx, v *ast.ErrWrapExpr) {
pkg, cb := ctx.pkg, ctx.cb
useClosure := v.Tok == token.NOT || v.Default != nil
if !useClosure && (cb.Scope().Parent() == types.Universe) {
panic("TODO: can't use expr? in global")
}
compileExpr(ctx, v.X)
x := cb.InternalStack().Pop()
n := 0
results, ok := x.Type.(*types.Tuple)
if ok {
n = results.Len() - 1
}
var ret []*types.Var
if n > 0 {
i, retName := 0, "_gop_ret"
ret = make([]*gox.Param, n)
for {
ret[i] = pkg.NewAutoParam(retName)
i++
if i >= n {
break
}
retName = "_gop_ret" + strconv.Itoa(i+1)
}
}
sig := types.NewSignature(nil, nil, types.NewTuple(ret...), false)
if useClosure {
cb.NewClosureWith(sig).BodyStart(pkg)
} else {
cb.CallInlineClosureStart(sig, 0, false)
}
cb.NewVar(tyError, "_gop_err")
err := cb.Scope().Lookup("_gop_err")
for _, retVar := range ret {
cb.VarRef(retVar)
}
cb.VarRef(err)
cb.InternalStack().Push(x)
cb.Assign(n+1, 1)
cb.If().Val(err).CompareNil(gotoken.NEQ).Then()
if v.Tok == token.NOT { // expr!
cb.Val(pkg.Builtin().Ref("panic")).Val(err).Call(1).EndStmt() // TODO: wrap err
} else if v.Default == nil { // expr?
cb.Val(err).ReturnErr(true) // TODO: wrap err & return err
} else { // expr?:val
compileExpr(ctx, v.Default)
cb.Return(1)
}
cb.End().Return(0).End()
if useClosure {
cb.Call(0)
}
}
// -----------------------------------------------------------------------------
| JessonChan/gop |
<|start_filename|>src/worldgen.coffee<|end_filename|>
NOTHING = 0
FLYING_ROCK = 1
GROUND = 2
class GeneratorState
constructor: (chunkSize) ->
@chunkSize = chunkSize
@cacheSizeX = @chunkSize
@cacheSizeY = @chunkSize + 1
@cacheSizeZ = @chunkSize
@blockSourceCache = new Uint8Array @cacheSizeX * @cacheSizeY * @cacheSizeZ
init: (gen, offX, offY, offZ) ->
{cacheSizeX, cacheSizeY, cacheSizeZ} = this
@offX = offX
@offY = offY
@offZ = offZ
sc = @blockSourceCache
for cz in [0...cacheSizeZ] by 1
for cy in [0...cacheSizeY] by 1
for cx in [0...cacheSizeX] by 1
source = gen.getBlockSource offX + cx, offY + cy, offZ + cz
sc[cx + cy * cacheSizeX + cz * cacheSizeX * cacheSizeY] = source
getBlockSource: (cx, cy, cz) ->
@blockSourceCache[cx + cy * @cacheSizeX + cz * @cacheSizeX * @cacheSizeY]
class WorldGeneratorProcess extends webglmc.Process
constructor: (seed) ->
@perlin = new webglmc.PerlinGenerator seed
@cachedState = null
@cachedChunk = null
@waterLevel = 16
isFlyingRock: (x, y, z) ->
nx = x * 0.01
ny = y * 0.01
nz = z * 0.01
heightOff = @perlin.simpleNoise2D(nx * 3.0, nz * 3.0) * 0.2
mx = (nx % 1.0 + 1.0) % 1.0
my = ((ny + heightOff) % 1.4 + 1.4) % 1.4
mz = (nz % 1.0 + 1.0) % 1.0
# falloff from the top
if my > 0.9
return false
if my > 0.8
plateauFalloff = 1.0 - (my - 0.8) * 10
else
plateauFalloff = 1.0
# falloff from the center
a = (mx - 0.5) * 1.5
b = (my - 1.0) * 0.8
c = (mz - 0.5) * 1.5
centerFalloff = 0.1 / (a * a + b * b + c * c)
noise = @perlin.noise3D nx, ny * 0.5, nz, 4
density = noise * centerFalloff * plateauFalloff
density > 0.1
getGroundHeight: (x, z) ->
nx = x * 0.01
nz = z * 0.01
noise = @perlin.noise2D(nx, nz, 3) * 0.5 + 0.5
noise * 30
getGrassVariation: (x, y, z) ->
nx = x * 1.2
ny = y * 1.4
nz = z * 1.1
noise = @perlin.simpleNoise3D(nx, ny, nz) * 0.5 + 0.5
variation = Math.floor(noise * 4) + 1
webglmc.BLOCK_TYPES["grass0#{variation}"]
getRockVariation: (x, y, z) ->
nx = 0.3 + x * 1.1
ny = 0.4 + y * 1.1
nz = 0.5 + z * 1.05
noise = @perlin.simpleNoise3D(nx, ny, nz) * 0.5 + 0.5
noise = Math.floor(noise * 3)
if noise > 0.4
return webglmc.BLOCK_TYPES.rock01
return webglmc.BLOCK_TYPES.rock02
getBlockSource: (x, y, z) ->
# Ground level blocks
if y < this.getGroundHeight x, z
return GROUND
# Flying rocks
if this.isFlyingRock x, y, z
return FLYING_ROCK
NOTHING
getBlock: (state, cx, cy, cz) ->
x = state.offX + cx
y = state.offY + cy
z = state.offZ + cz
blockSource = state.getBlockSource cx, cy, cz
if !blockSource
if y < @waterLevel
return webglmc.BLOCK_TYPES.water
return webglmc.BLOCK_TYPES.air
if blockSource == FLYING_ROCK
if !state.getBlockSource cx, cy + 1, cz
return this.getGrassVariation x, y, z
return this.getRockVariation x, y, z
if blockSource == GROUND
if y < @waterLevel - 4
return webglmc.BLOCK_TYPES.stone
if @waterLevel - 1 <= y <= @waterLevel + 1
return webglmc.BLOCK_TYPES.sand
return this.getGrassVariation x, y, z
getGeneratorState: (offX, offY, offZ, chunkSize) ->
if !@cachedState || @cachedState.chunkSize != chunkSize
@cachedState = new GeneratorState chunkSize
@cachedState.init this, offX, offY, offZ
@cachedState
getChunkArray: (chunkSize) ->
dim = chunkSize * chunkSize * chunkSize
if !@cachedChunk || @cachedChunk.length != dim
@cachedChunk = new Uint8Array dim
@cachedChunk
generateChunk: (def) ->
{chunkSize, x, y, z} = def
offX = x * chunkSize
offY = y * chunkSize
offZ = z * chunkSize
# Since generateChunk is not reentrant and JavaScript does not
# support multithreading we can savely keep them around. These
# functions will cache them in the background so that we do not
# need any memory allocations during world generation
state = this.getGeneratorState offX, offY, offZ, chunkSize
chunk = this.getChunkArray chunkSize
for cz in [0...chunkSize] by 1
for cy in [0...chunkSize] by 1
for cx in [0...chunkSize] by 1
blockID = this.getBlock state, cx, cy, cz
chunk[cx + cy * chunkSize + cz * chunkSize * chunkSize] = blockID
this.notifyParent x: x, y: y, z: z, chunk: chunk
class WorldGenerator
constructor: (world) ->
@world = world
numberOfWorkers = parseInt webglmc.getRuntimeParameter 'workers', 4
# Spawn a few workers for the actual world generation.
@manager = new webglmc.ProcessManager numberOfWorkers,
process: 'webglmc.WorldGeneratorProcess'
args: [world.seed]
onNotification: (data) =>
this.processGeneratedChunk data.x, data.y, data.z, data.chunk
@manager.addStatusDisplay('Worldgen worker load')
generateChunk: (x, y, z) ->
@manager.getWorker().generateChunk
x: x
y: y
z: z
chunkSize: @world.chunkSize
processGeneratedChunk: (x, y, z, chunk) ->
@world.setRequestedChunk x, y, z, chunk
public = self.webglmc ?= {}
public.WorldGenerator = WorldGenerator
public.WorldGeneratorProcess = WorldGeneratorProcess
<|start_filename|>src/debugpanel.coffee<|end_filename|>
class Display
constructor: (panel, name) ->
@panel = panel
row = $('<tr></tr>').appendTo(panel.element)
@keyElement = $('<th></th>').text(name).appendTo(row)
@valueElement = $('<td></td>').appendTo(row)
this.setText('')
setVisible: (value) ->
if value
@element.show()
else
@element.hide()
setText: (value) ->
@valueElement.text(value)
getText: ->
@valueElement.text()
class DebugPanel
constructor: ->
@displays = {}
@element = $('<table id=debugpanel></table>').appendTo('body')
addDisplay: (name) ->
@displays[name] ?= new Display this, name
parameters = null
getRuntimeParameter = (key, def=null) ->
if !parameters?
parameters = {}
for item in window.location.search.substr(1).split('&')
[k, v] = item.split('=', 2)
parameters[k] = v
parameters[key] ? def
bench = (benchName, callback) ->
name = "bench [#{benchName}]"
display = webglmc.debugPanel.addDisplay name
now = Date.now()
callback()
display.setText "#{(Date.now() - now) / 1000}ms"
public = self.webglmc ?= {}
public.DebugPanel = DebugPanel
public.getRuntimeParameter = getRuntimeParameter
public.bench = bench
<|start_filename|>src/game.coffee<|end_filename|>
keyMapping =
65: 'strafeLeft' # A
68: 'strafeRight' # D
87: 'moveForward' # W
83: 'moveBackward' # S
38: 'lookUp' # Arrow Up
40: 'lookDown' # Arrow Down
37: 'lookLeft' # Arrow Left
39: 'lookRight' # Arrow Right
69: 'putBlock' # E
81: 'removeBlock' # Q
class Game
constructor: ->
@actions = {}
for code, action of keyMapping
@actions[action] = false
initGame: ->
{engine} = webglmc
@cam = new webglmc.Camera
@cam.position = vec3.create([-20.0, 18.0, -20.0])
@cam.lookAt vec3.create([-0.5, 18.0, 0.5])
@world = new webglmc.World
@currentSelection = null
@processor = new webglmc.Processor webglmc.resmgr.resources['shaders/postprocess']
initEventHandlers: ->
$(window)
.bind 'keydown', (event) =>
this.onKeyDown event
.bind 'keyup', (event) =>
this.onKeyUp event
onKeyDown: (event) ->
action = keyMapping[event.which]
if action?
@actions[action] = true
if @currentSelection
s = @currentSelection
if action == 'removeBlock'
@world.setBlock s.x, s.y, s.z, 0
else if action == 'putBlock'
@world.setBlock s.nx, s.ny, s.nz, webglmc.BLOCK_TYPES.stone
false
onKeyUp: (event) ->
action = keyMapping[event.which]
if action?
@actions[action] = false
false
run: ->
webglmc.resmgr.wait =>
this.initGame()
this.initEventHandlers()
this.mainloop()
mainloop: ->
webglmc.engine.mainloop (dt) =>
this.updateGame dt
this.render()
updateGame: (dt) ->
if @actions.moveForward
@cam.moveForward dt * 10
if @actions.moveBackward
@cam.moveBackward dt * 10
if @actions.strafeLeft
@cam.strafeLeft dt * 10
if @actions.strafeRight
@cam.strafeRight dt * 10
if @actions.lookUp
@cam.rotateScreenY -dt * 0.5
if @actions.lookDown
@cam.rotateScreenY dt * 0.5
if @actions.lookLeft
@cam.rotateScreenX -dt * 0.5
if @actions.lookRight
@cam.rotateScreenX dt * 0.5
@cam.apply()
@world.update dt
@world.requestMissingChunks()
@currentSelection = @world.pickCloseBlockAtScreenCenter()
render: ->
{gl} = webglmc.engine
webglmc.clear()
@processor.push()
@world.draw()
if @currentSelection
s = @currentSelection
@world.drawBlockHighlight s.x, s.y, s.z, s.hit.side
@processor.pop()
initEngineAndGame = (selector, debug) ->
canvas = $(selector)[0]
webglmc.debugPanel = new webglmc.DebugPanel()
webglmc.engine = new webglmc.Engine(canvas, debug)
webglmc.resmgr = webglmc.makeDefaultResourceManager()
webglmc.game = new Game
webglmc.game.run()
$(document).ready ->
debug = webglmc.getRuntimeParameter('debug') == '1'
initEngineAndGame '#viewport', debug
public = self.webglmc ?= {}
public.game = null
public.debugPanel = null
public.resmgr = null
public.engine = null
<|start_filename|>src/engine.coffee<|end_filename|>
requestAnimationFrame = (
window.requestAnimationFrame ||
window.webkitRequestAnimationFrame ||
window.mozRequestAnimationFrame ||
window.oRequestAnimationFrame ||
window.msRequestAnimationFrame)
makeGLContext = (canvas, debug, options) ->
try
ctx = canvas.getContext('webgl', options) ||
canvas.getContext('experimental-webgl', options)
catch e
alert "Error: This browser does not support WebGL"
return null
if debug
ctx = WebGLDebugUtils.makeDebugContext ctx, (err, funcName, args) ->
args = (x for x in args)
errorStr = WebGLDebugUtils.glEnumToString(err)
console.error "WebGL Error: func=#{funcName} args=", args, " error=", errorStr
console.trace?()
throw "Aborting rendering after critical WebGL error"
ctx
clear = (color = webglmc.floatColorFromHex '#ffffff') ->
{gl} = webglmc.engine
# No splats because of chrome bug
gl.clearColor color[0], color[1], color[2], color[3]
gl.clear gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT
class Engine
constructor: (canvas, debug = false) ->
@debug = debug
@canvas = canvas
@throbber = $('<img src=assets/throbber.gif id=throbber>')
.appendTo('body')
.hide()
@throbberLevel = 0
@frameTimeDisplay = webglmc.debugPanel.addDisplay 'Frame time'
@gl = makeGLContext canvas, @debug,
antialias: false
@width = @canvas.width
@height = @canvas.height
@aspect = @canvas.width / @canvas.height
@gl.enable @gl.DEPTH_TEST
@gl.depthFunc @gl.LEQUAL
@gl.enable @gl.CULL_FACE
@gl.cullFace @gl.BACK
@gl.enable @gl.BLEND
@gl.blendFunc @gl.SRC_ALPHA, @gl.ONE_MINUS_SRC_ALPHA
@model = new MatrixStack
@view = new MatrixStack
@projection = new MatrixStack
@_uniformVersion = 0
this.markMVPDirty()
console.debug 'Render canvas =', @canvas
console.debug 'WebGL context =', @gl
pushThrobber: ->
if @throbberLevel++ == 0
@throbber.fadeIn()
popThrobber: ->
if --@throbberLevel == 0
@throbber.fadeOut()
markMVPDirty: ->
@_uniformVersion++
@_frustum = null
@_mvp = null
@_modelView = null
@_normal = null
@_iview = null
@_ivp = null
getModelView: ->
@_modelView ?= mat4.multiply @view.top, @model.top
getModelViewProjection: ->
@_mvp ?= mat4.multiply @projection.top, this.getModelView(), mat4.create()
getNormal: ->
@_normal = mat4.toInverseMat3 @model.top
getCurrentFrustum: ->
@_frustum ?= new webglmc.Frustum this.getModelViewProjection()
getInverseView: ->
@_iview ?= mat4.inverse @view.top, mat4.create()
getInverseViewProjection: ->
viewproj = mat4.multiply @projection.top, @view.top, mat4.create()
@_ivp ?= mat4.inverse viewproj
getCameraPos: ->
iview = this.getInverseView()
vec3.create [iview[12], iview[13], iview[14]]
getForward: ->
vec3.create [-@view.top[2], -@view.top[6], -@view.top[10]]
flushUniforms: ->
shader = webglmc.Shader.top()
if shader._uniformVersion == @_uniformVersion
return
shader.uniform2f "uViewportSize", @width, @height
shader.uniformMatrix4fv "uModelMatrix", @model.top
shader.uniformMatrix4fv "uViewMatrix", @view.top
shader.uniformMatrix4fv "uModelViewMatrix", this.getModelView()
shader.uniformMatrix4fv "uProjectionMatrix", @projection.top
shader.uniformMatrix4fv "uModelViewProjectionMatrix", this.getModelViewProjection()
shader.uniformMatrix3fv "uNormalMatrix", this.getNormal()
shader._uniformVersion = @_uniformVersion
mainloop: (iterate) ->
lastTimestamp = Date.now()
step = (timestamp) =>
dt = timestamp - lastTimestamp
@frameTimeDisplay.setText dt + 'ms'
iterate dt / 1000
lastTimestamp = timestamp
requestAnimationFrame step
requestAnimationFrame step
class MatrixStack
constructor: ->
@top = mat4.identity()
@stack = []
set: (value) ->
@top = value
webglmc.engine.markMVPDirty()
identity: ->
this.set mat4.identity()
multiply: (mat) ->
mat4.multiply @top, mat
webglmc.engine.markMVPDirty()
translate: (vector) ->
mat4.translate @top, vector
webglmc.engine.markMVPDirty()
rotate: (angle, axis) ->
mat4.rotate @top, angle, axis
webglmc.engine.markMVPDirty()
scale: (vector) ->
mat4.scale @top, vector
webglmc.engine.markMVPDirty()
push: (mat = null) ->
if !mat
@stack.push mat4.create mat
@top = mat4.create mat
else
@stack.push mat4.create mat
null
pop: ->
@top = @stack.pop()
webglmc.engine.markMVPDirty()
public = self.webglmc ?= {}
public.Engine = Engine
public.clear = clear
<|start_filename|>src/process.coffee<|end_filename|>
if window?
workerBase = 'compiled/'
startWorkerSupport = false
else
workerBase = './'
startWorkerSupport = true
findClass = (name) ->
rv = self
for piece in name.split(/\./)
rv = rv[piece]
rv
startProcess = (options) ->
args = options.args ? []
callback = options.onNotification
worker = new Worker workerBase + 'process.js'
worker.addEventListener 'message', (event) =>
{data} = event
if data.type == 'notify'
if callback?
callback data.value, data.done
else if data.type == 'console'
console[data.level]("%c[#{options.process}]: ",
'background: #D4F2F3; color: #133C3D', data.args...)
worker.addEventListener 'error', (event) =>
console.error 'Error in worker: ', event.message
console.log "Starting process #{options.process} as worker args=", args
worker.postMessage cmd: '__init__', worker: options.process, args: args
return new ProcessProxy worker, options.process, options.onBeforeCall
class ProcessProxy
constructor: (worker, processClass, onBeforeCall) ->
@_worker = worker
for key, callable of findClass(processClass).prototype
if key in ['constructor', 'run', 'notifyParent']
continue
do (key) =>
this[key] = (args...) ->
onBeforeCall?(key, args)
this._worker.postMessage cmd: key, args: args
undefined
class Process
notifyParent: (value, done = true) ->
postMessage type: 'notify', value: value, done: done
run: ->
class ProcessManager
constructor: (workers, options) ->
@workers = []
@display = null
@onNotification = options.onNotification
@load = {}
for n in [0...workers]
this.addWorker options
addWorker: (options) ->
num = @workers.length
@load[num] = 0
@workers.push startProcess
process: options.process
args: options.args
onBeforeCall: (name, args) =>
this.updateDisplay()
webglmc.engine.pushThrobber()
options.onBeforeCall?(name, args)
@load[num] += 1
onNotification: (data, done) =>
this.handleWorkerResult num, data, done
getWorker: ->
workers = ([load, num] for num, load of @load)
workers.sort (a, b) ->
a[0] - b[0]
@workers[workers[0][1]]
handleWorkerResult: (num, data, done) ->
webglmc.engine.popThrobber()
if done
@load[num] -= 1
this.updateDisplay()
this.onNotification data
updateDisplay: ->
if !@display
return
pieces = []
for num, load of @load
pieces.push "w(#{num}) = #{load}"
@display.setText pieces.join(', ')
addStatusDisplay: (name) ->
@display = webglmc.debugPanel.addDisplay name
this.updateDisplay()
public = self.webglmc ?= {}
public.Process = Process
public.ProcessManager = ProcessManager
public.startProcess = startProcess
if startWorkerSupport
importScripts '../lib/gl-matrix.js', 'perlin.js', 'world.js', 'worldgen.js'
instance = null
commandQueue = []
makeLogger = (level) ->
(args...) -> postMessage type: 'console', level: level, args: args
this.console =
log: makeLogger 'log'
debug: makeLogger 'debug'
warn: makeLogger 'warn'
error: makeLogger 'error'
kickOff = ->
setTimeout((-> instance.run()), 0)
for [cmd, args] in commandQueue
instance[cmd](args...)
executeCommand = (cmd, args) ->
if instance
if !instance[cmd]?
console.error 'Tried to call unexisting callback name=', cmd
instance[cmd](args...)
else
commandQueue.push [cmd, args]
self.addEventListener 'message', (event) ->
{data} = event
if data.cmd == '__init__'
cls = findClass data.worker
instance = new cls data.args...
console.log 'Started up args=', data.args
kickOff()
else if instance
executeCommand data.cmd, data.args
<|start_filename|>src/perlin.coffee<|end_filename|>
defaultPermutationTable = [
151, 160, 137, 91, 90, 15, 131, 13, 201, 95, 96, 53, 194, 233, 7, 225,
140, 36, 103, 30, 69, 142, 8, 99, 37, 240, 21, 10, 23, 190, 6, 148,
247, 120, 234, 75, 0, 26, 197, 62, 94, 252, 219, 203, 117, 35, 11, 32,
57, 177, 33, 88, 237, 149, 56, 87, 174, 20, 125, 136, 171, 168, 68,
175, 74, 165, 71, 134, 139, 48, 27, 166, 77, 146, 158, 231, 83, 111,
229, 122, 60, 211, 133, 230, 220, 105, 92, 41, 55, 46, 245, 40, 244,
102, 143, 54, 65, 25, 63, 161, 1, 216, 80, 73, 209, 76, 132, 187, 208,
89, 18, 169, 200, 196, 135, 130, 116, 188, 159, 86, 164, 100, 109,
198, 173, 186, 3, 64, 52, 217, 226, 250, 124, 123, 5, 202, 38, 147,
118, 126, 255, 82, 85, 212, 207, 206, 59, 227, 47, 16, 58, 17, 182,
189, 28, 42, 223, 183, 170, 213, 119, 248, 152, 2, 44, 154, 163, 70,
221, 153, 101, 155, 167, 43, 172, 9, 129, 22, 39, 253, 9, 98, 108,
110, 79, 113, 224, 232, 178, 185, 112, 104, 218, 246, 97, 228, 251,
34, 242, 193, 238, 210, 144, 12, 191, 179, 162, 241, 81, 51, 145, 235,
249, 14, 239, 107, 49, 192, 214, 31, 181, 199, 106, 157, 184, 84, 204,
176, 115, 121, 50, 45, 127, 4, 150, 254, 138, 236, 205, 93, 222, 114,
67, 29, 24, 72, 243, 141, 128, 195, 78, 66, 215, 61, 156, 180
]
gradientVectors = new Int8Array [
1.0, 1.0, 0.0
-1.0, 1.0, 0.0
1.0, -1.0, 0.0
-1.0, -1.0, 0.0
1.0, 0.0, 1.0
-1.0, 0.0, 1.0
1.0, 0.0, -1.0
-1.0, 0.0, -1.0
0.0, 1.0, 1.0
0.0, -1.0, 1.0
0.0, 1.0, -1.0
0.0, -1.0, -1.0
1.0, 1.0, 0.0
0.0, -1.0, 1.0
-1.0, 1.0, 0.0
0.0, -1.0, -1.0
]
F2 = (0.5 * (Math.sqrt(3.0) - 1.0))
G2 = ((3.0 - Math.sqrt(3.0)) / 6.0)
F3 = (1.0 / 3.0)
G3 = (1.0 / 6.0)
RAND_MAX = Math.pow(2, 32) - 1
fastRandom = (seed) ->
v = seed
u = 521288629
->
v = 36969 * (v & 65535) + (v >> 16)
u = 18000 * (u & 65535) + (u >> 16)
((v << 16) + u) / RAND_MAX
randomizeTable = (table, seed) ->
random = fastRandom seed
for i in [table.length...1]
j = parseInt random() * (i + 1)
[table[i], table[j]] = [table[j], table[i]]
class PerlinGenerator
constructor: (seed) ->
@seed = parseInt seed
@permutationTable = new Uint8Array defaultPermutationTable
@period = @permutationTable.length
randomizeTable @permutationTable, @seed
simpleNoise2D: (x, y) ->
noise = 0.0
pt = @permutationTable
gv = gradientVectors
p = @period
s = (x + y) * F2
i = Math.floor(x + s)
j = Math.floor(y + s)
t = (i + j) * G2
x0 = x - (i - t)
y0 = y - (j - t)
if x0 > y0
i1 = 1; j1 = 0
else
i1 = 0; j1 = 1
x1 = x0 - i1 + G2
y1 = y0 - j1 + G2
x2 = x0 + G2 * 2.0 - 1.0
y2 = y0 + G2 * 2.0 - 1.0
ii = (i % p + p) % p
jj = (j % p + p) % p
gi0 = pt[(ii + pt[jj % p]) % p] % 12
gi1 = pt[(ii + i1 + pt[(jj + j1) % p]) % p] % 12
gi2 = pt[(ii + 1 + pt[(jj + 1) % p]) % p] % 12
tt = 0.5 - x0 * x0 - y0 * y0
if tt > 0.0
gv0 = gv[gi0 * 3]
gv1 = gv[gi0 * 3 + 1]
noise += tt * tt * tt * tt * (gv0 * x0 + gv1 * y0)
tt = 0.5 - x1 * x1 - y1 * y1
if tt > 0.0
gv0 = gv[gi1 * 3]
gv1 = gv[gi1 * 3 + 1]
noise += tt * tt * tt * tt * (gv0 * x1 + gv1 * y1)
tt = 0.5 - x2 * x2 - y2 * y2
if tt > 0.0
gv0 = gv[gi2 * 3]
gv1 = gv[gi2 * 3 + 1]
noise += tt * tt * tt * tt * (gv0 * x2 + gv1 * y2)
noise * 70.0
simpleNoise3D: (x, y, z) ->
noise = 0.0
pt = @permutationTable
gv = gradientVectors
p = @period
s = (x + y + z) * F3
i = Math.floor x + s
j = Math.floor y + s
k = Math.floor z + s
t = (i + j + k) * G3
x0 = x - (i - t)
y0 = y - (j - t)
z0 = z - (k - t)
if x0 >= y0
if y0 >= z0
i1 = 1; j1 = 0; k1 = 0; i2 = 1; j2 = 1; k2 = 0
else if (x0 >= z0)
i1 = 1; j1 = 0; k1 = 0; i2 = 1; j2 = 0; k2 = 1
else
i1 = 0; j1 = 0; k1 = 1; i2 = 1; j2 = 0; k2 = 1
else
if (y0 < z0)
i1 = 0; j1 = 0; k1 = 1; i2 = 0; j2 = 1; k2 = 1
else if (x0 < z0)
i1 = 0; j1 = 1; k1 = 0; i2 = 0; j2 = 1; k2 = 1
else
i1 = 0; j1 = 1; k1 = 0; i2 = 1; j2 = 1; k2 = 0
x1 = x0 - i1 + G3
y1 = y0 - j1 + G3
z1 = z0 - k1 + G3
x2 = x0 - i2 + 2.0 * G3
y2 = y0 - j2 + 2.0 * G3
z2 = z0 - k2 + 2.0 * G3
x3 = x0 - 1.0 + 3.0 * G3
y3 = y0 - 1.0 + 3.0 * G3
z3 = z0 - 1.0 + 3.0 * G3
ii = (i % p + p) % p
jj = (j % p + p) % p
kk = (k % p + p) % p
gi0 = pt[(ii + pt[(jj + pt[kk % p]) % p]) % p] % 12
gi1 = pt[(ii + i1 + pt[(jj + j1 + pt[(kk + k1) % p]) % p]) % p] % 12
gi2 = pt[(ii + i2 + pt[(jj + j2 + pt[(kk + k2) % p]) % p]) % p] % 12
gi3 = pt[(ii + 1 + pt[(jj + 1 + pt[(kk + 1) % p]) % p]) % p] % 12
tt = 0.6 - x0 * x0 - y0 * y0 - z0 * z0
if tt > 0.0
gv0 = gv[gi0 * 3]
gv1 = gv[gi0 * 3 + 1]
gv2 = gv[gi0 * 3 + 2]
noise += tt * tt * tt * tt * (gv0 * x0 + gv1 * y0 + gv2 * z0)
tt = 0.6 - x1 * x1 - y1 * y1 - z1 * z1
if tt > 0.0
gv0 = gv[gi1 * 3]
gv1 = gv[gi1 * 3 + 1]
gv2 = gv[gi1 * 3 + 2]
noise += tt * tt * tt * tt * (gv0 * x1 + gv1 * y1 + gv2 * z1)
tt = 0.6 - x2 * x2 - y2 * y2 - z2 * z2
if tt > 0.0
gv0 = gv[gi2 * 3]
gv1 = gv[gi2 * 3 + 1]
gv2 = gv[gi2 * 3 + 2]
noise += tt * tt * tt * tt * (gv0 * x2 + gv1 * y2 + gv2 * z2)
tt = 0.6 - x3 * x3 - y3 * y3 - z3 * z3
if tt > 0.0
gv0 = gv[gi3 * 3]
gv1 = gv[gi3 * 3 + 1]
gv2 = gv[gi3 * 3 + 2]
noise += tt * tt * tt * tt * (gv0 * x3 + gv1 * y3 + gv2 * z3)
noise * 32.0
noise2D: (x, y, octaves = 1) ->
total = 0.0
freq = 1.0
i = 0
while i < octaves
total += this.simpleNoise2D(x * freq, y * freq) / freq
freq *= 2.0
i++
total
noise3D: (x, y, z, octaves = 1) ->
total = 0.0
freq = 1.0
i = 0
while i < octaves
total += this.simpleNoise3D(x * freq, y * freq, z * freq) / freq
freq *= 2.0
i++
total
public = self.webglmc ?= {}
public.PerlinGenerator = PerlinGenerator
<|start_filename|>src/atlas.coffee<|end_filename|>
class Atlas
constructor: (texture, slices) ->
@texture = texture
@slices = slices
destroy: ->
if @texture
@texture.destroy()
@texture = null
class AtlasNode
constructor: (x, y, width, height) ->
@x = x
@y = y
@width = width
@height = height
@left = null
@right = null
@inUse = false
insertChild: (width, height) ->
if @left
return @left.insertChild(width, height) ||
@right.insertChild(width, height)
if @inUse || width > @width || height > @height
return null
if @width == width && @height == height
@inUse = true
return this
if @width - width > @height - height
@left = new AtlasNode @x, @y, width, @height
@right = new AtlasNode @x + width, @y, @width - width, @height
else
@left = new AtlasNode @x, @y, @width, height
@right = new AtlasNode @x, @y + height, @width, @height - height
@left.insertChild(width, height)
class AtlasBuilder
constructor: (width, height, options = {}) ->
@canvas = $('<canvas></canvas>')
.attr('width', width)
.attr('height', height)[0]
@ctx = @canvas.getContext('2d')
@padding = options.padding ? 0
@gridAdd = options.gridAdd ? false
@slices = {}
@root = new AtlasNode 0, 0, width, height
drawOnCanvas: (x, y, img, gridAdd) ->
times = if gridAdd then 3 else 1
for ry in [0...times]
posy = y + (ry * img.height)
for rx in [0...times]
posx = x + (rx * img.width)
@ctx.drawImage img, 0, 0, img.width, img.height,
posx, posy, img.width, img.height
add: (key, img, gridAdd = @gridAdd) ->
width = img.width + @padding * 2
height = img.height + @padding * 2
if gridAdd
width *= 3
height *= 3
node = @root.insertChild width, height
if !node
return false
this.drawOnCanvas node.x, node.y, img, gridAdd
@slices[key] =
x: node.x + (if gridAdd then img.width else 0) + @padding
y: node.y + (if gridAdd then img.height else 0) + @padding
width: img.width
height: img.height
true
makeAtlas: (options = {}) ->
texture = webglmc.Texture.fromImage @canvas, options
slices = {}
for key, def of @slices
slices[key] = texture.slice def.x, texture.height - def.y - def.height,
def.width, def.height
new Atlas texture, slices
public = self.webglmc ?= {}
public.AtlasBuilder = AtlasBuilder
<|start_filename|>src/ray.coffee<|end_filename|>
class RaycastHit
constructor: (distance, side = null) ->
@distance = distance
@side = side
class Ray
constructor: (origin, direction) ->
@origin = origin
@direction = vec3.normalize direction
this.betweenTwoPoints = (origin, otherPoint) ->
direction = vec3.subtract origin, otherPoint, vec3.create()
return new Ray origin, direction
this.fromScreenSpaceNearToFar = (x, y) ->
{engine} = webglmc
ivp = engine.getInverseViewProjection()
if !ivp
return null
vec = vec4.create()
vec[0] = x * 2.0 / engine.width - 1.0
vec[1] = (engine.height - y) * 2.0 / engine.height - 1.0
vec[2] = 0.0
vec[3] = 1.0
origin = mat4.multiplyVec4 ivp, vec, vec4.create()
if !origin[3]
return null
new Ray vec4.toVec3(origin), engine.getForward()
intersectsAABB: (aabb, checkInside = true) ->
{vec1, vec2} = aabb
lowt = 0.0
didHit = false
sideHit = null
if checkInside &&
@origin[0] > vec1[0] && @origin[1] > vec1[1] && @origin[0] > vec1[2] &&
@origin[0] < vec2[0] && @origin[1] < vec2[1] && @origin[2] < vec2[2]
return new RaycastHit 0, 'inside'
checkHit = (vec, s, sa, sb, side) =>
if vec == vec1
cond = @origin[s] <= vec[s] && @direction[s] > 0.0
else
cond = @origin[s] >= vec[s] && @direction[s] < 0.0
if !cond
return
t = (vec[s] - @origin[s]) / @direction[s]
if t >= 0.0
hit = vec3.scale @direction, t, vec3.create()
hit = vec3.add hit, @origin
if (!didHit || t < lowt) &&
hit[sa] >= vec1[sa] && hit[sa] <= vec2[sa] &&
hit[sb] >= vec1[sb] && hit[sb] <= vec2[sb]
didHit = true
lowt = t
sideHit = side
checkHit vec1, 0, 1, 2, 'left'
checkHit vec2, 0, 1, 2, 'right'
checkHit vec1, 1, 0, 2, 'bottom'
checkHit vec2, 1, 0, 2, 'top'
checkHit vec1, 2, 0, 1, 'far'
checkHit vec2, 2, 0, 1, 'near'
if didHit then new RaycastHit(lowt, sideHit) else null
public = self.webglmc ?= {}
public.Ray = Ray
public.RaycastHit = RaycastHit
<|start_filename|>src/resmgr.coffee<|end_filename|>
forceAbsolute = (url) ->
if !/^(https?|file):\/\//.test url
url = document.baseURI.match(/^(.*)\//)[0] + url
url
class ResourceManager
constructor: ->
@resourceDefs = {}
@callbacks = {}
@resources = {}
@loaded = 0
@total = 0
add: (shortName, filename, def = {}, callback = null) ->
typeSource = 'explicit'
if !def.type?
def.type = this.guessType filename
typeSource = 'extension'
def.shortName = shortName
def.filename = forceAbsolute(filename)
def.key = "#{def.type}/#{filename}"
console.debug "Requesting resource '#{webglmc.autoShortenFilename def.filename
}' [type=#{def.type}, from=#{typeSource}]"
if callback && @resources[def.key]?
if shortName && !@resourceDefs[shortName]?
@resourceDefs[shortName] = @resources[def.key]
return callback(@resources[def.key])
@resourceDefs[def.key] = def
delete @resources[def.key]
@total++
if callback?
(@callbacks[def.key] ?= []).push callback
this.triggerLoading def.key
addFromList: (resources) ->
for args in resources
this.add.apply this, args
guessType: (filename) ->
return 'image' if /\.(png|gif|jpe?g)$/.test filename
return 'texture' if /\.texture$/.test filename
return 'shader' if /\.glsl$/.test filename
console.error "Could not guess type from resource #{filename}"
wait: (callback) ->
if this.doneLoading()
callback()
else
(@callbacks.__all__ ?= []).push callback
triggerLoading: (key) ->
def = @resourceDefs[key]
this.loaders[def.type] this, def, (obj) =>
if def.shortName?
@resources[def.shortName] = obj
@resources[key] = obj
callbacks = @callbacks[key]
delete @callbacks[key]
@loaded++
if callbacks
for callback in callbacks
callback(obj)
if this.doneLoading()
this.notifyWaiters()
doneLoading: ->
@loaded >= @total
notifyWaiters: ->
callbacks = @callbacks.__all__ || []
delete @callbacks.__all__
for callback in callbacks
callback()
loaders:
image: (mgr, def, callback) ->
rv = new Image()
rv.onload = =>
console.debug "Loaded image from '#{webglmc.autoShortenFilename def.filename
}' [dim=#{rv.width }x#{rv.height}] ->", rv
callback rv
rv.src = def.filename
shader: (mgr, def, callback) ->
webglmc.loadShader def.filename, (shader) ->
callback shader
texture: (mgr, def, callback) ->
imageFilename = def.image
if !imageFilename
imageFilename = def.filename.match(/^(.*)\.texture$/)[1]
mgr.add null, imageFilename, {}, (image) =>
callback webglmc.Texture.fromImage(image, def)
public = self.webglmc ?= {}
public.ResourceManager = ResourceManager
| rendergather/webgl-meincraft |
<|start_filename|>include/evfibers/eio.h<|end_filename|>
/********************************************************************
Copyright 2013 <NAME> <<EMAIL>>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
********************************************************************/
#ifndef _FBR_EIO_H_
#define _FBR_EIO_H_
/**
* @file evfibers/eio.h
* This file contains API for libeio fiber wrappers.
*
* Wrapper functions are not documented as they clone the libeio prototypes and
* their documenting would result in useless copy'n'paste here. libeio
* documentation can be used as a reference on this functions. The only
* difference is that first argument in the wrappers is always fiber context,
* and eio_cb and data pointer are passed internally, and so are not present in
* the prototypes.
*/
#ifdef __cplusplus
extern "C" {
#endif
#include <evfibers/config.h>
#ifndef FBR_EIO_ENABLED
# error "This build of libevfibers lacks support for libeio"
#endif
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <sys/statvfs.h>
#ifdef FBR_USE_EMBEDDED_EIO
#include <evfibers/libeio_embedded.h>
#else
#include <eio.h>
#endif
#include <evfibers/fiber.h>
/**
* eio custom callback function type.
*/
typedef eio_ssize_t (*fbr_eio_custom_func_t)(void *data);
/**
* eio event.
*
* This event struct can represent an eio event.
* @see fbr_ev_upcast
* @see fbr_ev_wait
*/
struct fbr_ev_eio {
eio_req *req; /*!< the libeio request itself */
fbr_eio_custom_func_t custom_func;
void *custom_arg;
struct fbr_ev_base ev_base;
};
/**
* Initializer for eio event.
*
* This functions properly initializes fbr_ev_eio struct. You should not do
* it manually.
* @see fbr_ev_eio
* @see fbr_ev_wait
*/
void fbr_ev_eio_init(FBR_P_ struct fbr_ev_eio *ev, eio_req *req);
/**
* Initialization routine for libeio fiber wrapper.
*
* This functions initializes libeio and sets up the necessary glue code to
* interact with libev (and in turn libevfibers).
*
* Must be called only once, uses EV_DEFAULT event loop internally, but any
* fiber scheduler can interact with libeio independently.
* @see fbr_ev_eio
* @see fbr_ev_wait
*/
void fbr_eio_init();
int fbr_eio_open(FBR_P_ const char *path, int flags, mode_t mode, int pri);
int fbr_eio_truncate(FBR_P_ const char *path, off_t offset, int pri);
int fbr_eio_chown(FBR_P_ const char *path, uid_t uid, gid_t gid, int pri);
int fbr_eio_chmod(FBR_P_ const char *path, mode_t mode, int pri);
int fbr_eio_mkdir(FBR_P_ const char *path, mode_t mode, int pri);
int fbr_eio_rmdir(FBR_P_ const char *path, int pri);
int fbr_eio_unlink(FBR_P_ const char *path, int pri);
int fbr_eio_utime(FBR_P_ const char *path, eio_tstamp atime, eio_tstamp mtime,
int pri);
int fbr_eio_mknod(FBR_P_ const char *path, mode_t mode, dev_t dev, int pri);
int fbr_eio_link(FBR_P_ const char *path, const char *new_path, int pri);
int fbr_eio_symlink(FBR_P_ const char *path, const char *new_path, int pri);
int fbr_eio_rename(FBR_P_ const char *path, const char *new_path, int pri);
int fbr_eio_mlock(FBR_P_ void *addr, size_t length, int pri);
int fbr_eio_close(FBR_P_ int fd, int pri);
int fbr_eio_sync(FBR_P_ int pri);
int fbr_eio_fsync(FBR_P_ int fd, int pri);
int fbr_eio_fdatasync(FBR_P_ int fd, int pri);
int fbr_eio_futime(FBR_P_ int fd, eio_tstamp atime, eio_tstamp mtime, int pri);
int fbr_eio_ftruncate(FBR_P_ int fd, off_t offset, int pri);
int fbr_eio_fchmod(FBR_P_ int fd, mode_t mode, int pri);
int fbr_eio_fchown(FBR_P_ int fd, uid_t uid, gid_t gid, int pri);
int fbr_eio_dup2(FBR_P_ int fd, int fd2, int pri);
ssize_t fbr_eio_seek(FBR_P_ int fd, off_t offset, int whence, int pri);
ssize_t fbr_eio_read(FBR_P_ int fd, void *buf, size_t length, off_t offset,
int pri);
ssize_t fbr_eio_write(FBR_P_ int fd, void *buf, size_t length, off_t offset,
int pri);
int fbr_eio_mlockall(FBR_P_ int flags, int pri);
int fbr_eio_msync(FBR_P_ void *addr, size_t length, int flags, int pri);
int fbr_eio_readlink(FBR_P_ const char *path, char *buf, size_t size, int pri);
int fbr_eio_realpath(FBR_P_ const char *path, char *buf, size_t size, int pri);
int fbr_eio_stat(FBR_P_ const char *path, EIO_STRUCT_STAT *statdata, int pri);
int fbr_eio_lstat(FBR_P_ const char *path, EIO_STRUCT_STAT *statdata, int pri);
int fbr_eio_fstat(FBR_P_ int fd, EIO_STRUCT_STAT *statdata, int pri);
int fbr_eio_statvfs(FBR_P_ const char *path, EIO_STRUCT_STATVFS *statdata,
int pri);
int fbr_eio_fstatvfs(FBR_P_ int fd, EIO_STRUCT_STATVFS *statdata, int pri);
int fbr_eio_readahead(FBR_P_ int fd, off_t offset, size_t length, int pri);
int fbr_eio_sendfile(FBR_P_ int out_fd, int in_fd, off_t in_offset,
size_t length, int pri);
int fbr_eio_readahead(FBR_P_ int fd, off_t offset, size_t length, int pri);
int fbr_eio_syncfs(FBR_P_ int fd, int pri);
int fbr_eio_sync_file_range(FBR_P_ int fd, off_t offset, size_t nbytes,
unsigned int flags, int pri);
int fbr_eio_fallocate(FBR_P_ int fd, int mode, off_t offset, off_t len,
int pri);
eio_ssize_t fbr_eio_custom(FBR_P_ fbr_eio_custom_func_t func, void *data,
int pri);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>bench/condvar.c<|end_filename|>
/********************************************************************
Copyright 2013 <NAME> <<EMAIL>>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
********************************************************************/
#include <stdio.h>
#include <string.h>
#include <limits.h>
#include <sys/stat.h>
#include <unistd.h>
#include <ev.h>
#include <errno.h>
#include <evfibers_private/fiber.h>
struct fiber_arg {
struct fbr_mutex mutex1;
struct fbr_cond_var cond1;
int cond1_set;
struct fbr_mutex mutex2;
struct fbr_cond_var cond2;
int cond2_set;
size_t count;
};
static void cond_fiber1(FBR_P_ void *_arg)
{
struct fiber_arg *arg = _arg;
for (;;) {
arg->cond1_set = 1;
fbr_cond_signal(FBR_A_ &arg->cond1);
while (0 == arg->cond2_set) {
fbr_mutex_lock(FBR_A_ &arg->mutex2);
fbr_cond_wait(FBR_A_ &arg->cond2, &arg->mutex2);
fbr_mutex_unlock(FBR_A_ &arg->mutex2);
}
arg->cond2_set = 0;
arg->count++;
}
}
static void cond_fiber2(FBR_P_ void *_arg)
{
struct fiber_arg *arg = _arg;
for (;;) {
arg->cond2_set = 1;
fbr_cond_signal(FBR_A_ &arg->cond2);
while (0 == arg->cond1_set) {
fbr_mutex_lock(FBR_A_ &arg->mutex1);
fbr_cond_wait(FBR_A_ &arg->cond1, &arg->mutex1);
fbr_mutex_unlock(FBR_A_ &arg->mutex1);
}
arg->cond1_set = 0;
arg->count++;
}
}
static void stats_fiber(FBR_P_ void *_arg)
{
struct fiber_arg *arg = _arg;
size_t last;
size_t diff;
int count = 0;
int max_samples = 100;
for (;;) {
last = arg->count;
fbr_sleep(FBR_A_ 1.0);
diff = arg->count - last;
printf("%zd\n", diff);
if (count++ > max_samples) {
ev_break(fctx->__p->loop, EVBREAK_ALL);
}
}
}
int main()
{
struct fbr_context context;
fbr_id_t fiber1, fiber2, fiber_stats;
int retval;
(void)retval;
struct fiber_arg arg = {
.count = 0
};
fbr_init(&context, EV_DEFAULT);
fbr_mutex_init(&context, &arg.mutex1);
fbr_mutex_init(&context, &arg.mutex2);
fbr_cond_init(&context, &arg.cond1);
fbr_cond_init(&context, &arg.cond2);
fiber1 = fbr_create(&context, "fiber1", cond_fiber1, &arg, 0);
assert(!fbr_id_isnull(fiber1));
retval = fbr_transfer(&context, fiber1);
assert(0 == retval);
fiber2 = fbr_create(&context, "fiber2", cond_fiber2, &arg, 0);
assert(!fbr_id_isnull(fiber2));
retval = fbr_transfer(&context, fiber2);
assert(0 == retval);
fiber_stats = fbr_create(&context, "fiber_stats", stats_fiber, &arg, 0);
assert(!fbr_id_isnull(fiber_stats));
retval = fbr_transfer(&context, fiber_stats);
assert(0 == retval);
ev_run(EV_DEFAULT, 0);
fbr_cond_destroy(&context, &arg.cond1);
fbr_cond_destroy(&context, &arg.cond2);
fbr_mutex_destroy(&context, &arg.mutex1);
fbr_mutex_destroy(&context, &arg.mutex2);
fbr_destroy(&context);
return 0;
}
<|start_filename|>cmake/FindLibVrb.cmake<|end_filename|>
find_path(LIBVRB_INCLUDE_DIR vrb.h
HINTS $ENV{LIBVRB_DIR}
PATH_SUFFIXES include
PATHS /usr/local /usr
)
find_library(LIBVRB_LIBRARY
NAMES vrb
HINTS $ENV{LIBVRB_DIR}
PATH_SUFFIXES lib
PATHS /usr/local /usr
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(LibVrb DEFAULT_MSG LIBVRB_LIBRARY LIBVRB_INCLUDE_DIR)
mark_as_advanced(LIBVRB_INCLUDE_DIR LIBVRB_LIBRARY)
<|start_filename|>src/fiber.c<|end_filename|>
/********************************************************************
Copyright 2013 <NAME> <<EMAIL>>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
********************************************************************/
#include <evfibers/config.h>
#include <sys/mman.h>
#include <fcntl.h>
#include <libgen.h>
#include <assert.h>
#include <errno.h>
#include <stdio.h>
#include <string.h>
#include <strings.h>
#include <err.h>
#ifdef HAVE_VALGRIND_H
#include <valgrind/valgrind.h>
#else
#define RUNNING_ON_VALGRIND (0)
#define VALGRIND_STACK_REGISTER(a,b) (void)0
#endif
#ifdef FBR_EIO_ENABLED
#include <evfibers/eio.h>
#endif
#include <evfibers_private/fiber.h>
#ifndef LIST_FOREACH_SAFE
#define LIST_FOREACH_SAFE(var, head, field, next_var) \
for ((var) = ((head)->lh_first); \
(var) && ((next_var) = ((var)->field.le_next), 1); \
(var) = (next_var))
#endif
#ifndef TAILQ_FOREACH_SAFE
#define TAILQ_FOREACH_SAFE(var, head, field, next_var) \
for ((var) = ((head)->tqh_first); \
(var) ? ({ (next_var) = ((var)->field.tqe_next); 1; }) \
: 0; \
(var) = (next_var))
#endif
#define ENSURE_ROOT_FIBER do { \
assert(fctx->__p->sp->fiber == &fctx->__p->root); \
} while (0)
#define CURRENT_FIBER (fctx->__p->sp->fiber)
#define CURRENT_FIBER_ID (fbr_id_pack(CURRENT_FIBER))
#define CALLED_BY_ROOT ((fctx->__p->sp - 1)->fiber == &fctx->__p->root)
#define unpack_transfer_errno(value, ptr, id) \
do { \
if (-1 == fbr_id_unpack(fctx, ptr, id)) \
return (value); \
} while (0)
#define return_success(value) \
do { \
fctx->f_errno = FBR_SUCCESS; \
return (value); \
} while (0)
#define return_error(value, code) \
do { \
fctx->f_errno = (code); \
return (value); \
} while (0)
const fbr_id_t FBR_ID_NULL = {0, NULL};
static const char default_buffer_pattern[] = "/dev/shm/fbr_buffer.XXXXXXXXX";
static fbr_id_t fbr_id_pack(struct fbr_fiber *fiber)
{
return (struct fbr_id_s){.g = fiber->id, .p = fiber};
}
static int fbr_id_unpack(FBR_P_ struct fbr_fiber **ptr, fbr_id_t id)
{
struct fbr_fiber *fiber = id.p;
if (fiber->id != id.g)
return_error(-1, FBR_ENOFIBER);
if (ptr)
*ptr = id.p;
return 0;
}
static void pending_async_cb(EV_P_ ev_async *w, _unused_ int revents)
{
struct fbr_context *fctx;
struct fbr_id_tailq_i *item;
fctx = (struct fbr_context *)w->data;
int retval;
ENSURE_ROOT_FIBER;
if (TAILQ_EMPTY(&fctx->__p->pending_fibers)) {
ev_async_stop(EV_A_ &fctx->__p->pending_async);
return;
}
item = TAILQ_FIRST(&fctx->__p->pending_fibers);
assert(item->head == &fctx->__p->pending_fibers);
/* item shall be removed from the queue by a destructor, which shall be
* set by the procedure demanding delayed execution. Destructor
* guarantees removal upon the reclaim of fiber. */
ev_async_send(EV_A_ &fctx->__p->pending_async);
retval = fbr_transfer(FBR_A_ item->id);
if (-1 == retval && FBR_ENOFIBER != fctx->f_errno) {
fbr_log_e(FBR_A_ "libevfibers: unexpected error trying to call"
" a fiber by id: %s",
fbr_strerror(FBR_A_ fctx->f_errno));
}
}
static void *allocate_in_fiber(FBR_P_ size_t size, struct fbr_fiber *in)
{
struct mem_pool *pool_entry;
pool_entry = malloc(size + sizeof(struct mem_pool));
if (NULL == pool_entry) {
fbr_log_e(FBR_A_ "libevfibers: unable to allocate %zu bytes\n",
size + sizeof(struct mem_pool));
abort();
}
pool_entry->ptr = pool_entry;
pool_entry->destructor = NULL;
pool_entry->destructor_context = NULL;
LIST_INSERT_HEAD(&in->pool, pool_entry, entries);
return pool_entry + 1;
}
static void stdio_logger(FBR_P_ struct fbr_logger *logger,
enum fbr_log_level level, const char *format, va_list ap)
{
struct fbr_fiber *fiber;
FILE* stream;
char *str_level;
ev_tstamp tstamp;
if (level > logger->level)
return;
fiber = CURRENT_FIBER;
switch (level) {
case FBR_LOG_ERROR:
str_level = "ERROR";
stream = stderr;
break;
case FBR_LOG_WARNING:
str_level = "WARNING";
stream = stdout;
break;
case FBR_LOG_NOTICE:
str_level = "NOTICE";
stream = stdout;
break;
case FBR_LOG_INFO:
str_level = "INFO";
stream = stdout;
break;
case FBR_LOG_DEBUG:
str_level = "DEBUG";
stream = stdout;
break;
default:
str_level = "?????";
stream = stdout;
break;
}
tstamp = ev_now(fctx->__p->loop);
fprintf(stream, "%.6f %-7s %-16s ", tstamp, str_level, fiber->name);
vfprintf(stream, format, ap);
fprintf(stream, "\n");
}
void fbr_init(FBR_P_ struct ev_loop *loop)
{
struct fbr_fiber *root;
struct fbr_logger *logger;
char *buffer_pattern;
fctx->__p = malloc(sizeof(struct fbr_context_private));
LIST_INIT(&fctx->__p->reclaimed);
LIST_INIT(&fctx->__p->root.children);
LIST_INIT(&fctx->__p->root.pool);
TAILQ_INIT(&fctx->__p->root.destructors);
TAILQ_INIT(&fctx->__p->pending_fibers);
root = &fctx->__p->root;
strncpy(root->name, "root", FBR_MAX_FIBER_NAME - 1);
fctx->__p->last_id = 0;
root->id = fctx->__p->last_id++;
coro_create(&root->ctx, NULL, NULL, NULL, 0);
logger = allocate_in_fiber(FBR_A_ sizeof(struct fbr_logger), root);
logger->logv = stdio_logger;
logger->level = FBR_LOG_NOTICE;
fctx->logger = logger;
fctx->__p->sp = fctx->__p->stack;
fctx->__p->sp->fiber = root;
fctx->__p->backtraces_enabled = 1;
fill_trace_info(FBR_A_ &fctx->__p->sp->tinfo);
fctx->__p->loop = loop;
fctx->__p->pending_async.data = fctx;
fctx->__p->backtraces_enabled = 0;
memset(&fctx->__p->key_free_mask, 0xFF,
sizeof(fctx->__p->key_free_mask));
ev_async_init(&fctx->__p->pending_async, pending_async_cb);
buffer_pattern = getenv("FBR_BUFFER_FILE_PATTERN");
if (buffer_pattern)
fctx->__p->buffer_file_pattern = buffer_pattern;
else
fctx->__p->buffer_file_pattern = default_buffer_pattern;
}
const char *fbr_strerror(_unused_ FBR_P_ enum fbr_error_code code)
{
switch (code) {
case FBR_SUCCESS:
return "Success";
case FBR_EINVAL:
return "Invalid argument";
case FBR_ENOFIBER:
return "No such fiber";
case FBR_ESYSTEM:
return "System error, consult system errno";
case FBR_EBUFFERMMAP:
return "Failed to mmap two adjacent regions";
case FBR_ENOKEY:
return "Fiber-local key does not exist";
case FBR_EPROTOBUF:
return "Protobuf unpacking error";
case FBR_EBUFFERNOSPACE:
return "Not enough space in the buffer";
case FBR_EEIO:
return "libeio request error";
}
return "Unknown error";
}
void fbr_log_e(FBR_P_ const char *format, ...)
{
va_list ap;
va_start(ap, format);
(*fctx->logger->logv)(FBR_A_ fctx->logger, FBR_LOG_ERROR, format, ap);
va_end(ap);
}
void fbr_log_w(FBR_P_ const char *format, ...)
{
va_list ap;
va_start(ap, format);
(*fctx->logger->logv)(FBR_A_ fctx->logger, FBR_LOG_WARNING, format, ap);
va_end(ap);
}
void fbr_log_n(FBR_P_ const char *format, ...)
{
va_list ap;
va_start(ap, format);
(*fctx->logger->logv)(FBR_A_ fctx->logger, FBR_LOG_NOTICE, format, ap);
va_end(ap);
}
void fbr_log_i(FBR_P_ const char *format, ...)
{
va_list ap;
va_start(ap, format);
(*fctx->logger->logv)(FBR_A_ fctx->logger, FBR_LOG_INFO, format, ap);
va_end(ap);
}
void fbr_log_d(FBR_P_ const char *format, ...)
{
va_list ap;
va_start(ap, format);
(*fctx->logger->logv)(FBR_A_ fctx->logger, FBR_LOG_DEBUG, format, ap);
va_end(ap);
}
void id_tailq_i_set(_unused_ FBR_P_
struct fbr_id_tailq_i *item,
struct fbr_fiber *fiber)
{
item->id = fbr_id_pack(fiber);
item->ev = NULL;
}
static void reclaim_children(FBR_P_ struct fbr_fiber *fiber)
{
struct fbr_fiber *f;
LIST_FOREACH(f, &fiber->children, entries.children) {
fbr_reclaim(FBR_A_ fbr_id_pack(f));
}
}
static void fbr_free_in_fiber(_unused_ FBR_P_ _unused_ struct fbr_fiber *fiber,
void *ptr, int destructor);
void fbr_destroy(FBR_P)
{
struct fbr_fiber *fiber, *x;
struct mem_pool *p, *x2;
reclaim_children(FBR_A_ &fctx->__p->root);
LIST_FOREACH_SAFE(p, &fctx->__p->root.pool, entries, x2) {
fbr_free_in_fiber(FBR_A_ &fctx->__p->root, p + 1, 1);
}
LIST_FOREACH_SAFE(fiber, &fctx->__p->reclaimed, entries.reclaimed, x) {
free(fiber->stack);
free(fiber);
}
free(fctx->__p);
}
void fbr_enable_backtraces(FBR_P_ int enabled)
{
if (enabled)
fctx->__p->backtraces_enabled = 1;
else
fctx->__p->backtraces_enabled = 0;
}
static void cancel_ev(_unused_ FBR_P_ struct fbr_ev_base *ev)
{
fbr_destructor_remove(FBR_A_ &ev->item.dtor, 1 /* call it */);
}
static void post_ev(_unused_ FBR_P_ struct fbr_fiber *fiber,
struct fbr_ev_base *ev)
{
assert(NULL != fiber->ev.waiting);
fiber->ev.arrived = 1;
ev->arrived = 1;
}
/* This callback should't be called if watcher has been stopped properly */
static void ev_abort_cb(_unused_ EV_P_ ev_watcher *w, _unused_ int event)
{
(void)event;
fprintf(stderr, "libevfibers: libev callback called for pending "
"watcher (%p), which is no longer being awaited via "
"fbr_ev_wait()", w);
abort();
}
static void ev_watcher_cb(_unused_ EV_P_ ev_watcher *w, _unused_ int event)
{
struct fbr_fiber *fiber;
struct fbr_ev_watcher *ev = w->data;
struct fbr_context *fctx = ev->ev_base.fctx;
int retval;
ENSURE_ROOT_FIBER;
retval = fbr_id_unpack(FBR_A_ &fiber, ev->ev_base.id);
if (-1 == retval) {
fbr_log_e(FBR_A_ "libevfibers: fiber is about to be called by"
" the watcher callback, but it's id is not valid: %s",
fbr_strerror(FBR_A_ fctx->f_errno));
abort();
}
post_ev(FBR_A_ fiber, &ev->ev_base);
retval = fbr_transfer(FBR_A_ fbr_id_pack(fiber));
assert(0 == retval);
}
static void fbr_free_in_fiber(_unused_ FBR_P_ _unused_ struct fbr_fiber *fiber,
void *ptr, int destructor)
{
struct mem_pool *pool_entry = NULL;
if (NULL == ptr)
return;
pool_entry = (struct mem_pool *)ptr - 1;
if (pool_entry->ptr != pool_entry) {
fbr_log_e(FBR_A_ "libevfibers: address %p does not look like "
"fiber memory pool entry", ptr);
if (!RUNNING_ON_VALGRIND)
abort();
}
LIST_REMOVE(pool_entry, entries);
if (destructor && pool_entry->destructor)
pool_entry->destructor(FBR_A_ ptr, pool_entry->destructor_context);
free(pool_entry);
}
static void fiber_cleanup(FBR_P_ struct fbr_fiber *fiber)
{
struct mem_pool *p, *x;
struct fbr_destructor *dtor;
/* coro_destroy(&fiber->ctx); */
LIST_REMOVE(fiber, entries.children);
TAILQ_FOREACH(dtor, &fiber->destructors, entries) {
dtor->func(FBR_A_ dtor->arg);
}
LIST_FOREACH_SAFE(p, &fiber->pool, entries, x) {
fbr_free_in_fiber(FBR_A_ fiber, p + 1, 1);
}
}
static void filter_fiber_stack(FBR_P_ struct fbr_fiber *fiber)
{
struct fbr_stack_item *sp;
for (sp = fctx->__p->stack; sp < fctx->__p->sp; sp++) {
if (sp->fiber == fiber) {
memmove(sp, sp + 1, (fctx->__p->sp - sp) * sizeof(*sp));
fctx->__p->sp--;
}
}
}
static int do_reclaim(FBR_P_ struct fbr_fiber *fiber)
{
#if 0
struct fbr_fiber *f;
#endif
fill_trace_info(FBR_A_ &fiber->reclaim_tinfo);
reclaim_children(FBR_A_ fiber);
fiber_cleanup(FBR_A_ fiber);
fiber->id = fctx->__p->last_id++;
#if 0
LIST_FOREACH(f, &fctx->__p->reclaimed, entries.reclaimed) {
assert(f != fiber);
}
#endif
LIST_INSERT_HEAD(&fctx->__p->reclaimed, fiber, entries.reclaimed);
filter_fiber_stack(FBR_A_ fiber);
if (CURRENT_FIBER == fiber)
fbr_yield(FBR_A);
return_success(0);
}
int fbr_reclaim(FBR_P_ fbr_id_t id)
{
struct fbr_fiber *fiber;
struct fbr_mutex mutex;
int retval;
unpack_transfer_errno(-1, &fiber, id);
fbr_mutex_init(FBR_A_ &mutex);
fbr_mutex_lock(FBR_A_ &mutex);
while (fiber->no_reclaim > 0) {
fiber->want_reclaim = 1;
assert("Attempt to reclaim self while no_reclaim is set would"
" block forever" && fiber != CURRENT_FIBER);
if (-1 == fbr_id_unpack(FBR_A_ NULL, id) &&
FBR_ENOFIBER == fctx->f_errno)
return_success(0);
retval = fbr_cond_wait(FBR_A_ &fiber->reclaim_cond, &mutex);
assert(0 == retval);
(void)retval;
}
fbr_mutex_unlock(FBR_A_ &mutex);
fbr_mutex_destroy(FBR_A_ &mutex);
if (-1 == fbr_id_unpack(FBR_A_ NULL, id) &&
FBR_ENOFIBER == fctx->f_errno)
return_success(0);
return do_reclaim(FBR_A_ fiber);
}
int fbr_set_reclaim(FBR_P_ fbr_id_t id)
{
struct fbr_fiber *fiber;
unpack_transfer_errno(-1, &fiber, id);
fiber->no_reclaim--;
if (0 == fiber->no_reclaim)
fbr_cond_broadcast(FBR_A_ &fiber->reclaim_cond);
return_success(0);
}
int fbr_set_noreclaim(FBR_P_ fbr_id_t id)
{
struct fbr_fiber *fiber;
unpack_transfer_errno(-1, &fiber, id);
fiber->no_reclaim++;
return_success(0);
}
int fbr_want_reclaim(FBR_P_ fbr_id_t id)
{
struct fbr_fiber *fiber;
unpack_transfer_errno(-1, &fiber, id);
if (fiber->no_reclaim > 0)
/* If we're in noreclaim block of any depth, always return 0 */
return 0;
return_success(fiber->want_reclaim);
}
int fbr_is_reclaimed(_unused_ FBR_P_ fbr_id_t id)
{
if (0 == fbr_id_unpack(FBR_A_ NULL, id))
return 0;
return 1;
}
fbr_id_t fbr_self(FBR_P)
{
return CURRENT_FIBER_ID;
}
static int do_reclaim(FBR_P_ struct fbr_fiber *fiber);
static void call_wrapper(FBR_P)
{
int retval;
struct fbr_fiber *fiber = CURRENT_FIBER;
fiber->func(FBR_A_ fiber->func_arg);
retval = do_reclaim(FBR_A_ fiber);
assert(0 == retval);
(void)retval;
fbr_yield(FBR_A);
assert(NULL);
}
enum ev_action_hint {
EV_AH_OK = 0,
EV_AH_ARRIVED,
EV_AH_EINVAL
};
static void item_dtor(_unused_ FBR_P_ void *arg)
{
struct fbr_id_tailq_i *item = arg;
if (item->head) {
TAILQ_REMOVE(item->head, item, entries);
}
}
static enum ev_action_hint prepare_ev(FBR_P_ struct fbr_ev_base *ev)
{
struct fbr_ev_watcher *e_watcher;
struct fbr_ev_mutex *e_mutex;
struct fbr_ev_cond_var *e_cond;
struct fbr_id_tailq_i *item = &ev->item;
ev->arrived = 0;
ev->item.dtor.func = item_dtor;
ev->item.dtor.arg = item;
fbr_destructor_add(FBR_A_ &ev->item.dtor);
switch (ev->type) {
case FBR_EV_WATCHER:
e_watcher = fbr_ev_upcast(ev, fbr_ev_watcher);
if (!ev_is_active(e_watcher->w)) {
fbr_destructor_remove(FBR_A_ &ev->item.dtor,
0 /* call it */);
return EV_AH_EINVAL;
}
e_watcher->w->data = e_watcher;
ev_set_cb(e_watcher->w, ev_watcher_cb);
break;
case FBR_EV_MUTEX:
e_mutex = fbr_ev_upcast(ev, fbr_ev_mutex);
if (fbr_id_isnull(e_mutex->mutex->locked_by)) {
e_mutex->mutex->locked_by = CURRENT_FIBER_ID;
return EV_AH_ARRIVED;
}
id_tailq_i_set(FBR_A_ item, CURRENT_FIBER);
item->ev = ev;
ev->data = item;
TAILQ_INSERT_TAIL(&e_mutex->mutex->pending, item, entries);
item->head = &e_mutex->mutex->pending;
break;
case FBR_EV_COND_VAR:
e_cond = fbr_ev_upcast(ev, fbr_ev_cond_var);
if (e_cond->mutex && fbr_id_isnull(e_cond->mutex->locked_by)) {
fbr_destructor_remove(FBR_A_ &ev->item.dtor,
0 /* call it */);
return EV_AH_EINVAL;
}
id_tailq_i_set(FBR_A_ item, CURRENT_FIBER);
item->ev = ev;
ev->data = item;
TAILQ_INSERT_TAIL(&e_cond->cond->waiting, item, entries);
item->head = &e_cond->cond->waiting;
if (e_cond->mutex)
fbr_mutex_unlock(FBR_A_ e_cond->mutex);
break;
case FBR_EV_EIO:
#ifdef FBR_EIO_ENABLED
/* NOP */
#else
fbr_log_e(FBR_A_ "libevfibers: libeio support is not compiled");
abort();
#endif
break;
}
return EV_AH_OK;
}
static void finish_ev(FBR_P_ struct fbr_ev_base *ev)
{
struct fbr_ev_cond_var *e_cond;
struct fbr_ev_watcher *e_watcher;
fbr_destructor_remove(FBR_A_ &ev->item.dtor, 1 /* call it */);
switch (ev->type) {
case FBR_EV_COND_VAR:
e_cond = fbr_ev_upcast(ev, fbr_ev_cond_var);
if (e_cond->mutex)
fbr_mutex_lock(FBR_A_ e_cond->mutex);
break;
case FBR_EV_WATCHER:
e_watcher = fbr_ev_upcast(ev, fbr_ev_watcher);
ev_set_cb(e_watcher->w, ev_abort_cb);
break;
case FBR_EV_MUTEX:
/* NOP */
break;
case FBR_EV_EIO:
#ifdef FBR_EIO_ENABLED
/* NOP */
#else
fbr_log_e(FBR_A_ "libevfibers: libeio support is not compiled");
abort();
#endif
break;
}
}
static void watcher_timer_dtor(_unused_ FBR_P_ void *_arg)
{
struct ev_timer *w = _arg;
ev_timer_stop(fctx->__p->loop, w);
}
int fbr_ev_wait_to(FBR_P_ struct fbr_ev_base *events[], ev_tstamp timeout)
{
size_t size;
ev_timer timer;
struct fbr_ev_watcher watcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
struct fbr_ev_base **new_events;
struct fbr_ev_base **ev_pptr;
int n_events;
ev_timer_init(&timer, NULL, timeout, 0.);
ev_timer_start(fctx->__p->loop, &timer);
fbr_ev_watcher_init(FBR_A_ &watcher,
(struct ev_watcher *)&timer);
dtor.func = watcher_timer_dtor;
dtor.arg = &timer;
fbr_destructor_add(FBR_A_ &dtor);
size = 0;
for (ev_pptr = events; NULL != *ev_pptr; ev_pptr++)
size++;
new_events = alloca((size + 2) * sizeof(void *));
memcpy(new_events, events, size * sizeof(void *));
new_events[size] = &watcher.ev_base;
new_events[size + 1] = NULL;
n_events = fbr_ev_wait(FBR_A_ new_events);
fbr_destructor_remove(FBR_A_ &dtor, 1 /* Call it? */);
if (n_events < 0)
return n_events;
if (watcher.ev_base.arrived)
n_events--;
return n_events;
}
int fbr_ev_wait(FBR_P_ struct fbr_ev_base *events[])
{
struct fbr_fiber *fiber = CURRENT_FIBER;
enum ev_action_hint hint;
int num = 0;
int i;
fiber->ev.arrived = 0;
fiber->ev.waiting = events;
for (i = 0; NULL != events[i]; i++) {
hint = prepare_ev(FBR_A_ events[i]);
switch (hint) {
case EV_AH_OK:
break;
case EV_AH_ARRIVED:
fiber->ev.arrived = 1;
events[i]->arrived = 1;
break;
case EV_AH_EINVAL:
return_error(-1, FBR_EINVAL);
}
}
while (0 == fiber->ev.arrived)
fbr_yield(FBR_A);
for (i = 0; NULL != events[i]; i++) {
if (events[i]->arrived) {
num++;
finish_ev(FBR_A_ events[i]);
} else
cancel_ev(FBR_A_ events[i]);
}
return_success(num);
}
int fbr_ev_wait_one(FBR_P_ struct fbr_ev_base *one)
{
struct fbr_fiber *fiber = CURRENT_FIBER;
enum ev_action_hint hint;
struct fbr_ev_base *events[] = {one, NULL};
fiber->ev.arrived = 0;
fiber->ev.waiting = events;
hint = prepare_ev(FBR_A_ one);
switch (hint) {
case EV_AH_OK:
break;
case EV_AH_ARRIVED:
goto finish;
case EV_AH_EINVAL:
return_error(-1, FBR_EINVAL);
}
while (0 == fiber->ev.arrived)
fbr_yield(FBR_A);
finish:
finish_ev(FBR_A_ one);
return 0;
}
int fbr_ev_wait_one_wto(FBR_P_ struct fbr_ev_base *one, ev_tstamp timeout)
{
int n_events;
struct fbr_ev_base *events[] = {one, NULL, NULL};
ev_timer timer;
struct fbr_ev_watcher twatcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
ev_timer_init(&timer, NULL, timeout, 0.);
ev_timer_start(fctx->__p->loop, &timer);
fbr_ev_watcher_init(FBR_A_ &twatcher,
(struct ev_watcher *)&timer);
dtor.func = watcher_timer_dtor;
dtor.arg = &timer;
fbr_destructor_add(FBR_A_ &dtor);
events[1] = &twatcher.ev_base;
n_events = fbr_ev_wait(FBR_A_ events);
fbr_destructor_remove(FBR_A_ &dtor, 1 /* Call it? */);
if (n_events > 0 && events[0]->arrived)
return 0;
errno = ETIMEDOUT;
return -1;
}
int fbr_transfer(FBR_P_ fbr_id_t to)
{
struct fbr_fiber *callee;
struct fbr_fiber *caller = fctx->__p->sp->fiber;
unpack_transfer_errno(-1, &callee, to);
fctx->__p->sp++;
fctx->__p->sp->fiber = callee;
fill_trace_info(FBR_A_ &fctx->__p->sp->tinfo);
coro_transfer(&caller->ctx, &callee->ctx);
return_success(0);
}
void fbr_yield(FBR_P)
{
struct fbr_fiber *callee;
struct fbr_fiber *caller;
assert("Attempt to yield in a root fiber" &&
fctx->__p->sp->fiber != &fctx->__p->root);
callee = fctx->__p->sp->fiber;
caller = (--fctx->__p->sp)->fiber;
coro_transfer(&callee->ctx, &caller->ctx);
}
int fbr_fd_nonblock(FBR_P_ int fd)
{
int flags, s;
flags = fcntl(fd, F_GETFL, 0);
if (flags == -1)
return_error(-1, FBR_ESYSTEM);
flags |= O_NONBLOCK;
s = fcntl(fd, F_SETFL, flags);
if (s == -1)
return_error(-1, FBR_ESYSTEM);
return_success(0);
}
static void ev_base_init(FBR_P_ struct fbr_ev_base *ev,
enum fbr_ev_type type)
{
memset(ev, 0x00, sizeof(*ev));
ev->type = type;
ev->id = CURRENT_FIBER_ID;
ev->fctx = fctx;
}
void fbr_ev_watcher_init(FBR_P_ struct fbr_ev_watcher *ev, ev_watcher *w)
{
ev_base_init(FBR_A_ &ev->ev_base, FBR_EV_WATCHER);
ev->w = w;
}
static void watcher_io_dtor(_unused_ FBR_P_ void *_arg)
{
struct ev_io *w = _arg;
ev_io_stop(fctx->__p->loop, w);
}
int fbr_connect(FBR_P_ int sockfd, const struct sockaddr *addr,
socklen_t addrlen) {
ev_io io;
struct fbr_ev_watcher watcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
int r;
socklen_t len;
r = connect(sockfd, addr, addrlen);
if ((-1 == r) && (EINPROGRESS != errno))
return -1;
ev_io_init(&io, NULL, sockfd, EV_WRITE);
ev_io_start(fctx->__p->loop, &io);
dtor.func = watcher_io_dtor;
dtor.arg = &io;
fbr_destructor_add(FBR_A_ &dtor);
fbr_ev_watcher_init(FBR_A_ &watcher, (ev_watcher *)&io);
fbr_ev_wait_one(FBR_A_ &watcher.ev_base);
len = sizeof(r);
if (-1 == getsockopt(sockfd, SOL_SOCKET, SO_ERROR, (void *)&r, &len)) {
r = -1;
} else if ( 0 != r ) {
errno = r;
r = -1;
}
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
ev_io_stop(fctx->__p->loop, &io);
return r;
}
int fbr_connect_wto(FBR_P_ int sockfd, const struct sockaddr *addr,
socklen_t addrlen, ev_tstamp timeout) {
ev_io io;
struct fbr_ev_watcher watcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
int r, rc;
socklen_t len;
r = connect(sockfd, addr, addrlen);
if ((-1 == r) && (EINPROGRESS != errno))
return -1;
ev_io_init(&io, NULL, sockfd, EV_WRITE);
ev_io_start(fctx->__p->loop, &io);
dtor.func = watcher_io_dtor;
dtor.arg = &io;
fbr_destructor_add(FBR_A_ &dtor);
fbr_ev_watcher_init(FBR_A_ &watcher, (ev_watcher *)&io);
rc = fbr_ev_wait_one_wto(FBR_A_ &watcher.ev_base, timeout);
if (0 == rc) {
len = sizeof(r);
if (-1 == getsockopt(sockfd, SOL_SOCKET, SO_ERROR, (void *)&r, &len)) {
r = -1;
} else if ( 0 != r ) {
errno = r;
r = -1;
}
} else {
r = -1;
errno = ETIMEDOUT;
}
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
ev_io_stop(fctx->__p->loop, &io);
return r;
}
ssize_t fbr_read(FBR_P_ int fd, void *buf, size_t count)
{
ssize_t r;
ev_io io;
struct fbr_ev_watcher watcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
ev_io_init(&io, NULL, fd, EV_READ);
ev_io_start(fctx->__p->loop, &io);
dtor.func = watcher_io_dtor;
dtor.arg = &io;
fbr_destructor_add(FBR_A_ &dtor);
fbr_ev_watcher_init(FBR_A_ &watcher, (ev_watcher *)&io);
fbr_ev_wait_one(FBR_A_ &watcher.ev_base);
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
do {
r = read(fd, buf, count);
} while (-1 == r && EINTR == errno);
ev_io_stop(fctx->__p->loop, &io);
return r;
}
ssize_t fbr_read_wto(FBR_P_ int fd, void *buf, size_t count, ev_tstamp timeout)
{
ssize_t r = 0;
ev_io io;
struct fbr_ev_watcher watcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
int rc = 0;
ev_io_init(&io, NULL, fd, EV_READ);
ev_io_start(fctx->__p->loop, &io);
dtor.func = watcher_io_dtor;
dtor.arg = &io;
fbr_destructor_add(FBR_A_ &dtor);
fbr_ev_watcher_init(FBR_A_ &watcher, (ev_watcher *)&io);
rc = fbr_ev_wait_one_wto(FBR_A_ &watcher.ev_base, timeout);
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
if (0 == rc) {
do {
r = read(fd, buf, count);
} while (-1 == r && EINTR == errno);
}
ev_io_stop(fctx->__p->loop, &io);
return r;
}
ssize_t fbr_read_all(FBR_P_ int fd, void *buf, size_t count)
{
ssize_t r;
size_t done = 0;
ev_io io;
struct fbr_ev_watcher watcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
ev_io_init(&io, NULL, fd, EV_READ);
ev_io_start(fctx->__p->loop, &io);
dtor.func = watcher_io_dtor;
dtor.arg = &io;
fbr_destructor_add(FBR_A_ &dtor);
fbr_ev_watcher_init(FBR_A_ &watcher, (ev_watcher *)&io);
while (count != done) {
next:
fbr_ev_wait_one(FBR_A_ &watcher.ev_base);
for (;;) {
r = read(fd, buf + done, count - done);
if (-1 == r) {
switch (errno) {
case EINTR:
continue;
case EAGAIN:
goto next;
default:
goto error;
}
}
break;
}
if (0 == r)
break;
done += r;
}
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
ev_io_stop(fctx->__p->loop, &io);
return (ssize_t)done;
error:
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
ev_io_stop(fctx->__p->loop, &io);
return -1;
}
ssize_t fbr_read_all_wto(FBR_P_ int fd, void *buf, size_t count, ev_tstamp timeout)
{
ssize_t r;
size_t done = 0;
ev_io io;
struct fbr_ev_watcher watcher, twatcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
struct fbr_destructor dtor2 = FBR_DESTRUCTOR_INITIALIZER;
struct fbr_ev_base *events[] = {NULL, NULL, NULL};
ev_timer timer;
ev_io_init(&io, NULL, fd, EV_READ);
ev_io_start(fctx->__p->loop, &io);
dtor.func = watcher_io_dtor;
dtor.arg = &io;
fbr_destructor_add(FBR_A_ &dtor);
fbr_ev_watcher_init(FBR_A_ &watcher, (ev_watcher *)&io);
events[0] = &watcher.ev_base;
ev_timer_init(&timer, NULL, timeout, 0.);
ev_timer_start(fctx->__p->loop, &timer);
fbr_ev_watcher_init(FBR_A_ &twatcher,
(struct ev_watcher *)&timer);
dtor2.func = watcher_timer_dtor;
dtor2.arg = &timer;
fbr_destructor_add(FBR_A_ &dtor2);
events[1] = &twatcher.ev_base;
while (count != done) {
next:
fbr_ev_wait(FBR_A_ events);
if (events[1]->arrived)
goto error;
for (;;) {
r = read(fd, buf + done, count - done);
if (-1 == r) {
switch (errno) {
case EINTR:
continue;
case EAGAIN:
goto next;
default:
goto error;
}
}
break;
}
if (0 == r)
break;
done += r;
}
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
fbr_destructor_remove(FBR_A_ &dtor2, 0 /* Call it? */);
ev_timer_stop(fctx->__p->loop, &timer);
ev_io_stop(fctx->__p->loop, &io);
return (ssize_t)done;
error:
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
fbr_destructor_remove(FBR_A_ &dtor2, 0 /* Call it? */);
ev_timer_stop(fctx->__p->loop, &timer);
ev_io_stop(fctx->__p->loop, &io);
return -1;
}
ssize_t fbr_readline(FBR_P_ int fd, void *buffer, size_t n)
{
ssize_t num_read;
size_t total_read;
char *buf;
char ch;
if (n <= 0 || buffer == NULL) {
errno = EINVAL;
return -1;
}
buf = buffer;
total_read = 0;
for (;;) {
num_read = fbr_read(FBR_A_ fd, &ch, 1);
if (num_read == -1) {
if (errno == EINTR)
continue;
else
return -1;
} else if (num_read == 0) {
if (total_read == 0)
return 0;
else
break;
} else {
if (total_read < n - 1) {
total_read++;
*buf++ = ch;
}
if (ch == '\n')
break;
}
}
*buf = '\0';
return total_read;
}
ssize_t fbr_write(FBR_P_ int fd, const void *buf, size_t count)
{
ssize_t r;
ev_io io;
struct fbr_ev_watcher watcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
ev_io_init(&io, NULL, fd, EV_WRITE);
ev_io_start(fctx->__p->loop, &io);
dtor.func = watcher_io_dtor;
dtor.arg = &io;
fbr_destructor_add(FBR_A_ &dtor);
fbr_ev_watcher_init(FBR_A_ &watcher, (ev_watcher *)&io);
fbr_ev_wait_one(FBR_A_ &watcher.ev_base);
do {
r = write(fd, buf, count);
} while (-1 == r && EINTR == errno);
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
ev_io_stop(fctx->__p->loop, &io);
return r;
}
ssize_t fbr_write_wto(FBR_P_ int fd, const void *buf, size_t count, ev_tstamp timeout)
{
ssize_t r = 0;
int rc;
ev_io io;
struct fbr_ev_watcher watcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
ev_io_init(&io, NULL, fd, EV_WRITE);
ev_io_start(fctx->__p->loop, &io);
dtor.func = watcher_io_dtor;
dtor.arg = &io;
fbr_destructor_add(FBR_A_ &dtor);
fbr_ev_watcher_init(FBR_A_ &watcher, (ev_watcher *)&io);
rc = fbr_ev_wait_one_wto(FBR_A_ &watcher.ev_base, timeout);
if (0 == rc) {
do {
r = write(fd, buf, count);
} while (-1 == r && EINTR == errno);
}
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
ev_io_stop(fctx->__p->loop, &io);
return r;
}
ssize_t fbr_write_all(FBR_P_ int fd, const void *buf, size_t count)
{
ssize_t r;
size_t done = 0;
ev_io io;
struct fbr_ev_watcher watcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
ev_io_init(&io, NULL, fd, EV_WRITE);
ev_io_start(fctx->__p->loop, &io);
dtor.func = watcher_io_dtor;
dtor.arg = &io;
fbr_destructor_add(FBR_A_ &dtor);
fbr_ev_watcher_init(FBR_A_ &watcher, (ev_watcher *)&io);
while (count != done) {
next:
fbr_ev_wait_one(FBR_A_ &watcher.ev_base);
for (;;) {
r = write(fd, buf + done, count - done);
if (-1 == r) {
switch (errno) {
case EINTR:
continue;
case EAGAIN:
goto next;
default:
goto error;
}
}
break;
}
done += r;
}
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
ev_io_stop(fctx->__p->loop, &io);
return (ssize_t)done;
error:
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
ev_io_stop(fctx->__p->loop, &io);
return -1;
}
ssize_t fbr_write_all_wto(FBR_P_ int fd, const void *buf, size_t count, ev_tstamp timeout)
{
ssize_t r;
size_t done = 0;
ev_io io;
struct fbr_ev_watcher watcher, twatcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
struct fbr_destructor dtor2 = FBR_DESTRUCTOR_INITIALIZER;
struct fbr_ev_base *events[] = {NULL, NULL, NULL};
ev_timer timer;
ev_io_init(&io, NULL, fd, EV_WRITE);
ev_io_start(fctx->__p->loop, &io);
dtor.func = watcher_io_dtor;
dtor.arg = &io;
fbr_destructor_add(FBR_A_ &dtor);
fbr_ev_watcher_init(FBR_A_ &watcher, (ev_watcher *)&io);
events[0] = &watcher.ev_base;
ev_timer_init(&timer, NULL, timeout, 0.);
ev_timer_start(fctx->__p->loop, &timer);
fbr_ev_watcher_init(FBR_A_ &twatcher,
(struct ev_watcher *)&timer);
dtor2.func = watcher_timer_dtor;
dtor2.arg = &timer;
fbr_destructor_add(FBR_A_ &dtor2);
events[1] = &twatcher.ev_base;
while (count != done) {
next:
fbr_ev_wait(FBR_A_ events);
if (events[1]->arrived) {
errno = ETIMEDOUT;
goto error;
}
for (;;) {
r = write(fd, buf + done, count - done);
if (-1 == r) {
switch (errno) {
case EINTR:
continue;
case EAGAIN:
goto next;
default:
goto error;
}
}
break;
}
done += r;
}
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
fbr_destructor_remove(FBR_A_ &dtor2, 0 /* Call it? */);
ev_timer_stop(fctx->__p->loop, &timer);
ev_io_stop(fctx->__p->loop, &io);
return (ssize_t)done;
error:
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
fbr_destructor_remove(FBR_A_ &dtor2, 0 /* Call it? */);
ev_timer_stop(fctx->__p->loop, &timer);
ev_io_stop(fctx->__p->loop, &io);
return -1;
}
ssize_t fbr_recvfrom(FBR_P_ int sockfd, void *buf, size_t len, int flags,
struct sockaddr *src_addr, socklen_t *addrlen)
{
ev_io io;
struct fbr_ev_watcher watcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
ev_io_init(&io, NULL, sockfd, EV_READ);
ev_io_start(fctx->__p->loop, &io);
dtor.func = watcher_io_dtor;
dtor.arg = &io;
fbr_destructor_add(FBR_A_ &dtor);
fbr_ev_watcher_init(FBR_A_ &watcher, (ev_watcher *)&io);
fbr_ev_wait_one(FBR_A_ &watcher.ev_base);
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
ev_io_stop(fctx->__p->loop, &io);
return recvfrom(sockfd, buf, len, flags, src_addr, addrlen);
}
ssize_t fbr_recv(FBR_P_ int sockfd, void *buf, size_t len, int flags)
{
ev_io io;
struct fbr_ev_watcher watcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
ev_io_init(&io, NULL, sockfd, EV_READ);
ev_io_start(fctx->__p->loop, &io);
dtor.func = watcher_io_dtor;
dtor.arg = &io;
fbr_destructor_add(FBR_A_ &dtor);
fbr_ev_watcher_init(FBR_A_ &watcher, (ev_watcher *)&io);
fbr_ev_wait_one(FBR_A_ &watcher.ev_base);
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
ev_io_stop(fctx->__p->loop, &io);
return recv(sockfd, buf, len, flags);
}
ssize_t fbr_sendto(FBR_P_ int sockfd, const void *buf, size_t len, int flags,
const struct sockaddr *dest_addr, socklen_t addrlen)
{
ev_io io;
struct fbr_ev_watcher watcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
ev_io_init(&io, NULL, sockfd, EV_WRITE);
ev_io_start(fctx->__p->loop, &io);
dtor.func = watcher_io_dtor;
dtor.arg = &io;
fbr_destructor_add(FBR_A_ &dtor);
fbr_ev_watcher_init(FBR_A_ &watcher, (ev_watcher *)&io);
fbr_ev_wait_one(FBR_A_ &watcher.ev_base);
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
ev_io_stop(fctx->__p->loop, &io);
return sendto(sockfd, buf, len, flags, dest_addr, addrlen);
}
ssize_t fbr_send(FBR_P_ int sockfd, const void *buf, size_t len, int flags)
{
ev_io io;
struct fbr_ev_watcher watcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
ev_io_init(&io, NULL, sockfd, EV_WRITE);
ev_io_start(fctx->__p->loop, &io);
dtor.func = watcher_io_dtor;
dtor.arg = &io;
fbr_destructor_add(FBR_A_ &dtor);
fbr_ev_watcher_init(FBR_A_ &watcher, (ev_watcher *)&io);
fbr_ev_wait_one(FBR_A_ &watcher.ev_base);
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
ev_io_stop(fctx->__p->loop, &io);
return send(sockfd, buf, len, flags);
}
int fbr_accept(FBR_P_ int sockfd, struct sockaddr *addr, socklen_t *addrlen)
{
int r;
ev_io io;
struct fbr_ev_watcher watcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
ev_io_init(&io, NULL, sockfd, EV_READ);
ev_io_start(fctx->__p->loop, &io);
dtor.func = watcher_io_dtor;
dtor.arg = &io;
fbr_destructor_add(FBR_A_ &dtor);
fbr_ev_watcher_init(FBR_A_ &watcher, (ev_watcher *)&io);
fbr_ev_wait_one(FBR_A_ &watcher.ev_base);
do {
r = accept(sockfd, addr, addrlen);
} while (-1 == r && EINTR == errno);
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
ev_io_stop(fctx->__p->loop, &io);
return r;
}
ev_tstamp fbr_sleep(FBR_P_ ev_tstamp seconds)
{
ev_timer timer;
struct fbr_ev_watcher watcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
ev_tstamp expected = ev_now(fctx->__p->loop) + seconds;
ev_timer_init(&timer, NULL, seconds, 0.);
ev_timer_start(fctx->__p->loop, &timer);
dtor.func = watcher_timer_dtor;
dtor.arg = &timer;
fbr_destructor_add(FBR_A_ &dtor);
fbr_ev_watcher_init(FBR_A_ &watcher, (ev_watcher *)&timer);
fbr_ev_wait_one(FBR_A_ &watcher.ev_base);
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
ev_timer_stop(fctx->__p->loop, &timer);
return max(0., expected - ev_now(fctx->__p->loop));
}
static void watcher_async_dtor(FBR_P_ void *_arg)
{
struct ev_async *w = _arg;
ev_async_stop(fctx->__p->loop, w);
}
void fbr_async_wait(FBR_P_ ev_async *w)
{
struct fbr_ev_watcher watcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
dtor.func = watcher_async_dtor;
dtor.arg = w;
fbr_destructor_add(FBR_A_ &dtor);
fbr_ev_watcher_init(FBR_A_ &watcher, (ev_watcher *)w);
fbr_ev_wait_one(FBR_A_ &watcher.ev_base);
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
ev_async_stop(fctx->__p->loop, w);
return;
}
static unsigned get_page_size()
{
static unsigned sz;
long retval;
if (sz > 0)
return sz;
retval = sysconf(_SC_PAGESIZE);
if (0 > retval) {
fprintf(stderr, "libevfibers: sysconf(_SC_PAGESIZE): %s",
strerror(errno));
abort();
}
sz = retval;
return sz;
}
static size_t round_up_to_page_size(size_t size)
{
unsigned sz = get_page_size();
size_t remainder;
remainder = size % sz;
if (remainder == 0)
return size;
return size + sz - remainder;
}
fbr_id_t fbr_create(FBR_P_ const char *name, fbr_fiber_func_t func, void *arg,
size_t stack_size)
{
struct fbr_fiber *fiber;
if (!LIST_EMPTY(&fctx->__p->reclaimed)) {
fiber = LIST_FIRST(&fctx->__p->reclaimed);
LIST_REMOVE(fiber, entries.reclaimed);
} else {
fiber = malloc(sizeof(struct fbr_fiber));
memset(fiber, 0x00, sizeof(struct fbr_fiber));
if (0 == stack_size)
stack_size = FBR_STACK_SIZE;
stack_size = round_up_to_page_size(stack_size);
fiber->stack = malloc(stack_size);
if (NULL == fiber->stack)
err(EXIT_FAILURE, "malloc failed");
fiber->stack_size = stack_size;
(void)VALGRIND_STACK_REGISTER(fiber->stack, fiber->stack +
stack_size);
fbr_cond_init(FBR_A_ &fiber->reclaim_cond);
fiber->id = fctx->__p->last_id++;
}
coro_create(&fiber->ctx, (coro_func)call_wrapper, FBR_A, fiber->stack,
fiber->stack_size);
LIST_INIT(&fiber->children);
LIST_INIT(&fiber->pool);
TAILQ_INIT(&fiber->destructors);
strncpy(fiber->name, name, FBR_MAX_FIBER_NAME - 1);
fiber->func = func;
fiber->func_arg = arg;
LIST_INSERT_HEAD(&CURRENT_FIBER->children, fiber, entries.children);
fiber->parent = CURRENT_FIBER;
fiber->no_reclaim = 0;
fiber->want_reclaim = 0;
return fbr_id_pack(fiber);
}
int fbr_disown(FBR_P_ fbr_id_t parent_id)
{
struct fbr_fiber *fiber, *parent;
if (!fbr_id_isnull(parent_id))
unpack_transfer_errno(-1, &parent, parent_id);
else
parent = &fctx->__p->root;
fiber = CURRENT_FIBER;
LIST_REMOVE(fiber, entries.children);
LIST_INSERT_HEAD(&parent->children, fiber, entries.children);
fiber->parent = parent;
return_success(0);
}
fbr_id_t fbr_parent(FBR_P)
{
struct fbr_fiber *fiber = CURRENT_FIBER;
if (fiber->parent == &fctx->__p->root)
return FBR_ID_NULL;
return fbr_id_pack(fiber->parent);
}
void *fbr_calloc(FBR_P_ unsigned int nmemb, size_t size)
{
void *ptr;
fprintf(stderr, "libevfibers: fbr_calloc is deprecated\n");
ptr = allocate_in_fiber(FBR_A_ nmemb * size, CURRENT_FIBER);
memset(ptr, 0x00, nmemb * size);
return ptr;
}
void *fbr_alloc(FBR_P_ size_t size)
{
fprintf(stderr, "libevfibers: fbr_alloc is deprecated\n");
return allocate_in_fiber(FBR_A_ size, CURRENT_FIBER);
}
void fbr_alloc_set_destructor(_unused_ FBR_P_ void *ptr,
fbr_alloc_destructor_func_t func, void *context)
{
struct mem_pool *pool_entry;
fprintf(stderr, "libevfibers:"
" fbr_alloc_set_destructor is deprecated\n");
pool_entry = (struct mem_pool *)ptr - 1;
pool_entry->destructor = func;
pool_entry->destructor_context = context;
}
void fbr_free(FBR_P_ void *ptr)
{
fprintf(stderr, "libevfibers: fbr_free is deprecated\n");
fbr_free_in_fiber(FBR_A_ CURRENT_FIBER, ptr, 1);
}
void fbr_free_nd(FBR_P_ void *ptr)
{
fprintf(stderr, "libevfibers: fbr_free_nd is deprecated\n");
fbr_free_in_fiber(FBR_A_ CURRENT_FIBER, ptr, 0);
}
void fbr_dump_stack(FBR_P_ fbr_logutil_func_t log)
{
struct fbr_stack_item *ptr = fctx->__p->sp;
(*log)(FBR_A_ "%s", "Fiber call stack:");
(*log)(FBR_A_ "%s", "-------------------------------");
while (ptr >= fctx->__p->stack) {
(*log)(FBR_A_ "fiber_call: %p\t%s",
ptr->fiber,
ptr->fiber->name);
print_trace_info(FBR_A_ &ptr->tinfo, log);
(*log)(FBR_A_ "%s", "-------------------------------");
ptr--;
}
}
static void transfer_later(FBR_P_ struct fbr_id_tailq_i *item)
{
int was_empty;
was_empty = TAILQ_EMPTY(&fctx->__p->pending_fibers);
TAILQ_INSERT_TAIL(&fctx->__p->pending_fibers, item, entries);
item->head = &fctx->__p->pending_fibers;
if (was_empty && !TAILQ_EMPTY(&fctx->__p->pending_fibers)) {
ev_async_start(fctx->__p->loop, &fctx->__p->pending_async);
}
ev_async_send(fctx->__p->loop, &fctx->__p->pending_async);
}
static void transfer_later_tailq(FBR_P_ struct fbr_id_tailq *tailq)
{
int was_empty;
struct fbr_id_tailq_i *item;
TAILQ_FOREACH(item, tailq, entries) {
item->head = &fctx->__p->pending_fibers;
}
was_empty = TAILQ_EMPTY(&fctx->__p->pending_fibers);
TAILQ_CONCAT(&fctx->__p->pending_fibers, tailq, entries);
if (was_empty && !TAILQ_EMPTY(&fctx->__p->pending_fibers)) {
ev_async_start(fctx->__p->loop, &fctx->__p->pending_async);
}
ev_async_send(fctx->__p->loop, &fctx->__p->pending_async);
}
void fbr_ev_mutex_init(FBR_P_ struct fbr_ev_mutex *ev,
struct fbr_mutex *mutex)
{
ev_base_init(FBR_A_ &ev->ev_base, FBR_EV_MUTEX);
ev->mutex = mutex;
}
void fbr_mutex_init(_unused_ FBR_P_ struct fbr_mutex *mutex)
{
mutex->locked_by = FBR_ID_NULL;
TAILQ_INIT(&mutex->pending);
}
void fbr_mutex_lock(FBR_P_ struct fbr_mutex *mutex)
{
struct fbr_ev_mutex ev;
assert(!fbr_id_eq(mutex->locked_by, CURRENT_FIBER_ID) &&
"Mutex is already locked by current fiber");
fbr_ev_mutex_init(FBR_A_ &ev, mutex);
fbr_ev_wait_one(FBR_A_ &ev.ev_base);
assert(fbr_id_eq(mutex->locked_by, CURRENT_FIBER_ID));
}
int fbr_mutex_trylock(FBR_P_ struct fbr_mutex *mutex)
{
if (fbr_id_isnull(mutex->locked_by)) {
mutex->locked_by = CURRENT_FIBER_ID;
return 1;
}
return 0;
}
void fbr_mutex_unlock(FBR_P_ struct fbr_mutex *mutex)
{
struct fbr_id_tailq_i *item, *x;
struct fbr_fiber *fiber = NULL;
assert(fbr_id_eq(mutex->locked_by, CURRENT_FIBER_ID) &&
"Can't unlock the mutex, locked by another fiber");
if (TAILQ_EMPTY(&mutex->pending)) {
mutex->locked_by = FBR_ID_NULL;
return;
}
TAILQ_FOREACH_SAFE(item, &mutex->pending, entries, x) {
assert(item->head == &mutex->pending);
TAILQ_REMOVE(&mutex->pending, item, entries);
if (-1 == fbr_id_unpack(FBR_A_ &fiber, item->id)) {
fbr_log_e(FBR_A_ "libevfibers: unexpected error trying"
" to find a fiber by id: %s",
fbr_strerror(FBR_A_ fctx->f_errno));
continue;
}
break;
}
mutex->locked_by = item->id;
assert(!fbr_id_isnull(mutex->locked_by));
post_ev(FBR_A_ fiber, item->ev);
transfer_later(FBR_A_ item);
}
void fbr_mutex_destroy(_unused_ FBR_P_ _unused_ struct fbr_mutex *mutex)
{
/* Since mutex is stack allocated now, this efffeectively turns into
* NOOP. But we might consider adding some cleanup in the future.
*/
}
void fbr_ev_cond_var_init(FBR_P_ struct fbr_ev_cond_var *ev,
struct fbr_cond_var *cond, struct fbr_mutex *mutex)
{
ev_base_init(FBR_A_ &ev->ev_base, FBR_EV_COND_VAR);
ev->cond = cond;
ev->mutex = mutex;
}
void fbr_cond_init(_unused_ FBR_P_ struct fbr_cond_var *cond)
{
cond->mutex = NULL;
TAILQ_INIT(&cond->waiting);
}
void fbr_cond_destroy(_unused_ FBR_P_ _unused_ struct fbr_cond_var *cond)
{
/* Since condvar is stack allocated now, this efffeectively turns into
* NOOP. But we might consider adding some cleanup in the future.
*/
}
int fbr_cond_wait(FBR_P_ struct fbr_cond_var *cond, struct fbr_mutex *mutex)
{
struct fbr_ev_cond_var ev;
if (mutex && fbr_id_isnull(mutex->locked_by))
return_error(-1, FBR_EINVAL);
fbr_ev_cond_var_init(FBR_A_ &ev, cond, mutex);
fbr_ev_wait_one(FBR_A_ &ev.ev_base);
return_success(0);
}
void fbr_cond_broadcast(FBR_P_ struct fbr_cond_var *cond)
{
struct fbr_id_tailq_i *item;
struct fbr_fiber *fiber;
if (TAILQ_EMPTY(&cond->waiting))
return;
TAILQ_FOREACH(item, &cond->waiting, entries) {
if(-1 == fbr_id_unpack(FBR_A_ &fiber, item->id)) {
assert(FBR_ENOFIBER == fctx->f_errno);
continue;
}
post_ev(FBR_A_ fiber, item->ev);
}
transfer_later_tailq(FBR_A_ &cond->waiting);
}
void fbr_cond_signal(FBR_P_ struct fbr_cond_var *cond)
{
struct fbr_id_tailq_i *item;
struct fbr_fiber *fiber;
if (TAILQ_EMPTY(&cond->waiting))
return;
item = TAILQ_FIRST(&cond->waiting);
if(-1 == fbr_id_unpack(FBR_A_ &fiber, item->id)) {
assert(FBR_ENOFIBER == fctx->f_errno);
return;
}
post_ev(FBR_A_ fiber, item->ev);
assert(item->head == &cond->waiting);
TAILQ_REMOVE(&cond->waiting, item, entries);
transfer_later(FBR_A_ item);
}
int fbr_vrb_init(struct fbr_vrb *vrb, size_t size, const char *file_pattern)
{
int fd = -1;
size_t sz = get_page_size();
size = (size ? round_up_to_page_size(size) : sz);
void *ptr = MAP_FAILED;
char *temp_name = NULL;
mode_t old_umask;
const mode_t secure_umask = 077;
temp_name = strdup(file_pattern);
if (!temp_name)
return -1;
//fctx->__p->vrb_file_pattern);
vrb->mem_ptr_size = size * 2 + sz * 2;
vrb->mem_ptr = mmap(NULL, vrb->mem_ptr_size, PROT_NONE,
FBR_MAP_ANON_FLAG | MAP_PRIVATE, -1, 0);
if (MAP_FAILED == vrb->mem_ptr)
goto error;
vrb->lower_ptr = vrb->mem_ptr + sz;
vrb->upper_ptr = vrb->lower_ptr + size;
vrb->ptr_size = size;
vrb->data_ptr = vrb->lower_ptr;
vrb->space_ptr = vrb->lower_ptr;
old_umask = umask(0);
umask(secure_umask);
fd = mkstemp(temp_name);
umask(old_umask);
if (0 >= fd)
goto error;
if (0 > unlink(temp_name))
goto error;
free(temp_name);
temp_name = NULL;
if (0 > ftruncate(fd, size))
goto error;
ptr = mmap(vrb->lower_ptr, vrb->ptr_size, PROT_READ | PROT_WRITE,
MAP_FIXED | MAP_SHARED, fd, 0);
if (MAP_FAILED == ptr)
goto error;
if (ptr != vrb->lower_ptr)
goto error;
ptr = mmap(vrb->upper_ptr, vrb->ptr_size, PROT_READ | PROT_WRITE,
MAP_FIXED | MAP_SHARED, fd, 0);
if (MAP_FAILED == ptr)
goto error;
if (ptr != vrb->upper_ptr)
goto error;
close(fd);
return 0;
error:
if (MAP_FAILED != ptr)
munmap(ptr, size);
if (0 < fd)
close(fd);
if (vrb->mem_ptr)
munmap(vrb->mem_ptr, vrb->mem_ptr_size);
if (temp_name)
free(temp_name);
return -1;
}
int fbr_buffer_init(FBR_P_ struct fbr_buffer *buffer, size_t size)
{
int rv;
rv = fbr_vrb_init(&buffer->vrb, size, fctx->__p->buffer_file_pattern);
if (rv)
return_error(-1, FBR_EBUFFERMMAP);
buffer->prepared_bytes = 0;
buffer->waiting_bytes = 0;
fbr_cond_init(FBR_A_ &buffer->committed_cond);
fbr_cond_init(FBR_A_ &buffer->bytes_freed_cond);
fbr_mutex_init(FBR_A_ &buffer->write_mutex);
fbr_mutex_init(FBR_A_ &buffer->read_mutex);
return_success(0);
}
void fbr_vrb_destroy(struct fbr_vrb *vrb)
{
munmap(vrb->upper_ptr, vrb->ptr_size);
munmap(vrb->lower_ptr, vrb->ptr_size);
munmap(vrb->mem_ptr, vrb->mem_ptr_size);
}
void fbr_buffer_destroy(FBR_P_ struct fbr_buffer *buffer)
{
fbr_vrb_destroy(&buffer->vrb);
fbr_mutex_destroy(FBR_A_ &buffer->read_mutex);
fbr_mutex_destroy(FBR_A_ &buffer->write_mutex);
fbr_cond_destroy(FBR_A_ &buffer->committed_cond);
fbr_cond_destroy(FBR_A_ &buffer->bytes_freed_cond);
}
void *fbr_buffer_alloc_prepare(FBR_P_ struct fbr_buffer *buffer, size_t size)
{
if (size > fbr_buffer_size(FBR_A_ buffer))
return_error(NULL, FBR_EINVAL);
fbr_mutex_lock(FBR_A_ &buffer->write_mutex);
while (buffer->prepared_bytes > 0)
fbr_cond_wait(FBR_A_ &buffer->committed_cond,
&buffer->write_mutex);
assert(0 == buffer->prepared_bytes);
buffer->prepared_bytes = size;
while (fbr_buffer_free_bytes(FBR_A_ buffer) < size)
fbr_cond_wait(FBR_A_ &buffer->bytes_freed_cond,
&buffer->write_mutex);
return fbr_buffer_space_ptr(FBR_A_ buffer);
}
void fbr_buffer_alloc_commit(FBR_P_ struct fbr_buffer *buffer)
{
fbr_vrb_give(&buffer->vrb, buffer->prepared_bytes);
buffer->prepared_bytes = 0;
fbr_cond_signal(FBR_A_ &buffer->committed_cond);
fbr_mutex_unlock(FBR_A_ &buffer->write_mutex);
}
void fbr_buffer_alloc_abort(FBR_P_ struct fbr_buffer *buffer)
{
buffer->prepared_bytes = 0;
fbr_cond_signal(FBR_A_ &buffer->committed_cond);
fbr_mutex_unlock(FBR_A_ &buffer->write_mutex);
}
void *fbr_buffer_read_address(FBR_P_ struct fbr_buffer *buffer, size_t size)
{
int retval;
if (size > fbr_buffer_size(FBR_A_ buffer))
return_error(NULL, FBR_EINVAL);
fbr_mutex_lock(FBR_A_ &buffer->read_mutex);
while (fbr_buffer_bytes(FBR_A_ buffer) < size) {
retval = fbr_cond_wait(FBR_A_ &buffer->committed_cond,
&buffer->read_mutex);
assert(0 == retval);
(void)retval;
}
buffer->waiting_bytes = size;
return_success(fbr_buffer_data_ptr(FBR_A_ buffer));
}
void fbr_buffer_read_advance(FBR_P_ struct fbr_buffer *buffer)
{
fbr_vrb_take(&buffer->vrb, buffer->waiting_bytes);
fbr_cond_signal(FBR_A_ &buffer->bytes_freed_cond);
fbr_mutex_unlock(FBR_A_ &buffer->read_mutex);
}
void fbr_buffer_read_discard(FBR_P_ struct fbr_buffer *buffer)
{
fbr_mutex_unlock(FBR_A_ &buffer->read_mutex);
}
int fbr_buffer_resize(FBR_P_ struct fbr_buffer *buffer, size_t size)
{
int rv;
fbr_mutex_lock(FBR_A_ &buffer->read_mutex);
fbr_mutex_lock(FBR_A_ &buffer->write_mutex);
rv = fbr_vrb_resize(&buffer->vrb, size, fctx->__p->buffer_file_pattern);
fbr_mutex_unlock(FBR_A_ &buffer->write_mutex);
fbr_mutex_unlock(FBR_A_ &buffer->read_mutex);
if (rv)
return_error(-1, FBR_EBUFFERMMAP);
return_success(0);
}
struct fbr_mq *fbr_mq_create(FBR_P_ size_t size, int flags)
{
struct fbr_mq *mq;
mq = calloc(1, sizeof(*mq));
mq->fctx = fctx;
mq->max = size + 1; /* One element is always unused */
mq->rb = calloc(mq->max, sizeof(void *));
mq->flags = flags;
fbr_cond_init(FBR_A_ &mq->bytes_available_cond);
fbr_cond_init(FBR_A_ &mq->bytes_freed_cond);
return mq;
}
void fbr_mq_clear(struct fbr_mq *mq, int wake_up_writers)
{
memset(mq->rb, 0x00, mq->max * sizeof(void *));
mq->head = 0;
mq->tail = 0;
if (wake_up_writers)
fbr_cond_signal(mq->fctx, &mq->bytes_available_cond);
}
void fbr_mq_push(struct fbr_mq *mq, void *obj)
{
unsigned next;
while ((next = ((mq->head + 1) % mq->max )) == mq->tail)
fbr_cond_wait(mq->fctx, &mq->bytes_freed_cond, NULL);
mq->rb[mq->head] = obj;
mq->head = next;
fbr_cond_signal(mq->fctx, &mq->bytes_available_cond);
}
int fbr_mq_try_push(struct fbr_mq *mq, void *obj)
{
unsigned next = mq->head + 1;
if (next >= mq->max)
next = 0;
/* Circular buffer is full */
if (next == mq->tail)
return -1;
mq->rb[mq->head] = obj;
mq->head = next;
fbr_cond_signal(mq->fctx, &mq->bytes_available_cond);
return 0;
}
void fbr_mq_wait_push(struct fbr_mq *mq)
{
while (((mq->head + 1) % mq->max) == mq->tail)
fbr_cond_wait(mq->fctx, &mq->bytes_freed_cond, NULL);
}
static void *mq_do_pop(struct fbr_mq *mq)
{
void *obj;
unsigned next;
obj = mq->rb[mq->tail];
mq->rb[mq->tail] = NULL;
next = mq->tail + 1;
if (next >= mq->max)
next = 0;
mq->tail = next;
fbr_cond_signal(mq->fctx, &mq->bytes_freed_cond);
return obj;
}
void *fbr_mq_pop(struct fbr_mq *mq)
{
/* if the head isn't ahead of the tail, we don't have any elements */
while (mq->head == mq->tail)
fbr_cond_wait(mq->fctx, &mq->bytes_available_cond, NULL);
return mq_do_pop(mq);
}
int fbr_mq_try_pop(struct fbr_mq *mq, void **obj)
{
/* if the head isn't ahead of the tail, we don't have any elements */
if (mq->head == mq->tail)
return -1;
*obj = mq_do_pop(mq);
return 0;
}
void fbr_mq_wait_pop(struct fbr_mq *mq)
{
/* if the head isn't ahead of the tail, we don't have any elements */
while (mq->head == mq->tail)
fbr_cond_wait(mq->fctx, &mq->bytes_available_cond, NULL);
}
void fbr_mq_destroy(struct fbr_mq *mq)
{
fbr_cond_destroy(mq->fctx, &mq->bytes_freed_cond);
fbr_cond_destroy(mq->fctx, &mq->bytes_available_cond);
free(mq->rb);
free(mq);
}
void *fbr_get_user_data(FBR_P_ fbr_id_t id)
{
struct fbr_fiber *fiber;
unpack_transfer_errno(NULL, &fiber, id);
return_success(fiber->user_data);
}
int fbr_set_user_data(FBR_P_ fbr_id_t id, void *data)
{
struct fbr_fiber *fiber;
unpack_transfer_errno(-1, &fiber, id);
fiber->user_data = data;
return_success(0);
}
void fbr_destructor_add(FBR_P_ struct fbr_destructor *dtor)
{
struct fbr_fiber *fiber = CURRENT_FIBER;
TAILQ_INSERT_TAIL(&fiber->destructors, dtor, entries);
dtor->active = 1;
}
void fbr_destructor_remove(FBR_P_ struct fbr_destructor *dtor,
int call)
{
struct fbr_fiber *fiber = CURRENT_FIBER;
if (0 == dtor->active)
return;
TAILQ_REMOVE(&fiber->destructors, dtor, entries);
if (call)
dtor->func(FBR_A_ dtor->arg);
dtor->active = 0;
}
static inline int wrap_ffsll(uint64_t val)
{
/* TODO: Add some check for the existance of this builtin */
return __builtin_ffsll(val);
}
static inline int is_key_registered(FBR_P_ fbr_key_t key)
{
return 0 == (fctx->__p->key_free_mask & (1 << key));
}
static inline void register_key(FBR_P_ fbr_key_t key)
{
fctx->__p->key_free_mask &= ~(1ULL << key);
}
static inline void unregister_key(FBR_P_ fbr_key_t key)
{
fctx->__p->key_free_mask |= (1 << key);
}
int fbr_key_create(FBR_P_ fbr_key_t *key_ptr)
{
fbr_key_t key = wrap_ffsll(fctx->__p->key_free_mask) - 1;
assert(key < FBR_MAX_KEY);
register_key(FBR_A_ key);
*key_ptr = key;
return_success(0);
}
int fbr_key_delete(FBR_P_ fbr_key_t key)
{
if (!is_key_registered(FBR_A_ key))
return_error(-1, FBR_ENOKEY);
unregister_key(FBR_A_ key);
return_success(0);
}
int fbr_key_set(FBR_P_ fbr_id_t id, fbr_key_t key, void *value)
{
struct fbr_fiber *fiber;
unpack_transfer_errno(-1, &fiber, id);
if (!is_key_registered(FBR_A_ key))
return_error(-1, FBR_ENOKEY);
fiber->key_data[key] = value;
return_success(0);
}
void *fbr_key_get(FBR_P_ fbr_id_t id, fbr_key_t key)
{
struct fbr_fiber *fiber;
unpack_transfer_errno(NULL, &fiber, id);
if (!is_key_registered(FBR_A_ key))
return_error(NULL, FBR_ENOKEY);
return fiber->key_data[key];
}
const char *fbr_get_name(FBR_P_ fbr_id_t id)
{
struct fbr_fiber *fiber;
unpack_transfer_errno(NULL, &fiber, id);
return_success(fiber->name);
}
int fbr_set_name(FBR_P_ fbr_id_t id, const char *name)
{
struct fbr_fiber *fiber;
unpack_transfer_errno(-1, &fiber, id);
strncpy(fiber->name, name, FBR_MAX_FIBER_NAME - 1);
return_success(0);
}
static int make_pipe(FBR_P_ int *r, int*w)
{
int fds[2];
int retval;
retval = pipe(fds);
if (-1 == retval)
return_error(-1, FBR_ESYSTEM);
*r = fds[0];
*w = fds[1];
return_success(0);
}
pid_t fbr_popen3(FBR_P_ const char *filename, char *const argv[],
char *const envp[], const char *working_dir,
int *stdin_w_ptr, int *stdout_r_ptr, int *stderr_r_ptr)
{
pid_t pid;
int stdin_r = -1, stdin_w = -1;
int stdout_r = -1, stdout_w = -1;
int stderr_r = -1, stderr_w = -1;
int devnull = -1;
int retval;
retval = (stdin_w_ptr ? make_pipe(FBR_A_ &stdin_r, &stdin_w) : 0);
if (retval)
goto error;
retval = (stdout_r_ptr ? make_pipe(FBR_A_ &stdout_r, &stdout_w) : 0);
if (retval)
goto error;
retval = (stderr_r_ptr ? make_pipe(FBR_A_ &stderr_r, &stderr_w) : 0);
if (retval)
goto error;
pid = fork();
if (-1 == pid)
goto error;
if (0 == pid) {
/* Child */
ev_break(EV_DEFAULT, EVBREAK_ALL);
if (stdin_w_ptr) {
retval = close(stdin_w);
if (-1 == retval)
goto error;
retval = dup2(stdin_r, STDIN_FILENO);
if (-1 == retval)
goto error;
} else {
devnull = open("/dev/null", O_RDONLY);
if (-1 == retval)
goto error;
retval = dup2(devnull, STDIN_FILENO);
if (-1 == retval)
goto error;
retval = close(devnull);
if (-1 == retval)
goto error;
}
if (stdout_r_ptr) {
retval = close(stdout_r);
if (-1 == retval)
goto error;
retval = dup2(stdout_w, STDOUT_FILENO);
if (-1 == retval)
goto error;
} else {
devnull = open("/dev/null", O_WRONLY);
if (-1 == retval)
goto error;
retval = dup2(devnull, STDOUT_FILENO);
if (-1 == retval)
goto error;
retval = close(devnull);
if (-1 == retval)
goto error;
}
if (stderr_r_ptr) {
retval = close(stderr_r);
if (-1 == retval)
goto error;
retval = dup2(stderr_w, STDERR_FILENO);
if (-1 == retval)
goto error;
} else {
devnull = open("/dev/null", O_WRONLY);
if (-1 == retval)
goto error;
retval = dup2(devnull, STDERR_FILENO);
if (-1 == retval)
goto error;
retval = close(devnull);
if (-1 == retval)
goto error;
}
if (working_dir) {
retval = chdir(working_dir);
if (-1 == retval)
goto error;
}
retval = execve(filename, argv, envp);
if (-1 == retval)
goto error;
errx(EXIT_FAILURE, "execve failed without error code");
}
/* Parent */
if (stdin_w_ptr) {
retval = close(stdin_r);
if (-1 == retval)
goto error;
retval = fbr_fd_nonblock(FBR_A_ stdin_w);
if (retval)
goto error;
}
if (stdout_r_ptr) {
retval = close(stdout_w);
if (-1 == retval)
goto error;
retval = fbr_fd_nonblock(FBR_A_ stdout_r);
if (retval)
goto error;
}
if (stderr_r_ptr) {
retval = close(stderr_w);
if (-1 == retval)
goto error;
retval = fbr_fd_nonblock(FBR_A_ stderr_r);
if (retval)
goto error;
}
fbr_log_d(FBR_A_ "child pid %d has been launched", pid);
if (stdin_w_ptr)
*stdin_w_ptr = stdin_w;
if (stdout_r_ptr)
*stdout_r_ptr = stdout_r;
if (stderr_r_ptr)
*stderr_r_ptr = stderr_r;
return pid;
error:
if (0 <= devnull)
close(devnull);
if (0 <= stdin_r)
close(stdin_r);
if (0 <= stdin_w)
close(stdin_w);
if (0 <= stdout_r)
close(stdout_r);
if (0 <= stdout_w)
close(stdout_w);
if (0 <= stderr_r)
close(stderr_r);
if (0 <= stderr_w)
close(stderr_w);
return_error(-1, FBR_ESYSTEM);
}
static void watcher_child_dtor(_unused_ FBR_P_ void *_arg)
{
struct ev_child *w = _arg;
ev_child_stop(fctx->__p->loop, w);
}
int fbr_waitpid(FBR_P_ pid_t pid)
{
struct ev_child child;
struct fbr_ev_watcher watcher;
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER;
ev_child_init(&child, NULL, pid, 0.);
ev_child_start(fctx->__p->loop, &child);
dtor.func = watcher_child_dtor;
dtor.arg = &child;
fbr_destructor_add(FBR_A_ &dtor);
fbr_ev_watcher_init(FBR_A_ &watcher, (ev_watcher *)&child);
fbr_ev_wait_one(FBR_A_ &watcher.ev_base);
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */);
ev_child_stop(fctx->__p->loop, &child);
return_success(child.rstatus);
}
int fbr_system(FBR_P_ const char *filename, char *const argv[],
char *const envp[], const char *working_dir)
{
pid_t pid;
int retval;
pid = fork();
if (-1 == pid)
return_error(-1, FBR_ESYSTEM);
if (0 == pid) {
/* Child */
ev_break(EV_DEFAULT, EVBREAK_ALL);
if (working_dir) {
retval = chdir(working_dir);
if (-1 == retval)
err(EXIT_FAILURE, "chdir");
}
retval = execve(filename, argv, envp);
if (-1 == retval)
err(EXIT_FAILURE, "execve");
errx(EXIT_FAILURE, "execve failed without error code");
}
/* Parent */
fbr_log_d(FBR_A_ "child pid %d has been launched", pid);
return fbr_waitpid(FBR_A_ pid);
}
#ifdef FBR_EIO_ENABLED
static struct ev_loop *eio_loop;
static ev_idle repeat_watcher;
static ev_async ready_watcher;
/* idle watcher callback, only used when eio_poll */
/* didn't handle all results in one call */
static void repeat(EV_P_ ev_idle *w, _unused_ int revents)
{
if (eio_poll () != -1)
ev_idle_stop(EV_A_ w);
}
/* eio has some results, process them */
static void ready(EV_P_ _unused_ ev_async *w, _unused_ int revents)
{
if (eio_poll() == -1)
ev_idle_start(EV_A_ &repeat_watcher);
}
/* wake up the event loop */
static void want_poll()
{
ev_async_send(eio_loop, &ready_watcher);
}
void fbr_eio_init()
{
if (NULL != eio_loop) {
fprintf(stderr, "libevfibers: fbr_eio_init called twice");
abort();
}
eio_loop = EV_DEFAULT;
ev_idle_init(&repeat_watcher, repeat);
ev_async_init(&ready_watcher, ready);
ev_async_start(eio_loop, &ready_watcher);
ev_unref(eio_loop);
eio_init(want_poll, 0);
}
void fbr_ev_eio_init(FBR_P_ struct fbr_ev_eio *ev, eio_req *req)
{
ev_base_init(FBR_A_ &ev->ev_base, FBR_EV_EIO);
ev->req = req;
}
static void eio_req_dtor(_unused_ FBR_P_ void *_arg)
{
eio_req *req = _arg;
eio_cancel(req);
}
static int fiber_eio_cb(eio_req *req)
{
struct fbr_fiber *fiber;
struct fbr_ev_eio *ev = req->data;
struct fbr_context *fctx = ev->ev_base.fctx;
int retval;
ENSURE_ROOT_FIBER;
ev_unref(eio_loop);
if (EIO_CANCELLED(req))
return 0;
retval = fbr_id_unpack(FBR_A_ &fiber, ev->ev_base.id);
if (-1 == retval) {
fbr_log_e(FBR_A_ "libevfibers: fiber is about to be called by"
" the eio callback, but it's id is not valid: %s",
fbr_strerror(FBR_A_ fctx->f_errno));
abort();
}
post_ev(FBR_A_ fiber, &ev->ev_base);
retval = fbr_transfer(FBR_A_ fbr_id_pack(fiber));
assert(0 == retval);
return 0;
}
#define FBR_EIO_PREP \
eio_req *req; \
struct fbr_ev_eio e_eio; \
int retval; \
struct fbr_destructor dtor = FBR_DESTRUCTOR_INITIALIZER; \
ev_ref(eio_loop);
#define FBR_EIO_WAIT \
if (NULL == req) { \
ev_unref(eio_loop); \
return_error(-1, FBR_EEIO); \
} \
dtor.func = eio_req_dtor; \
dtor.arg = req; \
fbr_destructor_add(FBR_A_ &dtor); \
fbr_ev_eio_init(FBR_A_ &e_eio, req); \
retval = fbr_ev_wait_one(FBR_A_ &e_eio.ev_base); \
fbr_destructor_remove(FBR_A_ &dtor, 0 /* Call it? */); \
if (retval) \
return retval;
#define FBR_EIO_RESULT_CHECK \
if (0 > req->result) { \
errno = req->errorno; \
return_error(-1, FBR_ESYSTEM); \
}
#define FBR_EIO_RESULT_RET \
FBR_EIO_RESULT_CHECK \
return req->result;
int fbr_eio_open(FBR_P_ const char *path, int flags, mode_t mode, int pri)
{
FBR_EIO_PREP;
req = eio_open(path, flags, mode, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_truncate(FBR_P_ const char *path, off_t offset, int pri)
{
FBR_EIO_PREP;
req = eio_truncate(path, offset, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_chown(FBR_P_ const char *path, uid_t uid, gid_t gid, int pri)
{
FBR_EIO_PREP;
req = eio_chown(path, uid, gid, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_chmod(FBR_P_ const char *path, mode_t mode, int pri)
{
FBR_EIO_PREP;
req = eio_chmod(path, mode, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_mkdir(FBR_P_ const char *path, mode_t mode, int pri)
{
FBR_EIO_PREP;
req = eio_mkdir(path, mode, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_rmdir(FBR_P_ const char *path, int pri)
{
FBR_EIO_PREP;
req = eio_rmdir(path, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_unlink(FBR_P_ const char *path, int pri)
{
FBR_EIO_PREP;
req = eio_unlink(path, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_utime(FBR_P_ const char *path, eio_tstamp atime, eio_tstamp mtime,
int pri)
{
FBR_EIO_PREP;
req = eio_utime(path, atime, mtime, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_mknod(FBR_P_ const char *path, mode_t mode, dev_t dev, int pri)
{
FBR_EIO_PREP;
req = eio_mknod(path, mode, dev, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_link(FBR_P_ const char *path, const char *new_path, int pri)
{
FBR_EIO_PREP;
req = eio_link(path, new_path, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_symlink(FBR_P_ const char *path, const char *new_path, int pri)
{
FBR_EIO_PREP;
req = eio_symlink(path, new_path, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_rename(FBR_P_ const char *path, const char *new_path, int pri)
{
FBR_EIO_PREP;
req = eio_rename(path, new_path, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_mlock(FBR_P_ void *addr, size_t length, int pri)
{
FBR_EIO_PREP;
req = eio_mlock(addr, length, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_close(FBR_P_ int fd, int pri)
{
FBR_EIO_PREP;
req = eio_close(fd, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_sync(FBR_P_ int pri)
{
FBR_EIO_PREP;
req = eio_sync(pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_fsync(FBR_P_ int fd, int pri)
{
FBR_EIO_PREP;
req = eio_fsync(fd, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_fdatasync(FBR_P_ int fd, int pri)
{
FBR_EIO_PREP;
req = eio_fdatasync(fd, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_futime(FBR_P_ int fd, eio_tstamp atime, eio_tstamp mtime, int pri)
{
FBR_EIO_PREP;
req = eio_futime(fd, atime, mtime, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_ftruncate(FBR_P_ int fd, off_t offset, int pri)
{
FBR_EIO_PREP;
req = eio_ftruncate(fd, offset, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_fchmod(FBR_P_ int fd, mode_t mode, int pri)
{
FBR_EIO_PREP;
req = eio_fchmod(fd, mode, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_fchown(FBR_P_ int fd, uid_t uid, gid_t gid, int pri)
{
FBR_EIO_PREP;
req = eio_fchown(fd, uid, gid, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_dup2(FBR_P_ int fd, int fd2, int pri)
{
FBR_EIO_PREP;
req = eio_dup2(fd, fd2, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
ssize_t fbr_eio_seek(FBR_P_ int fd, off_t offset, int whence, int pri)
{
FBR_EIO_PREP;
req = eio_seek(fd, offset, whence, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_CHECK;
return req->offs;
}
ssize_t fbr_eio_read(FBR_P_ int fd, void *buf, size_t length, off_t offset,
int pri)
{
FBR_EIO_PREP;
req = eio_read(fd, buf, length, offset, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
ssize_t fbr_eio_write(FBR_P_ int fd, void *buf, size_t length, off_t offset,
int pri)
{
FBR_EIO_PREP;
req = eio_write(fd, buf, length, offset, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_mlockall(FBR_P_ int flags, int pri)
{
FBR_EIO_PREP;
req = eio_mlockall(flags, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_msync(FBR_P_ void *addr, size_t length, int flags, int pri)
{
FBR_EIO_PREP;
req = eio_msync(addr, length, flags, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_readlink(FBR_P_ const char *path, char *buf, size_t size, int pri)
{
FBR_EIO_PREP;
req = eio_readlink(path, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_CHECK;
strncpy(buf, req->ptr2, min(size, (size_t)req->result));
return req->result;
}
int fbr_eio_realpath(FBR_P_ const char *path, char *buf, size_t size, int pri)
{
FBR_EIO_PREP;
req = eio_realpath(path, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_CHECK;
strncpy(buf, req->ptr2, min(size, (size_t)req->result));
return req->result;
}
int fbr_eio_stat(FBR_P_ const char *path, EIO_STRUCT_STAT *statdata, int pri)
{
EIO_STRUCT_STAT *st;
FBR_EIO_PREP;
req = eio_stat(path, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_CHECK;
st = (EIO_STRUCT_STAT *)req->ptr2;
memcpy(statdata, st, sizeof(*st));
return req->result;
}
int fbr_eio_lstat(FBR_P_ const char *path, EIO_STRUCT_STAT *statdata, int pri)
{
EIO_STRUCT_STAT *st;
FBR_EIO_PREP;
req = eio_lstat(path, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_CHECK;
st = (EIO_STRUCT_STAT *)req->ptr2;
memcpy(statdata, st, sizeof(*st));
return req->result;
}
int fbr_eio_fstat(FBR_P_ int fd, EIO_STRUCT_STAT *statdata, int pri)
{
EIO_STRUCT_STAT *st;
FBR_EIO_PREP;
req = eio_fstat(fd, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_CHECK;
st = (EIO_STRUCT_STAT *)req->ptr2;
memcpy(statdata, st, sizeof(*st));
return req->result;
}
int fbr_eio_statvfs(FBR_P_ const char *path, EIO_STRUCT_STATVFS *statdata,
int pri)
{
EIO_STRUCT_STATVFS *st;
FBR_EIO_PREP;
req = eio_statvfs(path, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_CHECK;
st = (EIO_STRUCT_STATVFS *)req->ptr2;
memcpy(statdata, st, sizeof(*st));
return req->result;
}
int fbr_eio_fstatvfs(FBR_P_ int fd, EIO_STRUCT_STATVFS *statdata, int pri)
{
EIO_STRUCT_STATVFS *st;
FBR_EIO_PREP;
req = eio_fstatvfs(fd, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_CHECK;
st = (EIO_STRUCT_STATVFS *)req->ptr2;
memcpy(statdata, st, sizeof(*st));
return req->result;
}
int fbr_eio_sendfile(FBR_P_ int out_fd, int in_fd, off_t in_offset,
size_t length, int pri)
{
FBR_EIO_PREP;
req = eio_sendfile(out_fd, in_fd, in_offset, length, pri, fiber_eio_cb,
&e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_readahead(FBR_P_ int fd, off_t offset, size_t length, int pri)
{
FBR_EIO_PREP;
req = eio_readahead(fd, offset, length, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_syncfs(FBR_P_ int fd, int pri)
{
FBR_EIO_PREP;
req = eio_syncfs(fd, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_sync_file_range(FBR_P_ int fd, off_t offset, size_t nbytes,
unsigned int flags, int pri)
{
FBR_EIO_PREP;
req = eio_sync_file_range(fd, offset, nbytes, flags, pri, fiber_eio_cb,
&e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
int fbr_eio_fallocate(FBR_P_ int fd, int mode, off_t offset, off_t len, int pri)
{
FBR_EIO_PREP;
req = eio_fallocate(fd, mode, offset, len, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
static void custom_execute_cb(eio_req *req)
{
struct fbr_ev_eio *ev = req->data;
req->result = ev->custom_func(ev->custom_arg);
}
eio_ssize_t fbr_eio_custom(FBR_P_ fbr_eio_custom_func_t func, void *data,
int pri)
{
FBR_EIO_PREP;
e_eio.custom_func = func;
e_eio.custom_arg = data;
req = eio_custom(custom_execute_cb, pri, fiber_eio_cb, &e_eio);
FBR_EIO_WAIT;
FBR_EIO_RESULT_RET;
}
#else
void fbr_eio_init(FBR_PU)
{
fbr_log_e(FBR_A_ "libevfibers: libeio support is not compiled");
abort();
}
#endif
<|start_filename|>include/evfibers/fiber.h<|end_filename|>
/********************************************************************
Copyright 2013 <NAME> <<EMAIL>>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
********************************************************************/
/** \mainpage About libevfibers
*
* \section intro_sec Introduction
*
* libevfibers is a small C fiber library that uses libev based event loop and
* libcoro based coroutine context switching. As libcoro alone is barely enough
* to do something useful, this project aims at building a complete fiber API
* around it while leveraging libev's high performance and flexibility.
*
* You may ask why yet another fiber library, there are GNU Pth, State threads,
* etc. When I was looking at their API, I found it being too restrictive: you
* cannot use other event loop. For GNU Pth it's solely select based
* implementation, as for state threads --- they provide several
* implementations including poll, epoll, select though event loop is hidden
* underneath the public API and is not usable directly. I found another
* approach more sensible, namely: just put fiber layer on top of well-known
* and robust event loop implementation. Marc Lehmann already provided all the
* necessary to do the job: event loop library libev with coroutine library
* libcoro.
*
* So what's so cool about fibers? Fibers are user-space threads. User-space
* means that context switching from one fiber to an other fiber takes no
* effort from the kernel. There are different ways to achieve this, but it's
* not relevant here since libcoro already does all the dirty job. At top level
* you have a set of functions that execute on private stacks that do not
* intersect. Whenever such function is going to do some blocking operation,
* i.e. socket read, it calls fiber library wrapper, that asks event loop to
* transfer execution to this function whenever some data arrives, then it
* yields execution to other fiber. From the function's point of view it runs
* in exclusive mode and blocks on all operations, but really other such
* functions execute while this one is waiting. Typically most of them are
* waiting for something and event loop dispatches the events.
*
* This approach helps a lot. Imagine that you have some function that requires
* 3 events. In classic asynchronous model you will have to arrange your
* function in 3 callbacks and register them in the event loop. On the other
* hand having one function waiting for 3 events in ``blocking'' fashion is
* both more readable and maintainable.
*
* Then why use event loop when you have fancy callback-less fiber wrappers?
* Sometimes you just need a function that will set a flag in some object when
* a timer times out. Creating a fiber solely for this simple task is a bit
* awkward.
*
* libevfibers allows you to use fiber style wrappers for blocking operations
* as well as fall back to usual event loop style programming when you need it.
*
* \section install_sec Installation
*
* \subsection requirements_ssec Requirements
*
* To build this documentation properly you need to have
* [doxygen](http://www.stack.nl/~dimitri/doxygen) version >= 1.8 since it used
* markdown.
*
* To build libevfibers you need the following packages:
* - [cmake](http://www.cmake.org)
*
* CMake is a build system used to assemble this project.
* - [libev](http://software.schmorp.de/pkg/libev.html) development files
*
* Well-known and robust event loop.
* - [valgrind](http://valgrind.org) development files
*
* libevfibers makes use of client requests in valgrind to register stacks.
* - [Check](http://check.sourceforge.net) unit testing framework
*
* Strictly it's not a requirement, but you better run unit tests before
* installation.
*
* You don't need libcoro installed as it's part of source tree and will build
* along with libevfibers.
*
* As far as runtime dependencies concerned, the following is required:
* - [libev](http://software.schmorp.de/pkg/libev.html) runtime files
*
* For debian-based distributions users (i.e. Ubuntu) you can use the following
* command to install all the dependencies:
*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.sh}
* sudo apt-get install cmake libev-dev valgrind check
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*
* \subsection building_ssec Building
*
* Once you have all required packages installed you may proceed with building.
* Roughly it's done as follows:
*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.sh}
* git clone https://code.google.com/p/libevfibers
* cd libevfibers/
* mkdir build
* cd build/
* cmake -DCMAKE_BUILD_TYPE=RelWithDebInfo ..
* make
* sudo make install
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* \subsection building_deb_ssec Building debian package
* If you are running debian-based distribution, it will be more useful to
* build a debian package and install it.
*
* The following actions will bring you there:
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.sh}
* git clone https://code.google.com/p/libevfibers
* cd libevfibers/
* dpkg-buildpackage
* sudo dpkg -i ../libevfibers?_*_*.deb ../libevfibers-dev_*_*.deb
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*
* \section contributors_sec Contributors
* libevfibers was written and designed by <NAME>.
*
* <NAME> contributed some patches, a lot of criticism and ideas.
*/
#ifndef _FBR_FIBER_H_
#define _FBR_FIBER_H_
/**
* @file evfibers/fiber.h
* This file contains all client-visible API functions for working with fibers.
*/
#ifdef __cplusplus
extern "C" {
#endif
#include <unistd.h>
#include <stdarg.h>
#include <stddef.h>
#include <string.h>
#include <stdint.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <sys/queue.h>
#include <assert.h>
#include <ev.h>
#include <evfibers/config.h>
/**
* Maximum allowed level of fbr_transfer nesting within fibers.
*/
#define FBR_CALL_STACK_SIZE 16
/**
* Default stack size for a fiber of 64 KB.
*/
#define FBR_STACK_SIZE (64 * 1024) /* 64 KB */
/**
* @def fbr_assert
* Fiber version of classic assert.
*/
#ifdef NDEBUG
#define fbr_assert(context, expr) ((void)(0))
#else
#define fbr_assert(context, expr) \
do { \
__typeof__(expr) ex = (expr); \
if (ex) \
(void)(0); \
else { \
fbr_dump_stack(context, fbr_log_e); \
__assert_fail(__STRING(expr), __FILE__, __LINE__, __ASSERT_FUNCTION); \
} \
} while (0)
#endif
/**
* Just for convenience we have container_of macro here.
*
* Nothing specific. You can find the same one in the linux kernel tree.
*/
#define fbr_container_of(ptr, type, member) ({ \
const typeof( ((type *)0)->member ) *__mptr = (ptr); \
(type *)( (char *)__mptr - offsetof(type,member) ); \
})
struct fbr_context_private;
struct fbr_logger;
struct fbr_id_s {
uint64_t g;
void *p;
} __attribute__((packed));
/**
* Fiber ID type.
*
* For you it's just an opaque type.
*/
typedef struct fbr_id_s fbr_id_t;
extern const fbr_id_t FBR_ID_NULL;
static inline int fbr_id_eq(fbr_id_t a, fbr_id_t b)
{
return a.p == b.p && a.g == b.g;
}
static inline int fbr_id_isnull(fbr_id_t a)
{
return fbr_id_eq(a, FBR_ID_NULL);
}
/**
* Error codes used within the library.
*
* These constants are returned via f_errno member of fbr_context struct.
* @see fbr_context
* @see fbr_strerror
*/
enum fbr_error_code {
FBR_SUCCESS = 0,
FBR_EINVAL,
FBR_ENOFIBER,
FBR_ESYSTEM,
FBR_EBUFFERMMAP,
FBR_ENOKEY,
FBR_EPROTOBUF,
FBR_EBUFFERNOSPACE,
FBR_EEIO,
};
/**
* Library context structure, should be initialized before any other library
* calls will be performed.
* @see fbr_init
* @see fbr_destroy
* @see fbr_strerror
*/
struct fbr_context {
struct fbr_context_private *__p; /*!< pointer to internal context
structure */
enum fbr_error_code f_errno; /*!< context wide error code */
struct fbr_logger *logger; /*!< current logger */
};
/**
* Utility macro for context parameter used in function prototypes.
*/
#define FBR_P struct fbr_context *fctx
/**
* Same as FBR_P but with unused attribute.
*/
#define FBR_PU __attribute__((unused)) FBR_P
/**
* Same as FBR_P, but with comma afterwards for use in functions that accept
* more that one parameter (which itself is the context pointer).
*/
#define FBR_P_ FBR_P,
/**
* Same as FBR_P_ but unused attribute.
*/
#define FBR_PU_ __attribute__((unused)) FBR_P_
/**
* Utility macro for context parameter passing when calling fbr_* functions.
*/
#define FBR_A fctx
/**
* Same as FBR_A, but with comma afterwards for invocations of functions that
* require more that one parameter (which itself is the context pointer).
*/
#define FBR_A_ FBR_A,
/**
* Fiber's ``main'' function type.
* Fiber main function takes only one parameter --- the context. If you need to
* pass more context information, you shall embed fbr_context into any
* structure of your choice and calculate the base pointer using container_of
* macro.
* @see FBR_P
* @see fbr_context
*/
typedef void (*fbr_fiber_func_t)(FBR_P_ void *_arg);
/**
* (DEPRECATED) Destructor function type for the memory allocated in a fiber.
* @param [in] ptr memory pointer for memory to be destroyed
* @param [in] context user data pointer passed via fbr_alloc_set_destructor
*
* One can attach a destructor to a piece of memory allocated in a fiber. It
* will be called whenever memory is freed with original pointer allocated
* along with a user context pointer passed to it.
* @see fbr_alloc
* @see fbr_free
* @see fbr_alloc_set_destructor
*/
typedef void (*fbr_alloc_destructor_func_t)(FBR_P_ void *ptr, void *context);
/**
* Logging levels.
* @see fbr_logger
* @see fbr_context
*/
enum fbr_log_level {
FBR_LOG_ERROR = 0,
FBR_LOG_WARNING,
FBR_LOG_NOTICE,
FBR_LOG_INFO,
FBR_LOG_DEBUG
};
struct fbr_logger;
/**
* Logger function type.
* @param [in] logger currently configured logger
* @param [in] level log level of message
* @param [in] format printf-compatible format string
* @param [in] ap variadic argument list
* This function should log the message if log level suits the one configured
* in a non-blocking manner (i.e. it should not synchronously write it to
* disk).
* @see fbr_logger
* @see fbr_log_func_t
*/
typedef void (*fbr_log_func_t)(FBR_P_ struct fbr_logger *logger,
enum fbr_log_level level, const char *format, va_list ap);
/**
* Logger utility function type.
* @param [in] format printf-compatible format string
*
* This function wraps logger function invocation.
* @see fbr_logger
* @see fbr_log_func_t
*/
typedef void (*fbr_logutil_func_t)(FBR_P_ const char *format, ...);
/**
* Logger structure.
* @see fbr_logger
* @see fbr_context
*/
struct fbr_logger {
fbr_log_func_t logv; /*!< Function pointer that represents the logger */
enum fbr_log_level level; /*!< Current log level */
void *data; /*!< User data pointer */
};
/**
* Convenient function to test if certain log level will actually be logged.
*
* Useful when you need to perform some processing before logging. Wrapping
* your processing in ``if'' statement based on this macros' result can perform
* the processing only if its result will get logged.
*/
static inline int fbr_need_log(FBR_P_ enum fbr_log_level level)
{
return level <= fctx->logger->level;
}
/**
* Convenient function to set current log level.
*/
static inline void fbr_set_log_level(FBR_P_ enum fbr_log_level desired_level)
{
fctx->logger->level = desired_level;
}
/**
* Type of events supported by the library.
* @see fbr_ev_wait
*/
enum fbr_ev_type {
FBR_EV_WATCHER = 1, /*!< libev watcher event */
FBR_EV_MUTEX, /*!< fbr_mutex event */
FBR_EV_COND_VAR, /*!< fbr_cond_var event */
FBR_EV_EIO, /*!< libeio event */
};
struct fbr_ev_base;
/**
* Destructor function type.
* @param [in] arg user-defined data argument
*
* This function gets called when containing fiber dies or destructor is
* removed with call flag set to 1.
* @see fbr_destructor
* @see fbr_log_func_t
*/
typedef void (*fbr_destructor_func_t)(FBR_P_ void *arg);
/**
* Destructor structure.
*
* This structure holds information required for destruction. As it's defined
* in public interface, it may be used as stack-allocatable destructor (it's
* used internally the same way).
*
* Stack-allocated destructor might be useful if one has some resource (e.g.
* file descriptor), which needs to be destructed in some way, and it's
* lifespan continues across several fbr_* calls. While being in some library
* call, a fiber may be reclaimed, but it's stack remains intact until
* reclaimed. Destructor is called before the stack becomes dangerous to use
* and guarantees resource destruction.
*
* User is supposed to fill in the func and arg fields.
* @see fbr_destructor_func_t
* @see fbr_destructor_add
* @see fbr_destructor_remove
*/
struct fbr_destructor {
fbr_destructor_func_t func; /*!< destructor function */
void *arg; /*!< destructor function argument (optional) */
TAILQ_ENTRY(fbr_destructor) entries; //Private
int active; //Private
};
#define FBR_DESTRUCTOR_INITIALIZER { \
.func = NULL, \
.arg = NULL, \
.active = 0, \
};
struct fbr_id_tailq;
struct fbr_id_tailq_i {
/* Private structure */
fbr_id_t id;
struct fbr_ev_base *ev;
TAILQ_ENTRY(fbr_id_tailq_i) entries;
struct fbr_destructor dtor;
struct fbr_id_tailq *head;
};
TAILQ_HEAD(fbr_id_tailq, fbr_id_tailq_i);
/**
* Base struct for all events.
*
* All other event structures ``inherit'' this one by inclusion of it as
* ev_base member.
* @see fbr_ev_upcast
* @see fbr_ev_wait
*/
struct fbr_ev_base {
enum fbr_ev_type type; /*!< type of the event */
fbr_id_t id; /*!< id of a fiber that is waiting for this event */
int arrived; /*!< flag indicating that this event has arrived */
struct fbr_context *fctx; //Private
void *data; //Private
struct fbr_id_tailq_i item; //Private
};
/**
* Convenience macro to save some typing.
*
* Allows you to cast fbr_ev_base to some other event struct via
* fbr_container_of magic.
* @see fbr_container_of
* @see fbr_ev_base
*/
#define fbr_ev_upcast(ptr, type_no_struct) \
fbr_container_of(ptr, struct type_no_struct, ev_base)
/**
* libev watcher event.
*
* This event struct can represent any libev watcher which should be
* initialized and started. You can safely pass NULL as a callback for the
* watcher since the library sets up it's own callback.
* @see fbr_ev_upcast
* @see fbr_ev_wait
*/
struct fbr_ev_watcher {
ev_watcher *w; /*!< libev watcher */
struct fbr_ev_base ev_base;
};
/**
* fbr_mutex event.
*
* This event struct can represent mutex aquisition waiting.
* @see fbr_ev_upcast
* @see fbr_ev_wait
*/
struct fbr_ev_mutex {
struct fbr_mutex *mutex; /*!< mutex we're interested in */
struct fbr_ev_base ev_base;
};
/**
* fbr_cond_var event.
*
* This event struct can represent conditional variable waiting.
* @see fbr_ev_upcast
* @see fbr_ev_wait
*/
struct fbr_ev_cond_var {
struct fbr_cond_var *cond; /*!< conditional variable we're interested
in */
struct fbr_mutex *mutex; /*!< mutex to protect conditional variable*/
struct fbr_ev_base ev_base;
};
/**
* Mutex structure.
*
* This structure represent a mutex.
* @see fbr_mutex_init
* @see fbr_mutex_destroy
*/
struct fbr_mutex {
fbr_id_t locked_by;
struct fbr_id_tailq pending;
TAILQ_ENTRY(fbr_mutex) entries;
};
/**
* Conditional variable structure.
*
* This structure represent a conditional variable.
* @see fbr_mutex_init
* @see fbr_mutex_destroy
*/
struct fbr_cond_var {
struct fbr_mutex *mutex;
struct fbr_id_tailq waiting;
};
/**
* Virtual ring buffer implementation.
*
* Low-level data structure implemented using memory mappings of the same
* physical pages to provide no-copy overhead efficient ring buffer
* imlementation.
* @see fbr_vrb_init
* @see fbr_vrb_destroy
*/
struct fbr_vrb {
void *mem_ptr;
size_t mem_ptr_size;
void *lower_ptr;
void *upper_ptr;
size_t ptr_size;
void *data_ptr;
void *space_ptr;
};
/**
* Inter-fiber communication pipe.
*
* This structure represent a communication pipe between two (or more) fibers.
*
* Higher level wrapper around fbr_vrb.
* @see fbr_buffer_init
* @see fbr_buffer_destroy
* @see struct fbr_vrb
*/
struct fbr_buffer {
struct fbr_vrb vrb;
size_t prepared_bytes;
size_t waiting_bytes;
struct fbr_cond_var committed_cond;
struct fbr_mutex write_mutex;
struct fbr_cond_var bytes_freed_cond;
struct fbr_mutex read_mutex;
};
struct fbr_mq;
/**
* Fiber-local data key.
*
* @see fbr_key_create
* @see fbr_key_delete
* @see fbr_key_get_data
* @see fbr_key_set_data
*/
typedef unsigned int fbr_key_t;
/**
* Maximum numbef of fiber-local keys allowed.
*/
#define FBR_MAX_KEY 64
/**
* Adds destructor to fiber list.
* @param [in] dtor destructor to register
*
* This function registers a destructor. User must guarantee that destructor
* object stays alive until fiber is reclaimed or destructor is removed,
* whichever comes first.
* @see fbr_destructor
*/
void fbr_destructor_add(FBR_P_ struct fbr_destructor *dtor);
/**
* Removes destructor from fiber list.
* @param [in] dtor destructor to unregister
* @param [in] call flag indicating if destructor needs to be called
*
* This function unregisters a destructor. User may specify a flag for
* destructor function to be called.
* @see fbr_destructor
*/
void fbr_destructor_remove(FBR_P_ struct fbr_destructor *dtor,
int call);
/**
* Initializes a destructor.
* @param [in] dtor destructor to initialize
*
* This function should be called before a newly allocated destructor is used.
* Alternatively you may use FBR_DESTRUCTOR_INITIALIZER macro as a initializing
* value upon declaration.
* @see fbr_destructor
*/
static inline void fbr_destructor_init(struct fbr_destructor *dtor)
{
memset(dtor, 0x00, sizeof(*dtor));
}
/**
* Initializer for libev watcher event.
*
* This functions properly initializes fbr_ev_watcher struct. You should not do
* it manually.
* @see fbr_ev_watcher
* @see fbr_ev_wait
*/
void fbr_ev_watcher_init(FBR_P_ struct fbr_ev_watcher *ev, ev_watcher *w);
/**
* Initializer for mutex event.
*
* This functions properly initializes fbr_ev_mutex struct. You should not do
* it manually.
* @see fbr_ev_mutex
* @see fbr_ev_wait
*/
void fbr_ev_mutex_init(FBR_P_ struct fbr_ev_mutex *ev,
struct fbr_mutex *mutex);
/**
* Initializer for conditional variable event.
*
* This functions properly initializes fbr_ev_cond_var struct. You should not do
* it manually.
* @see fbr_ev_cond_var
* @see fbr_ev_wait
*/
void fbr_ev_cond_var_init(FBR_P_ struct fbr_ev_cond_var *ev,
struct fbr_cond_var *cond, struct fbr_mutex *mutex);
/**
* Event awaiting function (one event only wrapper).
* @param [in] one the event base pointer of the event to wait for
* @returns 0 on success, -1 upon error
*
* This functions wraps fbr_ev_wait passing only one event to it.
* @see fbr_ev_base
* @see fbr_ev_wait
*/
int fbr_ev_wait_one(FBR_P_ struct fbr_ev_base *one);
/**
* Event awaiting function (generic one).
* @param [in] events array of event base pointers
* @returns the number of events arrived or -1 upon error
*
* This function waits until any event from events array arrives. Only one
* event can arrive at a time. It returns a pointer to the same event that was
* passed in events array.
* @see fbr_ev_base
* @see fbr_ev_wait_one
*/
int fbr_ev_wait(FBR_P_ struct fbr_ev_base *events[]);
/**
* Event awaiting function with timeout.
* @param [in] events array of event base pointers
* @param [in] timeout in seconds to wait for the events
* @returns the number of events arrived or -1 upon error
*
* This function is a convenient wrapper around fbr_ev_wait, it just creates a
* timer watcher and makes new events array with the timer watcher included.
* Timer event is not counted in the number of returned events.
* @see fbr_ev_wait
*/
int fbr_ev_wait_to(FBR_P_ struct fbr_ev_base *events[], ev_tstamp timeout);
/**
* Transfer of fiber context to another fiber.
* @param [in] to callee id
* @returns 0 on success, -1 on failure with f_errno set.
*
* This function transfers the execution context to other fiber. It returns as
* soon as the called fiber yields. In case of error it returns immediately.
* @see fbr_yield
*/
int fbr_transfer(FBR_P_ fbr_id_t to);
/**
* Initializes the library context.
* @param [in] fctx pointer to the user allocated fbr_context.
* @param [in] loop pointer to the user supplied libev loop.
*
* It's user's responsibility to allocate fbr_context structure and create and
* run the libev event loop.
* @see fbr_context
* @see fbr_destroy
*/
void fbr_init(struct fbr_context *fctx, struct ev_loop *loop);
/**
* Destroys the library context.
* All created fibers are reclaimed and all of the memory is freed. Stopping
* the event loop is user's responsibility.
* @see fbr_context
* @see fbr_init
* @see fbr_reclaim
*/
void fbr_destroy(FBR_P);
/**
* Enables/Disables backtrace capturing.
* @param [in] enabled are backtraces enabled?
*
* The library tries to capture backtraces at certain points which may help
* when debugging obscure problems. For example it captures the backtrace
* whenever a fiber is reclaimed and when one tries to call it dumps out the
* backtrace showing where was it reclaimed. But these cost quite a bit of cpu
* and are disabled by default.
*/
void fbr_enable_backtraces(FBR_P, int enabled);
/**
* Analog of strerror but for the library errno.
* @param [in] code Error code to describe
* @see fbr_context
* @see fbr_error_code
*/
const char *fbr_strerror(FBR_P_ enum fbr_error_code code);
/**
* Utility log wrapper.
*
* Wraps logv function of type fbr_log_func_t located in fbr_logger with log
* level of FBR_LOG_ERROR. Follows printf semantics of format string and
* variadic argument list.
* @see fbr_context
* @see fbr_logger
* @see fbr_log_func_t
* @see fbr_logutil_func_t
*/
void fbr_log_e(FBR_P_ const char *format, ...)
__attribute__ ((format (printf, 2, 3)));
/**
* Utility log wrapper.
*
* Wraps logv function of type fbr_log_func_t located in fbr_logger with log
* level of FBR_LOG_WARNING. Follows printf semantics of format string and
* variadic argument list.
* @see fbr_context
* @see fbr_logger
* @see fbr_log_func_t
* @see fbr_logutil_func_t
*/
void fbr_log_w(FBR_P_ const char *format, ...)
__attribute__ ((format (printf, 2, 3)));
/**
* Utility log wrapper.
*
* Wraps logv function of type fbr_log_func_t located in fbr_logger with log
* level of FBR_LOG_NOTICE. Follows printf semantics of format string and
* variadic argument list.
* @see fbr_context
* @see fbr_logger
* @see fbr_log_func_t
* @see fbr_logutil_func_t
*/
void fbr_log_n(FBR_P_ const char *format, ...)
__attribute__ ((format (printf, 2, 3)));
/**
* Utility log wrapper.
*
* Wraps logv function of type fbr_log_func_t located in fbr_logger with log
* level of FBR_LOG_INFO. Follows printf semantics of format string and
* variadic argument list.
* @see fbr_context
* @see fbr_logger
* @see fbr_log_func_t
* @see fbr_logutil_func_t
*/
void fbr_log_i(FBR_P_ const char *format, ...)
__attribute__ ((format (printf, 2, 3)));
/**
* Utility log wrapper.
*
* Wraps logv function of type fbr_log_func_t located in fbr_logger with log
* level of FBR_LOG_DEBUG. Follows printf semantics of format string and
* variadic argument list.
* @see fbr_context
* @see fbr_logger
* @see fbr_log_func_t
* @see fbr_logutil_func_t
*/
void fbr_log_d(FBR_P_ const char *format, ...)
__attribute__ ((format (printf, 2, 3)));
/**
* Maximum length of fiber's name.
*/
#define FBR_MAX_FIBER_NAME 64
/**
* Creates a new fiber.
* @param [in] name fiber name, used for identification it
* backtraces, etc.
* @param [in] func function used as a fiber's ``main''.
* @param [in] stack_size stack size (0 for default).
* @param [in] arg user supplied argument to a fiber.
* @return Pointer to the created fiber.
*
* The created fiber is not running in any shape or form, it's just created and
* is ready to be launched.
*
* Stack is anonymously mmaped so it should not occupy all the required space
* straight away. Adjust stack size only when you know what you are doing!
*
* Allocated stacks are registered as stacks via valgrind client request
* mechanism, so it's generally valgrind friendly and should not cause any
* noise.
*
* Fibers are organized in a tree. Child nodes are attached to a parent
* whenever the parent is creating them. This tree is used primarily for
* automatic reclaim of child fibers.
* @see fbr_reclaim
* @see fbr_disown
* @see fbr_parent
*/
fbr_id_t fbr_create(FBR_P_ const char *name, fbr_fiber_func_t func, void *arg,
size_t stack_size);
/**
* Retrieve a name of the fiber.
* @param [in] id identificator of a fiber
* @return pointer to charater buffer or NULL on error
*
* The name is located in the statically allocated buffer of size
* FBR_MAX_FIBER_NAME.
*
* Don't try to free it!
*
* @see fbr_create
* @see fbr_set_name
*/
const char *fbr_get_name(FBR_P_ fbr_id_t id);
/**
* Sets a name for the fiber.
* @param [in] id identificator of a fiber
* @param [in] name new name for a fiber
* @return 0 on success, -1 on error.
*
* The name is located in the statically allocated buffer of size
* FBR_MAX_FIBER_NAME. If your name does not fit, it will be trimmed.
*
* @see fbr_get_name
*/
int fbr_set_name(FBR_P_ fbr_id_t id, const char *name);
/**
* Changes parent of current fiber.
* @param [in] parent new parent fiber
* @returns -1 on error with f_errno set, 0 upon success
*
* This function allows you to change fiber's parent. You needs to pass valid
* id or 0 to indicate the root fiber.
*
* This might be useful when some fiber A creates another fiber B that should
* survive it's parent being reclaimed, or vice versa, some fiber A needs to be
* reclaimed with fiber B albeit B is not A's parent.
*
* Root fiber is reclaimed only when library context is destroyed.
* @see fbr_create
* @see fbr_destroy
*/
int fbr_disown(FBR_P_ fbr_id_t parent);
/**
* Find out current fiber's parent.
* @returns current fiber's parent
*
* This function allows you to find out what fiber is considered to be parent
* for the current one.
* @see fbr_create
* @see fbr_disown
*/
fbr_id_t fbr_parent(FBR_P);
/**
* Reclaims a fiber.
* @param [in] fiber fiber pointer
* @returns -1 on error with f_errno set, 0 upon success
*
* Fibers are never destroyed, but reclaimed. Reclamation frees some resources
* like call lists and memory pools immediately while keeping fiber structure
* itself and its stack as is. Reclaimed fiber is prepended to the reclaimed
* fiber list and will be served as a new one whenever next fbr_create is
* called. Fiber is prepended because it is warm in terms of cpu cache and its
* use might be faster than any other fiber in the list.
*
* When you have some reclaimed fibers in the list, reclaiming and creating are
* generally cheap operations.
*
* This function takes action immediately unless the target fiber has blocked
* the reclaim by fbr_set_noreclaim (in this case fbr_reclaim will block and
* wait for fbr_set_reclaim to be called).
*/
int fbr_reclaim(FBR_P_ fbr_id_t fiber);
/**
* Allows a fiber to be reclaimed.
* @param [in] fiber fiber pointer
* @returns -1 on error with f_errno set, 0 upon success
*
* The opposite of fbr_set_noreclaim. Enables the fiber to be reclaimed and
* returns the control flow to the caller (i.e. the fiber will not be
* immediately reclaimed).
* fbr_want_reclaim can be used to check if reclaim of the fiber was requested
* so as to cooperate nicely and return from the fiber or call fbr_reclaim on
* fbr_self.
*
* @see fbr_want_reclaim
*/
int fbr_set_reclaim(FBR_P_ fbr_id_t fiber);
/**
* Blocks a fiber from being reclaimed.
* @param [in] fiber fiber pointer
* @returns -1 on error with f_errno set, 0 upon success
*
* After this call the fiber will not be reclaimed until explicitly allowed by
* fbr_set_reclaim. Nesting of such blocks is allowed as long as
* fbr_set_reclaim and fbr_set_noreclaim are called the same number of times.
*
* @see fbr_set_reclaim
*/
int fbr_set_noreclaim(FBR_P_ fbr_id_t fiber);
/**
* Checks if reclaim requests are pending.
* @param [in] fiber fiber pointer
* @returns -1 on error with f_errno set, 0 if no pending reclaim requests, 1
* if there are pending requests
*
* This function can be used to check if there are pending reclaim requests. It
* can return 1 only when fuber is out of any (nested) no reclaim blocks.
*
* @see fbr_set_noreclaim
*/
int fbr_want_reclaim(FBR_P_ fbr_id_t fiber);
/**
* Tests if given fiber is reclaimed.
* @param [in] fiber fiber pointer
* @return 1 if fiber is reclaimed, 0 otherwise
*/
int fbr_is_reclaimed(FBR_P_ fbr_id_t fiber);
/**
* Returns id of current fiber.
* @return fbr_id_t of current fiber being executed.
*/
fbr_id_t fbr_self(FBR_P);
/**
* Fiber-local key creation.
*
* This created a new unique key and stores it in key.
*/
int fbr_key_create(FBR_P_ fbr_key_t *key);
/**
* Fiber-local key deletion.
* This explicitly destroys a key.
*/
int fbr_key_delete(FBR_P_ fbr_key_t key);
/**
* Sets fiber-local key data.
* This stores a value under a key.
*/
int fbr_key_set(FBR_P_ fbr_id_t id, fbr_key_t key, void *value);
/**
* Gets fiber-local key data.
* This retrieves the value under a key.
*/
void *fbr_key_get(FBR_P_ fbr_id_t id, fbr_key_t key);
/**
* Yields execution to other fiber.
*
* When a fiber is waiting for some incoming event --- it should yield. This
* will pop current fiber from the fiber stack and transfer the execution
* context to the next fiber from the stack making that fiber a new current
* one.
*
* It loops through all fibers subscribed to specified multicast group id.
* @see fbr_transfer
*/
void fbr_yield(FBR_P);
/**
* Yields execution to other fiber returning the execution at the next event
* loop run.
*
* Useful inside of some busy loop with lots of iterations to play nicely with
* other fibers which might start starving on the execution time.
* @see fbr_yield
* @see fbr_transfer
*/
void fbr_cooperate(FBR_P);
/**
* (DEPRECATED) Allocates memory in current fiber's pool.
* @param [in] size size of the requested memory block
* @return allocated memory chunk
*
* When a fiber is reclaimed, this memory will be freed. Prior to that a
* destructor will be called if any specified.
* @see fbr_calloc
* @see fbr_alloc_set_destructor
* @see fbr_alloc_destructor_func_t
* @see fbr_free
*/
void *fbr_alloc(FBR_P_ size_t size);
/**
* (DEPRECATED) Sets destructor for a memory chunk.
* @param [in] ptr address of a memory chunk
* @param [in] func destructor function
* @param [in] context user supplied context pointer
*
* Setting new destructor simply changes it without calling old one or queueing
* them.
*
* You can allocate 0 sized memory chunk and never free it just for the purpose
* of calling destructor with some context when fiber is reclaimed. This way
* you can for example close some file descriptors or do some other required
* cleanup.
* @see fbr_alloc
* @see fbr_free
*/
void fbr_alloc_set_destructor(FBR_P_ void *ptr, fbr_alloc_destructor_func_t func,
void *context);
/**
* (DEPRECATED) Allocates a set of initialized objects in fiber's pool.
* @param [in] nmemb number of members
* @param [in] size size of a single member
* @return zero-filled allocated memory chunk
*
* Same as fbr_alloc called with nmemb multiplied by size.
* @see fbr_alloc
* @see fbr_free
*/
void *fbr_calloc(FBR_P_ unsigned int nmemb, size_t size);
/**
* (DEPRECATED) Explicitly frees allocated memory chunk.
* @param [in] ptr chunk address
*
* Explicitly frees a fiber pool chunk calling the destructor if any.
* @see fbr_alloc
* @see fbr_calloc
* @see fbr_alloc_set_destructor
*/
void fbr_free(FBR_P_ void *ptr);
/**
* (DEPRECATED) Explicitly frees allocated memory chunk.
* @param [in] ptr chunk address
*
* Explicitly frees a fiber pool chunk without calling the destructor.
* @see fbr_alloc
* @see fbr_calloc
* @see fbr_alloc_set_destructor
*/
void fbr_free_nd(FBR_P_ void *ptr);
/**
* Utility function to make file descriptor non-blocking.
* @param [in] fd file descriptor to make non-blocking
* @returns -1 on error with f_errno set, 0 upon success
*
* In case of failure FBR_ESYSTEM is set as f_errno ans user should consult
* system errno for details.
*
*/
int fbr_fd_nonblock(FBR_P_ int fd);
/**
* Fiber friendly connect wrapper.
* @param [in] sockfd - socket file descriptor
* @param [in] addr - pointer to struct sockaddr, containing connection details
* @param [in] length of struct sockaddr
* @return zero on success, -1 in case of error and errno set
*
* Connect wrapper, that connects the socket referred to by the file
* descriptor sockfd to the address specified by addr.
* starting at buf. Calling fiber will be blocked until sockfd is connected or
* error is occured
*
* Possible errno values are described in connect man page. The only special case
* is EINPROGRESS which is handled internally.
*/
int fbr_connect(FBR_P_ int sockfd, const struct sockaddr *addr,
socklen_t addrlen);
/**
* Fiber friendly connect wrapper with timeout.
* @param [in] sockfd - socket file descriptor
* @param [in] addr - pointer to struct sockaddr, containing connection details
* @param [in] length of struct sockaddr
* @param [in] timeout in seconds to wait for events
* @return zero on success, -1 in case of error and errno set
*
* Connect wrapper, that connects the socket referred to by the file
* descriptor sockfd to the address specified by addr.
* starting at buf. Calling fiber will be blocked until sockfd is connected or
* either timeout or error occurs
*
* Possible errno values are described in connect man page. The are special cases:
* EINPROGRESS is handled internally; ETIMEDOUT is returned in case of timeout.
*/
int fbr_connect_wto(FBR_P_ int sockfd, const struct sockaddr *addr,
socklen_t addrlen, ev_tstamp timeout);
/**
* Fiber friendly libc read wrapper.
* @param [in] fd file descriptor to read from
* @param [in] buf pointer to some user-allocated buffer
* @param [in] count maximum number of bytes to read
* @return number of bytes read on success, -1 in case of error and errno set
*
* Attempts to read up to count bytes from file descriptor fd into the buffer
* starting at buf. Calling fiber will be blocked until something arrives at
* fd.
*
* Possible errno values are described in read man page.
*
* @see fbr_read_all
* @see fbr_readline
*/
ssize_t fbr_read(FBR_P_ int fd, void *buf, size_t count);
/**
* Fiber friendly libc read wrapper with timeout.
* @param [in] fd file descriptor to read from
* @param [in] buf pointer to some user-allocated buffer
* @param [in] count maximum number of bytes to read
* @param [in] timeout in seconds to wait for events
* @return number of bytes read on success, -1 in case of error and errno set
*
* Attempts to read up to count bytes from file descriptor fd into the buffer
* starting at buf. Calling fiber will be blocked until something arrives at
* fd or timeout occurs.
*
* Possible errno values are described in read man page. The only special case
* is ETIMEDOUT, which is returned to the caller when timeout occurs.
*
* @see fbr_read_all
* @see fbr_readline
*/
ssize_t fbr_read_wto(FBR_P_ int fd, void *buf, size_t count, ev_tstamp timeout);
/**
* Even more fiber friendly libc read wrapper.
* @param [in] fd file descriptor to read from
* @param [in] buf pointer to some user-allocated buffer
* @param [in] count desired number of bytes to read
* @return number of bytes read on success, -1 in case of error and errno set
*
* Attempts to read exactly count bytes from file descriptor fd into the buffer
* starting at buf. Calling fiber will be blocked until the required amount of
* data or EOF arrive at fd. If latter occurs too early returned number of
* bytes will be less that required.
*
* Possible errno values are described in read man page.
*
* @see fbr_read
* @see fbr_read_line
*/
ssize_t fbr_read_all(FBR_P_ int fd, void *buf, size_t count);
/**
* Even more fiber friendly libc read wrapper with timeout.
* @param [in] fd file descriptor to read from
* @param [in] buf pointer to some user-allocated buffer
* @param [in] count desired number of bytes to read
* @param [in] timeout in seconds to wait for events
* @return number of bytes read on success, -1 in case of error and errno set
*
* Attempts to read exactly count bytes from file descriptor fd into the buffer
* starting at buf. Calling fiber will be blocked for timeout number of seconds, or
* the required amount of data arrives, or EOF is got at fd. If latter occurs too
* early, returned number of bytes will be less that required.
*
* Possible errno values are described in read man page. Errno is set to ETIMEDOUT
* when timeout occurs.
*
* @see fbr_read
* @see fbr_readline
*/
ssize_t fbr_read_all_wto(FBR_P_ int fd, void *buf, size_t count, ev_tstamp timeout);
/**
* Utility function to read a line.
* @param [in] fd file descriptor to read from
* @param [in] buffer pointer to some user-allocated buffer
* @param [in] n maximum number of bytes to read
* @return number of bytes read on success, -1 in case of error and errno set
*
* Attempts to read at most count bytes from file descriptor fd into the buffer
* starting at buf, but stops if newline is encountered. Calling fiber will be
* blocked until the required amount of data, EOF or newline arrive at fd.
*
* Possible errno values are described in read man page.
*
* @see fbr_read
* @see fbr_read_all
*/
ssize_t fbr_readline(FBR_P_ int fd, void *buffer, size_t n);
/**
* Fiber friendly libc write wrapper.
* @param [in] fd file descriptor to write to
* @param [in] buf pointer to some user-allocated buffer
* @param [in] count maximum number of bytes to write
* @return number of bytes written on success, -1 in case of error and errno set
*
* Attempts to write up to count bytes to file descriptor fd from the buffer
* starting at buf. Calling fiber will be blocked until the data is written.
*
* Possible errno values are described in write man page.
*
* @see fbr_write_all
*/
ssize_t fbr_write(FBR_P_ int fd, const void *buf, size_t count);
/**
* Fiber friendly libc write wrapper with timeout.
* @param [in] fd file descriptor to write to
* @param [in] buf pointer to some user-allocated buffer
* @param [in] count maximum number of bytes to write
* @param [in] timeout in seconds to wait for events
* @return number of bytes written on success, -1 in case of error and errno set
*
* Attempts to write up to count bytes to file descriptor fd from the buffer
* starting at buf. Calling fiber will be blocked until the data is written
* or timeout occurs.
*
* Possible errno values are described in write man page.
* ETIMEDOUT is returned in case of timeout.
*
* @see fbr_write_all_wto
*/
ssize_t fbr_write_wto(FBR_P_ int fd, const void *buf, size_t count, ev_tstamp timeout);
/**
* Even more fiber friendly libc write wrapper.
* @param [in] fd file descriptor to write to
* @param [in] buf pointer to some user-allocated buffer
* @param [in] count desired number of bytes to write
* @return number of bytes read on success, -1 in case of error and errno set
*
* Attempts to write exactly count bytes to file descriptor fd from the buffer
* starting at buf. Calling fiber will be blocked until the required amount of
* data is written to fd.
*
* Possible errno values are described in write man page.
*
* @see fbr_write
*/
ssize_t fbr_write_all(FBR_P_ int fd, const void *buf, size_t count);
/**
* Even more fiber friendly libc write wrapper with timeout.
* @param [in] fd file descriptor to write to
* @param [in] buf pointer to some user-allocated buffer
* @param [in] count desired number of bytes to write
* @param [in] timeout in seconds to wait for events
* @return number of bytes read on success, -1 in case of error and errno set
*
* Attempts to write exactly count bytes to file descriptor fd from the buffer
* starting at buf. Calling fiber will be blocked until the required amount of
* data is written to fd or timeout occurs.
*
* Possible errno values are described in write man page.
*
* @see fbr_write_wto
*/
ssize_t fbr_write_all_wto(FBR_P_ int fd, const void *buf, size_t count, ev_tstamp timeout);
/**
* Fiber friendly libc recvfrom wrapper.
* @param [in] sockfd file descriptor to read from
* @param [in] buf pointer to some user-allocated buffer
* @param [in] len maximum number of bytes to read
* @param [in] flags just flags, see man recvfrom for details
* @param [in] src_addr source address
* @param [in] addrlen size of src_addr
* @return number of bytes read on success, -1 in case of error and errno set
*
* This function is used to receive messages from a socket.
*
* Possible errno values are described in recvfrom man page.
*
*/
ssize_t fbr_recvfrom(FBR_P_ int sockfd, void *buf, size_t len, int flags,
struct sockaddr *src_addr, socklen_t *addrlen);
/**
* Fiber friendly libc recv wrapper.
* @param [in] sockfd file descriptor to read from
* @param [in] buf pointer to some user-allocated buffer
* @param [in] len maximum number of bytes to read
* @param [in] flags just flags, see man recvfrom for details
* @return number of bytes read on success, -1 in case of error and errno set
*
* This function is used to receive messages from a socket.
*
* Possible errno values are described in recv man page.
*/
ssize_t fbr_recv(FBR_P_ int sockfd, void *buf, size_t len, int flags);
/**
* Fiber friendly libc sendto wrapper.
* @param [in] sockfd file descriptor to write to
* @param [in] buf pointer to some user-allocated buffer
* @param [in] len maximum number of bytes to write
* @param [in] flags just flags, see man sendto for details
* @param [in] dest_addr destination address
* @param [in] addrlen size of dest_addr
* @return number of bytes written on success, -1 in case of error and errno set
*
* This function is used to send messages to a socket.
*
* Possible errno values are described in sendto man page.
*
*/
ssize_t fbr_sendto(FBR_P_ int sockfd, const void *buf, size_t len, int flags, const
struct sockaddr *dest_addr, socklen_t addrlen);
/**
*
* Fiber friendly libc send wrapper.
* @param [in] sockfd file descriptor to write to
* @param [in] buf pointer to some user-allocated buffer
* @param [in] len maximum number of bytes to write
* @param [in] flags just flags, see man sendto for details
* @return number of bytes written on success, -1 in case of error and errno set
*
* This function is used to send messages to a socket.
*/
ssize_t fbr_send(FBR_P_ int sockfd, const void *buf, size_t len, int flags);
/**
* Fiber friendly libc accept wrapper.
* @param [in] sockfd file descriptor to accept on
* @param [in] addr client address
* @param [in] addrlen size of addr
* @return client socket fd on success, -1 in case of error and errno set
*
* This function is used to accept a connection on a listening socket.
*
* Possible errno values are described in accept man page.
*
*/
int fbr_accept(FBR_P_ int sockfd, struct sockaddr *addr, socklen_t *addrlen);
/**
* Puts current fiber to sleep.
* @param [in] seconds maximum number of seconds to sleep
* @return number of seconds actually being asleep
*
* This function is used to put current fiber into sleep. It will wake up after
* the desired time has passed or earlier if some other fiber has called it.
*/
ev_tstamp fbr_sleep(FBR_P_ ev_tstamp seconds);
/**
* Waits for an async libev event.
* @param [in] w ev_async watcher (initialized by caller)
*
* This function will cause the calling fiber to wait until an
* ev_async_send() has been triggered on the specified ev_async watcher.
*/
void fbr_async_wait(FBR_P_ ev_async *w);
/**
* Prints fiber call stack to stderr.
*
* useful while debugging obscure fiber call problems.
*/
void fbr_dump_stack(FBR_P_ fbr_logutil_func_t log);
/**
* Initializes a mutex.
* @param [in] mutex a mutex structure to initialize
*
* Mutexes are helpful when your fiber has a critical code section including
* several fbr_* calls. In this case execution of multiple copies of your fiber
* may get mixed up.
*
* @see fbr_mutex_lock
* @see fbr_mutex_trylock
* @see fbr_mutex_unlock
* @see fbr_mutex_destroy
*/
void fbr_mutex_init(FBR_P_ struct fbr_mutex *mutex);
/**
* Locks a mutex.
* @param [in] mutex pointer to a mutex
*
* Attempts to lock a mutex. If mutex is already locked then the calling fiber
* is suspended until the mutex is eventually freed.
*
* @see fbr_mutex_init
* @see fbr_mutex_trylock
* @see fbr_mutex_unlock
* @see fbr_mutex_destroy
*/
void fbr_mutex_lock(FBR_P_ struct fbr_mutex *mutex);
/**
* Tries to locks a mutex.
* @param [in] mutex pointer to a mutex
* @return 1 if lock was successful, 0 otherwise
*
* Attempts to lock a mutex. Returns immediately despite of locking being
* successful or not.
*
* @see fbr_mutex_init
* @see fbr_mutex_lock
* @see fbr_mutex_unlock
* @see fbr_mutex_destroy
*/
int fbr_mutex_trylock(FBR_P_ struct fbr_mutex *mutex);
/**
* Unlocks a mutex.
* @param [in] mutex pointer to a mutex
*
* Unlocks the given mutex. An other fiber that is waiting for it (if any) will
* be called upon next libev loop iteration.
*
* @see fbr_mutex_init
* @see fbr_mutex_lock
* @see fbr_mutex_trylock
* @see fbr_mutex_destroy
*/
void fbr_mutex_unlock(FBR_P_ struct fbr_mutex *mutex);
/**
* Destroys a mutex.
* @param [in] mutex pointer to mutex
*
* Frees used resources. It does not unlock the mutex.
*
* @see fbr_mutex_init
* @see fbr_mutex_lock
* @see fbr_mutex_unlock
* @see fbr_mutex_trylock
*/
void fbr_mutex_destroy(FBR_P_ struct fbr_mutex *mutex);
/**
* Initializes a conditional variable.
*
* Conditional variable is useful primitive for fiber synchronisation. A set of
* fibers may be waiting until certain condition is met. Another fiber can
* trigger this condition for one or all waiting fibers.
*
* @see fbr_cond_destroy
* @see fbr_cond_wait
* @see fbr_cond_broadcast
* @see fbr_cond_signal
*/
void fbr_cond_init(FBR_P_ struct fbr_cond_var *cond);
/**
* Destroys a conditional variable.
*
* This just frees used resources. No signals are sent to waiting fibers.
*
* @see fbr_cond_init
* @see fbr_cond_wait
* @see fbr_cond_broadcast
* @see fbr_cond_signal
*/
void fbr_cond_destroy(FBR_P_ struct fbr_cond_var *cond);
/**
* Waits until condition is met.
*
* Current fiber is suspended until a signal is sent via fbr_cond_signal or
* fbr_cond_broadcast to the corresponding conditional variable.
*
* A mutex must be acquired by the calling fiber prior to waiting for a
* condition. Internally mutex is released and reacquired again before
* returning. Upon successful return calling fiber will hold the mutex.
*
* @see fbr_cond_init
* @see fbr_cond_destroy
* @see fbr_cond_broadcast
* @see fbr_cond_signal
*/
int fbr_cond_wait(FBR_P_ struct fbr_cond_var *cond, struct fbr_mutex *mutex);
/**
* Broadcasts a signal to all fibers waiting for condition.
*
* All fibers waiting for a condition will be added to run queue (and will
* eventually be run, one per event loop iteration).
*
* @see fbr_cond_init
* @see fbr_cond_destroy
* @see fbr_cond_wait
* @see fbr_cond_signal
*/
void fbr_cond_broadcast(FBR_P_ struct fbr_cond_var *cond);
/**
* Signals to first fiber waiting for a condition.
*
* Exactly one fiber (first one) waiting for a condition will be added to run
* queue (and will eventually be run, one per event loop iteration).
*
* @see fbr_cond_init
* @see fbr_cond_destroy
* @see fbr_cond_wait
* @see fbr_cond_signal
*/
void fbr_cond_signal(FBR_P_ struct fbr_cond_var *cond);
/**
* Initializes memory mappings.
* @param [in] vrb a pointer to fbr_vrb
* @param [in] size length of the data
* @param [in] file_pattern file name patterm for underlying mmap storage
* @returns 0 on succes, -1 on error.
*
* This function mmaps adjacent virtual memory regions of required size which
* correspond to the same physical memory region. Also it adds two page-sized
* regions on the left and on the right with PROT_NONE access as a guards.
*
* It does mmaps on the same file, which is unlinked and closed afterwards, so
* it will not pollute file descriptor space of a process and the filesystem.
*
* @see struct fbr_vrb
* @see fbr_vrb_destroy
*/
int fbr_vrb_init(struct fbr_vrb *vrb, size_t size, const char *file_pattern);
/**
* Destroys mappings.
* @param [in] vrb a pointer to fbr_vrb
*
* This function unmaps all the mappings done by fbr_vrb_init.
*
* @see fbr_vrb_init
*/
void fbr_vrb_destroy(struct fbr_vrb *vrb);
/**
* Retrieves data length.
* @param [in] vrb a pointer to fbr_vrb
* @returns length of the data area.
*
* @see struct fbr_vrb
*/
static inline size_t fbr_vrb_data_len(struct fbr_vrb *vrb)
{
return (char *) vrb->space_ptr - (char *) vrb->data_ptr;
}
/**
* Retrieves space length.
* @param [in] vrb a pointer to fbr_vrb
* @returns length of the space area.
*
* @see struct fbr_vrb
*/
static inline size_t fbr_vrb_space_len(struct fbr_vrb *vrb)
{
return (char *) vrb->data_ptr + vrb->ptr_size - (char *) vrb->space_ptr;
}
/**
* Retrieves total buffer capacity.
* @param [in] vrb a pointer to fbr_vrb
* @returns maximum length of data this vrb can hold.
*
* @see struct fbr_vrb
*/
static inline size_t fbr_vrb_capacity(struct fbr_vrb *vrb)
{
return vrb->ptr_size;
}
/**
* Retrieves space area pointer.
* @param [in] vrb a pointer to fbr_vrb
* @returns pointer to the start of space area.
*
* @see struct fbr_vrb
*/
static inline void *fbr_vrb_space_ptr(struct fbr_vrb *vrb)
{
return vrb->space_ptr;
}
/**
* Retrieves data area pointer.
* @param [in] vrb a pointer to fbr_vrb
* @returns pointer to the start of data area.
*
* @see struct fbr_vrb
*/
static inline void *fbr_vrb_data_ptr(struct fbr_vrb *vrb)
{
return vrb->data_ptr;
}
/**
* Give data to a vrb.
* @param [in] vrb a pointer to fbr_vrb
* @param [in] size length of the data
* @returns 0 on succes, -1 on error.
*
* This function marks size bytes of space area as data starting from the
* beginning of space area. It's your responsibility to fill those area with
* the actual data.
*
* @see struct fbr_vrb
* @see fbr_vrb_take
*/
static inline int fbr_vrb_give(struct fbr_vrb *vrb, size_t size)
{
if (size > fbr_vrb_space_len(vrb))
return -1;
vrb->space_ptr += size;
return 0;
}
/**
* Take data from a vrb.
* @param [in] vrb a pointer to fbr_vrb
* @param [in] size length of the data
* @returns 0 on succes, -1 on error.
*
* This function marks size bytes of data area as space starting from the
* beginning of data area. It's your responsibility to drop any references to
* the region as it might be overwritten later.
*
* @see struct fbr_vrb
* @see fbr_vrb_give
*/
static inline int fbr_vrb_take(struct fbr_vrb *vrb, size_t size)
{
if (size > fbr_vrb_data_len(vrb))
return -1;
vrb->data_ptr += size;
if (vrb->data_ptr >= vrb->upper_ptr) {
vrb->data_ptr -= vrb->ptr_size;
vrb->space_ptr -= vrb->ptr_size;
}
return 0;
}
/**
* Resets a vrb.
* @param [in] vrb a pointer to fbr_vrb
*
* This function resets data and space pointers to their initial value,
* efficiently marking vrb as empty.
*
* @see struct fbr_vrb
*/
static inline void fbr_vrb_reset(struct fbr_vrb *vrb)
{
vrb->data_ptr = vrb->lower_ptr;
vrb->space_ptr = vrb->lower_ptr;
}
/* Private function */
int fbr_vrb_resize_do(struct fbr_vrb *vrb, size_t new_size,
const char *file_pattern);
/**
* Resizes a vrb.
* @param [in] vrb a pointer to fbr_vrb
* @param [in] new_size new length of the data
* @param [in] file_pattern file name patterm for underlying mmap storage
* @returns 0 on succes, -1 on error.
*
* This function does new mappings and copies the data over. Old mappings will
* be destroyed and all pointers to old data will be invalid after this
* operation.
*
* @see struct fbr_vrb
* @see fbr_vrb_init
*/
static inline int fbr_vrb_resize(struct fbr_vrb *vrb, size_t new_size,
const char *file_pattern)
{
struct fbr_vrb tmp;
int rv;
if (fbr_vrb_capacity(vrb) >= new_size)
return 0;
memcpy(&tmp, vrb, sizeof(tmp));
rv = fbr_vrb_init(vrb, new_size, file_pattern);
if (rv) {
memcpy(vrb, &tmp, sizeof(tmp));
return rv;
}
memcpy(fbr_vrb_space_ptr(vrb), fbr_vrb_data_ptr(&tmp),
fbr_vrb_data_len(&tmp));
fbr_vrb_give(vrb, fbr_vrb_data_len(&tmp));
fbr_vrb_destroy(&tmp);
return 0;
}
/**
* Initializes a circular buffer with pipe semantics.
* @param [in] buffer fbr_buffer structure to initialize
* @param [in] size size hint for the buffer
* @returns 0 on succes, -1 upon failure with f_errno set.
*
* This allocates a buffer with pipe semantics: you can write into it and later
* read what you have written. The buffer will occupy size rounded up to page
* size in physical memory, while occupying twice this size in virtual process
* memory due to usage of two mirrored adjacent mmaps.
*/
int fbr_buffer_init(FBR_P_ struct fbr_buffer *buffer, size_t size);
/**
* Amount of bytes filled with data.
* @param [in] buffer a pointer to fbr_buffer
* @returns number of bytes written to the buffer
*
* This function can be used to check if fbr_buffer_read_address will block.
* @see fbr_buffer_free_bytes
*/
static inline size_t fbr_buffer_bytes(FBR_PU_ struct fbr_buffer *buffer)
{
return fbr_vrb_data_len(&buffer->vrb);
}
/**
* Amount of free bytes.
* @param [in] buffer a pointer to fbr_buffer
* @returns number of free bytes in the buffer
*
* This function can be used to check if fbr_buffer_alloc_prepare will block.
* @see fbr_buffer_bytes
*/
static inline size_t fbr_buffer_free_bytes(FBR_PU_ struct fbr_buffer *buffer)
{
return fbr_vrb_space_len(&buffer->vrb);
}
/**
* Total capacity of a buffer.
* @param [in] buffer a pointer to fbr_buffer
* @returns maximum number of bytes the buffer may contain.
*
* This function may return total capacity larger that originally requested due
* to size being rounded up to be a multiple of page size.
* @see fbr_buffer_bytes
* @see fbr_buffer_free_bytes
*/
static inline size_t fbr_buffer_size(FBR_PU_ struct fbr_buffer *buffer)
{
return fbr_vrb_capacity(&buffer->vrb);
}
/**
* Pointer to the start of ``space'' memory area.
* @param [in] buffer a pointer to fbr_buffer
* @returns pointer to space memory region start.
*
* This function returns a pointer to the start of free memory area inside the
* buffer.
*
* You may use this function in case you have a fbr_buffer, that is not used as
* an inter-fiber communication mechanism but only as a local circular data
* buffer.
*
* Mixing space_ptr/data_ptr/give/take API with mutex-protected transactional
* API might lead to corruption and is not recommended unless you know what you
* are doing.
* @see fbr_buffer_data_ptr
* @see fbr_buffer_give
* @see fbr_buffer_take
*/
static inline void *fbr_buffer_space_ptr(FBR_PU_ struct fbr_buffer *buffer)
{
return fbr_vrb_space_ptr(&buffer->vrb);
}
/**
* Pointer to the start of ``data'' memory area.
* @param [in] buffer a pointer to fbr_buffer
* @returns pointer to data memory region start.
*
* This function returns a pointer to the start of data memory area inside the
* buffer.
*
* You may use this function in case you have a fbr_buffer, that is not used as
* an inter-fiber communication mechanism but only as a local circular data
* buffer.
*
* Mixing space_ptr/data_ptr/give/take API with mutex-protected transactional
* API might lead to corruption and is not recommended unless you know what you
* are doing.
* @see fbr_buffer_data_ptr
* @see fbr_buffer_give
* @see fbr_buffer_take
*/
static inline void *fbr_buffer_data_ptr(FBR_PU_ struct fbr_buffer *buffer)
{
return fbr_vrb_data_ptr(&buffer->vrb);
}
/**
* Resets a buffer.
* @param [in] buffer a pointer to fbr_buffer
*
* This function resets space and data pointers of the buffer to it's start,
* which results in whole buffer being marked as space.
*
* This function does not affect the state of mutexes and conditional
* variables, so using it while the buffer is in use by multiple fibers in not
* safe.
*
* @see fbr_buffer_init
* @see fbr_buffer_destroy
*/
static inline void fbr_buffer_reset(FBR_PU_ struct fbr_buffer *buffer)
{
fbr_vrb_reset(&buffer->vrb);
}
/**
* Destroys a circular buffer.
* @param [in] buffer a pointer to fbr_buffer to free
*
* This unmaps all mmaped memory for the buffer. It does not do any fancy stuff
* like waiting until buffer is empty etc., it just frees it.
*/
void fbr_buffer_destroy(FBR_P_ struct fbr_buffer *buffer);
/**
* Prepares a chunk of memory to be committed to buffer.
* @param [in] buffer a pointer to fbr_buffer
* @param [in] size required size
* @returns pointer to memory reserved for commit.
*
* This function reserves a chunk of memory (or waits until there is one
* available, blocking current fiber) and returns pointer to it.
*
* A fiber trying to reserve a chunk of memory after some other fiber already
* reserved it leads to the former fiber being blocked until the latter one
* commits or aborts.
* @see fbr_buffer_alloc_commit
* @see fbr_buffer_alloc_abort
*/
void *fbr_buffer_alloc_prepare(FBR_P_ struct fbr_buffer *buffer, size_t size);
/**
* Commits a chunk of memory to the buffer.
* @param [in] buffer a pointer to fbr_buffer
*
* This function commits a chunk of memory previously reserved.
* @see fbr_buffer_alloc_prepare
* @see fbr_buffer_alloc_abort
*/
void fbr_buffer_alloc_commit(FBR_P_ struct fbr_buffer *buffer);
/**
* Aborts a chunk of memory in the buffer.
* @param [in] buffer a pointer to fbr_buffer
*
* This function aborts prepared chunk of memory previously reserved. It will
* not be committed and the next fiber may reuse it for it's own purposes.
* @see fbr_buffer_alloc_prepare
* @see fbr_buffer_alloc_commit
*/
void fbr_buffer_alloc_abort(FBR_P_ struct fbr_buffer *buffer);
/**
* Aborts a chunk of memory in the buffer.
* @param [in] buffer a pointer to fbr_buffer
* @param [in] size number of bytes required
* @returns read address containing size bytes
*
* This function reserves (or waits till data is available, blocking current
* fiber) a chunk of memory for reading. While a chunk of memory is reserved
* for reading no other fiber can read from this buffer blocking until current
* read is advanced or discarded.
* @see fbr_buffer_read_advance
* @see fbr_buffer_read_discard
*/
void *fbr_buffer_read_address(FBR_P_ struct fbr_buffer *buffer, size_t size);
/**
* Confirms a read of chunk of memory in the buffer.
* @param [in] buffer a pointer to fbr_buffer
*
* This function confirms that bytes obtained with fbr_buffer_read_address are
* read and no other fiber will be able to read them.
* @see fbr_buffer_read_address
* @see fbr_buffer_read_discard
*/
void fbr_buffer_read_advance(FBR_P_ struct fbr_buffer *buffer);
/**
* Discards a read of chunk of memory in the buffer.
* @param [in] buffer a pointer to fbr_buffer
*
* This function discards bytes obtained with fbr_buffer_read_address. Next
* fiber trying to read something from a buffer may obtain those bytes.
* @see fbr_buffer_read_address
* @see fbr_buffer_read_advance
*/
void fbr_buffer_read_discard(FBR_P_ struct fbr_buffer *buffer);
/**
* Resizes the buffer.
* @param [in] buffer a pointer to fbr_buffer
* @param [in] size a new buffer length
* @returns 0 on success, -1 on error.
*
* This function allocates new memory mapping of sufficient size and copies the
* content of a buffer into it. Old mapping is destroyed.
*
* This operation is expensive and involves several syscalls, so it is
* beneficiary to allocate a buffer of siffucuent size from the start.
*
* This function acquires both read and write mutex, and may block until read
* or write operation has finished.
* @see fbr_buffer_reset
*/
int fbr_buffer_resize(FBR_P_ struct fbr_buffer *buffer, size_t size);
/**
* Helper function, returning read conditional variable.
* @param [in] buffer a pointer to fbr_buffer
* @returns read conditional variable
*/
static inline struct fbr_cond_var *fbr_buffer_cond_read(FBR_PU_
struct fbr_buffer *buffer)
{
return &buffer->committed_cond;
}
/**
* Helper function, returning write conditional variable.
* @param [in] buffer a pointer to fbr_buffer
* @returns write conditional variable
*/
static inline struct fbr_cond_var *fbr_buffer_cond_write(FBR_PU_
struct fbr_buffer *buffer)
{
return &buffer->bytes_freed_cond;
}
/**
* Helper function, which waits until read is possible.
* @param [in] buffer a pointer to fbr_buffer
* @param [in] size required read size
* @returns 1 to be while friendly
*
* This function is useful when you need to wait for data to arrive on a buffer
* in a while loop.
*/
static inline int fbr_buffer_wait_read(FBR_P_ struct fbr_buffer *buffer,
size_t size)
{
struct fbr_mutex mutex;
int retval;
fbr_mutex_init(FBR_A_ &mutex);
fbr_mutex_lock(FBR_A_ &mutex);
while (fbr_buffer_bytes(FBR_A_ buffer) < size) {
retval = fbr_cond_wait(FBR_A_ &buffer->committed_cond, &mutex);
assert(0 == retval);
(void)retval;
}
fbr_mutex_unlock(FBR_A_ &mutex);
fbr_mutex_destroy(FBR_A_ &mutex);
return 1;
}
/**
* Helper function, which test if read is possible.
* @param [in] buffer a pointer to fbr_buffer
* @param [in] size required read size
* @returns 1 if read is possible, 0 otherwise
*
* This function is useful when you need to test if you can read some data from
* the buffer without blocking.
*/
static inline int fbr_buffer_can_read(FBR_P_ struct fbr_buffer *buffer,
size_t size)
{
return fbr_buffer_bytes(FBR_A_ buffer) >= size;
}
/**
* Helper function, which waits until write is possible.
* @param [in] buffer a pointer to fbr_buffer
* @param [in] size required write size
* @returns 1 to be while friendly
*
* This function is useful when you need to wait for free space on a buffer
* in a while loop.
*/
static inline int fbr_buffer_wait_write(FBR_P_ struct fbr_buffer *buffer,
size_t size)
{
struct fbr_mutex mutex;
int retval;
fbr_mutex_init(FBR_A_ &mutex);
fbr_mutex_lock(FBR_A_ &mutex);
while (fbr_buffer_free_bytes(FBR_A_ buffer) < size) {
retval = fbr_cond_wait(FBR_A_ &buffer->bytes_freed_cond,
&mutex);
assert(0 == retval);
(void)retval;
}
fbr_mutex_unlock(FBR_A_ &mutex);
fbr_mutex_destroy(FBR_A_ &mutex);
return 1;
}
/**
* Helper function, which test if write is possible.
* @param [in] buffer a pointer to fbr_buffer
* @param [in] size required write size
* @returns 1 if write is possible, 0 otherwise
*
* This function is useful when you need to test if you can write some data to
* the buffer without blocking.
*/
static inline int fbr_buffer_can_write(FBR_P_ struct fbr_buffer *buffer,
size_t size)
{
return fbr_buffer_free_bytes(FBR_A_ buffer) >= size;
}
struct fbr_mq *fbr_mq_create(FBR_P_ size_t size, int flags);
void fbr_mq_push(struct fbr_mq *mq, void *obj);
int fbr_mq_try_push(struct fbr_mq *mq, void *obj);
void fbr_mq_wait_push(struct fbr_mq *mq);
void *fbr_mq_pop(struct fbr_mq *mq);
int fbr_mq_try_pop(struct fbr_mq *mq, void **obj);
void fbr_mq_wait_pop(struct fbr_mq *mq);
void fbr_mq_clear(struct fbr_mq *mq, int wake_up_writers);
void fbr_mq_destroy(struct fbr_mq *mq);
/**
* Gets fiber user data pointer.
* @param [in] id fiber id
* @returns user data pointer on success, NULL on failure with f_errno set
*
* This function allows you to retrieve user data pointer.
* @see fbr_set_user_data
*/
void *fbr_get_user_data(FBR_P_ fbr_id_t id);
/**
* Sets fiber user data pointer.
* @param [in] id fiber id
* @param [in] data pointer to user data
* @returns 0 on success, -1 upon failure with f_errno set
*
* This function allows you to extend fiber with some user structure.
* @see fbr_get_user_data
*/
int fbr_set_user_data(FBR_P_ fbr_id_t id, void *data);
/**
* Implementation of popen() with redirection of all descriptors --- stdin,
* stdout and stderr.
* @param [in] filename as in execve(2)
* @param [in] argv as in execve(2)
* @param [in] envp as in execve(2)
* @param [in] working_dir if not NULL, child process will be launched with
* working directory set to working_dir
* @param [in] stdin_w_ptr if not NULL, a new write-only file descriptor mapped
* to child STDIN will be written at the provided address
* @param [in] stdout_r_ptr if not NULL, a new read-only file descriptor mapped
* to child STDOUT will be written at the provided address
* @param [in] stderr_r_ptr if not NULL, a new read-only file descriptor mapped
* to child STDERR will be written at the provided address
* @returns the pid of the launched process or -1 upon error
*
* If any of the file descriptor pointers are NULLs, fbr_popen3 will use read or
* write file descriptor for /dev/null instead.
*/
pid_t fbr_popen3(FBR_P_ const char *filename, char *const argv[],
char *const envp[], const char *working_dir,
int *stdin_w_ptr, int *stdout_r_ptr, int *stderr_r_ptr);
/**
* Convenience wrapper for fbr_popen3() without stderr redirection.
* @param [in] filename as in execve(2)
* @param [in] argv as in execve(2)
* @param [in] envp as in execve(2)
* @param [in] working_dir if not NULL, child process will be launched with
* working directory set to working_dir
* @param [in] stdin_w_ptr if not NULL, a new write-only file descriptor mapped
* to child STDIN will be written at the provided address
* @param [in] stdout_r_ptr if not NULL, a new read-only file descriptor mapped
* to child STDOUT will be written at the provided address
* @returns the pid of the launched process or -1 upon error
*
* If any of the file descriptor pointers are NULLs, fbr_popen3 will use read or
* write file descriptor for /dev/null instead.
*
* @see fbr_popen3
*/
static inline pid_t fbr_popen2(FBR_P_ const char *filename, char *const argv[],
char *const envp[], const char *working_dir, int *stdin_w_ptr,
int *stdout_r_ptr)
{
return fbr_popen3(FBR_A_ filename, argv, envp, working_dir, stdin_w_ptr,
stdout_r_ptr, NULL);
}
/**
* Convenience wrapper for fbr_popen3() without any descriptors being
* redirected.
* @param [in] filename as in execve(2)
* @param [in] argv as in execve(2)
* @param [in] envp as in execve(2)
* @param [in] working_dir if not NULL, child process will be launched with
* working directory set to working_dir
* @returns the pid of the launched process or -1 upon error
*
* @see fbr_popen3
*/
static inline pid_t fbr_popen0(FBR_P_ const char *filename, char *const argv[],
char *const envp[], const char *working_dir)
{
return fbr_popen3(FBR_A_ filename, argv, envp, working_dir, NULL, NULL,
NULL);
}
/**
* Waits for child process to finish.
* @param [in] pid is the PID of the process to wait for
* @returns the process exit/trace status caused by rpid (see your systems
* waitpid and sys/wait.h documentation for details)
*
* This function is basically a fiber wrapper for ev_child watcher. It's worth
* reading the libev documentation for ev_child to fully understand the
* limitations.
*/
int fbr_waitpid(FBR_P_ pid_t pid);
int fbr_system(FBR_P_ const char *filename, char *const argv[],
char *const envp[], const char *working_dir);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>cmake/FindLibEio.cmake<|end_filename|>
find_path(LIBEIO_INCLUDE_DIR eio.h
HINTS $ENV{LIBEIO_DIR}
PATH_SUFFIXES include
PATHS /usr/local /usr
)
find_library(LIBEIO_LIBRARY
NAMES ev-eio eio
HINTS $ENV{LIBEIO_DIR}
PATH_SUFFIXES lib
PATHS /usr/local /usr
)
check_library_exists(${LIBEIO_LIBRARY} eio_custom "" EIO_CUSTOM_IS_PRESENT)
if (NOT EIO_CUSTOM_IS_PRESENT)
message(FATAL_ERROR "symbol eio_custom is not found in ${LIBEIO_LIBRARY}")
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(LibEio DEFAULT_MSG LIBEIO_LIBRARY LIBEIO_INCLUDE_DIR)
mark_as_advanced(LIBEIO_INCLUDE_DIR LIBEIO_LIBRARY)
<|start_filename|>include/evfibers_private/fiber.h<|end_filename|>
/********************************************************************
Copyright 2013 <NAME> <<EMAIL>>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
********************************************************************/
#ifndef _FBR_FIBER_PRIVATE_H_
#define _FBR_FIBER_PRIVATE_H_
#include <stdarg.h>
#include <stdlib.h>
#include <unistd.h>
#include <stdint.h>
#include <sys/queue.h>
#include <evfibers/fiber.h>
#include <evfibers_private/trace.h>
#include <coro.h>
#define max(a,b) ({ \
const typeof(a) __tmp_a = (a); \
const typeof(b) __tmp_b = (b); \
__tmp_a > __tmp_b ? __tmp_a : __tmp_b; \
})
#define min(a,b) ({ \
const typeof(a) __tmp_a = (a); \
const typeof(b) __tmp_b = (b); \
__tmp_a < __tmp_b ? __tmp_a : __tmp_b; \
})
#define _unused_ __attribute__((unused))
#ifndef LIST_FOREACH_SAFE
#define LIST_FOREACH_SAFE(var, head, field, next_var) \
for ((var) = ((head)->lh_first); \
(var) && ((next_var) = ((var)->field.le_next), 1); \
(var) = (next_var))
#endif
#ifndef TAILQ_FOREACH_SAFE
#define TAILQ_FOREACH_SAFE(var, head, field, next_var) \
for ((var) = ((head)->tqh_first); \
(var) ? ({ (next_var) = ((var)->field.tqe_next); 1; }) \
: 0; \
(var) = (next_var))
#endif
#define ENSURE_ROOT_FIBER do { \
assert(fctx->__p->sp->fiber == &fctx->__p->root); \
} while (0)
#define CURRENT_FIBER (fctx->__p->sp->fiber)
#define CURRENT_FIBER_ID (fbr_id_pack(CURRENT_FIBER))
#define CALLED_BY_ROOT ((fctx->__p->sp - 1)->fiber == &fctx->__p->root)
#define unpack_transfer_errno(value, ptr, id) \
do { \
if (-1 == fbr_id_unpack(fctx, ptr, id)) \
return (value); \
} while (0)
#define return_success(value) \
do { \
fctx->f_errno = FBR_SUCCESS; \
return (value); \
} while (0)
#define return_error(value, code) \
do { \
fctx->f_errno = (code); \
return (value); \
} while (0)
#ifndef LIST_FOREACH_SAFE
#define LIST_FOREACH_SAFE(var, head, field, next_var) \
for ((var) = ((head)->lh_first); \
(var) && ((next_var) = ((var)->field.le_next), 1); \
(var) = (next_var))
#endif
#ifndef TAILQ_FOREACH_SAFE
#define TAILQ_FOREACH_SAFE(var, head, field, next_var) \
for ((var) = ((head)->tqh_first); \
(var) ? ({ (next_var) = ((var)->field.tqe_next); 1; }) \
: 0; \
(var) = (next_var))
#endif
#define ENSURE_ROOT_FIBER do { \
assert(fctx->__p->sp->fiber == &fctx->__p->root); \
} while (0)
#define CURRENT_FIBER (fctx->__p->sp->fiber)
#define CURRENT_FIBER_ID (fbr_id_pack(CURRENT_FIBER))
#define CALLED_BY_ROOT ((fctx->__p->sp - 1)->fiber == &fctx->__p->root)
#define unpack_transfer_errno(value, ptr, id) \
do { \
if (-1 == fbr_id_unpack(fctx, ptr, id)) \
return (value); \
} while (0)
#define return_success(value) \
do { \
fctx->f_errno = FBR_SUCCESS; \
return (value); \
} while (0)
#define return_error(value, code) \
do { \
fctx->f_errno = (code); \
return (value); \
} while (0)
struct mem_pool {
void *ptr;
fbr_alloc_destructor_func_t destructor;
void *destructor_context;
LIST_ENTRY(mem_pool) entries;
};
LIST_HEAD(mem_pool_list, mem_pool);
TAILQ_HEAD(fiber_destructor_tailq, fbr_destructor);
LIST_HEAD(fiber_list, fbr_fiber);
struct fbr_fiber {
uint64_t id;
char name[FBR_MAX_FIBER_NAME];
fbr_fiber_func_t func;
void *func_arg;
coro_context ctx;
char *stack;
size_t stack_size;
struct {
struct fbr_ev_base **waiting;
int arrived;
} ev;
struct trace_info reclaim_tinfo;
struct fiber_list children;
struct fbr_fiber *parent;
struct mem_pool_list pool;
struct {
LIST_ENTRY(fbr_fiber) reclaimed;
LIST_ENTRY(fbr_fiber) children;
} entries;
struct fiber_destructor_tailq destructors;
void *user_data;
void *key_data[FBR_MAX_KEY];
int no_reclaim;
int want_reclaim;
struct fbr_cond_var reclaim_cond;
};
TAILQ_HEAD(mutex_tailq, fbr_mutex);
struct fbr_stack_item {
struct fbr_fiber *fiber;
struct trace_info tinfo;
};
struct fbr_context_private {
struct fbr_stack_item stack[FBR_CALL_STACK_SIZE];
struct fbr_stack_item *sp;
struct fbr_fiber root;
struct fiber_list reclaimed;
struct ev_async pending_async;
struct fbr_id_tailq pending_fibers;
int backtraces_enabled;
uint64_t last_id;
uint64_t key_free_mask;
const char *buffer_file_pattern;
struct ev_loop *loop;
};
struct fbr_mq {
struct fbr_context *fctx;
void **rb;
unsigned head;
unsigned tail;
unsigned max;
int flags;
struct fbr_cond_var bytes_available_cond;
struct fbr_cond_var bytes_freed_cond;
};
#endif
<|start_filename|>cmake/FindLibEv.cmake<|end_filename|>
find_path(LIBEV_INCLUDE_DIR ev.h
HINTS $ENV{LIBEV_DIR}
PATH_SUFFIXES include
PATHS /usr/local /usr
)
find_library(LIBEV_LIBRARY
NAMES ev
HINTS $ENV{LIBEV_DIR}
PATH_SUFFIXES lib
PATHS /usr/local /usr
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(LibEv DEFAULT_MSG LIBEV_LIBRARY LIBEV_INCLUDE_DIR)
mark_as_advanced(LIBEV_INCLUDE_DIR LIBEV_LIBRARY)
| Lupus/libevfibers |
<|start_filename|>doc/javadoc/index-files/index-6.html<|end_filename|>
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_152-release) on Tue Oct 30 23:20:43 CET 2018 -->
<title>G-Index</title>
<meta name="date" content="2018-10-30">
<link rel="stylesheet" type="text/css" href="../stylesheet.css" title="Style">
<script type="text/javascript" src="../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="G-Index";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../overview-summary.html">Overview</a></li>
<li>Package</li>
<li>Class</li>
<li><a href="../overview-tree.html">Tree</a></li>
<li><a href="../deprecated-list.html">Deprecated</a></li>
<li class="navBarCell1Rev">Index</li>
<li><a href="../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="index-5.html">Prev Letter</a></li>
<li><a href="index-7.html">Next Letter</a></li>
</ul>
<ul class="navList">
<li><a href="../index.html?index-files/index-6.html" target="_top">Frames</a></li>
<li><a href="index-6.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="contentContainer"><a href="index-1.html">A</a> <a href="index-2.html">B</a> <a href="index-3.html">C</a> <a href="index-4.html">D</a> <a href="index-5.html">E</a> <a href="index-6.html">G</a> <a href="index-7.html">H</a> <a href="index-8.html">I</a> <a href="index-9.html">M</a> <a href="index-10.html">N</a> <a href="index-11.html">O</a> <a href="index-12.html">P</a> <a href="index-13.html">R</a> <a href="index-14.html">S</a> <a href="index-15.html">T</a> <a href="index-16.html">V</a> <a name="I:G">
<!-- -->
</a>
<h2 class="title">G</h2>
<dl>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#getBackground--">getBackground()</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Returns the background of the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getColumnCount--">getColumnCount()</a></span> - Method in class de.mrapp.android.bottomsheet.adapter.<a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter">DividableGridAdapter</a></dt>
<dd>
<div class="block">Returns the number of columns, which are displayed by the adapter.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#getContext--">getContext()</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Returns the context, which is used by the builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getCount--">getCount()</a></span> - Method in class de.mrapp.android.bottomsheet.adapter.<a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter">DividableGridAdapter</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#getDimAmount--">getDimAmount()</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Returns the dim amount, which is used to darken the area outside the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getDividerColor--">getDividerColor()</a></span> - Method in class de.mrapp.android.bottomsheet.adapter.<a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter">DividableGridAdapter</a></dt>
<dd>
<div class="block">Returns the color of the adapter's dividers.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#getDividerColor--">getDividerColor()</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Returns the color of the dividers of the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#getDragSensitivity--">getDragSensitivity()</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Returns the sensitivity, which specifies the distance after which dragging has an effect on
the bottom sheet, in relation to an internal value range.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#getGridView--">getGridView()</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Returns the grid view, which is contained by the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#getIcon--">getIcon()</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Returns the icon of the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/model/Item.html#getIcon--">getIcon()</a></span> - Method in class de.mrapp.android.bottomsheet.model.<a href="../de/mrapp/android/bottomsheet/model/Item.html" title="class in de.mrapp.android.bottomsheet.model">Item</a></dt>
<dd>
<div class="block">Returns the item's icon.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#getId-int-">getId(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Returns the id of the item, which corresponds to a specific index.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/model/AbstractItem.html#getId--">getId()</a></span> - Method in class de.mrapp.android.bottomsheet.model.<a href="../de/mrapp/android/bottomsheet/model/AbstractItem.html" title="class in de.mrapp.android.bottomsheet.model">AbstractItem</a></dt>
<dd>
<div class="block">Returns the item's id.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getItem-int-">getItem(int)</a></span> - Method in class de.mrapp.android.bottomsheet.adapter.<a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter">DividableGridAdapter</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getItemColor--">getItemColor()</a></span> - Method in class de.mrapp.android.bottomsheet.adapter.<a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter">DividableGridAdapter</a></dt>
<dd>
<div class="block">Returns the text color of the adapter's items.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#getItemColor--">getItemColor()</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Returns the color of the items of the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getItemCount--">getItemCount()</a></span> - Method in class de.mrapp.android.bottomsheet.adapter.<a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter">DividableGridAdapter</a></dt>
<dd>
<div class="block">Returns the number of items, which are contained by the adapter.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#getItemCount--">getItemCount()</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Returns the number of items, which are currently contained by the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getItemId-int-">getItemId(int)</a></span> - Method in class de.mrapp.android.bottomsheet.adapter.<a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter">DividableGridAdapter</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getItemViewType-int-">getItemViewType(int)</a></span> - Method in class de.mrapp.android.bottomsheet.adapter.<a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter">DividableGridAdapter</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#getListAdapter--">getListAdapter()</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Returns the adapter of the grid view, which is contained by the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getStyle--">getStyle()</a></span> - Method in class de.mrapp.android.bottomsheet.adapter.<a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter">DividableGridAdapter</a></dt>
<dd>
<div class="block">Returns the style, which is used to display the adapter's items.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#getStyle--">getStyle()</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Returns the style, which is used to display the bottom sheet's items.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#getTitle--">getTitle()</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Returns the title of the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/model/AbstractItem.html#getTitle--">getTitle()</a></span> - Method in class de.mrapp.android.bottomsheet.model.<a href="../de/mrapp/android/bottomsheet/model/AbstractItem.html" title="class in de.mrapp.android.bottomsheet.model">AbstractItem</a></dt>
<dd>
<div class="block">Returns the item's title.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#getTitleColor--">getTitleColor()</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Returns the color of the title of the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/view/DraggableView.html#getTopMargin--">getTopMargin()</a></span> - Method in class de.mrapp.android.bottomsheet.view.<a href="../de/mrapp/android/bottomsheet/view/DraggableView.html" title="class in de.mrapp.android.bottomsheet.view">DraggableView</a></dt>
<dd>
<div class="block">Returns the top margin of the view.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getView-int-android.view.View-android.view.ViewGroup-">getView(int, View, ViewGroup)</a></span> - Method in class de.mrapp.android.bottomsheet.adapter.<a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter">DividableGridAdapter</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getViewTypeCount--">getViewTypeCount()</a></span> - Method in class de.mrapp.android.bottomsheet.adapter.<a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter">DividableGridAdapter</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#getWidth--">getWidth()</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Returns the width of the bottom sheet.</div>
</dd>
</dl>
<a href="index-1.html">A</a> <a href="index-2.html">B</a> <a href="index-3.html">C</a> <a href="index-4.html">D</a> <a href="index-5.html">E</a> <a href="index-6.html">G</a> <a href="index-7.html">H</a> <a href="index-8.html">I</a> <a href="index-9.html">M</a> <a href="index-10.html">N</a> <a href="index-11.html">O</a> <a href="index-12.html">P</a> <a href="index-13.html">R</a> <a href="index-14.html">S</a> <a href="index-15.html">T</a> <a href="index-16.html">V</a> </div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../overview-summary.html">Overview</a></li>
<li>Package</li>
<li>Class</li>
<li><a href="../overview-tree.html">Tree</a></li>
<li><a href="../deprecated-list.html">Deprecated</a></li>
<li class="navBarCell1Rev">Index</li>
<li><a href="../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="index-5.html">Prev Letter</a></li>
<li><a href="index-7.html">Next Letter</a></li>
</ul>
<ul class="navList">
<li><a href="../index.html?index-files/index-6.html" target="_top">Frames</a></li>
<li><a href="index-6.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
<|start_filename|>doc/javadoc/de/mrapp/android/bottomsheet/BottomSheet.Builder.html<|end_filename|>
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_152-release) on Tue Oct 30 23:20:43 CET 2018 -->
<title>BottomSheet.Builder</title>
<meta name="date" content="2018-10-30">
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="BottomSheet.Builder";
}
}
catch(err) {
}
//-->
var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10};
var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
var altColor = "altColor";
var rowColor = "rowColor";
var tableTab = "tableTab";
var activeTableTab = "activeTableTab";
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Style.html" title="enum in de.mrapp.android.bottomsheet"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?de/mrapp/android/bottomsheet/BottomSheet.Builder.html" target="_top">Frames</a></li>
<li><a href="BottomSheet.Builder.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li><a href="#constructor.summary">Constr</a> | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li><a href="#constructor.detail">Constr</a> | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle">de.mrapp.android.bottomsheet</div>
<h2 title="Class BottomSheet.Builder" class="title">Class BottomSheet.Builder</h2>
</div>
<div class="contentContainer">
<ul class="inheritance">
<li>java.lang.Object</li>
<li>
<ul class="inheritance">
<li>de.mrapp.android.bottomsheet.BottomSheet.Builder</li>
</ul>
</li>
</ul>
<div class="description">
<ul class="blockList">
<li class="blockList">
<dl>
<dt>Enclosing class:</dt>
<dd><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dd>
</dl>
<hr>
<br>
<pre>public static class <span class="typeNameLabel">BottomSheet.Builder</span>
extends java.lang.Object</pre>
<div class="block">A builder, which allows to create and show bottom sheets, which are designed according to
Android 5's Material Design guidelines even on pre-Lollipop devices. Such a bottom sheet
appears at the bottom of the window and consists of a title and multiple items. It is
possible to customize the appearance of the bottom sheet or to replace its title and items
with custom views.</div>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- ======== CONSTRUCTOR SUMMARY ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor.summary">
<!-- -->
</a>
<h3>Constructor Summary</h3>
<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation">
<caption><span>Constructors</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Constructor and Description</th>
</tr>
<tr class="altColor">
<td class="colOne"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#Builder-android.content.Context-">Builder</a></span>(android.content.Context context)</code>
<div class="block">Creates a new builder, which allows to create bottom sheets, which are designed according
to Android 5's Material Design guidelines even on pre-Lollipop devices.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colOne"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#Builder-android.content.Context-int-">Builder</a></span>(android.content.Context context,
int themeResourceId)</code>
<div class="block">Creates a new builder, which allows to create bottom sheets, which are designed according
to Android 5's Material Design guidelines even on pre-Lollipop devices.</div>
</td>
</tr>
</table>
</li>
</ul>
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method.summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd"> </span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd"> </span></span><span id="t4" class="tableTab"><span><a href="javascript:show(8);">Concrete Methods</a></span><span class="tabEnd"> </span></span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tr id="i0" class="altColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#addDivider--">addDivider</a></span>()</code>
<div class="block">Adds a new divider to the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i1" class="rowColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#addDivider-java.lang.CharSequence-">addDivider</a></span>(java.lang.CharSequence title)</code>
<div class="block">Adds a new divider to the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i2" class="altColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#addDivider-int-">addDivider</a></span>(int titleId)</code>
<div class="block">Adds a new divider to the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i3" class="rowColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#addItem-int-java.lang.CharSequence-">addItem</a></span>(int id,
java.lang.CharSequence title)</code>
<div class="block">Adds a new item to the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i4" class="altColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#addItem-int-java.lang.CharSequence-android.graphics.drawable.Drawable-">addItem</a></span>(int id,
java.lang.CharSequence title,
android.graphics.drawable.Drawable icon)</code>
<div class="block">Adds a new item to the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i5" class="rowColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#addItem-int-int-">addItem</a></span>(int id,
int titleId)</code>
<div class="block">Adds a new item to the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i6" class="altColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#addItem-int-int-int-">addItem</a></span>(int id,
int titleId,
int iconId)</code>
<div class="block">Adds a new item to the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i7" class="rowColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#create--">create</a></span>()</code>
<div class="block">Creates a bottom sheet with the arguments, which have been supplied to the builder.</div>
</td>
</tr>
<tr id="i8" class="altColor">
<td class="colFirst"><code>android.content.Context</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#getContext--">getContext</a></span>()</code>
<div class="block">Returns the context, which is used by the builder.</div>
</td>
</tr>
<tr id="i9" class="rowColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#maximize--">maximize</a></span>()</code>
<div class="block">Creates a bottom sheet with the arguments, which have been supplied to the builder and
immediately maximizes it.</div>
</td>
</tr>
<tr id="i10" class="altColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setBackground-android.graphics.Bitmap-">setBackground</a></span>(android.graphics.Bitmap background)</code>
<div class="block">Sets the background of the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i11" class="rowColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setBackground-int-">setBackground</a></span>(int resourceId)</code>
<div class="block">Sets the background of the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i12" class="altColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setBackgroundColor-int-">setBackgroundColor</a></span>(int color)</code>
<div class="block">Sets the background color of the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i13" class="rowColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setCancelable-boolean-">setCancelable</a></span>(boolean cancelable)</code>
<div class="block">Sets, whether the bottom sheet, which is created by the builder, should be cancelable, or
not.</div>
</td>
</tr>
<tr id="i14" class="altColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setCustomTitle-int-">setCustomTitle</a></span>(int resourceId)</code>
<div class="block">Sets the custom view, which should be used to show the title of the bottom sheet, which
is created by the builder.</div>
</td>
</tr>
<tr id="i15" class="rowColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setCustomTitle-android.view.View-">setCustomTitle</a></span>(android.view.View view)</code>
<div class="block">Sets the custom view, which should be used to show the title of the bottom sheet, which
is created by the builder.</div>
</td>
</tr>
<tr id="i16" class="altColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setDimAmount-float-">setDimAmount</a></span>(float dimAmount)</code>
<div class="block">Sets the dim amount, which should be used to darken the area outside the bottom sheet,
which is created by the builder.</div>
</td>
</tr>
<tr id="i17" class="rowColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setDividerColor-int-">setDividerColor</a></span>(int color)</code>
<div class="block">Sets the color of the dividers of the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i18" class="altColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setDragSensitivity-float-">setDragSensitivity</a></span>(float dragSensitivity)</code>
<div class="block">Sets the sensitivity, which specifies the distance after which dragging has an effect on
the bottom sheet, in relation to an internal value range.</div>
</td>
</tr>
<tr id="i19" class="rowColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setIcon-android.graphics.Bitmap-">setIcon</a></span>(android.graphics.Bitmap icon)</code>
<div class="block">Sets the icon of the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i20" class="altColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setIcon-int-">setIcon</a></span>(int resourceId)</code>
<div class="block">Sets the icon of the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i21" class="rowColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setIconAttribute-int-">setIconAttribute</a></span>(int attributeId)</code>
<div class="block">Set the icon of the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i22" class="altColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setIntent-android.app.Activity-android.content.Intent-">setIntent</a></span>(android.app.Activity activity,
android.content.Intent intent)</code>
<div class="block">Adds the apps, which are able to handle a specific intent, as items to the bottom sheet,
which is created by the builder.</div>
</td>
</tr>
<tr id="i23" class="rowColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setItemColor-int-">setItemColor</a></span>(int color)</code>
<div class="block">Sets the color of the items of the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i24" class="altColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setItemEnabled-int-boolean-">setItemEnabled</a></span>(int index,
boolean enabled)</code>
<div class="block">Sets, whether the item at a specific index should be enabled, or not.</div>
</td>
</tr>
<tr id="i25" class="rowColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setOnCancelListener-android.content.DialogInterface.OnCancelListener-">setOnCancelListener</a></span>(android.content.DialogInterface.OnCancelListener listener)</code>
<div class="block">Sets the listener, which should be notified, when the bottom sheet, which is created by
the builder, is canceled.</div>
</td>
</tr>
<tr id="i26" class="altColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setOnDismissListener-android.content.DialogInterface.OnDismissListener-">setOnDismissListener</a></span>(android.content.DialogInterface.OnDismissListener listener)</code>
<div class="block">Sets the listener, which should be notified, when the bottom sheet, which is created by
the builder, is dismissed for any reason.</div>
</td>
</tr>
<tr id="i27" class="rowColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setOnItemClickListener-android.widget.AdapterView.OnItemClickListener-">setOnItemClickListener</a></span>(android.widget.AdapterView.OnItemClickListener listener)</code>
<div class="block">Sets the listener, which should be notified, when an item of the bottom sheet has been
clicked.</div>
</td>
</tr>
<tr id="i28" class="altColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setOnItemLongClickListener-android.widget.AdapterView.OnItemLongClickListener-">setOnItemLongClickListener</a></span>(android.widget.AdapterView.OnItemLongClickListener listener)</code>
<div class="block">Sets the listener, which should be notified, when an item of the bottom sheet has been
long-clicked.</div>
</td>
</tr>
<tr id="i29" class="rowColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setOnKeyListener-android.content.DialogInterface.OnKeyListener-">setOnKeyListener</a></span>(android.content.DialogInterface.OnKeyListener listener)</code>
<div class="block">Sets the listener, which should be notified, if a key is dispatched to the bottom sheet,
which is created by the builder.</div>
</td>
</tr>
<tr id="i30" class="altColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setOnMaximizeListener-de.mrapp.android.bottomsheet.OnMaximizeListener-">setOnMaximizeListener</a></span>(<a href="../../../../de/mrapp/android/bottomsheet/OnMaximizeListener.html" title="interface in de.mrapp.android.bottomsheet">OnMaximizeListener</a> listener)</code>
<div class="block">Sets the listener, which should be notified, when the bottom sheet, which is created by
the builder, has been maximized.</div>
</td>
</tr>
<tr id="i31" class="rowColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setStyle-de.mrapp.android.bottomsheet.BottomSheet.Style-">setStyle</a></span>(<a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Style.html" title="enum in de.mrapp.android.bottomsheet">BottomSheet.Style</a> style)</code>
<div class="block">Sets the style of the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i32" class="altColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setTitle-java.lang.CharSequence-">setTitle</a></span>(java.lang.CharSequence title)</code>
<div class="block">Sets the title of the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i33" class="rowColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setTitle-int-">setTitle</a></span>(int resourceId)</code>
<div class="block">Sets the title of the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i34" class="altColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setTitleColor-int-">setTitleColor</a></span>(int color)</code>
<div class="block">Sets the color of the title of the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i35" class="rowColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setView-int-">setView</a></span>(int resourceId)</code>
<div class="block">Sets the custom view, which should be shown by the bottom sheet, which is created by the
builder.</div>
</td>
</tr>
<tr id="i36" class="altColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setView-android.view.View-">setView</a></span>(android.view.View view)</code>
<div class="block">Sets the custom view, which should be shown by the bottom sheet, which is created by the
builder.</div>
</td>
</tr>
<tr id="i37" class="rowColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setWidth-int-">setWidth</a></span>(int width)</code>
<div class="block">Sets the width of the bottom sheet, which is created by the builder.</div>
</td>
</tr>
<tr id="i38" class="altColor">
<td class="colFirst"><code><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#show--">show</a></span>()</code>
<div class="block">Creates a bottom sheet with the arguments, which have been supplied to the builder and
immediately displays it.</div>
</td>
</tr>
</table>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
<!-- -->
</a>
<h3>Methods inherited from class java.lang.Object</h3>
<code>clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</code></li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ========= CONSTRUCTOR DETAIL ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor.detail">
<!-- -->
</a>
<h3>Constructor Detail</h3>
<a name="Builder-android.content.Context-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>Builder</h4>
<pre>public Builder(@NonNull
android.content.Context context)</pre>
<div class="block">Creates a new builder, which allows to create bottom sheets, which are designed according
to Android 5's Material Design guidelines even on pre-Lollipop devices.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>context</code> - The context, which should be used by the builder, as an instance of the class
<code>Context</code>. The context may not be null</dd>
</dl>
</li>
</ul>
<a name="Builder-android.content.Context-int-">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>Builder</h4>
<pre>public Builder(@NonNull
android.content.Context context,
@StyleRes
int themeResourceId)</pre>
<div class="block">Creates a new builder, which allows to create bottom sheets, which are designed according
to Android 5's Material Design guidelines even on pre-Lollipop devices.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>context</code> - The context, which should be used by the builder, as an instance of the class
<code>Context</code>. The context may not be null</dd>
<dd><code>themeResourceId</code> - The resource id of the theme, which should be used by the bottom sheet, as an
<code>Integer</code> value. The resource id must correspond to a valid theme</dd>
</dl>
</li>
</ul>
</li>
</ul>
<!-- ============ METHOD DETAIL ========== -->
<ul class="blockList">
<li class="blockList"><a name="method.detail">
<!-- -->
</a>
<h3>Method Detail</h3>
<a name="getContext--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getContext</h4>
<pre>public final android.content.Context getContext()</pre>
<div class="block">Returns the context, which is used by the builder.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The context, which is used by the builder, as an instance of the class <code>Context</code></dd>
</dl>
</li>
</ul>
<a name="setCancelable-boolean-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setCancelable</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setCancelable(boolean cancelable)</pre>
<div class="block">Sets, whether the bottom sheet, which is created by the builder, should be cancelable, or
not.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>cancelable</code> - True, if the bottom sheet, which is created by the builder, should be cancelable,
false otherwise</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setStyle-de.mrapp.android.bottomsheet.BottomSheet.Style-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setStyle</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setStyle(@NonNull
<a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Style.html" title="enum in de.mrapp.android.bottomsheet">BottomSheet.Style</a> style)</pre>
<div class="block">Sets the style of the bottom sheet, which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>style</code> - The style, which should be set, as a value of the enum <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Style.html" title="enum in de.mrapp.android.bottomsheet"><code>BottomSheet.Style</code></a>. The style
may either be <code>LIST</code>, <code>LIST_COLUMNS</code> or <code>GRID</code></dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setOnItemClickListener-android.widget.AdapterView.OnItemClickListener-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setOnItemClickListener</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setOnItemClickListener(@Nullable
android.widget.AdapterView.OnItemClickListener listener)</pre>
<div class="block">Sets the listener, which should be notified, when an item of the bottom sheet has been
clicked.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>listener</code> - The listener, which should be set, as an instance of the type <code>AdapterView.OnItemClickListener</code> or null, if no listener should be notified</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setOnItemLongClickListener-android.widget.AdapterView.OnItemLongClickListener-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setOnItemLongClickListener</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setOnItemLongClickListener(@Nullable
android.widget.AdapterView.OnItemLongClickListener listener)</pre>
<div class="block">Sets the listener, which should be notified, when an item of the bottom sheet has been
long-clicked.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>listener</code> - The listener, which should be set, as an instance of the type <code>AdapterView.OnItemLongClickListener</code> or null, if no listener should be notified</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setOnMaximizeListener-de.mrapp.android.bottomsheet.OnMaximizeListener-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setOnMaximizeListener</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setOnMaximizeListener(@Nullable
<a href="../../../../de/mrapp/android/bottomsheet/OnMaximizeListener.html" title="interface in de.mrapp.android.bottomsheet">OnMaximizeListener</a> listener)</pre>
<div class="block">Sets the listener, which should be notified, when the bottom sheet, which is created by
the builder, has been maximized.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>listener</code> - The listener, which should be set, as an instance of the type <a href="../../../../de/mrapp/android/bottomsheet/OnMaximizeListener.html" title="interface in de.mrapp.android.bottomsheet"><code>OnMaximizeListener</code></a> or null, if no listener should be notified</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setOnCancelListener-android.content.DialogInterface.OnCancelListener-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setOnCancelListener</h4>
<pre>public <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setOnCancelListener(@Nullable
android.content.DialogInterface.OnCancelListener listener)</pre>
<div class="block">Sets the listener, which should be notified, when the bottom sheet, which is created by
the builder, is canceled.
<p>
If you are interested in listening for all cases where the bottom sheet is dismissed and
not just when it is canceled, see <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setOnDismissListener-android.content.DialogInterface.OnDismissListener-"><code>setOnDismissListener</code></a>.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>listener</code> - The listener, which should be set, as an instance of the type <code>OnCancelListener</code>, or null, if no listener should be set</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
<dt><span class="seeLabel">See Also:</span></dt>
<dd><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setOnDismissListener-android.content.DialogInterface.OnDismissListener-"><code>setOnDismissListener(android.content.DialogInterface.OnDismissListener)</code></a></dd>
</dl>
</li>
</ul>
<a name="setOnDismissListener-android.content.DialogInterface.OnDismissListener-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setOnDismissListener</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setOnDismissListener(@Nullable
android.content.DialogInterface.OnDismissListener listener)</pre>
<div class="block">Sets the listener, which should be notified, when the bottom sheet, which is created by
the builder, is dismissed for any reason.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>listener</code> - The listener, which should be set, as an instance of the type <code>OnDismissListener</code>, or null, if no listener should be set</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setOnKeyListener-android.content.DialogInterface.OnKeyListener-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setOnKeyListener</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setOnKeyListener(@Nullable
android.content.DialogInterface.OnKeyListener listener)</pre>
<div class="block">Sets the listener, which should be notified, if a key is dispatched to the bottom sheet,
which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>listener</code> - The listener, which should be set, as an instance of the type <code>OnKeyListener</code>, or null, if no listener should be set</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setTitleColor-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setTitleColor</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setTitleColor(int color)</pre>
<div class="block">Sets the color of the title of the bottom sheet, which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>color</code> - The color, which should be set, as an <code>Integer</code> value or -1, if no custom
color should be set</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setItemColor-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setItemColor</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setItemColor(int color)</pre>
<div class="block">Sets the color of the items of the bottom sheet, which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>color</code> - The color, which should be set, as an <code>Integer</code> value or -1, if no custom
color should be set</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setDividerColor-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setDividerColor</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setDividerColor(int color)</pre>
<div class="block">Sets the color of the dividers of the bottom sheet, which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>color</code> - The color, which should be set, as an <code>Integer</code> value or -1, if no custom
color should be set</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setBackground-android.graphics.Bitmap-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setBackground</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setBackground(@Nullable
android.graphics.Bitmap background)</pre>
<div class="block">Sets the background of the bottom sheet, which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>background</code> - The background, which should be set, as an instance of the class <code>Bitmap</code>
or null, if no custom background should be set</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setBackground-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setBackground</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setBackground(@DrawableRes
int resourceId)</pre>
<div class="block">Sets the background of the bottom sheet, which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>resourceId</code> - The resource id of the background, which should be set, as an <code>Integer</code>
value. The resource id must correspond to a valid drawable resource</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setBackgroundColor-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setBackgroundColor</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setBackgroundColor(int color)</pre>
<div class="block">Sets the background color of the bottom sheet, which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>color</code> - The background color, which should be set, as an <code>Integer</code> value or -1, if
no custom background color should be set</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setTitle-java.lang.CharSequence-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setTitle</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setTitle(@Nullable
java.lang.CharSequence title)</pre>
<div class="block">Sets the title of the bottom sheet, which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>title</code> - The title, which should be set, as an instance of the type <code>CharSequence</code>
or null, if no title should be shown</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setTitle-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setTitle</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setTitle(@StringRes
int resourceId)</pre>
<div class="block">Sets the title of the bottom sheet, which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>resourceId</code> - The resource id of the title, which should be set, as an <code>Integer</code> value.
The resource id must correspond to a valid string resource</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setIcon-android.graphics.Bitmap-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setIcon</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setIcon(@Nullable
android.graphics.Bitmap icon)</pre>
<div class="block">Sets the icon of the bottom sheet, which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>icon</code> - The icon, which should be set, as an instance of the class <code>Bitmap</code> or
null, if no icon should be shown</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setIcon-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setIcon</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setIcon(@DrawableRes
int resourceId)</pre>
<div class="block">Sets the icon of the bottom sheet, which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>resourceId</code> - The resource id of the icon, which should be set, as an <code>Integer</code> value.
The resource id must correspond to a valid drawable resource</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setIconAttribute-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setIconAttribute</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setIconAttribute(@AttrRes
int attributeId)</pre>
<div class="block">Set the icon of the bottom sheet, which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>attributeId</code> - The id of the theme attribute, which supplies the icon, which should be set, as
an <code>Integer</code> value. The id must point to a valid drawable resource</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setView-android.view.View-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setView</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setView(@Nullable
android.view.View view)</pre>
<div class="block">Sets the custom view, which should be shown by the bottom sheet, which is created by the
builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>view</code> - The view, which should be set, as an instance of the class <code>View</code> or null,
if no custom view should be shown</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setView-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setView</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setView(@LayoutRes
int resourceId)</pre>
<div class="block">Sets the custom view, which should be shown by the bottom sheet, which is created by the
builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>resourceId</code> - The resource id of the view, which should be set, as an <code>Integer</code> value.
The resource id must correspond to a valid layout resource</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setCustomTitle-android.view.View-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setCustomTitle</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setCustomTitle(@Nullable
android.view.View view)</pre>
<div class="block">Sets the custom view, which should be used to show the title of the bottom sheet, which
is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>view</code> - The view, which should be set, as an instance of the class <code>View</code> or null,
if no custom view should be used to show the title</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setCustomTitle-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setCustomTitle</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setCustomTitle(@LayoutRes
int resourceId)</pre>
<div class="block">Sets the custom view, which should be used to show the title of the bottom sheet, which
is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>resourceId</code> - The resource id of the view, which should be set, as an <code>Integer</code> value.
The resource id must correspond to a valid layout resource</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setDragSensitivity-float-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setDragSensitivity</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setDragSensitivity(float dragSensitivity)</pre>
<div class="block">Sets the sensitivity, which specifies the distance after which dragging has an effect on
the bottom sheet, in relation to an internal value range.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>dragSensitivity</code> - The drag sensitivity, which should be set, as a <code>Float</code> value. The drag
sensitivity must be at lest 0 and at maximum 1</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setDimAmount-float-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setDimAmount</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setDimAmount(float dimAmount)</pre>
<div class="block">Sets the dim amount, which should be used to darken the area outside the bottom sheet,
which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>dimAmount</code> - The dim amount, which should be set, as a <code>Float</code> value. The dim amount
must be at least 0 (fully transparent) and at maximum 1 (fully opaque)</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setWidth-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setWidth</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setWidth(int width)</pre>
<div class="block">Sets the width of the bottom sheet, which is created by the builder. The width is only
used on tablet devices or in landscape mode.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>width</code> - The width, which should be set, in pixels as an <code>Integer</code> value. The width
must be at least 1</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="addItem-int-java.lang.CharSequence-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>addItem</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> addItem(int id,
@NonNull
java.lang.CharSequence title)</pre>
<div class="block">Adds a new item to the bottom sheet, which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>id</code> - The id of the item, which should be added, as an <code>Integer</code> value. The id
must be at least 0</dd>
<dd><code>title</code> - The title of the item, which should be added, as an instance of the type <code>CharSequence</code>. The title may neither be null, nor empty</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="addItem-int-java.lang.CharSequence-android.graphics.drawable.Drawable-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>addItem</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> addItem(int id,
@NonNull
java.lang.CharSequence title,
@Nullable
android.graphics.drawable.Drawable icon)</pre>
<div class="block">Adds a new item to the bottom sheet, which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>id</code> - The id of the item, which should be added, as an <code>Integer</code> value. The id
must be at least 0</dd>
<dd><code>title</code> - The title of the item, which should be added, as an instance of the type <code>CharSequence</code>. The title may neither be null, nor empty</dd>
<dd><code>icon</code> - The icon of the item, which should be added, as an instance of the class <code>Drawable</code>, or null, if no item should be used</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="addItem-int-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>addItem</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> addItem(int id,
@StringRes
int titleId)</pre>
<div class="block">Adds a new item to the bottom sheet, which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>id</code> - The id of the item, which should be added, as an <code>Integer</code> value. The id
must be at least 0</dd>
<dd><code>titleId</code> - The resource id of the title of the item, which should be added, as an <code>Integer</code> value. The resource id must correspond to a valid string resource</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="addItem-int-int-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>addItem</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> addItem(int id,
@StringRes
int titleId,
@DrawableRes
int iconId)</pre>
<div class="block">Adds a new item to the bottom sheet, which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>id</code> - The id of the item, which should be added, as an <code>Integer</code> value. The id
must be at least 0</dd>
<dd><code>titleId</code> - The resource id of the title of the item, which should be added, as an <code>Integer</code> value. The resource id must correspond to a valid string resource</dd>
<dd><code>iconId</code> - The resource id of the icon of the item, which should be added, as an <code>Integer</code> value. The resource id must correspond to a valid drawable resource</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="addDivider--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>addDivider</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> addDivider()</pre>
<div class="block">Adds a new divider to the bottom sheet, which is created by the builder.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="addDivider-java.lang.CharSequence-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>addDivider</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> addDivider(@Nullable
java.lang.CharSequence title)</pre>
<div class="block">Adds a new divider to the bottom sheet, which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>title</code> - The title of the divider, which should be added, as an instance of the type
<code>CharSequence</code>, or null, if no title should be used</dd>
</dl>
</li>
</ul>
<a name="addDivider-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>addDivider</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> addDivider(@StringRes
int titleId)</pre>
<div class="block">Adds a new divider to the bottom sheet, which is created by the builder.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>titleId</code> - The resource id of the title, which should be added, as an <code>Integer</code> value.
The resource id must correspond to a valid string resource</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setItemEnabled-int-boolean-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setItemEnabled</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setItemEnabled(int index,
boolean enabled)</pre>
<div class="block">Sets, whether the item at a specific index should be enabled, or not.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>index</code> - The index of the item as an <code>Integer</code> value</dd>
<dd><code>enabled</code> - True, if the item should be enabled, false otherwise</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="setIntent-android.app.Activity-android.content.Intent-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setIntent</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a> setIntent(@NonNull
android.app.Activity activity,
@NonNull
android.content.Intent intent)</pre>
<div class="block">Adds the apps, which are able to handle a specific intent, as items to the bottom sheet,
which is created by the builder. This causes all previously added items to be removed.
When an item is clicked, the corresponding app is started.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>activity</code> - The activity, the bottom sheet, which is created by the builder, belongs to, as
an instance of the class <code>Activity</code>. The activity may not be null</dd>
<dd><code>intent</code> - The intent as an instance of the class <code>Intent</code>. The intent may not be
null</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The builder, the method has been called upon, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet.Builder</code></a></dd>
</dl>
</li>
</ul>
<a name="create--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>create</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a> create()</pre>
<div class="block">Creates a bottom sheet with the arguments, which have been supplied to the builder.
Calling this method does not display the bottom sheet.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The bottom sheet, which has been created as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet</code></a></dd>
</dl>
</li>
</ul>
<a name="show--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>show</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a> show()</pre>
<div class="block">Creates a bottom sheet with the arguments, which have been supplied to the builder and
immediately displays it.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The bottom sheet, which has been created, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet</code></a></dd>
</dl>
</li>
</ul>
<a name="maximize--">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>maximize</h4>
<pre>public final <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a> maximize()</pre>
<div class="block">Creates a bottom sheet with the arguments, which have been supplied to the builder and
immediately maximizes it.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The bottom sheet, which has been created, as an instance of the class <a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet</code></a></dd>
</dl>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../../de/mrapp/android/bottomsheet/BottomSheet.Style.html" title="enum in de.mrapp.android.bottomsheet"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?de/mrapp/android/bottomsheet/BottomSheet.Builder.html" target="_top">Frames</a></li>
<li><a href="BottomSheet.Builder.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li><a href="#constructor.summary">Constr</a> | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li><a href="#constructor.detail">Constr</a> | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
<|start_filename|>example/src/main/java/de/mrapp/android/bottomsheet/example/PreferenceFragment.java<|end_filename|>
/*
* Copyright 2016 - 2018 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package de.mrapp.android.bottomsheet.example;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.preference.Preference;
import android.preference.Preference.OnPreferenceClickListener;
import android.preference.PreferenceManager;
import androidx.annotation.NonNull;
import androidx.core.content.ContextCompat;
import android.view.View;
import android.widget.AdapterView;
import android.widget.Toast;
import de.mrapp.android.bottomsheet.BottomSheet;
import de.mrapp.android.bottomsheet.BottomSheet.Style;
/**
* A preference fragment, which contains the example app's settings.
*
* @author <NAME>
*/
public class PreferenceFragment extends android.preference.PreferenceFragment {
/**
* The name of the extra, which is used to store the state of the regular bottom sheet within a
* bundle.
*/
private static final String BOTTOM_SHEET_STATE_EXTRA =
PreferenceFragment.class.getSimpleName() + "::bottomSheetState";
/**
* The name of the extra, which is used to store the state of the custom bottom sheet within a
* bundle.
*/
private static final String CUSTOM_BOTTOM_SHEET_STATE_EXTRA =
PreferenceFragment.class.getSimpleName() + "::customBottomSheetState";
/**
* The name of the extra, which is used to store the state of the intent bottom sheet within a
* bundle.
*/
private static final String INTENT_BOTTOM_SHEET_STATE_EXTRA =
PreferenceFragment.class.getSimpleName() + "::intentBottomSheetState";
/**
* A regular bottom sheet containing list items.
*/
private BottomSheet bottomSheet;
/**
* A bottom sheet containing a custom view.
*/
private BottomSheet customBottomSheet;
/**
* A bottom sheet containing possible receivers for an intent.
*/
private BottomSheet intentBottomSheet;
/**
* The toast, which is used to indicate, when a bottom sheet's item has been clicked.
*/
private Toast toast;
/**
* Initializes the bottom sheets.
*/
private void initializeBottomSheets() {
initializeBottomSheet();
initializeCustomBottomSheet();
initializeIntentBottomSheet();
}
/**
* Initializes the regular bottom sheet containing list items.
*/
private void initializeBottomSheet() {
BottomSheet.Builder builder = createBottomSheetBuilder();
addItems(builder);
bottomSheet = builder.create();
}
/**
* Initializes the bottom sheet containing a custom view.
*/
private void initializeCustomBottomSheet() {
BottomSheet.Builder builder = createBottomSheetBuilder();
builder.setView(R.layout.custom_view);
customBottomSheet = builder.create();
}
/**
* Initializes the bottom sheet containing possible receivers for an intent.
*/
private void initializeIntentBottomSheet() {
BottomSheet.Builder builder = createBottomSheetBuilder();
Intent intent = new Intent();
intent.setAction(Intent.ACTION_SEND);
intent.putExtra(Intent.EXTRA_TEXT, "This is my text to send.");
intent.setType("text/plain");
builder.setIntent(getActivity(), intent);
intentBottomSheet = builder.create();
}
/**
* Initializes the preference, which allows to change the app's theme.
*/
private void initializeThemePreference() {
Preference themePreference = findPreference(getString(R.string.theme_preference_key));
themePreference.setOnPreferenceChangeListener(createThemeChangeListener());
}
/**
* Creates and returns a listener, which allows to adapt the app's theme, when the value of the
* corresponding preference has been changed.
*
* @return The listener, which has been created, as an instance of the type {@link
* Preference.OnPreferenceChangeListener}
*/
private Preference.OnPreferenceChangeListener createThemeChangeListener() {
return new Preference.OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(final Preference preference, final Object newValue) {
getActivity().recreate();
return true;
}
};
}
/**
* Initializes the preference, which allows to show a bottom sheet.
*/
private void initializeShowBottomSheetPreference() {
Preference showBottomSheetPreference =
findPreference(getString(R.string.show_bottom_sheet_preference_key));
showBottomSheetPreference.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(final Preference preference) {
initializeBottomSheet();
bottomSheet.show();
return true;
}
});
}
/**
* Initializes the preference, which allows to show a bottom sheet with custom content.
*/
private void initializeShowCustomBottomSheetPreference() {
Preference showCustomBottomSheetPreference =
findPreference(getString(R.string.show_custom_bottom_sheet_preference_key));
showCustomBottomSheetPreference
.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(final Preference preference) {
initializeCustomBottomSheet();
customBottomSheet.show();
return true;
}
});
}
/**
* Initializes the preference, which allows to display the applications, which are suited for
* handling an intent.
*/
private void initializeShowIntentBottmSheetPreference() {
Preference showIntentBottomSheetPreference =
findPreference(getString(R.string.show_intent_bottom_sheet_preference_key));
showIntentBottomSheetPreference
.setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
initializeIntentBottomSheet();
intentBottomSheet.show();
return true;
}
});
}
/**
* Creates and returns a builder, which allows to create bottom sheets, depending on the app's
* settings.
*
* @return The builder, which has been created, as an instance of the class {@link
* BottomSheet.Builder}
*/
private BottomSheet.Builder createBottomSheetBuilder() {
BottomSheet.Builder builder = new BottomSheet.Builder(getActivity());
builder.setStyle(getStyle());
if (shouldTitleBeShown()) {
builder.setTitle(getBottomSheetTitle());
}
if (shouldIconBeShown()) {
builder.setIcon(android.R.drawable.ic_dialog_alert);
}
return builder;
}
/**
* Adds items, depending on the app's settings, to a builder, which allows to create a bottom
* sheet.
*
* @param builder
* The builder, which allows to create the bottom sheet, as an instance of the class
* {@link BottomSheet.Builder}. The builder may not be null
*/
private void addItems(@NonNull final BottomSheet.Builder builder) {
int dividerCount = getDividerCount();
boolean showDividerTitle = shouldDividerTitleBeShown();
int itemCount = getItemCount();
boolean showIcon = shouldItemIconsBeShown();
boolean disableItems = shouldItemsBeDisabled();
int index = 0;
for (int i = 0; i < dividerCount + 1; i++) {
if (i > 0) {
builder.addDivider(showDividerTitle ? getString(R.string.divider_title, i) : null);
index++;
}
for (int j = 0; j < itemCount; j++) {
String title = getString(R.string.item_title, i * itemCount + j + 1);
Drawable icon;
if (isDarkThemeSet()) {
icon = showIcon ? ContextCompat.getDrawable(getActivity(),
getStyle() == Style.GRID ? R.drawable.grid_item_dark :
R.drawable.list_item_dark) : null;
} else {
icon = showIcon ? ContextCompat.getDrawable(getActivity(),
getStyle() == Style.GRID ? R.drawable.grid_item :
R.drawable.list_item) : null;
}
builder.addItem(i * dividerCount + j, title, icon);
if (disableItems) {
builder.setItemEnabled(index, false);
}
index++;
}
}
builder.setOnItemClickListener(createItemClickListener());
}
private AdapterView.OnItemClickListener createItemClickListener() {
return new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(final AdapterView<?> parent, final View view,
final int position, final long id) {
if (toast != null) {
toast.cancel();
}
int itemCount = getItemCount();
String text = getString(R.string.item_clicked_toast,
position - position / (itemCount + 1) + 1);
toast = Toast.makeText(getActivity(), text, Toast.LENGTH_SHORT);
toast.show();
}
};
}
/**
* Returns the style, which should be used to create bottom sheets, depending on the app's
* settings.
*
* @return The style, which should be used to create bottom sheets, as a value of the enum
* {@link Style}
*/
private Style getStyle() {
SharedPreferences sharedPreferences =
PreferenceManager.getDefaultSharedPreferences(getActivity());
String key = getString(R.string.bottom_sheet_style_preference_key);
String defaultValue = getString(R.string.bottom_sheet_style_preference_default_value);
String style = sharedPreferences.getString(key, defaultValue);
switch (style) {
case "list":
return Style.LIST;
case "list_columns":
return Style.LIST_COLUMNS;
default:
return Style.GRID;
}
}
/**
* Returns, whether the title of bottom sheets should be shown, depending on the app's settings,
* or not.
*
* @return True, if the title of bottom sheets should be shown, false otherwise
*/
private boolean shouldTitleBeShown() {
SharedPreferences sharedPreferences =
PreferenceManager.getDefaultSharedPreferences(getActivity());
String key = getString(R.string.show_bottom_sheet_title_preference_key);
boolean defaultValue =
getResources().getBoolean(R.bool.show_bottom_sheet_title_preference_default_value);
return sharedPreferences.getBoolean(key, defaultValue);
}
/**
* Returns the title of bottom sheets, depending on the app's settings.
*
* @return The title of the bottom sheets
*/
private String getBottomSheetTitle() {
SharedPreferences sharedPreferences =
PreferenceManager.getDefaultSharedPreferences(getActivity());
String key = getString(R.string.bottom_sheet_title_preference_key);
String defaultValue = getString(R.string.bottom_sheet_title_preference_default_value);
return sharedPreferences.getString(key, defaultValue);
}
/**
* Returns, whether the icon of bottom sheets should be shown, depending on the app's settings,
* or not.
*
* @return True, if the icon of bottom sheets should be shown, false otherwise
*/
private boolean shouldIconBeShown() {
SharedPreferences sharedPreferences =
PreferenceManager.getDefaultSharedPreferences(getActivity());
String key = getString(R.string.show_bottom_sheet_icon_preference_key);
boolean defaultValue =
getResources().getBoolean(R.bool.show_bottom_sheet_icon_preference_default_value);
return sharedPreferences.getBoolean(key, defaultValue);
}
/**
* Returns the number of dividers, which should be shown, depending on the app's settings.
*
* @return The number of dividers, which should be shown, as an {@link Integer} value
*/
private int getDividerCount() {
SharedPreferences sharedPreferences =
PreferenceManager.getDefaultSharedPreferences(getActivity());
String key = getString(R.string.divider_count_preference_key);
String defaultValue = getString(R.string.divider_count_preference_default_value);
return Integer.valueOf(sharedPreferences.getString(key, defaultValue));
}
/**
* Returns, whether the title of dividers should be shown, depending on the app's settings, or
* not.
*
* @return True, if the title of dividers should be shown, false otherwise
*/
private boolean shouldDividerTitleBeShown() {
SharedPreferences sharedPreferences =
PreferenceManager.getDefaultSharedPreferences(getActivity());
String key = getString(R.string.show_divider_title_preference_key);
boolean defaultValue =
getResources().getBoolean(R.bool.show_divider_title_preference_default_value);
return sharedPreferences.getBoolean(key, defaultValue);
}
/**
* Returns the number of items, which should be shown per divider, depending on the app's
* settings.
*
* @return The number of items, which should be shown per divider, as an {@link Integer} value
*/
private int getItemCount() {
SharedPreferences sharedPreferences =
PreferenceManager.getDefaultSharedPreferences(getActivity());
String key = getString(R.string.item_count_preference_key);
String defaultValue = getString(R.string.item_count_preference_default_value);
return Integer.valueOf(sharedPreferences.getString(key, defaultValue));
}
/**
* Returns, whether icons should be shown next to items, depending on the app's settings, or
* not.
*
* @return True, if icons should be shown next to items, false otherwise
*/
private boolean shouldItemIconsBeShown() {
SharedPreferences sharedPreferences =
PreferenceManager.getDefaultSharedPreferences(getActivity());
String key = getString(R.string.show_item_icons_preference_key);
boolean defaultValue =
getResources().getBoolean(R.bool.show_item_icons_preference_default_value);
return sharedPreferences.getBoolean(key, defaultValue);
}
/**
* Returns, whether items should be disabled, depending on the app's settings, or not.
*
* @return True, if items should be disabled, false otherwise.
*/
private boolean shouldItemsBeDisabled() {
SharedPreferences sharedPreferences =
PreferenceManager.getDefaultSharedPreferences(getActivity());
String key = getString(R.string.disable_items_preference_key);
boolean defaultValue =
getResources().getBoolean(R.bool.disable_items_preference_default_value);
return sharedPreferences.getBoolean(key, defaultValue);
}
/**
* Returns, whether the app uses the dark theme, or not.
*
* @return True, if the app uses the dark theme, false otherwise
*/
private boolean isDarkThemeSet() {
SharedPreferences sharedPreferences =
PreferenceManager.getDefaultSharedPreferences(getActivity());
String key = getString(R.string.theme_preference_key);
String defaultValue = getString(R.string.theme_preference_default_value);
return Integer.valueOf(sharedPreferences.getString(key, defaultValue)) != 0;
}
@Override
public final void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
initializeBottomSheets();
addPreferencesFromResource(R.xml.preferences);
initializeThemePreference();
initializeShowBottomSheetPreference();
initializeShowCustomBottomSheetPreference();
initializeShowIntentBottmSheetPreference();
if (savedInstanceState != null) {
Bundle bottomSheetState = savedInstanceState.getBundle(BOTTOM_SHEET_STATE_EXTRA);
Bundle customBottomSheetState =
savedInstanceState.getBundle(CUSTOM_BOTTOM_SHEET_STATE_EXTRA);
Bundle intentBottomSheetState =
savedInstanceState.getBundle(INTENT_BOTTOM_SHEET_STATE_EXTRA);
if (bottomSheetState != null) {
bottomSheet.onRestoreInstanceState(bottomSheetState);
}
if (customBottomSheetState != null) {
customBottomSheet.onRestoreInstanceState(customBottomSheetState);
}
if (intentBottomSheetState != null) {
intentBottomSheet.onRestoreInstanceState(intentBottomSheetState);
}
}
}
@Override
public final void onSaveInstanceState(final Bundle outState) {
super.onSaveInstanceState(outState);
outState.putBundle(BOTTOM_SHEET_STATE_EXTRA, bottomSheet.onSaveInstanceState());
outState.putBundle(CUSTOM_BOTTOM_SHEET_STATE_EXTRA,
customBottomSheet.onSaveInstanceState());
outState.putBundle(INTENT_BOTTOM_SHEET_STATE_EXTRA,
intentBottomSheet.onSaveInstanceState());
}
}
<|start_filename|>example/src/main/java/de/mrapp/android/bottomsheet/example/MainActivity.java<|end_filename|>
/*
* Copyright 2016 - 2018 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package de.mrapp.android.bottomsheet.example;
import android.app.Fragment;
import android.app.FragmentTransaction;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.preference.PreferenceManager;
import androidx.appcompat.app.AppCompatActivity;
/**
* The main activity of the example app.
*
* @author <NAME>
*/
public class MainActivity extends AppCompatActivity {
/**
* The tag, which is used to show the activity's fragment.
*/
private static final String FRAGMENT_TAG = MainActivity.class.getSimpleName() + "::fragmentTag";
@Override
public final void setTheme(final int resourceId) {
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this);
String key = getString(R.string.theme_preference_key);
String defaultValue = getString(R.string.theme_preference_default_value);
int theme = Integer.valueOf(sharedPreferences.getString(key, defaultValue));
if (theme != 0) {
super.setTheme(R.style.DarkTheme);
} else {
super.setTheme(R.style.LightTheme);
}
}
@Override
protected final void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (getFragmentManager().findFragmentByTag(FRAGMENT_TAG) == null) {
Fragment fragment = Fragment.instantiate(this, PreferenceFragment.class.getName());
FragmentTransaction transaction = getFragmentManager().beginTransaction();
transaction.replace(R.id.fragment, fragment, FRAGMENT_TAG);
transaction.commit();
}
}
}
<|start_filename|>library/src/main/java/de/mrapp/android/bottomsheet/view/DividableGridView.java<|end_filename|>
/*
* Copyright 2016 - 2018 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package de.mrapp.android.bottomsheet.view;
import android.annotation.TargetApi;
import android.content.Context;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.StyleRes;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.ViewGroup;
import android.widget.GridView;
import de.mrapp.android.bottomsheet.BottomSheet;
import de.mrapp.android.bottomsheet.R;
import de.mrapp.android.bottomsheet.adapter.DividableGridAdapter;
import de.mrapp.android.bottomsheet.model.AbstractItem;
import de.mrapp.android.bottomsheet.model.Divider;
/**
* A grid view, which allows to display the items of a {@link BottomSheet}. Its height can be
* adapted to the heights of its children, even if they have different heights.
*
* @author <NAME>
* @since 1.0.0
*/
public class DividableGridView extends GridView {
/**
* Creates a new grid view, which allows to display the items of a {@link BottomSheet}.
*
* @param context
* The context, which should be used by the view, as an instance of the class {@link
* Context}. The context may not be null
*/
public DividableGridView(@NonNull final Context context) {
super(context);
}
/**
* Creates a new grid view, which allows to display the items of a {@link BottomSheet}.
*
* @param context
* The context, which should be used by the view, as an instance of the class {@link
* Context}. The context may not be null
* @param attributeSet
* The attribute set, the view's attributes should be obtained from, as an instance of
* the type {@link AttributeSet} or null, if no attributes should be obtained
*/
public DividableGridView(@NonNull final Context context,
@Nullable final AttributeSet attributeSet) {
super(context, attributeSet);
}
/**
* Creates a new grid view, which allows to display the items of a {@link BottomSheet}.
*
* @param context
* The context, which should be used by the view, as an instance of the class {@link
* Context}. The context may not be null
* @param attributeSet
* The attribute set, the view's attributes should be obtained from, as an instance of
* the type {@link AttributeSet} or null, if no attributes should be obtained
* @param defaultStyle
* The default style to apply to this view. If 0, no style will be applied (beyond what
* is included in the theme). This may either be an attribute resource, whose value will
* be retrieved from the current theme, or an explicit style resource
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public DividableGridView(@NonNull final Context context,
@Nullable final AttributeSet attributeSet,
@StyleRes final int defaultStyle) {
super(context, attributeSet, defaultStyle);
}
/**
* Creates a new grid view, which allows to display the items of a {@link BottomSheet}.
*
* @param context
* The context, which should be used by the view, as an instance of the class {@link
* Context}. The context may not be null
* @param attributeSet
* The attribute set, the view's attributes should be obtained from, as an instance of
* the type {@link AttributeSet} or null, if no attributes should be obtained
* @param defaultStyle
* The default style to apply to this view. If 0, no style will be applied (beyond what
* is included in the theme). This may either be an attribute resource, whose value will
* be retrieved from the current theme, or an explicit style resource
* @param defaultStyleResource
* A resource identifier of a style resource that supplies default values for the view,
* used only if the default style is 0 or can not be found in the theme. Can be 0 to not
* look for defaults
*/
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public DividableGridView(@NonNull final Context context,
@Nullable final AttributeSet attributeSet,
@StyleRes final int defaultStyle,
@StyleRes final int defaultStyleResource) {
super(context, attributeSet, defaultStyle, defaultStyleResource);
}
/**
* Adapts the height of the grid view to the height of its children.
*/
public void adaptHeightToChildren() {
DividableGridAdapter adapter = (DividableGridAdapter) getAdapter();
if (adapter != null) {
int height = getPaddingTop() + getPaddingBottom();
for (int i = 0; i < adapter.getCount(); i += adapter.getColumnCount()) {
AbstractItem item = adapter.getItem(i);
if (item instanceof Divider) {
height += getResources().getDimensionPixelSize(
TextUtils.isEmpty(item.getTitle()) ?
R.dimen.bottom_sheet_divider_height :
R.dimen.bottom_sheet_divider_title_height);
} else {
height += getResources().getDimensionPixelSize(
adapter.getStyle() == BottomSheet.Style.GRID ?
R.dimen.bottom_sheet_grid_item_size :
R.dimen.bottom_sheet_list_item_height);
}
}
ViewGroup.LayoutParams params = getLayoutParams();
params.height = height;
setLayoutParams(params);
requestLayout();
}
}
}
<|start_filename|>doc/javadoc/index-files/index-4.html<|end_filename|>
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_152-release) on Tue Oct 30 23:20:43 CET 2018 -->
<title>D-Index</title>
<meta name="date" content="2018-10-30">
<link rel="stylesheet" type="text/css" href="../stylesheet.css" title="Style">
<script type="text/javascript" src="../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="D-Index";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../overview-summary.html">Overview</a></li>
<li>Package</li>
<li>Class</li>
<li><a href="../overview-tree.html">Tree</a></li>
<li><a href="../deprecated-list.html">Deprecated</a></li>
<li class="navBarCell1Rev">Index</li>
<li><a href="../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="index-3.html">Prev Letter</a></li>
<li><a href="index-5.html">Next Letter</a></li>
</ul>
<ul class="navList">
<li><a href="../index.html?index-files/index-4.html" target="_top">Frames</a></li>
<li><a href="index-4.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="contentContainer"><a href="index-1.html">A</a> <a href="index-2.html">B</a> <a href="index-3.html">C</a> <a href="index-4.html">D</a> <a href="index-5.html">E</a> <a href="index-6.html">G</a> <a href="index-7.html">H</a> <a href="index-8.html">I</a> <a href="index-9.html">M</a> <a href="index-10.html">N</a> <a href="index-11.html">O</a> <a href="index-12.html">P</a> <a href="index-13.html">R</a> <a href="index-14.html">S</a> <a href="index-15.html">T</a> <a href="index-16.html">V</a> <a name="I:D">
<!-- -->
</a>
<h2 class="title">D</h2>
<dl>
<dt><a href="../de/mrapp/android/bottomsheet/package-summary.html">de.mrapp.android.bottomsheet</a> - package de.mrapp.android.bottomsheet</dt>
<dd> </dd>
<dt><a href="../de/mrapp/android/bottomsheet/adapter/package-summary.html">de.mrapp.android.bottomsheet.adapter</a> - package de.mrapp.android.bottomsheet.adapter</dt>
<dd> </dd>
<dt><a href="../de/mrapp/android/bottomsheet/animation/package-summary.html">de.mrapp.android.bottomsheet.animation</a> - package de.mrapp.android.bottomsheet.animation</dt>
<dd> </dd>
<dt><a href="../de/mrapp/android/bottomsheet/model/package-summary.html">de.mrapp.android.bottomsheet.model</a> - package de.mrapp.android.bottomsheet.model</dt>
<dd> </dd>
<dt><a href="../de/mrapp/android/bottomsheet/view/package-summary.html">de.mrapp.android.bottomsheet.view</a> - package de.mrapp.android.bottomsheet.view</dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#dismiss--">dismiss()</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/view/DraggableView.html#dispatchTouchEvent-android.view.MotionEvent-">dispatchTouchEvent(MotionEvent)</a></span> - Method in class de.mrapp.android.bottomsheet.view.<a href="../de/mrapp/android/bottomsheet/view/DraggableView.html" title="class in de.mrapp.android.bottomsheet.view">DraggableView</a></dt>
<dd> </dd>
<dt><a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter"><span class="typeNameLink">DividableGridAdapter</span></a> - Class in <a href="../de/mrapp/android/bottomsheet/adapter/package-summary.html">de.mrapp.android.bottomsheet.adapter</a></dt>
<dd>
<div class="block">An adapter, which manages the items of a <a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet</code></a>.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#DividableGridAdapter-android.content.Context-de.mrapp.android.bottomsheet.BottomSheet.Style-int-">DividableGridAdapter(Context, BottomSheet.Style, int)</a></span> - Constructor for class de.mrapp.android.bottomsheet.adapter.<a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter">DividableGridAdapter</a></dt>
<dd>
<div class="block">Creates a new adapter, which manages the items of a <a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet</code></a>.</div>
</dd>
<dt><a href="../de/mrapp/android/bottomsheet/view/DividableGridView.html" title="class in de.mrapp.android.bottomsheet.view"><span class="typeNameLink">DividableGridView</span></a> - Class in <a href="../de/mrapp/android/bottomsheet/view/package-summary.html">de.mrapp.android.bottomsheet.view</a></dt>
<dd>
<div class="block">A grid view, which allows to display the items of a <a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet</code></a>.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/view/DividableGridView.html#DividableGridView-android.content.Context-">DividableGridView(Context)</a></span> - Constructor for class de.mrapp.android.bottomsheet.view.<a href="../de/mrapp/android/bottomsheet/view/DividableGridView.html" title="class in de.mrapp.android.bottomsheet.view">DividableGridView</a></dt>
<dd>
<div class="block">Creates a new grid view, which allows to display the items of a <a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet</code></a>.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/view/DividableGridView.html#DividableGridView-android.content.Context-android.util.AttributeSet-">DividableGridView(Context, AttributeSet)</a></span> - Constructor for class de.mrapp.android.bottomsheet.view.<a href="../de/mrapp/android/bottomsheet/view/DividableGridView.html" title="class in de.mrapp.android.bottomsheet.view">DividableGridView</a></dt>
<dd>
<div class="block">Creates a new grid view, which allows to display the items of a <a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet</code></a>.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/view/DividableGridView.html#DividableGridView-android.content.Context-android.util.AttributeSet-int-">DividableGridView(Context, AttributeSet, int)</a></span> - Constructor for class de.mrapp.android.bottomsheet.view.<a href="../de/mrapp/android/bottomsheet/view/DividableGridView.html" title="class in de.mrapp.android.bottomsheet.view">DividableGridView</a></dt>
<dd>
<div class="block">Creates a new grid view, which allows to display the items of a <a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet</code></a>.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/view/DividableGridView.html#DividableGridView-android.content.Context-android.util.AttributeSet-int-int-">DividableGridView(Context, AttributeSet, int, int)</a></span> - Constructor for class de.mrapp.android.bottomsheet.view.<a href="../de/mrapp/android/bottomsheet/view/DividableGridView.html" title="class in de.mrapp.android.bottomsheet.view">DividableGridView</a></dt>
<dd>
<div class="block">Creates a new grid view, which allows to display the items of a <a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet</code></a>.</div>
</dd>
<dt><a href="../de/mrapp/android/bottomsheet/model/Divider.html" title="class in de.mrapp.android.bottomsheet.model"><span class="typeNameLink">Divider</span></a> - Class in <a href="../de/mrapp/android/bottomsheet/model/package-summary.html">de.mrapp.android.bottomsheet.model</a></dt>
<dd>
<div class="block">Represents a divider, which can be shown in a bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/model/Divider.html#Divider--">Divider()</a></span> - Constructor for class de.mrapp.android.bottomsheet.model.<a href="../de/mrapp/android/bottomsheet/model/Divider.html" title="class in de.mrapp.android.bottomsheet.model">Divider</a></dt>
<dd>
<div class="block">Creates a new divider.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/model/Divider.html#DIVIDER_ID">DIVIDER_ID</a></span> - Static variable in class de.mrapp.android.bottomsheet.model.<a href="../de/mrapp/android/bottomsheet/model/Divider.html" title="class in de.mrapp.android.bottomsheet.model">Divider</a></dt>
<dd>
<div class="block">The id of dividers.</div>
</dd>
<dt><a href="../de/mrapp/android/bottomsheet/view/DraggableView.html" title="class in de.mrapp.android.bottomsheet.view"><span class="typeNameLink">DraggableView</span></a> - Class in <a href="../de/mrapp/android/bottomsheet/view/package-summary.html">de.mrapp.android.bottomsheet.view</a></dt>
<dd>
<div class="block">The root view of a <a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet</code></a>, which can be dragged by the user.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/view/DraggableView.html#DraggableView-android.content.Context-">DraggableView(Context)</a></span> - Constructor for class de.mrapp.android.bottomsheet.view.<a href="../de/mrapp/android/bottomsheet/view/DraggableView.html" title="class in de.mrapp.android.bottomsheet.view">DraggableView</a></dt>
<dd>
<div class="block">Creates a new root view of a <a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet</code></a>, which can be dragged by the user.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/view/DraggableView.html#DraggableView-android.content.Context-android.util.AttributeSet-">DraggableView(Context, AttributeSet)</a></span> - Constructor for class de.mrapp.android.bottomsheet.view.<a href="../de/mrapp/android/bottomsheet/view/DraggableView.html" title="class in de.mrapp.android.bottomsheet.view">DraggableView</a></dt>
<dd>
<div class="block">Creates a new root view of a <a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet</code></a>, which can be dragged by the user.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/view/DraggableView.html#DraggableView-android.content.Context-android.util.AttributeSet-int-">DraggableView(Context, AttributeSet, int)</a></span> - Constructor for class de.mrapp.android.bottomsheet.view.<a href="../de/mrapp/android/bottomsheet/view/DraggableView.html" title="class in de.mrapp.android.bottomsheet.view">DraggableView</a></dt>
<dd>
<div class="block">Creates a new root view of a <a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet</code></a>, which can be dragged by the user.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/view/DraggableView.html#DraggableView-android.content.Context-android.util.AttributeSet-int-int-">DraggableView(Context, AttributeSet, int, int)</a></span> - Constructor for class de.mrapp.android.bottomsheet.view.<a href="../de/mrapp/android/bottomsheet/view/DraggableView.html" title="class in de.mrapp.android.bottomsheet.view">DraggableView</a></dt>
<dd>
<div class="block">Creates a new root view of a <a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet</code></a>, which can be dragged by the user.</div>
</dd>
<dt><a href="../de/mrapp/android/bottomsheet/view/DraggableView.Callback.html" title="interface in de.mrapp.android.bottomsheet.view"><span class="typeNameLink">DraggableView.Callback</span></a> - Interface in <a href="../de/mrapp/android/bottomsheet/view/package-summary.html">de.mrapp.android.bottomsheet.view</a></dt>
<dd>
<div class="block">Defines the interface, a class, which should be notified about the view's state, must
implement.</div>
</dd>
<dt><a href="../de/mrapp/android/bottomsheet/animation/DraggableViewAnimation.html" title="class in de.mrapp.android.bottomsheet.animation"><span class="typeNameLink">DraggableViewAnimation</span></a> - Class in <a href="../de/mrapp/android/bottomsheet/animation/package-summary.html">de.mrapp.android.bottomsheet.animation</a></dt>
<dd>
<div class="block">An animation, which allows to show or hide a <a href="../de/mrapp/android/bottomsheet/view/DraggableView.html" title="class in de.mrapp.android.bottomsheet.view"><code>DraggableView</code></a>.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/animation/DraggableViewAnimation.html#DraggableViewAnimation-de.mrapp.android.bottomsheet.view.DraggableView-int-long-android.view.animation.Animation.AnimationListener-">DraggableViewAnimation(DraggableView, int, long, Animation.AnimationListener)</a></span> - Constructor for class de.mrapp.android.bottomsheet.animation.<a href="../de/mrapp/android/bottomsheet/animation/DraggableViewAnimation.html" title="class in de.mrapp.android.bottomsheet.animation">DraggableViewAnimation</a></dt>
<dd>
<div class="block">Creates a new animation, which allows to show or hide a <a href="../de/mrapp/android/bottomsheet/view/DraggableView.html" title="class in de.mrapp.android.bottomsheet.view"><code>DraggableView</code></a>.</div>
</dd>
</dl>
<a href="index-1.html">A</a> <a href="index-2.html">B</a> <a href="index-3.html">C</a> <a href="index-4.html">D</a> <a href="index-5.html">E</a> <a href="index-6.html">G</a> <a href="index-7.html">H</a> <a href="index-8.html">I</a> <a href="index-9.html">M</a> <a href="index-10.html">N</a> <a href="index-11.html">O</a> <a href="index-12.html">P</a> <a href="index-13.html">R</a> <a href="index-14.html">S</a> <a href="index-15.html">T</a> <a href="index-16.html">V</a> </div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../overview-summary.html">Overview</a></li>
<li>Package</li>
<li>Class</li>
<li><a href="../overview-tree.html">Tree</a></li>
<li><a href="../deprecated-list.html">Deprecated</a></li>
<li class="navBarCell1Rev">Index</li>
<li><a href="../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="index-3.html">Prev Letter</a></li>
<li><a href="index-5.html">Next Letter</a></li>
</ul>
<ul class="navList">
<li><a href="../index.html?index-files/index-4.html" target="_top">Frames</a></li>
<li><a href="index-4.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
<|start_filename|>doc/javadoc/index-files/index-14.html<|end_filename|>
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_152-release) on Tue Oct 30 23:20:43 CET 2018 -->
<title>S-Index</title>
<meta name="date" content="2018-10-30">
<link rel="stylesheet" type="text/css" href="../stylesheet.css" title="Style">
<script type="text/javascript" src="../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="S-Index";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../overview-summary.html">Overview</a></li>
<li>Package</li>
<li>Class</li>
<li><a href="../overview-tree.html">Tree</a></li>
<li><a href="../deprecated-list.html">Deprecated</a></li>
<li class="navBarCell1Rev">Index</li>
<li><a href="../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="index-13.html">Prev Letter</a></li>
<li><a href="index-15.html">Next Letter</a></li>
</ul>
<ul class="navList">
<li><a href="../index.html?index-files/index-14.html" target="_top">Frames</a></li>
<li><a href="index-14.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="contentContainer"><a href="index-1.html">A</a> <a href="index-2.html">B</a> <a href="index-3.html">C</a> <a href="index-4.html">D</a> <a href="index-5.html">E</a> <a href="index-6.html">G</a> <a href="index-7.html">H</a> <a href="index-8.html">I</a> <a href="index-9.html">M</a> <a href="index-10.html">N</a> <a href="index-11.html">O</a> <a href="index-12.html">P</a> <a href="index-13.html">R</a> <a href="index-14.html">S</a> <a href="index-15.html">T</a> <a href="index-16.html">V</a> <a name="I:S">
<!-- -->
</a>
<h2 class="title">S</h2>
<dl>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#set-int-de.mrapp.android.bottomsheet.model.AbstractItem-">set(int, AbstractItem)</a></span> - Method in class de.mrapp.android.bottomsheet.adapter.<a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter">DividableGridAdapter</a></dt>
<dd>
<div class="block">Replaces the item with at a specific index</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setBackground-android.graphics.Bitmap-">setBackground(Bitmap)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the background of the bottom sheet, which is created by the builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setBackground-int-">setBackground(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the background of the bottom sheet, which is created by the builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setBackground-android.graphics.Bitmap-">setBackground(Bitmap)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets the background of the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setBackground-int-">setBackground(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets the background of the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setBackgroundColor-int-">setBackgroundColor(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the background color of the bottom sheet, which is created by the builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setBackgroundColor-int-">setBackgroundColor(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets the background color of the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/view/DraggableView.html#setCallback-de.mrapp.android.bottomsheet.view.DraggableView.Callback-">setCallback(DraggableView.Callback)</a></span> - Method in class de.mrapp.android.bottomsheet.view.<a href="../de/mrapp/android/bottomsheet/view/DraggableView.html" title="class in de.mrapp.android.bottomsheet.view">DraggableView</a></dt>
<dd>
<div class="block">Sets the callback, which should be notified about the view's state.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setCancelable-boolean-">setCancelable(boolean)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets, whether the bottom sheet, which is created by the builder, should be cancelable, or
not.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setCancelable-boolean-">setCancelable(boolean)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setCanceledOnTouchOutside-boolean-">setCanceledOnTouchOutside(boolean)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setCustomTitle-android.view.View-">setCustomTitle(View)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the custom view, which should be used to show the title of the bottom sheet, which
is created by the builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setCustomTitle-int-">setCustomTitle(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the custom view, which should be used to show the title of the bottom sheet, which
is created by the builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setCustomTitle-android.view.View-">setCustomTitle(View)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets the custom view, which should be used to show the title of the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setCustomTitle-int-">setCustomTitle(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets the custom view, which should be used to show the title of the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setDimAmount-float-">setDimAmount(float)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the dim amount, which should be used to darken the area outside the bottom sheet,
which is created by the builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setDimAmount-float-">setDimAmount(float)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets the dim amount, which should be used to darken the area outside the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setDivider-int-">setDivider(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Replaces the item at a specific index with a divider.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setDivider-int-java.lang.CharSequence-">setDivider(int, CharSequence)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Replaces the item at a specific index with a divider.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setDivider-int-int-">setDivider(int, int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Replaces the item at a specific index with a divider.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#setDividerColor-int-">setDividerColor(int)</a></span> - Method in class de.mrapp.android.bottomsheet.adapter.<a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter">DividableGridAdapter</a></dt>
<dd>
<div class="block">Sets the color of the adapter's dividers.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setDividerColor-int-">setDividerColor(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the color of the dividers of the bottom sheet, which is created by the builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setDividerColor-int-">setDividerColor(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets the color of the dividers of the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setDragSensitivity-float-">setDragSensitivity(float)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the sensitivity, which specifies the distance after which dragging has an effect on
the bottom sheet, in relation to an internal value range.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setDragSensitivity-float-">setDragSensitivity(float)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets the sensitivity, which specifies the distance after which dragging has an effect on the
bottom sheet, in relation to an internal value range.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/view/DraggableView.html#setDragSensitivity-int-">setDragSensitivity(int)</a></span> - Method in class de.mrapp.android.bottomsheet.view.<a href="../de/mrapp/android/bottomsheet/view/DraggableView.html" title="class in de.mrapp.android.bottomsheet.view">DraggableView</a></dt>
<dd>
<div class="block">Sets the distance in pixels, a drag gesture must last until it is recognized.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/model/Item.html#setEnabled-boolean-">setEnabled(boolean)</a></span> - Method in class de.mrapp.android.bottomsheet.model.<a href="../de/mrapp/android/bottomsheet/model/Item.html" title="class in de.mrapp.android.bottomsheet.model">Item</a></dt>
<dd>
<div class="block">Sets, whether the item should be enabled, or not.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setIcon-android.graphics.Bitmap-">setIcon(Bitmap)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the icon of the bottom sheet, which is created by the builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setIcon-int-">setIcon(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the icon of the bottom sheet, which is created by the builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setIcon-android.graphics.Bitmap-">setIcon(Bitmap)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets the icon of the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setIcon-int-">setIcon(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets the icon of the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/model/Item.html#setIcon-android.graphics.drawable.Drawable-">setIcon(Drawable)</a></span> - Method in class de.mrapp.android.bottomsheet.model.<a href="../de/mrapp/android/bottomsheet/model/Item.html" title="class in de.mrapp.android.bottomsheet.model">Item</a></dt>
<dd>
<div class="block">Sets the item's icon.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/model/Item.html#setIcon-android.content.Context-int-">setIcon(Context, int)</a></span> - Method in class de.mrapp.android.bottomsheet.model.<a href="../de/mrapp/android/bottomsheet/model/Item.html" title="class in de.mrapp.android.bottomsheet.model">Item</a></dt>
<dd>
<div class="block">Sets the item's icon.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setIconAttribute-int-">setIconAttribute(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Set the icon of the bottom sheet, which is created by the builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setIconAttribute-int-">setIconAttribute(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Set the icon of the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setIntent-android.app.Activity-android.content.Intent-">setIntent(Activity, Intent)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Adds the apps, which are able to handle a specific intent, as items to the bottom sheet,
which is created by the builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setIntent-android.app.Activity-android.content.Intent-">setIntent(Activity, Intent)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Adds the apps, which are able to handle a specific intent, as items to the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setItem-int-int-java.lang.CharSequence-">setItem(int, int, CharSequence)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Replaces the item at a specific index with another item.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setItem-int-int-java.lang.CharSequence-android.graphics.drawable.Drawable-">setItem(int, int, CharSequence, Drawable)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Replaces the item at a specific index with another item.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setItem-int-int-int-">setItem(int, int, int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Replaces the item at a specific index with another item.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setItem-int-int-int-int-">setItem(int, int, int, int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Replaces the item at a specific index with another item.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#setItemColor-int-">setItemColor(int)</a></span> - Method in class de.mrapp.android.bottomsheet.adapter.<a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter">DividableGridAdapter</a></dt>
<dd>
<div class="block">Sets the text color of the adapter's items.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setItemColor-int-">setItemColor(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the color of the items of the bottom sheet, which is created by the builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setItemColor-int-">setItemColor(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets the color of the items of the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#setItemEnabled-int-boolean-">setItemEnabled(int, boolean)</a></span> - Method in class de.mrapp.android.bottomsheet.adapter.<a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter">DividableGridAdapter</a></dt>
<dd>
<div class="block">Sets, whether the item at a specific index should be enabled, or not.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setItemEnabled-int-boolean-">setItemEnabled(int, boolean)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets, whether the item at a specific index should be enabled, or not.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setItemEnabled-int-boolean-">setItemEnabled(int, boolean)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets, whether the item at a specific index should be enabled, or not.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setOnCancelListener-android.content.DialogInterface.OnCancelListener-">setOnCancelListener(DialogInterface.OnCancelListener)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the listener, which should be notified, when the bottom sheet, which is created by
the builder, is canceled.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setOnDismissListener-android.content.DialogInterface.OnDismissListener-">setOnDismissListener(DialogInterface.OnDismissListener)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the listener, which should be notified, when the bottom sheet, which is created by
the builder, is dismissed for any reason.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setOnItemClickListener-android.widget.AdapterView.OnItemClickListener-">setOnItemClickListener(AdapterView.OnItemClickListener)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the listener, which should be notified, when an item of the bottom sheet has been
clicked.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setOnItemClickListener-android.widget.AdapterView.OnItemClickListener-">setOnItemClickListener(AdapterView.OnItemClickListener)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets the listener, which should be notified, when an item of the bottom sheet has been
clicked.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setOnItemLongClickListener-android.widget.AdapterView.OnItemLongClickListener-">setOnItemLongClickListener(AdapterView.OnItemLongClickListener)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the listener, which should be notified, when an item of the bottom sheet has been
long-clicked.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setOnItemLongClickListener-android.widget.AdapterView.OnItemLongClickListener-">setOnItemLongClickListener(AdapterView.OnItemLongClickListener)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets the listener, which should be notified, when an item of the bottom sheet has been
long-clicked.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setOnKeyListener-android.content.DialogInterface.OnKeyListener-">setOnKeyListener(DialogInterface.OnKeyListener)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the listener, which should be notified, if a key is dispatched to the bottom sheet,
which is created by the builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setOnMaximizeListener-de.mrapp.android.bottomsheet.OnMaximizeListener-">setOnMaximizeListener(OnMaximizeListener)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the listener, which should be notified, when the bottom sheet, which is created by
the builder, has been maximized.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setOnMaximizeListener-de.mrapp.android.bottomsheet.OnMaximizeListener-">setOnMaximizeListener(OnMaximizeListener)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets the listener, which should be notified, when the bottom sheet has been maximized.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setOnShowListener-android.content.DialogInterface.OnShowListener-">setOnShowListener(DialogInterface.OnShowListener)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#setStyle-de.mrapp.android.bottomsheet.BottomSheet.Style-">setStyle(BottomSheet.Style)</a></span> - Method in class de.mrapp.android.bottomsheet.adapter.<a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter">DividableGridAdapter</a></dt>
<dd>
<div class="block">Sets the style, which should be used to display the adapter's items.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setStyle-de.mrapp.android.bottomsheet.BottomSheet.Style-">setStyle(BottomSheet.Style)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the style of the bottom sheet, which is created by the builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setStyle-de.mrapp.android.bottomsheet.BottomSheet.Style-">setStyle(BottomSheet.Style)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets the style, which should be used to display the bottom sheet's items.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setTitle-java.lang.CharSequence-">setTitle(CharSequence)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the title of the bottom sheet, which is created by the builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setTitle-int-">setTitle(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the title of the bottom sheet, which is created by the builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setTitle-java.lang.CharSequence-">setTitle(CharSequence)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/model/AbstractItem.html#setTitle-java.lang.CharSequence-">setTitle(CharSequence)</a></span> - Method in class de.mrapp.android.bottomsheet.model.<a href="../de/mrapp/android/bottomsheet/model/AbstractItem.html" title="class in de.mrapp.android.bottomsheet.model">AbstractItem</a></dt>
<dd>
<div class="block">Sets the item's title.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/model/Divider.html#setTitle-android.content.Context-int-">setTitle(Context, int)</a></span> - Method in class de.mrapp.android.bottomsheet.model.<a href="../de/mrapp/android/bottomsheet/model/Divider.html" title="class in de.mrapp.android.bottomsheet.model">Divider</a></dt>
<dd>
<div class="block">Sets the divider's title.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/model/Item.html#setTitle-android.content.Context-int-">setTitle(Context, int)</a></span> - Method in class de.mrapp.android.bottomsheet.model.<a href="../de/mrapp/android/bottomsheet/model/Item.html" title="class in de.mrapp.android.bottomsheet.model">Item</a></dt>
<dd>
<div class="block">Sets the title of the item.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/model/Item.html#setTitle-java.lang.CharSequence-">setTitle(CharSequence)</a></span> - Method in class de.mrapp.android.bottomsheet.model.<a href="../de/mrapp/android/bottomsheet/model/Item.html" title="class in de.mrapp.android.bottomsheet.model">Item</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setTitleColor-int-">setTitleColor(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the color of the title of the bottom sheet, which is created by the builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setTitleColor-int-">setTitleColor(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets the color of the title of the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setView-android.view.View-">setView(View)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the custom view, which should be shown by the bottom sheet, which is created by the
builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setView-int-">setView(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the custom view, which should be shown by the bottom sheet, which is created by the
builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setView-android.view.View-">setView(View)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets the custom view, which should be shown by the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setView-int-">setView(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets the custom view, which should be shown by the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#setWidth-int-">setWidth(int)</a></span> - Method in class de.mrapp.android.bottomsheet.adapter.<a href="../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter">DividableGridAdapter</a></dt>
<dd>
<div class="block">Sets the width of the bottom sheet, the items, which are displayed by the adapter, belong
to.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#setWidth-int-">setWidth(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Sets the width of the bottom sheet, which is created by the builder.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.html#setWidth-int-">setWidth(int)</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet">BottomSheet</a></dt>
<dd>
<div class="block">Sets the width of the bottom sheet.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/view/DraggableView.html#setWidth-int-">setWidth(int)</a></span> - Method in class de.mrapp.android.bottomsheet.view.<a href="../de/mrapp/android/bottomsheet/view/DraggableView.html" title="class in de.mrapp.android.bottomsheet.view">DraggableView</a></dt>
<dd>
<div class="block">Sets the width of the view.</div>
</dd>
<dt><span class="memberNameLink"><a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html#show--">show()</a></span> - Method in class de.mrapp.android.bottomsheet.<a href="../de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet">BottomSheet.Builder</a></dt>
<dd>
<div class="block">Creates a bottom sheet with the arguments, which have been supplied to the builder and
immediately displays it.</div>
</dd>
</dl>
<a href="index-1.html">A</a> <a href="index-2.html">B</a> <a href="index-3.html">C</a> <a href="index-4.html">D</a> <a href="index-5.html">E</a> <a href="index-6.html">G</a> <a href="index-7.html">H</a> <a href="index-8.html">I</a> <a href="index-9.html">M</a> <a href="index-10.html">N</a> <a href="index-11.html">O</a> <a href="index-12.html">P</a> <a href="index-13.html">R</a> <a href="index-14.html">S</a> <a href="index-15.html">T</a> <a href="index-16.html">V</a> </div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../overview-summary.html">Overview</a></li>
<li>Package</li>
<li>Class</li>
<li><a href="../overview-tree.html">Tree</a></li>
<li><a href="../deprecated-list.html">Deprecated</a></li>
<li class="navBarCell1Rev">Index</li>
<li><a href="../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="index-13.html">Prev Letter</a></li>
<li><a href="index-15.html">Next Letter</a></li>
</ul>
<ul class="navList">
<li><a href="../index.html?index-files/index-14.html" target="_top">Frames</a></li>
<li><a href="index-14.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
<|start_filename|>doc/javadoc/overview-tree.html<|end_filename|>
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_152-release) on Tue Oct 30 23:20:43 CET 2018 -->
<title>Class Hierarchy</title>
<meta name="date" content="2018-10-30">
<link rel="stylesheet" type="text/css" href="stylesheet.css" title="Style">
<script type="text/javascript" src="script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Class Hierarchy";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="overview-summary.html">Overview</a></li>
<li>Package</li>
<li>Class</li>
<li class="navBarCell1Rev">Tree</li>
<li><a href="deprecated-list.html">Deprecated</a></li>
<li><a href="index-files/index-1.html">Index</a></li>
<li><a href="help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="index.html?overview-tree.html" target="_top">Frames</a></li>
<li><a href="overview-tree.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h1 class="title">Hierarchy For All Packages</h1>
<span class="packageHierarchyLabel">Package Hierarchies:</span>
<ul class="horizontal">
<li><a href="de/mrapp/android/bottomsheet/package-tree.html">de.mrapp.android.bottomsheet</a>, </li>
<li><a href="de/mrapp/android/bottomsheet/adapter/package-tree.html">de.mrapp.android.bottomsheet.adapter</a>, </li>
<li><a href="de/mrapp/android/bottomsheet/animation/package-tree.html">de.mrapp.android.bottomsheet.animation</a>, </li>
<li><a href="de/mrapp/android/bottomsheet/model/package-tree.html">de.mrapp.android.bottomsheet.model</a>, </li>
<li><a href="de/mrapp/android/bottomsheet/view/package-tree.html">de.mrapp.android.bottomsheet.view</a></li>
</ul>
</div>
<div class="contentContainer">
<h2 title="Class Hierarchy">Class Hierarchy</h2>
<ul>
<li type="circle">java.lang.Object
<ul>
<li type="circle">de.mrapp.android.bottomsheet.model.<a href="de/mrapp/android/bottomsheet/model/AbstractItem.html" title="class in de.mrapp.android.bottomsheet.model"><span class="typeNameLink">AbstractItem</span></a> (implements java.lang.Cloneable, java.io.Serializable)
<ul>
<li type="circle">de.mrapp.android.bottomsheet.model.<a href="de/mrapp/android/bottomsheet/model/Divider.html" title="class in de.mrapp.android.bottomsheet.model"><span class="typeNameLink">Divider</span></a></li>
<li type="circle">de.mrapp.android.bottomsheet.model.<a href="de/mrapp/android/bottomsheet/model/Item.html" title="class in de.mrapp.android.bottomsheet.model"><span class="typeNameLink">Item</span></a></li>
</ul>
</li>
<li type="circle">android.view.animation.Animation (implements java.lang.Cloneable)
<ul>
<li type="circle">de.mrapp.android.bottomsheet.animation.<a href="de/mrapp/android/bottomsheet/animation/DraggableViewAnimation.html" title="class in de.mrapp.android.bottomsheet.animation"><span class="typeNameLink">DraggableViewAnimation</span></a></li>
</ul>
</li>
<li type="circle">android.widget.BaseAdapter (implements android.widget.ListAdapter, android.widget.SpinnerAdapter)
<ul>
<li type="circle">de.mrapp.android.bottomsheet.adapter.<a href="de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" title="class in de.mrapp.android.bottomsheet.adapter"><span class="typeNameLink">DividableGridAdapter</span></a></li>
</ul>
</li>
<li type="circle">de.mrapp.android.bottomsheet.<a href="de/mrapp/android/bottomsheet/BottomSheet.Builder.html" title="class in de.mrapp.android.bottomsheet"><span class="typeNameLink">BottomSheet.Builder</span></a></li>
<li type="circle">android.app.Dialog (implements android.content.DialogInterface, android.view.KeyEvent.Callback, android.view.View.OnCreateContextMenuListener, android.view.Window.Callback)
<ul>
<li type="circle">de.mrapp.android.bottomsheet.<a href="de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><span class="typeNameLink">BottomSheet</span></a> (implements android.content.DialogInterface, de.mrapp.android.bottomsheet.view.<a href="de/mrapp/android/bottomsheet/view/DraggableView.Callback.html" title="interface in de.mrapp.android.bottomsheet.view">DraggableView.Callback</a>)</li>
</ul>
</li>
<li type="circle">android.view.View (implements android.view.accessibility.AccessibilityEventSource, android.graphics.drawable.Drawable.Callback, android.view.KeyEvent.Callback)
<ul>
<li type="circle">android.view.ViewGroup (implements android.view.ViewManager, android.view.ViewParent)
<ul>
<li type="circle">android.widget.AdapterView<T>
<ul>
<li type="circle">android.widget.AbsListView (implements android.widget.Filter.FilterListener, android.text.TextWatcher, android.view.ViewTreeObserver.OnGlobalLayoutListener, android.view.ViewTreeObserver.OnTouchModeChangeListener)
<ul>
<li type="circle">android.widget.GridView
<ul>
<li type="circle">de.mrapp.android.bottomsheet.view.<a href="de/mrapp/android/bottomsheet/view/DividableGridView.html" title="class in de.mrapp.android.bottomsheet.view"><span class="typeNameLink">DividableGridView</span></a></li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
<li type="circle">android.widget.LinearLayout
<ul>
<li type="circle">de.mrapp.android.bottomsheet.view.<a href="de/mrapp/android/bottomsheet/view/DraggableView.html" title="class in de.mrapp.android.bottomsheet.view"><span class="typeNameLink">DraggableView</span></a> (implements android.view.ViewTreeObserver.OnGlobalLayoutListener)</li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
</ul>
<h2 title="Interface Hierarchy">Interface Hierarchy</h2>
<ul>
<li type="circle">de.mrapp.android.bottomsheet.view.<a href="de/mrapp/android/bottomsheet/view/DraggableView.Callback.html" title="interface in de.mrapp.android.bottomsheet.view"><span class="typeNameLink">DraggableView.Callback</span></a></li>
<li type="circle">de.mrapp.android.bottomsheet.<a href="de/mrapp/android/bottomsheet/OnMaximizeListener.html" title="interface in de.mrapp.android.bottomsheet"><span class="typeNameLink">OnMaximizeListener</span></a></li>
</ul>
<h2 title="Enum Hierarchy">Enum Hierarchy</h2>
<ul>
<li type="circle">java.lang.Object
<ul>
<li type="circle">java.lang.Enum<E> (implements java.lang.Comparable<T>, java.io.Serializable)
<ul>
<li type="circle">de.mrapp.android.bottomsheet.<a href="de/mrapp/android/bottomsheet/BottomSheet.Style.html" title="enum in de.mrapp.android.bottomsheet"><span class="typeNameLink">BottomSheet.Style</span></a></li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="overview-summary.html">Overview</a></li>
<li>Package</li>
<li>Class</li>
<li class="navBarCell1Rev">Tree</li>
<li><a href="deprecated-list.html">Deprecated</a></li>
<li><a href="index-files/index-1.html">Index</a></li>
<li><a href="help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="index.html?overview-tree.html" target="_top">Frames</a></li>
<li><a href="overview-tree.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
<|start_filename|>doc/javadoc/de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html<|end_filename|>
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_152-release) on Tue Oct 30 23:20:43 CET 2018 -->
<title>DividableGridAdapter</title>
<meta name="date" content="2018-10-30">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="DividableGridAdapter";
}
}
catch(err) {
}
//-->
var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10};
var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
var altColor = "altColor";
var rowColor = "rowColor";
var tableTab = "tableTab";
var activeTableTab = "activeTableTab";
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev Class</li>
<li>Next Class</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" target="_top">Frames</a></li>
<li><a href="DividableGridAdapter.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li><a href="#constructor.summary">Constr</a> | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li><a href="#constructor.detail">Constr</a> | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle">de.mrapp.android.bottomsheet.adapter</div>
<h2 title="Class DividableGridAdapter" class="title">Class DividableGridAdapter</h2>
</div>
<div class="contentContainer">
<ul class="inheritance">
<li>java.lang.Object</li>
<li>
<ul class="inheritance">
<li>android.widget.BaseAdapter</li>
<li>
<ul class="inheritance">
<li>de.mrapp.android.bottomsheet.adapter.DividableGridAdapter</li>
</ul>
</li>
</ul>
</li>
</ul>
<div class="description">
<ul class="blockList">
<li class="blockList">
<dl>
<dt>All Implemented Interfaces:</dt>
<dd>android.widget.Adapter, android.widget.ListAdapter, android.widget.SpinnerAdapter</dd>
</dl>
<hr>
<br>
<pre>public class <span class="typeNameLabel">DividableGridAdapter</span>
extends android.widget.BaseAdapter</pre>
<div class="block">An adapter, which manages the items of a <a href="../../../../../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet</code></a>. It allows to show the items in a
list or grid and supports to show dividers.</div>
<dl>
<dt><span class="simpleTagLabel">Since:</span></dt>
<dd>1.0.0</dd>
<dt><span class="simpleTagLabel">Author:</span></dt>
<dd><NAME></dd>
</dl>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- =========== FIELD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="field.summary">
<!-- -->
</a>
<h3>Field Summary</h3>
<ul class="blockList">
<li class="blockList"><a name="fields.inherited.from.class.android.widget.Adapter">
<!-- -->
</a>
<h3>Fields inherited from interface android.widget.Adapter</h3>
<code>IGNORE_ITEM_VIEW_TYPE, NO_SELECTION</code></li>
</ul>
</li>
</ul>
<!-- ======== CONSTRUCTOR SUMMARY ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor.summary">
<!-- -->
</a>
<h3>Constructor Summary</h3>
<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation">
<caption><span>Constructors</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Constructor and Description</th>
</tr>
<tr class="altColor">
<td class="colOne"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#DividableGridAdapter-android.content.Context-de.mrapp.android.bottomsheet.BottomSheet.Style-int-">DividableGridAdapter</a></span>(android.content.Context context,
<a href="../../../../../de/mrapp/android/bottomsheet/BottomSheet.Style.html" title="enum in de.mrapp.android.bottomsheet">BottomSheet.Style</a> style,
int width)</code>
<div class="block">Creates a new adapter, which manages the items of a <a href="../../../../../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet</code></a>.</div>
</td>
</tr>
</table>
</li>
</ul>
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method.summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd"> </span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd"> </span></span><span id="t4" class="tableTab"><span><a href="javascript:show(8);">Concrete Methods</a></span><span class="tabEnd"> </span></span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tr id="i0" class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#add-de.mrapp.android.bottomsheet.model.AbstractItem-">add</a></span>(<a href="../../../../../de/mrapp/android/bottomsheet/model/AbstractItem.html" title="class in de.mrapp.android.bottomsheet.model">AbstractItem</a> item)</code>
<div class="block">Adds a new item to the adapter.</div>
</td>
</tr>
<tr id="i1" class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#clear--">clear</a></span>()</code>
<div class="block">Removes all items from the adapter.</div>
</td>
</tr>
<tr id="i2" class="altColor">
<td class="colFirst"><code>boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#containsDividers--">containsDividers</a></span>()</code>
<div class="block">Returns, whether the adapter contains dividers, or not.</div>
</td>
</tr>
<tr id="i3" class="rowColor">
<td class="colFirst"><code>int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getColumnCount--">getColumnCount</a></span>()</code>
<div class="block">Returns the number of columns, which are displayed by the adapter.</div>
</td>
</tr>
<tr id="i4" class="altColor">
<td class="colFirst"><code>int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getCount--">getCount</a></span>()</code> </td>
</tr>
<tr id="i5" class="rowColor">
<td class="colFirst"><code>int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getDividerColor--">getDividerColor</a></span>()</code>
<div class="block">Returns the color of the adapter's dividers.</div>
</td>
</tr>
<tr id="i6" class="altColor">
<td class="colFirst"><code><a href="../../../../../de/mrapp/android/bottomsheet/model/AbstractItem.html" title="class in de.mrapp.android.bottomsheet.model">AbstractItem</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getItem-int-">getItem</a></span>(int position)</code> </td>
</tr>
<tr id="i7" class="rowColor">
<td class="colFirst"><code>int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getItemColor--">getItemColor</a></span>()</code>
<div class="block">Returns the text color of the adapter's items.</div>
</td>
</tr>
<tr id="i8" class="altColor">
<td class="colFirst"><code>int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getItemCount--">getItemCount</a></span>()</code>
<div class="block">Returns the number of items, which are contained by the adapter.</div>
</td>
</tr>
<tr id="i9" class="rowColor">
<td class="colFirst"><code>long</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getItemId-int-">getItemId</a></span>(int position)</code> </td>
</tr>
<tr id="i10" class="altColor">
<td class="colFirst"><code>int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getItemViewType-int-">getItemViewType</a></span>(int position)</code> </td>
</tr>
<tr id="i11" class="rowColor">
<td class="colFirst"><code><a href="../../../../../de/mrapp/android/bottomsheet/BottomSheet.Style.html" title="enum in de.mrapp.android.bottomsheet">BottomSheet.Style</a></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getStyle--">getStyle</a></span>()</code>
<div class="block">Returns the style, which is used to display the adapter's items.</div>
</td>
</tr>
<tr id="i12" class="altColor">
<td class="colFirst"><code>android.view.View</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getView-int-android.view.View-android.view.ViewGroup-">getView</a></span>(int position,
android.view.View convertView,
android.view.ViewGroup parent)</code> </td>
</tr>
<tr id="i13" class="rowColor">
<td class="colFirst"><code>int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#getViewTypeCount--">getViewTypeCount</a></span>()</code> </td>
</tr>
<tr id="i14" class="altColor">
<td class="colFirst"><code>boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#isEnabled-int-">isEnabled</a></span>(int position)</code> </td>
</tr>
<tr id="i15" class="rowColor">
<td class="colFirst"><code>boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#isItemEnabled-int-">isItemEnabled</a></span>(int index)</code>
<div class="block">Returns, whether the item at a specific index is enabled, or not.</div>
</td>
</tr>
<tr id="i16" class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#notifyOnChange-boolean-">notifyOnChange</a></span>(boolean notifyOnChange)</code>
<div class="block">Sets, whether the <code>notifyDataSetChanged</code>-method should be called automatically,
when the adapter's items have been changed, or not.</div>
</td>
</tr>
<tr id="i17" class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#remove-int-">remove</a></span>(int index)</code>
<div class="block">Removes the item at a specific index.</div>
</td>
</tr>
<tr id="i18" class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#set-int-de.mrapp.android.bottomsheet.model.AbstractItem-">set</a></span>(int index,
<a href="../../../../../de/mrapp/android/bottomsheet/model/AbstractItem.html" title="class in de.mrapp.android.bottomsheet.model">AbstractItem</a> item)</code>
<div class="block">Replaces the item with at a specific index</div>
</td>
</tr>
<tr id="i19" class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#setDividerColor-int-">setDividerColor</a></span>(int color)</code>
<div class="block">Sets the color of the adapter's dividers.</div>
</td>
</tr>
<tr id="i20" class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#setItemColor-int-">setItemColor</a></span>(int color)</code>
<div class="block">Sets the text color of the adapter's items.</div>
</td>
</tr>
<tr id="i21" class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#setItemEnabled-int-boolean-">setItemEnabled</a></span>(int index,
boolean enabled)</code>
<div class="block">Sets, whether the item at a specific index should be enabled, or not.</div>
</td>
</tr>
<tr id="i22" class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#setStyle-de.mrapp.android.bottomsheet.BottomSheet.Style-">setStyle</a></span>(<a href="../../../../../de/mrapp/android/bottomsheet/BottomSheet.Style.html" title="enum in de.mrapp.android.bottomsheet">BottomSheet.Style</a> style)</code>
<div class="block">Sets the style, which should be used to display the adapter's items.</div>
</td>
</tr>
<tr id="i23" class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html#setWidth-int-">setWidth</a></span>(int width)</code>
<div class="block">Sets the width of the bottom sheet, the items, which are displayed by the adapter, belong
to.</div>
</td>
</tr>
</table>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.android.widget.BaseAdapter">
<!-- -->
</a>
<h3>Methods inherited from class android.widget.BaseAdapter</h3>
<code>areAllItemsEnabled, getAutofillOptions, getDropDownView, hasStableIds, isEmpty, notifyDataSetChanged, notifyDataSetInvalidated, registerDataSetObserver, setAutofillOptions, unregisterDataSetObserver</code></li>
</ul>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
<!-- -->
</a>
<h3>Methods inherited from class java.lang.Object</h3>
<code>clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</code></li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ========= CONSTRUCTOR DETAIL ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor.detail">
<!-- -->
</a>
<h3>Constructor Detail</h3>
<a name="DividableGridAdapter-android.content.Context-de.mrapp.android.bottomsheet.BottomSheet.Style-int-">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>DividableGridAdapter</h4>
<pre>public DividableGridAdapter(@NonNull
android.content.Context context,
<a href="../../../../../de/mrapp/android/bottomsheet/BottomSheet.Style.html" title="enum in de.mrapp.android.bottomsheet">BottomSheet.Style</a> style,
int width)</pre>
<div class="block">Creates a new adapter, which manages the items of a <a href="../../../../../de/mrapp/android/bottomsheet/BottomSheet.html" title="class in de.mrapp.android.bottomsheet"><code>BottomSheet</code></a>.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>context</code> - The context, which should be used by the adapter, as an instance of the class <code>Context</code>. The context may not be null</dd>
<dd><code>style</code> - The style, which should be used to display the adapter's items, as a value of the
enum <a href="../../../../../de/mrapp/android/bottomsheet/BottomSheet.Style.html" title="enum in de.mrapp.android.bottomsheet"><code>BottomSheet.Style</code></a>. The style may either be <code>LIST</code>,
<code>LIST_COLUMNS</code> or <code>GRID</code></dd>
<dd><code>width</code> - The width of the bottom sheet, the items, which are displayed by the adapter, belong
to, as an <code>Integer</code> value</dd>
</dl>
</li>
</ul>
</li>
</ul>
<!-- ============ METHOD DETAIL ========== -->
<ul class="blockList">
<li class="blockList"><a name="method.detail">
<!-- -->
</a>
<h3>Method Detail</h3>
<a name="getStyle--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getStyle</h4>
<pre>public final <a href="../../../../../de/mrapp/android/bottomsheet/BottomSheet.Style.html" title="enum in de.mrapp.android.bottomsheet">BottomSheet.Style</a> getStyle()</pre>
<div class="block">Returns the style, which is used to display the adapter's items.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The style, which is used to display the adapter's items, as a value of the enum
<a href="../../../../../de/mrapp/android/bottomsheet/BottomSheet.Style.html" title="enum in de.mrapp.android.bottomsheet"><code>BottomSheet.Style</code></a>. The style may either be <code>LIST</code>, <code>LIST_COLUMNS</code> or
<code>GRID</code></dd>
</dl>
</li>
</ul>
<a name="setStyle-de.mrapp.android.bottomsheet.BottomSheet.Style-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setStyle</h4>
<pre>public final void setStyle(@NonNull
<a href="../../../../../de/mrapp/android/bottomsheet/BottomSheet.Style.html" title="enum in de.mrapp.android.bottomsheet">BottomSheet.Style</a> style)</pre>
<div class="block">Sets the style, which should be used to display the adapter's items.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>style</code> - The style, which should be set, as a value of the enum <a href="../../../../../de/mrapp/android/bottomsheet/BottomSheet.Style.html" title="enum in de.mrapp.android.bottomsheet"><code>BottomSheet.Style</code></a>. The style may
either be <code>LIST</code>, <code>LIST_COLUMNS</code> or <code>GRID</code></dd>
</dl>
</li>
</ul>
<a name="setWidth-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setWidth</h4>
<pre>public final void setWidth(int width)</pre>
<div class="block">Sets the width of the bottom sheet, the items, which are displayed by the adapter, belong
to.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>width</code> - The width, which should be set, as an <code>Integer</code> value</dd>
</dl>
</li>
</ul>
<a name="containsDividers--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>containsDividers</h4>
<pre>public final boolean containsDividers()</pre>
<div class="block">Returns, whether the adapter contains dividers, or not.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>True, if the adapter contains dividers, false otherwise</dd>
</dl>
</li>
</ul>
<a name="getColumnCount--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getColumnCount</h4>
<pre>public final int getColumnCount()</pre>
<div class="block">Returns the number of columns, which are displayed by the adapter.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The number of columns, which are displayed by the adapter</dd>
</dl>
</li>
</ul>
<a name="getItemColor--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getItemColor</h4>
<pre>public final int getItemColor()</pre>
<div class="block">Returns the text color of the adapter's items.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The text color of the adapter's items as an <code>Integer</code> value or -1, if no custom
color has been set</dd>
</dl>
</li>
</ul>
<a name="setItemColor-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setItemColor</h4>
<pre>public final void setItemColor(int color)</pre>
<div class="block">Sets the text color of the adapter's items.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>color</code> - The text color, which should be set, as an <code>Integer</code> value or -1, if no custom
color should be set</dd>
</dl>
</li>
</ul>
<a name="getDividerColor--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getDividerColor</h4>
<pre>public final int getDividerColor()</pre>
<div class="block">Returns the color of the adapter's dividers.</div>
<dl>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>The color of the adapter's dividers as an <code>Integer</code> value or -1, if no custom
color has been set</dd>
</dl>
</li>
</ul>
<a name="setDividerColor-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setDividerColor</h4>
<pre>public final void setDividerColor(int color)</pre>
<div class="block">Sets the color of the adapter's dividers.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>color</code> - The color, which should be set, as an <code>Integer</code> value or -1, if no custom color
should be set</dd>
</dl>
</li>
</ul>
<a name="add-de.mrapp.android.bottomsheet.model.AbstractItem-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>add</h4>
<pre>public final void add(@NonNull
<a href="../../../../../de/mrapp/android/bottomsheet/model/AbstractItem.html" title="class in de.mrapp.android.bottomsheet.model">AbstractItem</a> item)</pre>
<div class="block">Adds a new item to the adapter.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>item</code> - The item, which should be added, as an instance of the class <a href="../../../../../de/mrapp/android/bottomsheet/model/AbstractItem.html" title="class in de.mrapp.android.bottomsheet.model"><code>AbstractItem</code></a>.
The item may not be null</dd>
</dl>
</li>
</ul>
<a name="set-int-de.mrapp.android.bottomsheet.model.AbstractItem-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>set</h4>
<pre>public final void set(int index,
@NonNull
<a href="../../../../../de/mrapp/android/bottomsheet/model/AbstractItem.html" title="class in de.mrapp.android.bottomsheet.model">AbstractItem</a> item)</pre>
<div class="block">Replaces the item with at a specific index</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>index</code> - The index of the item, which should be replaced, as an <code>Integer</code> value</dd>
<dd><code>item</code> - The item, which should be set, as an instance of the class <a href="../../../../../de/mrapp/android/bottomsheet/model/AbstractItem.html" title="class in de.mrapp.android.bottomsheet.model"><code>AbstractItem</code></a>. The
item may not be null</dd>
</dl>
</li>
</ul>
<a name="remove-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>remove</h4>
<pre>public final void remove(int index)</pre>
<div class="block">Removes the item at a specific index.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>index</code> - The index of the item, which should be removed, as an <code>Integer</code> value</dd>
</dl>
</li>
</ul>
<a name="clear--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>clear</h4>
<pre>public final void clear()</pre>
<div class="block">Removes all items from the adapter.</div>
</li>
</ul>
<a name="getItemCount--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getItemCount</h4>
<pre>public final int getItemCount()</pre>
<div class="block">Returns the number of items, which are contained by the adapter.</div>
</li>
</ul>
<a name="isItemEnabled-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isItemEnabled</h4>
<pre>public final boolean isItemEnabled(int index)</pre>
<div class="block">Returns, whether the item at a specific index is enabled, or not.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>index</code> - The index of the item, which should be checked, as an <code>Integer</code> value</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>True, if the item is enabled, false otherwise</dd>
</dl>
</li>
</ul>
<a name="setItemEnabled-int-boolean-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>setItemEnabled</h4>
<pre>public final void setItemEnabled(int index,
boolean enabled)</pre>
<div class="block">Sets, whether the item at a specific index should be enabled, or not.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>index</code> - The index of the item as an <code>Integer</code> value</dd>
<dd><code>enabled</code> - True, if the item should be enabled, false otherwise</dd>
</dl>
</li>
</ul>
<a name="notifyOnChange-boolean-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>notifyOnChange</h4>
<pre>public final void notifyOnChange(boolean notifyOnChange)</pre>
<div class="block">Sets, whether the <code>notifyDataSetChanged</code>-method should be called automatically,
when the adapter's items have been changed, or not.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>notifyOnChange</code> - True, if the <code>notifyDataSetChanged</code>-method should be called automatically,
when the adapter's items have been changed, false otherwise</dd>
</dl>
</li>
</ul>
<a name="isEnabled-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>isEnabled</h4>
<pre>public final boolean isEnabled(int position)</pre>
<dl>
<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
<dd><code>isEnabled</code> in interface <code>android.widget.ListAdapter</code></dd>
<dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
<dd><code>isEnabled</code> in class <code>android.widget.BaseAdapter</code></dd>
</dl>
</li>
</ul>
<a name="getCount--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getCount</h4>
<pre>public final int getCount()</pre>
</li>
</ul>
<a name="getItem-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getItem</h4>
<pre>public final <a href="../../../../../de/mrapp/android/bottomsheet/model/AbstractItem.html" title="class in de.mrapp.android.bottomsheet.model">AbstractItem</a> getItem(int position)</pre>
</li>
</ul>
<a name="getItemId-int-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getItemId</h4>
<pre>public final long getItemId(int position)</pre>
</li>
</ul>
<a name="getView-int-android.view.View-android.view.ViewGroup-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getView</h4>
<pre>public final android.view.View getView(int position,
android.view.View convertView,
android.view.ViewGroup parent)</pre>
</li>
</ul>
<a name="getViewTypeCount--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>getViewTypeCount</h4>
<pre>public final int getViewTypeCount()</pre>
<dl>
<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
<dd><code>getViewTypeCount</code> in interface <code>android.widget.Adapter</code></dd>
<dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
<dd><code>getViewTypeCount</code> in class <code>android.widget.BaseAdapter</code></dd>
</dl>
</li>
</ul>
<a name="getItemViewType-int-">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>getItemViewType</h4>
<pre>public final int getItemViewType(int position)</pre>
<dl>
<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
<dd><code>getItemViewType</code> in interface <code>android.widget.Adapter</code></dd>
<dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
<dd><code>getItemViewType</code> in class <code>android.widget.BaseAdapter</code></dd>
</dl>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev Class</li>
<li>Next Class</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?de/mrapp/android/bottomsheet/adapter/DividableGridAdapter.html" target="_top">Frames</a></li>
<li><a href="DividableGridAdapter.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li><a href="#constructor.summary">Constr</a> | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li><a href="#constructor.detail">Constr</a> | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
<|start_filename|>library/src/main/java/de/mrapp/android/bottomsheet/BottomSheet.java<|end_filename|>
/*
* Copyright 2016 - 2018 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package de.mrapp.android.bottomsheet;
import android.annotation.TargetApi;
import android.app.Activity;
import android.app.Dialog;
import android.content.ComponentName;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.view.animation.DecelerateInterpolator;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.AdapterView.OnItemLongClickListener;
import android.widget.FrameLayout;
import android.widget.GridView;
import android.widget.ListAdapter;
import android.widget.TextView;
import java.util.List;
import androidx.annotation.AttrRes;
import androidx.annotation.ColorInt;
import androidx.annotation.DrawableRes;
import androidx.annotation.LayoutRes;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.StringRes;
import androidx.annotation.StyleRes;
import androidx.core.content.ContextCompat;
import de.mrapp.android.bottomsheet.adapter.DividableGridAdapter;
import de.mrapp.android.bottomsheet.model.AbstractItem;
import de.mrapp.android.bottomsheet.model.Divider;
import de.mrapp.android.bottomsheet.model.Item;
import de.mrapp.android.bottomsheet.view.DividableGridView;
import de.mrapp.android.bottomsheet.view.DraggableView;
import de.mrapp.android.util.DisplayUtil;
import de.mrapp.android.util.ViewUtil;
import de.mrapp.util.Condition;
import static android.os.Build.VERSION_CODES;
import static de.mrapp.android.util.DisplayUtil.getDeviceType;
import static de.mrapp.android.util.DisplayUtil.getOrientation;
/**
* A bottom sheet, which is designed according to the Android 5's Material Design guidelines even on
* pre-Lollipop devices. Such a bottom sheet appears at the bottom of the window and consists of a
* title and multiple items. It is possible to customize the appearance of the bottom sheet or to
* replace its title and items with custom views.
* <p>
* For creating or showing such bottom sheets, the methods {@link Builder#create()} or {@link
* Builder#show()} of the builder {@link de.mrapp.android.bottomsheet.BottomSheet.Builder} can be
* used.
*
* @author <NAME>
* @since 1.0.0
*/
public class BottomSheet extends Dialog implements DialogInterface, DraggableView.Callback {
/**
* A builder, which allows to create and show bottom sheets, which are designed according to
* Android 5's Material Design guidelines even on pre-Lollipop devices. Such a bottom sheet
* appears at the bottom of the window and consists of a title and multiple items. It is
* possible to customize the appearance of the bottom sheet or to replace its title and items
* with custom views.
*/
public static class Builder {
/**
* The bottom sheet, which is created by the builder.
*/
private BottomSheet bottomSheet;
/**
* Initializes the builder.
*
* @param context
* The context, which should be used by the builder, as an instance of the class
* {@link Context}. The context may not be null
* @param themeResourceId
* The resource id of the theme, which should be used by the dialog, as an {@link
* Integer} value, or -1, if the default theme should be used
*/
private void initialize(@NonNull final Context context,
@StyleRes final int themeResourceId) {
TypedValue typedValue = new TypedValue();
context.getTheme().resolveAttribute(R.attr.bottomSheetTheme, typedValue, true);
int themeId = typedValue.resourceId;
themeId = themeId != 0 ? themeId : R.style.BottomSheet_Light;
bottomSheet = new BottomSheet(context, themeId);
bottomSheet.requestWindowFeature(Window.FEATURE_NO_TITLE);
bottomSheet.setCanceledOnTouchOutside(true);
bottomSheet.setCancelable(true);
bottomSheet.setContentView(createContentView(),
new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT));
obtainStyledAttributes(themeId);
}
/**
* Creates and returns the bottom sheetview stub, which allows to inflate the bottom sheet's
* layout when shown.
*
* @return The view stub, which has been created
*/
private View createContentView() {
FrameLayout contentView = new FrameLayout(getContext());
contentView.setId(android.R.id.content);
return contentView;
}
/**
* Obtains all relevant attributes from the current theme.
*/
private void obtainStyledAttributes(@StyleRes final int themeResourceId) {
obtainBackground(themeResourceId);
obtainTitleColor(themeResourceId);
obtainItemColor(themeResourceId);
obtainDividerColor(themeResourceId);
obtainDimAmount(themeResourceId);
obtainDragSensitivity(themeResourceId);
}
/**
* Obtains the background from the current theme.
*
* @param themeResourceId
* The resource id of the theme, the background should be obtained from, as an
* {@link Integer} value
*/
private void obtainBackground(@StyleRes final int themeResourceId) {
TypedArray typedArray = getContext().getTheme().obtainStyledAttributes(themeResourceId,
new int[]{R.attr.bottomSheetBackground});
int color = typedArray.getColor(0, -1);
if (color != -1) {
setBackgroundColor(color);
} else {
int resourceId = typedArray.getResourceId(0, 0);
if (resourceId != 0) {
setBackground(resourceId);
}
}
}
/**
* Obtains the title color from the current theme.
*
* @param themeResourceId
* The resource id of the theme, the title color should be obtained from, as an
* {@link Integer} value
*/
private void obtainTitleColor(@StyleRes final int themeResourceId) {
TypedArray typedArray = getContext().getTheme().obtainStyledAttributes(themeResourceId,
new int[]{R.attr.bottomSheetTitleColor});
int color = typedArray.getColor(0, -1);
if (color != -1) {
setTitleColor(color);
}
}
/**
* Obtains the divider color from the current theme.
*
* @param themeResourceId
* The resource id of the theme, the divider color should be obtained from, as an
* {@link Integer} value
*/
private void obtainDividerColor(@StyleRes final int themeResourceId) {
TypedArray typedArray = getContext().getTheme().obtainStyledAttributes(themeResourceId,
new int[]{R.attr.bottomSheetDividerColor});
int color = typedArray.getColor(0, -1);
if (color != -1) {
setDividerColor(color);
}
}
/**
* Obtains the item color from the current theme.
*
* @param themeResourceId
* The resource id of the theme, the item color should be obtained from, as an
* {@link Integer} value
*/
private void obtainItemColor(@StyleRes final int themeResourceId) {
TypedArray typedArray = getContext().getTheme().obtainStyledAttributes(themeResourceId,
new int[]{R.attr.bottomSheetItemColor});
int color = typedArray.getColor(0, -1);
if (color != -1) {
setItemColor(color);
}
}
/**
* Obtains the dim amount from the current theme.
*
* @param themeResourceId
* The resource id of the theme, the dim amount should be obtained from, as an
* {@link Integer} value
*/
private void obtainDimAmount(@StyleRes final int themeResourceId) {
TypedArray typedArray = getContext().getTheme().obtainStyledAttributes(themeResourceId,
new int[]{R.attr.bottomSheetDimAmount});
float dimAmount = typedArray.getFraction(0, 1, 1, -1);
if (dimAmount != -1) {
setDimAmount(dimAmount);
}
}
/**
* Obtains the drag sensitivity from the current theme.
*
* @param themeResourceId
* The resource id of the theme, the drag sensitivity should be obtained from, as an
* {@link Integer} value
*/
private void obtainDragSensitivity(@StyleRes final int themeResourceId) {
TypedArray typedArray = getContext().getTheme().obtainStyledAttributes(themeResourceId,
new int[]{R.attr.bottomSheetDragSensitivity});
float dragSensitivity = typedArray.getFraction(0, 1, 1, -1);
if (dragSensitivity != -1) {
setDragSensitivity(dragSensitivity);
}
}
/**
* Creates a new builder, which allows to create bottom sheets, which are designed according
* to Android 5's Material Design guidelines even on pre-Lollipop devices.
*
* @param context
* The context, which should be used by the builder, as an instance of the class
* {@link Context}. The context may not be null
*/
public Builder(@NonNull final Context context) {
this(context, -1);
}
/**
* Creates a new builder, which allows to create bottom sheets, which are designed according
* to Android 5's Material Design guidelines even on pre-Lollipop devices.
*
* @param context
* The context, which should be used by the builder, as an instance of the class
* {@link Context}. The context may not be null
* @param themeResourceId
* The resource id of the theme, which should be used by the bottom sheet, as an
* {@link Integer} value. The resource id must correspond to a valid theme
*/
public Builder(@NonNull final Context context, @StyleRes final int themeResourceId) {
initialize(context, themeResourceId);
}
/**
* Returns the context, which is used by the builder.
*
* @return The context, which is used by the builder, as an instance of the class {@link
* Context}
*/
public final Context getContext() {
return bottomSheet.getContext();
}
/**
* Sets, whether the bottom sheet, which is created by the builder, should be cancelable, or
* not.
*
* @param cancelable
* True, if the bottom sheet, which is created by the builder, should be cancelable,
* false otherwise
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setCancelable(final boolean cancelable) {
bottomSheet.setCancelable(cancelable);
return this;
}
/**
* Sets the style of the bottom sheet, which is created by the builder.
*
* @param style
* The style, which should be set, as a value of the enum {@link Style}. The style
* may either be <code>LIST</code>, <code>LIST_COLUMNS</code> or <code>GRID</code>
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setStyle(@NonNull final Style style) {
bottomSheet.setStyle(style);
return this;
}
/**
* Sets the listener, which should be notified, when an item of the bottom sheet has been
* clicked.
*
* @param listener
* The listener, which should be set, as an instance of the type {@link
* OnItemClickListener} or null, if no listener should be notified
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setOnItemClickListener(@Nullable final OnItemClickListener listener) {
bottomSheet.setOnItemClickListener(listener);
return this;
}
/**
* Sets the listener, which should be notified, when an item of the bottom sheet has been
* long-clicked.
*
* @param listener
* The listener, which should be set, as an instance of the type {@link
* OnItemLongClickListener} or null, if no listener should be notified
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setOnItemLongClickListener(
@Nullable final OnItemLongClickListener listener) {
bottomSheet.setOnItemLongClickListener(listener);
return this;
}
/**
* Sets the listener, which should be notified, when the bottom sheet, which is created by
* the builder, has been maximized.
*
* @param listener
* The listener, which should be set, as an instance of the type {@link
* OnMaximizeListener} or null, if no listener should be notified
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setOnMaximizeListener(@Nullable final OnMaximizeListener listener) {
bottomSheet.setOnMaximizeListener(listener);
return this;
}
/**
* Sets the listener, which should be notified, when the bottom sheet, which is created by
* the builder, is canceled.
* <p>
* If you are interested in listening for all cases where the bottom sheet is dismissed and
* not just when it is canceled, see {@link #setOnDismissListener(android.content.DialogInterface.OnDismissListener)
* setOnDismissListener}.
*
* @param listener
* The listener, which should be set, as an instance of the type {@link
* OnCancelListener}, or null, if no listener should be set
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
* @see #setOnDismissListener(android.content.DialogInterface.OnDismissListener)
*/
public Builder setOnCancelListener(@Nullable final OnCancelListener listener) {
bottomSheet.setOnCancelListener(listener);
return this;
}
/**
* Sets the listener, which should be notified, when the bottom sheet, which is created by
* the builder, is dismissed for any reason.
*
* @param listener
* The listener, which should be set, as an instance of the type {@link
* OnDismissListener}, or null, if no listener should be set
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setOnDismissListener(@Nullable final OnDismissListener listener) {
bottomSheet.setOnDismissListener(listener);
return this;
}
/**
* Sets the listener, which should be notified, if a key is dispatched to the bottom sheet,
* which is created by the builder.
*
* @param listener
* The listener, which should be set, as an instance of the type {@link
* OnKeyListener}, or null, if no listener should be set
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setOnKeyListener(@Nullable final OnKeyListener listener) {
bottomSheet.setOnKeyListener(listener);
return this;
}
/**
* Sets the color of the title of the bottom sheet, which is created by the builder.
*
* @param color
* The color, which should be set, as an {@link Integer} value or -1, if no custom
* color should be set
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setTitleColor(@ColorInt final int color) {
bottomSheet.setTitleColor(color);
return this;
}
/**
* Sets the color of the items of the bottom sheet, which is created by the builder.
*
* @param color
* The color, which should be set, as an {@link Integer} value or -1, if no custom
* color should be set
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setItemColor(@ColorInt final int color) {
bottomSheet.setItemColor(color);
return this;
}
/**
* Sets the color of the dividers of the bottom sheet, which is created by the builder.
*
* @param color
* The color, which should be set, as an {@link Integer} value or -1, if no custom
* color should be set
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setDividerColor(@ColorInt final int color) {
bottomSheet.setDividerColor(color);
return this;
}
/**
* Sets the background of the bottom sheet, which is created by the builder.
*
* @param background
* The background, which should be set, as an instance of the class {@link Bitmap}
* or null, if no custom background should be set
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setBackground(@Nullable final Bitmap background) {
bottomSheet.setBackground(background);
return this;
}
/**
* Sets the background of the bottom sheet, which is created by the builder.
*
* @param resourceId
* The resource id of the background, which should be set, as an {@link Integer}
* value. The resource id must correspond to a valid drawable resource
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setBackground(@DrawableRes final int resourceId) {
bottomSheet.setBackground(resourceId);
return this;
}
/**
* Sets the background color of the bottom sheet, which is created by the builder.
*
* @param color
* The background color, which should be set, as an {@link Integer} value or -1, if
* no custom background color should be set
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setBackgroundColor(@ColorInt final int color) {
bottomSheet.setBackgroundColor(color);
return this;
}
/**
* Sets the title of the bottom sheet, which is created by the builder.
*
* @param title
* The title, which should be set, as an instance of the type {@link CharSequence}
* or null, if no title should be shown
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setTitle(@Nullable final CharSequence title) {
bottomSheet.setTitle(title);
return this;
}
/**
* Sets the title of the bottom sheet, which is created by the builder.
*
* @param resourceId
* The resource id of the title, which should be set, as an {@link Integer} value.
* The resource id must correspond to a valid string resource
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setTitle(@StringRes final int resourceId) {
bottomSheet.setTitle(resourceId);
return this;
}
/**
* Sets the icon of the bottom sheet, which is created by the builder.
*
* @param icon
* The icon, which should be set, as an instance of the class {@link Bitmap} or
* null, if no icon should be shown
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setIcon(@Nullable final Bitmap icon) {
bottomSheet.setIcon(icon);
return this;
}
/**
* Sets the icon of the bottom sheet, which is created by the builder.
*
* @param resourceId
* The resource id of the icon, which should be set, as an {@link Integer} value.
* The resource id must correspond to a valid drawable resource
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setIcon(@DrawableRes final int resourceId) {
bottomSheet.setIcon(resourceId);
return this;
}
/**
* Set the icon of the bottom sheet, which is created by the builder.
*
* @param attributeId
* The id of the theme attribute, which supplies the icon, which should be set, as
* an {@link Integer} value. The id must point to a valid drawable resource
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setIconAttribute(@AttrRes final int attributeId) {
bottomSheet.setIconAttribute(attributeId);
return this;
}
/**
* Sets the custom view, which should be shown by the bottom sheet, which is created by the
* builder.
*
* @param view
* The view, which should be set, as an instance of the class {@link View} or null,
* if no custom view should be shown
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setView(@Nullable final View view) {
bottomSheet.setView(view);
return this;
}
/**
* Sets the custom view, which should be shown by the bottom sheet, which is created by the
* builder.
*
* @param resourceId
* The resource id of the view, which should be set, as an {@link Integer} value.
* The resource id must correspond to a valid layout resource
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setView(@LayoutRes final int resourceId) {
bottomSheet.setView(resourceId);
return this;
}
/**
* Sets the custom view, which should be used to show the title of the bottom sheet, which
* is created by the builder.
*
* @param view
* The view, which should be set, as an instance of the class {@link View} or null,
* if no custom view should be used to show the title
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setCustomTitle(@Nullable final View view) {
bottomSheet.setCustomTitle(view);
return this;
}
/**
* Sets the custom view, which should be used to show the title of the bottom sheet, which
* is created by the builder.
*
* @param resourceId
* The resource id of the view, which should be set, as an {@link Integer} value.
* The resource id must correspond to a valid layout resource
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setCustomTitle(@LayoutRes final int resourceId) {
bottomSheet.setCustomTitle(resourceId);
return this;
}
/**
* Sets the sensitivity, which specifies the distance after which dragging has an effect on
* the bottom sheet, in relation to an internal value range.
*
* @param dragSensitivity
* The drag sensitivity, which should be set, as a {@link Float} value. The drag
* sensitivity must be at lest 0 and at maximum 1
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setDragSensitivity(final float dragSensitivity) {
bottomSheet.setDragSensitivity(dragSensitivity);
return this;
}
/**
* Sets the dim amount, which should be used to darken the area outside the bottom sheet,
* which is created by the builder.
*
* @param dimAmount
* The dim amount, which should be set, as a {@link Float} value. The dim amount
* must be at least 0 (fully transparent) and at maximum 1 (fully opaque)
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setDimAmount(final float dimAmount) {
bottomSheet.setDimAmount(dimAmount);
return this;
}
/**
* Sets the width of the bottom sheet, which is created by the builder. The width is only
* used on tablet devices or in landscape mode.
*
* @param width
* The width, which should be set, in pixels as an {@link Integer} value. The width
* must be at least 1
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setWidth(final int width) {
bottomSheet.setWidth(width);
return this;
}
/**
* Adds a new item to the bottom sheet, which is created by the builder.
*
* @param id
* The id of the item, which should be added, as an {@link Integer} value. The id
* must be at least 0
* @param title
* The title of the item, which should be added, as an instance of the type {@link
* CharSequence}. The title may neither be null, nor empty
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder addItem(final int id, @NonNull final CharSequence title) {
bottomSheet.addItem(id, title);
return this;
}
/**
* Adds a new item to the bottom sheet, which is created by the builder.
*
* @param id
* The id of the item, which should be added, as an {@link Integer} value. The id
* must be at least 0
* @param title
* The title of the item, which should be added, as an instance of the type {@link
* CharSequence}. The title may neither be null, nor empty
* @param icon
* The icon of the item, which should be added, as an instance of the class {@link
* Drawable}, or null, if no item should be used
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder addItem(final int id, @NonNull final CharSequence title,
@Nullable final Drawable icon) {
bottomSheet.addItem(id, title, icon);
return this;
}
/**
* Adds a new item to the bottom sheet, which is created by the builder.
*
* @param id
* The id of the item, which should be added, as an {@link Integer} value. The id
* must be at least 0
* @param titleId
* The resource id of the title of the item, which should be added, as an {@link
* Integer} value. The resource id must correspond to a valid string resource
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder addItem(final int id, @StringRes final int titleId) {
bottomSheet.addItem(id, titleId);
return this;
}
/**
* Adds a new item to the bottom sheet, which is created by the builder.
*
* @param id
* The id of the item, which should be added, as an {@link Integer} value. The id
* must be at least 0
* @param titleId
* The resource id of the title of the item, which should be added, as an {@link
* Integer} value. The resource id must correspond to a valid string resource
* @param iconId
* The resource id of the icon of the item, which should be added, as an {@link
* Integer} value. The resource id must correspond to a valid drawable resource
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder addItem(final int id, @StringRes final int titleId,
@DrawableRes final int iconId) {
bottomSheet.addItem(id, titleId, iconId);
return this;
}
/**
* Adds a new divider to the bottom sheet, which is created by the builder.
*
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder addDivider() {
bottomSheet.addDivider();
return this;
}
/**
* Adds a new divider to the bottom sheet, which is created by the builder.
*
* @param title
* The title of the divider, which should be added, as an instance of the type
* {@link CharSequence}, or null, if no title should be used
*/
public final Builder addDivider(@Nullable final CharSequence title) {
bottomSheet.addDivider(title);
return this;
}
/**
* Adds a new divider to the bottom sheet, which is created by the builder.
*
* @param titleId
* The resource id of the title, which should be added, as an {@link Integer} value.
* The resource id must correspond to a valid string resource
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder addDivider(@StringRes final int titleId) {
bottomSheet.addDivider(titleId);
return this;
}
/**
* Sets, whether the item at a specific index should be enabled, or not.
*
* @param index
* The index of the item as an {@link Integer} value
* @param enabled
* True, if the item should be enabled, false otherwise
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setItemEnabled(final int index, final boolean enabled) {
bottomSheet.setItemEnabled(index, enabled);
return this;
}
/**
* Adds the apps, which are able to handle a specific intent, as items to the bottom sheet,
* which is created by the builder. This causes all previously added items to be removed.
* When an item is clicked, the corresponding app is started.
*
* @param activity
* The activity, the bottom sheet, which is created by the builder, belongs to, as
* an instance of the class {@link Activity}. The activity may not be null
* @param intent
* The intent as an instance of the class {@link Intent}. The intent may not be
* null
* @return The builder, the method has been called upon, as an instance of the class {@link
* Builder}
*/
public final Builder setIntent(@NonNull final Activity activity,
@NonNull final Intent intent) {
bottomSheet.setIntent(activity, intent);
return this;
}
/**
* Creates a bottom sheet with the arguments, which have been supplied to the builder.
* Calling this method does not display the bottom sheet.
*
* @return The bottom sheet, which has been created as an instance of the class {@link
* BottomSheet}
*/
public final BottomSheet create() {
return bottomSheet;
}
/**
* Creates a bottom sheet with the arguments, which have been supplied to the builder and
* immediately displays it.
*
* @return The bottom sheet, which has been created, as an instance of the class {@link
* BottomSheet}
*/
public final BottomSheet show() {
bottomSheet.show();
return bottomSheet;
}
/**
* Creates a bottom sheet with the arguments, which have been supplied to the builder and
* immediately maximizes it.
*
* @return The bottom sheet, which has been created, as an instance of the class {@link
* BottomSheet}
*/
@TargetApi(VERSION_CODES.FROYO)
public final BottomSheet maximize() {
bottomSheet.maximize();
return bottomSheet;
}
}
/**
* Contains all possible styles of a {@link BottomSheet}.
*/
public enum Style {
/**
* If the bottom sheet's items should be shown in a list.
*/
LIST,
/**
* If the bottom sheet's items should be shown as a two-columned list on tablet devices and
* in landscape mode.
*/
LIST_COLUMNS,
/**
* If the bottom sheet's items should be shown in a grid.
*/
GRID
}
/**
* The name of the extra, which is used to store the title of the bottom sheet within a bundle.
*/
private static final String TITLE_EXTRA = BottomSheet.class.getSimpleName() + "::title";
/**
* The name of the extra, which is used to store the bitmap of the icon of the bottom sheet
* within a bundle.
*/
private static final String ICON_BITMAP_EXTRA =
BottomSheet.class.getSimpleName() + "::iconBitmap";
/**
* The name of the extra, which is used to store the resource id of the icon of the bottom sheet
* within a bundle.
*/
private static final String ICON_ID_EXTRA = BottomSheet.class.getSimpleName() + "::iconId";
/**
* The name of the extra, which is used to store the attribute id of the icon of the bottom
* sheet within a bundle.
*/
private static final String ICON_ATTRIBUTE_ID_EXTRA =
BottomSheet.class.getSimpleName() + "::iconAttributeId";
/**
* The name of the extra, which is used to store the color of the title of the bottom sheet
* within a bundle.
*/
private static final String TITLE_COLOR_EXTRA =
BottomSheet.class.getSimpleName() + "::titleColor";
/**
* The name of the extra, which is used to store the bitmap of the background of the bottom
* sheet within a bundle.
*/
private static final String BACKGROUND_BITMAP_EXTRA =
BottomSheet.class.getSimpleName() + "::backgroundBitmap";
/**
* The name of the extra, which is used to store the resource id of the background of the bottom
* sheet within a bundle.
*/
private static final String BACKGROUND_ID_EXTRA =
BottomSheet.class.getSimpleName() + "::backgroundId";
/**
* The name of the extra, which is used to store the color of the background of the bottom sheet
* within a bundle.
*/
private static final String BACKGROUND_COLOR_EXTRA =
BottomSheet.class.getSimpleName() + "::backgroundColor";
/**
* The name of the extra, which is used to store, whether the bottom sheet is cancelable, or
* not, within a bundle.
*/
private static final String CANCELABLE_EXTRA =
BottomSheet.class.getSimpleName() + "::cancelable";
/**
* The name of the extra, which is used to store, whether the bottom sheet is canceled when it
* is clicked outside, or not, within a bundle.
*/
private static final String CANCELED_ON_TOUCH_OUTSIDE_EXTRA =
BottomSheet.class.getSimpleName() + "::canceledOnTouchOutside";
/**
* The name of the extra, which is used to store the drag sensitivity within a bundle.
*/
private static final String DRAG_SENSITIVITY_EXTRA =
BottomSheet.class.getSimpleName() + "::dragSensitivity";
/**
* The name of the extra, which is used to store the dim amount within a bundle.
*/
private static final String DIM_AMOUNT_EXTRA =
BottomSheet.class.getSimpleName() + "::dimAmount";
/**
* The name of the extra, which is used to store the width of the bottom sheet within a bundle.
*/
private static final String WIDTH_EXTRA = BottomSheet.class.getSimpleName() + "::width";
/**
* The minimum value of the internal value range, which specifies after which distance dragging
* has an effect on the bottom sheet.
*/
private static final int MIN_DRAG_SENSITIVITY = 10;
/**
* The maximum value of the internal value range, which specifies after which distance dragging
* has an effect on the bottom sheet.
*/
private static final int MAX_DRAG_SENSITIVITY = 260;
/**
* The root view of the bottom sheet.
*/
private DraggableView rootView;
/**
* The layout, which is used to show the bottom sheet's title.
*/
private ViewGroup titleContainer;
/**
* The text view, which is used to show the bottom sheet's title.
*/
private TextView titleTextView;
/**
* The layout, which is used to show the bottom sheet's content.
*/
private ViewGroup contentContainer;
/**
* The grid view, which is used to show the bottom sheet's items.
*/
private GridView gridView;
/**
* The adapter, which is used to manage the bottom sheet's items.
*/
private DividableGridAdapter adapter;
/**
* The title of the bottom sheet.
*/
private CharSequence title;
/**
* The icon of the bottom sheet.
*/
private Drawable icon;
/**
* The bitmap of the icon of the bottom sheet.
*/
private Bitmap iconBitmap;
/**
* The resource id of the icon of the bottom sheet.
*/
private int iconId = -1;
/**
* The attribute id of the icon of the bottom sheet.
*/
private int iconAttributeId = -1;
/**
* The color of the title of the bottom sheet.
*/
private int titleColor = -1;
/**
* The background of the bottom sheet.
*/
private Drawable background;
/**
* The bitmap of the background of the bottom sheet.
*/
private Bitmap backgroundBitmap;
/**
* The resource id of the background of the bottom sheet.
*/
private int backgroundId = -1;
/**
* The color of the background of the bottom sheet.
*/
private int backgroundColor = -1;
/**
* True, if the bottom sheet is cancelable, false otherwise.
*/
private boolean cancelable;
/**
* True, if the bottom sheet is canceled, when the decor view is touched, false otherwise.
*/
private boolean canceledOnTouchOutside;
/**
* The sensitivity, which specifies the distance after which dragging has an effect on the
* bottom sheet, in relation to an internal value range.
*/
private float dragSensitivity;
/**
* The dim amount, which is used to darken the area outside the bottom sheet.
*/
private float dimAmount;
/**
* The width of the bottom sheet in pixels.
*/
private int width;
/**
* The custom content view of the bottom sheet.
*/
private View customView;
/**
* The resource id of the custom content view of the bottom sheet.
*/
private int customViewId = -1;
/**
* The custom title view of the bottom sheet.
*/
private View customTitleView;
/**
* The resource id of the custom title view of the bottom sheet.
*/
private int customTitleViewId = -1;
/**
* The listener, which is notified, when the bottom sheet has been shown.
*/
private OnShowListener onShowListener;
/**
* The listener, which is notified, when an item of the bottom sheet has been clicked.
*/
private OnItemClickListener itemClickListener;
/**
* The listener, which is notified, when an item of the bottom sheet has been long-clicked.
*/
private OnItemLongClickListener itemLongClickListener;
/**
* The listener, which is notified, when the bottom sheet is maximized.
*/
private OnMaximizeListener maximizeListener;
/**
* True, if the bottom sheet should be maximized immediately after it has been shown, false
* otherwise.
*/
private boolean maximize;
/**
* Initializes the bottom sheet.
*/
private void initialize() {
width = getContext().getResources().getDimensionPixelSize(R.dimen.default_width);
maximize = false;
adapter = new DividableGridAdapter(getContext(), Style.LIST, width);
super.setOnShowListener(createOnShowListener());
}
/**
* Creates and returns the layout params, which should be used to show the bottom sheet.
*
* @return The layout params, which have been created, as an instance of the class {@link
* android.view.WindowManager.LayoutParams}
*/
private WindowManager.LayoutParams createLayoutParams() {
WindowManager.LayoutParams layoutParams = getWindow().getAttributes();
layoutParams.width = ViewGroup.LayoutParams.MATCH_PARENT;
layoutParams.height = ViewGroup.LayoutParams.MATCH_PARENT;
return layoutParams;
}
/**
* Creates and returns the layout params, which should be used to show the bottom sheet's root
* view.
*
* @return The layout params, which have been created, as an instance of the class {@link
* android.widget.FrameLayout.LayoutParams }
*/
private FrameLayout.LayoutParams createRootViewLayoutParams() {
FrameLayout.LayoutParams layoutParams =
new FrameLayout.LayoutParams(FrameLayout.LayoutParams.MATCH_PARENT,
FrameLayout.LayoutParams.MATCH_PARENT);
layoutParams.gravity = Gravity.BOTTOM | Gravity.CENTER_HORIZONTAL;
return layoutParams;
}
/**
* Initializes the bottom sheet's root view.
*/
private void inflateRootView() {
ViewGroup contentView = findViewById(android.R.id.content);
contentView.removeAllViews();
LayoutInflater layoutInflater = LayoutInflater.from(getContext());
rootView =
(DraggableView) layoutInflater.inflate(R.layout.bottom_sheet, contentView, false);
rootView.setCallback(this);
contentView.addView(rootView, createRootViewLayoutParams());
}
/**
* Inflates the layout, which is used to show the bottom sheet's title. The layout may either be
* the default one or a custom view, if one has been set before.
*/
private void inflateTitleView() {
titleContainer = rootView.findViewById(R.id.title_container);
titleContainer.removeAllViews();
if (customTitleView != null) {
titleContainer.addView(customTitleView);
} else if (customTitleViewId != -1) {
LayoutInflater layoutInflater = LayoutInflater.from(getContext());
View view = layoutInflater.inflate(customTitleViewId, titleContainer, false);
titleContainer.addView(view);
} else {
LayoutInflater layoutInflater = LayoutInflater.from(getContext());
View view = layoutInflater.inflate(R.layout.bottom_sheet_title, titleContainer, false);
titleContainer.addView(view);
}
if (getStyle() == Style.LIST) {
int padding = getContext().getResources()
.getDimensionPixelSize(R.dimen.bottom_sheet_list_item_horizontal_padding);
titleContainer.setPadding(padding, 0, padding, 0);
} else {
int padding = getContext().getResources()
.getDimensionPixelSize(R.dimen.bottom_sheet_grid_item_horizontal_padding);
titleContainer.setPadding(padding, 0, padding, 0);
}
View titleView = titleContainer.findViewById(android.R.id.title);
titleTextView = titleView instanceof TextView ? (TextView) titleView : null;
}
/**
* Inflates the layout, which is used to show the bottom sheet's content. The layout may either
* be the default one or a custom view, if one has been set before.
*/
private void inflateContentView() {
contentContainer = rootView.findViewById(R.id.content_container);
contentContainer.removeAllViews();
if (customView != null) {
contentContainer.setVisibility(View.VISIBLE);
contentContainer.addView(customView);
} else if (customViewId != -1) {
contentContainer.setVisibility(View.VISIBLE);
LayoutInflater layoutInflater = LayoutInflater.from(getContext());
View view = layoutInflater.inflate(customViewId, contentContainer, false);
contentContainer.addView(view);
} else {
LayoutInflater layoutInflater = LayoutInflater.from(getContext());
View view = layoutInflater
.inflate(R.layout.bottom_sheet_grid_view, contentContainer, false);
contentContainer.addView(view);
}
showGridView();
}
/**
* Shows the grid view, which is used to show the bottom sheet's items.
*/
private void showGridView() {
gridView = contentContainer.findViewById(R.id.bottom_sheet_grid_view);
if (gridView != null) {
contentContainer.setVisibility(View.VISIBLE);
if (getStyle() == Style.GRID) {
int horizontalPadding = getContext().getResources()
.getDimensionPixelSize(R.dimen.bottom_sheet_grid_item_horizontal_padding);
int paddingBottom = getContext().getResources()
.getDimensionPixelSize(R.dimen.bottom_sheet_grid_padding_bottom);
gridView.setPadding(horizontalPadding, 0, horizontalPadding, paddingBottom);
gridView.setNumColumns(GridView.AUTO_FIT);
gridView.setColumnWidth(getContext().getResources()
.getDimensionPixelSize(R.dimen.bottom_sheet_grid_item_size));
} else {
int paddingBottom = getContext().getResources()
.getDimensionPixelSize(R.dimen.bottom_sheet_list_padding_bottom);
gridView.setPadding(0, 0, 0, paddingBottom);
gridView.setNumColumns(getStyle() == Style.LIST_COLUMNS &&
(getDeviceType(getContext()) == DisplayUtil.DeviceType.TABLET ||
getOrientation(getContext()) == DisplayUtil.Orientation.LANDSCAPE) ?
2 : 1);
}
gridView.setOnItemClickListener(createItemClickListener());
gridView.setOnItemLongClickListener(createItemLongClickListener());
gridView.setAdapter(adapter);
}
}
/**
* Adapts the view, which is used to show the dialog's content.
*/
private void adaptContentView() {
if (contentContainer != null) {
inflateContentView();
}
}
/**
* Adapts the root view.
*/
private void adaptRootView() {
if (rootView != null) {
if (getStyle() == Style.LIST) {
int paddingTop = getContext().getResources()
.getDimensionPixelSize(R.dimen.bottom_sheet_list_padding_top);
rootView.setPadding(0, paddingTop, 0, 0);
} else {
int paddingTop = getContext().getResources()
.getDimensionPixelSize(R.dimen.bottom_sheet_grid_padding_top);
rootView.setPadding(0, paddingTop, 0, 0);
}
}
}
/**
* Adapts the view, which is used to show the dialog's title.
*/
private void adaptTitleView() {
if (titleContainer != null) {
inflateTitleView();
adaptTitle();
adaptTitleColor();
adaptIcon();
}
}
/**
* Adapts the color of the bottom sheet's title.
*/
private void adaptTitleColor() {
if (titleTextView != null && titleColor != -1) {
titleTextView.setTextColor(titleColor);
}
}
/**
* Adapts the bottom sheet's title.
*/
private void adaptTitle() {
if (titleTextView != null) {
titleTextView.setText(title);
}
adaptTitleContainerVisibility();
}
/**
* Adapts the bottom sheet's icon.
*/
private void adaptIcon() {
if (titleTextView != null) {
titleTextView.setCompoundDrawablesWithIntrinsicBounds(icon, null, null, null);
}
adaptTitleContainerVisibility();
}
/**
* Adapts the visibility of the layout, which is used to show the bottom sheet's title.
*/
private void adaptTitleContainerVisibility() {
if (titleContainer != null) {
if (customTitleView == null && customTitleViewId == -1) {
titleContainer.setVisibility(
!TextUtils.isEmpty(title) || icon != null ? View.VISIBLE : View.GONE);
} else {
titleContainer.setVisibility(View.VISIBLE);
}
}
}
/**
* Adapts the bottom sheet's background.
*/
private void adaptBackground() {
if (rootView != null && background != null) {
ViewUtil.setBackground(rootView, background);
}
}
/**
* Adapts the bottom sheet's drag sensitivity.
*/
private void adaptDragSensitivity() {
if (rootView != null) {
rootView.setDragSensitivity(calculateDragSensitivity());
}
}
/**
* Adapts the width of the bottom sheet.
*/
private void adaptWidth() {
adapter.setWidth(width);
if (rootView != null) {
rootView.setWidth(width);
rootView.requestLayout();
}
}
/**
* Adapts the height of the grid view, which is used to show the bottom sheet's items.
*/
private void adaptGridViewHeight() {
if (gridView instanceof DividableGridView) {
((DividableGridView) gridView).adaptHeightToChildren();
}
}
/**
* Creates and returns a listener, which allows to immediately maximize the bottom sheet after
* it has been shown.
*
* @return The listener, which has been created, as an instance of the type {@link
* OnShowListener}
*/
@TargetApi(VERSION_CODES.FROYO)
private OnShowListener createOnShowListener() {
return new OnShowListener() {
@Override
public void onShow(final DialogInterface dialog) {
if (onShowListener != null) {
onShowListener.onShow(dialog);
}
if (maximize) {
maximize = false;
rootView.maximize(new DecelerateInterpolator());
}
}
};
}
/**
* Creates and returns a listener, which allows to cancel the bottom sheet, when the decor view
* is touched.
*
* @return The listener, which has been created, as an instance of the type {@link
* View.OnTouchListener}
*/
private View.OnTouchListener createCancelOnTouchListener() {
return new View.OnTouchListener() {
@Override
public boolean onTouch(final View v, final MotionEvent event) {
if (cancelable && canceledOnTouchOutside) {
cancel();
v.performClick();
return true;
}
return false;
}
};
}
/**
* Calculates and returns the distance after which dragging has an effect on the bottom sheet in
* pixels. The distance depends on the current set drag sensitivity, which corresponds to an
* internal value range.
*
* @return The distance after which dragging has an effect on the bottom sheet in pixels as an
* {@link Integer} value
*/
private int calculateDragSensitivity() {
int range = MAX_DRAG_SENSITIVITY - MIN_DRAG_SENSITIVITY;
return Math.round((1 - getDragSensitivity()) * range + MIN_DRAG_SENSITIVITY);
}
/**
* Creates and returns a listener, which allows to observe when the items of a bottom sheet have
* been clicked.
*
* @return The listener, which has been created, as an instance of the type {@link
* OnItemClickListener}
*/
private OnItemClickListener createItemClickListener() {
return new OnItemClickListener() {
@Override
public void onItemClick(final AdapterView<?> parent, final View view,
final int position, final long id) {
if (itemClickListener != null && !rootView.isDragging() &&
!rootView.isAnimationRunning()) {
int index = position;
if (adapter.containsDividers()) {
for (int i = position; i >= 0; i--) {
if (adapter.getItem(i) == null ||
(adapter.getItem(i) instanceof Divider &&
i % adapter.getColumnCount() > 0)) {
index--;
}
}
}
itemClickListener.onItemClick(parent, view, index, getId(position));
}
dismiss();
}
};
}
/**
* Creates and returns a listener, which allows to observe when the items of a bottom sheet have
* been long-clicked.
*
* @return The listener, which has been created, as an instance of the type {qlink
* OnItemLongClickListener}
*/
private OnItemLongClickListener createItemLongClickListener() {
return new OnItemLongClickListener() {
@Override
public boolean onItemLongClick(final AdapterView<?> parent, final View view,
final int position, final long id) {
if (!rootView.isDragging() && !rootView.isAnimationRunning() &&
itemLongClickListener != null) {
int index = position;
if (adapter.containsDividers()) {
for (int i = position; i >= 0; i--) {
if (adapter.getItem(i) == null ||
(adapter.getItem(i) instanceof Divider &&
i % adapter.getColumnCount() > 0)) {
index--;
}
}
}
return itemLongClickListener
.onItemLongClick(parent, view, index, getId(position));
}
return false;
}
};
}
/**
* Creates and returns a listener, which allows to start an app, when an item of the bottom
* sheet has been clicked.
*
* @param activity
* The activity, the bottom sheet belongs to, as an instance of the class {@link
* Activity}. The activity may not be null
* @param intent
* The intent, which should be passed to the started app, as an instance of the class
* {@link Intent}. The intent may not be null
* @param resolveInfos
* A list, which contains the resolve infos, which correspond to the apps, which are
* able to handle the intent, as an instance of the type {@link List} or an empty list,
* if no apps are able to handle the intent
* @return The listener, which has been created, as an instance of the type {@link
* OnItemClickListener}
*/
private OnItemClickListener createIntentClickListener(@NonNull final Activity activity,
@NonNull final Intent intent,
@NonNull final List<ResolveInfo> resolveInfos) {
return new OnItemClickListener() {
@Override
public void onItemClick(final AdapterView<?> parent, final View view,
final int position, final long id) {
ActivityInfo activityInfo = resolveInfos.get(position).activityInfo;
ComponentName componentName =
new ComponentName(activityInfo.applicationInfo.packageName,
activityInfo.name);
intent.setFlags(
Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_RESET_TASK_IF_NEEDED);
intent.setComponent(componentName);
activity.startActivity(intent);
dismiss();
}
};
}
/**
* Notifies, the listener, which has been registered to be notified, when the bottom sheet has
* been maximized, about the bottom sheet being maximized.
*/
private void notifyOnMaximize() {
if (maximizeListener != null) {
maximizeListener.onMaximize(this);
}
}
/**
* Creates a bottom sheet, which is designed according to Android 5's Material Design guidelines
* even on pre-Lollipop devices.
*
* @param context
* The context, which should be used by the bottom sheet, as an instance of the class
* {@link Context}. The context may not be null
* @param themeResourceId
* The resource id of the theme, which should be used by the bottom sheet, as an {@link
* Integer} value. The resource id must correspond to a valid theme
*/
protected BottomSheet(@NonNull final Context context, @StyleRes final int themeResourceId) {
super(context, themeResourceId);
initialize();
}
/**
* Sets the listener, which should be notified, when an item of the bottom sheet has been
* clicked.
*
* @param listener
* The listener, which should be set, as an instance of the type {@link
* OnItemClickListener} or null, if no listener should be notified
*/
public final void setOnItemClickListener(@Nullable final OnItemClickListener listener) {
this.itemClickListener = listener;
}
/**
* Sets the listener, which should be notified, when an item of the bottom sheet has been
* long-clicked.
*
* @param listener
* The listener, which should be set, as an instance of the type {@link
* OnItemLongClickListener} or null, if no listener should be notified
*/
public final void setOnItemLongClickListener(@Nullable final OnItemLongClickListener listener) {
this.itemLongClickListener = listener;
}
/**
* Sets the listener, which should be notified, when the bottom sheet has been maximized.
*
* @param listener
* The listener, which should be set, as an instance of the type {@link
* OnMaximizeListener} or null, if no listener should be notified
*/
public final void setOnMaximizeListener(@Nullable final OnMaximizeListener listener) {
this.maximizeListener = listener;
}
/**
* Returns the grid view, which is contained by the bottom sheet.
*
* @return The grid view, which is contained by the bottom sheet, as an instance of the class
* {@link GridView} or null, if the bottom sheet does not show any items or has not been shown
* yet
*/
public final GridView getGridView() {
return (gridView != null && gridView.getVisibility() == View.VISIBLE) ? gridView : null;
}
/**
* Returns the adapter of the grid view, which is contained by the bottom sheet.
*
* @return The adapter of the grid view, which is contained by the bottom sheet, as an instance
* of the type {@link ListAdapter}
*/
public final ListAdapter getListAdapter() {
return adapter;
}
/**
* Returns the icon of the bottom sheet.
*
* @return The icon of the bottom sheet, as an instance of the class {@link Drawable} or null,
* if no icon has been set
*/
public final Drawable getIcon() {
return icon;
}
/**
* Sets the icon of the bottom sheet.
*
* @param icon
* The icon, which should be set, as an instance of the class {@link Bitmap} or null, if
* no icon should be shown
*/
public final void setIcon(@Nullable final Bitmap icon) {
this.icon = new BitmapDrawable(getContext().getResources(), icon);
this.iconBitmap = icon;
this.iconId = -1;
this.iconAttributeId = -1;
adaptIcon();
}
/**
* Sets the icon of the bottom sheet.
*
* @param resourceId
* The resource id of the icon, which should be set, as an {@link Integer} value. The
* resource id must correspond to a valid drawable resource
*/
public final void setIcon(@DrawableRes final int resourceId) {
this.icon = ContextCompat.getDrawable(getContext(), resourceId);
this.iconBitmap = null;
this.iconId = resourceId;
this.iconAttributeId = -1;
adaptIcon();
}
/**
* Set the icon of the bottom sheet.
*
* @param attributeId
* The id of the theme attribute, which supplies the icon, which should be set, as an
* {@link Integer} value. The id must point to a valid drawable resource
*/
public final void setIconAttribute(@AttrRes final int attributeId) {
TypedArray typedArray =
getContext().getTheme().obtainStyledAttributes(new int[]{attributeId});
this.icon = typedArray.getDrawable(0);
this.iconBitmap = null;
this.iconId = -1;
this.iconAttributeId = attributeId;
adaptIcon();
}
/**
* Returns the color of the title of the bottom sheet.
*
* @return The color of the title of the bottom sheet as an {@link Integer} value or -1, if no
* custom color has been set
*/
public final int getTitleColor() {
return titleColor;
}
/**
* Sets the color of the title of the bottom sheet.
*
* @param color
* The color, which should be set, as an {@link Integer} value
*/
public final void setTitleColor(@ColorInt final int color) {
titleColor = color;
adaptTitleColor();
}
/**
* Returns the color of the items of the bottom sheet.
*
* @return The color of the items of the bottom sheet as an {@link Integer} value or -1, if no
* custom color has been set
*/
public final int getItemColor() {
return adapter.getItemColor();
}
/**
* Sets the color of the items of the bottom sheet.
*
* @param color
* The color, which should be set, as an {@link Integer} value or -1, if no custom color
* should be set
*/
public final void setItemColor(@ColorInt final int color) {
adapter.setItemColor(color);
adapter.notifyDataSetChanged();
}
/**
* Returns the color of the dividers of the bottom sheet.
*
* @return The color of the dividers of the bottom sheet as an {@link Integer} value or -1, if
* no custom color has been set
*/
public final int getDividerColor() {
return adapter.getDividerColor();
}
/**
* Sets the color of the dividers of the bottom sheet.
*
* @param color
* The color, which should be set, as an {@link Integer} value or -1, if no custom color
* should be set
*/
public final void setDividerColor(@ColorInt final int color) {
adapter.setDividerColor(color);
adapter.notifyDataSetChanged();
}
/**
* Returns the background of the bottom sheet.
*
* @return The background of the bottom sheet as an instance of the class {@link Drawable} or
* null, if no custom background has been set
*/
public final Drawable getBackground() {
return background;
}
/**
* Sets the background of the bottom sheet.
*
* @param background
* The background, which should be set, as an instance of the class {@link Bitmap} or
* null, if no custom background should be set
*/
public final void setBackground(@Nullable final Bitmap background) {
this.background = new BitmapDrawable(getContext().getResources(), background);
this.backgroundBitmap = background;
this.backgroundId = -1;
this.backgroundColor = -1;
adaptBackground();
}
/**
* Sets the background of the bottom sheet.
*
* @param resourceId
* The resource id of the background, which should be set, as an {@link Integer} value.
* The resource id must correspond to a valid drawable resource
*/
public final void setBackground(@DrawableRes final int resourceId) {
this.background = ContextCompat.getDrawable(getContext(), resourceId);
this.backgroundBitmap = null;
this.backgroundId = -1;
this.backgroundColor = -1;
adaptBackground();
}
/**
* Sets the background color of the bottom sheet.
*
* @param color
* The background color, which should be set, as an {@link Integer} value or -1, if no
* custom background color should be set
*/
public final void setBackgroundColor(@ColorInt final int color) {
this.background = new ColorDrawable(color);
this.backgroundBitmap = null;
this.backgroundId = -1;
this.backgroundColor = color;
adaptBackground();
}
/**
* Sets the custom view, which should be used to show the title of the bottom sheet.
*
* @param view
* The view, which should be set, as an instance of the class {@link View} or null, if
* no custom view should be used to show the title
*/
public final void setCustomTitle(@Nullable final View view) {
customTitleView = view;
customTitleViewId = -1;
adaptTitleView();
}
/**
* Sets the custom view, which should be used to show the title of the bottom sheet.
*
* @param resourceId
* The resource id of the view, which should be set, as an {@link Integer} value. The
* resource id must correspond to a valid layout resource
*/
public final void setCustomTitle(@LayoutRes final int resourceId) {
customTitleView = null;
customTitleViewId = resourceId;
adaptTitleView();
}
/**
* Sets the custom view, which should be shown by the bottom sheet.
*
* @param view
* The view, which should be set, as an instance of the class {@link View} or null, if
* no custom view should be shown
*/
public final void setView(@Nullable final View view) {
customView = view;
customViewId = -1;
adaptContentView();
}
/**
* Sets the custom view, which should be shown by the bottom sheet.
*
* @param resourceId
* The resource id of the view, which should be set, as an {@link Integer} value. The
* resource id must correspond to a valid layout resource
*/
public final void setView(@LayoutRes final int resourceId) {
customView = null;
customViewId = resourceId;
adaptContentView();
adaptGridViewHeight();
}
/**
* Returns the sensitivity, which specifies the distance after which dragging has an effect on
* the bottom sheet, in relation to an internal value range.
*
* @return The drag sensitivity as a {@link Float} value. The drag sensitivity must be at lest 0
* and at maximum 1
*/
public final float getDragSensitivity() {
return dragSensitivity;
}
/**
* Sets the sensitivity, which specifies the distance after which dragging has an effect on the
* bottom sheet, in relation to an internal value range.
*
* @param dragSensitivity
* The drag sensitivity, which should be set, as a {@link Float} value. The drag
* sensitivity must be at lest 0 and at maximum 1
*/
public final void setDragSensitivity(final float dragSensitivity) {
Condition.INSTANCE
.ensureAtLeast(dragSensitivity, 0, "The drag sensitivity must be at least 0");
Condition.INSTANCE
.ensureAtMaximum(dragSensitivity, 1, "The drag sensitivity must be at maximum 1");
this.dragSensitivity = dragSensitivity;
adaptDragSensitivity();
}
/**
* Returns the dim amount, which is used to darken the area outside the bottom sheet.
*
* @return The dim amount, which is used to darken the area outside the bottom sheet, as a
* {@link Float} value
*/
public final float getDimAmount() {
return dimAmount;
}
/**
* Sets the dim amount, which should be used to darken the area outside the bottom sheet.
*
* @param dimAmount
* The dim amount, which should be set, as a {@link Float} value. The dim amount must be
* at least 0 (fully transparent) and at maximum 1 (fully opaque)
*/
public final void setDimAmount(final float dimAmount) {
Condition.INSTANCE.ensureAtLeast(dimAmount, 0, "The dim amount must be at least 0");
Condition.INSTANCE.ensureAtMaximum(dimAmount, 1, "The dim amount must be at maximum 1");
this.dimAmount = dimAmount;
getWindow().getAttributes().dimAmount = dimAmount;
}
/**
* Returns the width of the bottom sheet. The width is only used on tablet devices or in
* landscape mode.
*
* @return The width of the bottom sheet in pixels as an {@link Integer} value
*/
public final int getWidth() {
return width;
}
/**
* Sets the width of the bottom sheet. The width is only used on tablet devices or in landscape
* mode.
*
* @param width
* The width, which should be set, in pixels as an {@link Integer} value. The width must
* be at least 1
*/
public final void setWidth(final int width) {
Condition.INSTANCE.ensureAtLeast(width, 1, "The width must be at least 1");
this.width = width;
adaptWidth();
}
/**
* Adds a new item to the bottom sheet.
*
* @param id
* The id of the item, which should be added, as an {@link Integer} value. The id must
* be at least 0
* @param title
* The title of the item, which should be added, as an instance of the type {@link
* CharSequence}. The title may neither be null, nor empty
*/
public final void addItem(final int id, @NonNull final CharSequence title) {
Item item = new Item(id, title);
adapter.add(item);
adaptGridViewHeight();
}
/**
* Adds a new item to the bottom sheet.
*
* @param id
* The id of the item, which should be added, as an {@link Integer} value. The id must
* be at least 0
* @param title
* The title of the item, which should be added, as an instance of the type {@link
* CharSequence}. The title may neither be null, nor empty
* @param icon
* The icon of the item, which should be added, as an instance of the class {@link
* Drawable}, or null, if no item should be used
*/
public final void addItem(final int id, @NonNull final CharSequence title,
@Nullable final Drawable icon) {
Item item = new Item(id, title);
item.setIcon(icon);
adapter.add(item);
adaptGridViewHeight();
}
/**
* Adds a new item to the bottom sheet.
*
* @param id
* The id of the item, which should be added, as an {@link Integer} value. The id must
* be at least 0
* @param titleId
* The resource id of the title of the item, which should be added, as an {@link
* Integer} value. The resource id must correspond to a valid string resource
*/
public final void addItem(final int id, @StringRes final int titleId) {
Item item = new Item(getContext(), id, titleId);
adapter.add(item);
adaptGridViewHeight();
}
/**
* Adds a new item to the bottom sheet.
*
* @param id
* The id of the item, which should be added, as an {@link Integer} value. The id must
* be at least 0
* @param titleId
* The resource id of the title of the item, which should be added, as an {@link
* Integer} value. The resource id must correspond to a valid string resource
* @param iconId
* The resource id of the icon of the item, which should be added, as an {@link Integer}
* value. The resource id must correspond to a valid drawable resource
*/
public final void addItem(final int id, @StringRes final int titleId,
@DrawableRes final int iconId) {
Item item = new Item(getContext(), id, titleId);
item.setIcon(getContext(), iconId);
adapter.add(item);
adaptGridViewHeight();
}
/**
* Replaces the item at a specific index with another item.
*
* @param index
* The index of the item, which should be replaced, as an {@link Integer} value
* @param id
* The id of the item, which should be added, as an {@link Integer} value. The id must
* be at least 0
* @param title
* The title of the item, which should be added, as an instance of the type {@link
* CharSequence}. The title may neither be null, nor empty
*/
public final void setItem(final int index, final int id, @NonNull final CharSequence title) {
Item item = new Item(id, title);
adapter.set(index, item);
adaptGridViewHeight();
}
/**
* Replaces the item at a specific index with another item.
*
* @param index
* The index of the item, which should be replaced, as an {@link Integer} value
* @param id
* The id of the item, which should be added, as an {@link Integer} value. The id must
* be at least 0
* @param title
* The title of the item, which should be added, as an instance of the type {@link
* CharSequence}. The title may neither be null, nor empty
* @param icon
* The icon of the item, which should be added, as an instance of the class {@link
* Drawable}, or null, if no item should be used
*/
public final void setItem(final int index, final int id, @NonNull final CharSequence title,
@Nullable final Drawable icon) {
Item item = new Item(id, title);
item.setIcon(icon);
adapter.set(index, item);
adaptGridViewHeight();
}
/**
* Replaces the item at a specific index with another item.
*
* @param index
* The index of the item, which should be replaced, as an {@link Integer} value
* @param id
* The id of the item, which should be added, as an {@link Integer} value. The id must
* be at least 0
* @param titleId
* The resource id of the title of the item, which should be added, as an {@link
* Integer} value. The resource id must correspond to a valid string resource
*/
public final void setItem(final int index, final int id, @StringRes final int titleId) {
Item item = new Item(getContext(), id, titleId);
adapter.set(index, item);
adaptGridViewHeight();
}
/**
* Replaces the item at a specific index with another item.
*
* @param index
* The index of the item, which should be replaced, as an {@link Integer} value
* @param id
* The id of the item, which should be added, as an {@link Integer} value. The id must
* be at least 0
* @param titleId
* The resource id of the title of the item, which should be added, as an {@link
* Integer} value. The resource id must correspond to a valid string resource
* @param iconId
* The resource id of the icon of the item, which should be added, as an {@link Integer}
* value. The resource id must correspond to a valid drawable resource
*/
public final void setItem(final int index, final int id, @StringRes final int titleId,
@DrawableRes final int iconId) {
Item item = new Item(getContext(), id, titleId);
item.setIcon(getContext(), iconId);
adapter.set(index, item);
adaptGridViewHeight();
}
/**
* Adds a new divider to the bottom sheet.
*/
public final void addDivider() {
adapter.add(new Divider());
adaptGridViewHeight();
}
/**
* Adds a new divider to the bottom sheet.
*
* @param title
* The title of the divider, which should be added, as an instance of the type {@link
* CharSequence}, or null, if no title should be used
*/
public final void addDivider(@Nullable final CharSequence title) {
Divider divider = new Divider();
divider.setTitle(title);
adapter.add(divider);
adaptGridViewHeight();
}
/**
* Adds a new divider to the bottom sheet.
*
* @param titleId
* The resource id of the title of the divider, which should be added, as an {@link
* Integer} value. The resource id must correspond to a valid string resource
*/
public final void addDivider(@StringRes final int titleId) {
Divider divider = new Divider();
divider.setTitle(getContext(), titleId);
adapter.add(divider);
adaptGridViewHeight();
}
/**
* Replaces the item at a specific index with a divider.
*
* @param index
* The index of the item, which should be replaced, as an {@link Integer} value
*/
public final void setDivider(final int index) {
Divider divider = new Divider();
adapter.set(index, divider);
adaptGridViewHeight();
}
/**
* Replaces the item at a specific index with a divider.
*
* @param index
* The index of the item, which should be replaced, as an {@link Integer} value
* @param title
* The title of the divider, which should be added, as an instance of the type {@link
* CharSequence}, or null, if no title should be used
*/
public final void setDivider(final int index, @Nullable final CharSequence title) {
Divider divider = new Divider();
divider.setTitle(title);
adapter.set(index, divider);
adaptGridViewHeight();
}
/**
* Replaces the item at a specific index with a divider.
*
* @param index
* The index of the item, which should be replaced, as an {@link Integer} value
* @param titleId
* The resource id of the title of the divider, which should be added, as an {@link
* Integer} value. The resource id must correspond to a valid string resource
*/
public final void setDivider(final int index, @StringRes final int titleId) {
Divider divider = new Divider();
divider.setTitle(getContext(), titleId);
adapter.set(index, divider);
adaptGridViewHeight();
}
/**
* Removes the item with at a specific index from the bottom sheet.
*
* @param index
* The index of the item, which should be removed, as an {@link Integer} value
*/
public final void removeItem(final int index) {
adapter.remove(index);
adaptGridViewHeight();
}
/**
* Removes all items from the bottom sheet.
*/
public final void removeAllItems() {
adapter.clear();
adaptGridViewHeight();
}
/**
* Returns, whether the bottom sheet contains any items, or not.
*
* @return True, if the bottom sheet contains any items, false otherwise
*/
public final boolean isEmpty() {
return adapter.isEmpty();
}
/**
* Returns the number of items, which are currently contained by the bottom sheet.
*
* @return The number of items, which are currently contained by the bottom sheet, as an {@link
* Integer} value or -1, if the bottom sheet does not show any items or has not been shown yet
*/
public final int getItemCount() {
return adapter.getItemCount();
}
/**
* Returns the index of the item, which corresponds to a specific id.
*
* @param id
* The id of the item, whose index should be returned, as an {@link Integer} value. The
* id must be at least 0
* @return The index of the item, which corresponds to the given id, or -1, if no item, which
* corresponds to the given id, is contained by the bottom sheet
*/
public final int indexOf(final int id) {
Condition.INSTANCE.ensureAtLeast(id, 0, "The id must be at least 0");
for (int i = 0; i < getItemCount(); i++) {
AbstractItem item = adapter.getItem(i);
if (item.getId() == id) {
return i;
}
}
return -1;
}
/**
* Returns the id of the item, which corresponds to a specific index.
*
* @param index
* The index of the item, whose id should be returned, as an {@link Integer} value
* @return The id of the item, which corresponds to the given position, or
* <code>Divider#DIVIDER_ID</code>, if the item is a divider
*/
public final int getId(final int index) {
return adapter.getItem(index).getId();
}
/**
* Returns, whether the item at a specific index is enabled, or not.
*
* @param index
* The index of the item, which should be checked, as an {@link Integer} value
* @return True, if the item is enabled, false otherwise
*/
public final boolean isItemEnabled(final int index) {
return adapter.isItemEnabled(index);
}
/**
* Sets, whether the item at a specific index should be enabled, or not.
*
* @param index
* The index of the item as an {@link Integer} value
* @param enabled
* True, if the item should be enabled, false otherwise
*/
public final void setItemEnabled(final int index, final boolean enabled) {
adapter.setItemEnabled(index, enabled);
}
/**
* Adds the apps, which are able to handle a specific intent, as items to the bottom sheet. This
* causes all previously added items to be removed. When an item is clicked, the corresponding
* app is started.
*
* @param activity
* The activity, the bottom sheet belongs to, as an instance of the class {@link
* Activity}. The activity may not be null
* @param intent
* The intent as an instance of the class {@link Intent}. The intent may not be null
*/
public final void setIntent(@NonNull final Activity activity, @NonNull final Intent intent) {
Condition.INSTANCE.ensureNotNull(activity, "The activity may not be null");
Condition.INSTANCE.ensureNotNull(intent, "The intent may not be null");
removeAllItems();
PackageManager packageManager = activity.getPackageManager();
List<ResolveInfo> resolveInfos = packageManager.queryIntentActivities(intent, 0);
for (int i = 0; i < resolveInfos.size(); i++) {
ResolveInfo resolveInfo = resolveInfos.get(i);
addItem(i, resolveInfo.loadLabel(packageManager), resolveInfo.loadIcon(packageManager));
}
setOnItemClickListener(
createIntentClickListener(activity, (Intent) intent.clone(), resolveInfos));
}
/**
* Invalidates the bottom sheet. This method must be called in order to update the appearance of
* the bottom sheet, when its items have been changed.
*/
public final void invalidate() {
adapter.notifyDataSetChanged();
}
/**
* Sets, whether the bottom sheet should automatically be invalidated, when its items have been
* changed, or not.
*
* @param invalidateOnChange
* True, if the bottom sheet should automatically be invalidated, when its items have
* been changed, false otherwise
*/
public final void invalidateOnChange(final boolean invalidateOnChange) {
adapter.notifyOnChange(invalidateOnChange);
}
/**
* Returns, whether the bottom sheet is currently maximized, or not.
*
* @return True, if the bottom sheet is currently maximized, false otherwise
*/
public final boolean isMaximized() {
return rootView != null && rootView.isMaximized();
}
/**
* Maximizes the bottom sheet.
*/
@TargetApi(VERSION_CODES.FROYO)
public final void maximize() {
if (!isMaximized()) {
if (!isShowing()) {
maximize = true;
show();
} else {
rootView.maximize(new AccelerateDecelerateInterpolator());
}
}
}
/**
* Returns the style, which is used to display the bottom sheet's items.
*
* @return style The style, which is used to display the bottom sheet's items, as a value of the
* enum {@link Style}
*/
public final Style getStyle() {
return adapter.getStyle();
}
/**
* Sets the style, which should be used to display the bottom sheet's items.
*
* @param style
* The style, which should be set, as a value of the enum {@link Style}. The style may
* either be <code>LIST</code>, <code>LIST_COLUMNS</code> or <code>GRID</code>
*/
public final void setStyle(@NonNull final Style style) {
Condition.INSTANCE.ensureNotNull(style, "The style may not be null");
adapter.setStyle(style);
adaptRootView();
adaptTitleView();
adaptContentView();
adaptGridViewHeight();
}
/**
* Returns the title of the bottom sheet.
*
* @return The title of the bottom sheet as an instance of the type {@link CharSequence} or
* null, if no title has been set
*/
public final CharSequence getTitle() {
return title;
}
@Override
public final void setTitle(@Nullable final CharSequence title) {
super.setTitle(title);
this.title = title;
adaptTitle();
}
@Override
public final void dismiss() {
if (isShowing()) {
rootView.hideView(false);
}
}
@Override
public final void cancel() {
if (isShowing()) {
rootView.hideView(true);
}
}
@Override
public final void setCancelable(final boolean cancelable) {
super.setCancelable(cancelable);
this.cancelable = cancelable;
}
@Override
public final void setCanceledOnTouchOutside(final boolean canceledOnTouchOutside) {
super.setCanceledOnTouchOutside(canceledOnTouchOutside);
this.canceledOnTouchOutside = canceledOnTouchOutside;
}
@TargetApi(VERSION_CODES.FROYO)
@Override
public final void setOnShowListener(@Nullable final OnShowListener listener) {
this.onShowListener = listener;
}
@Override
public final void onMaximized() {
notifyOnMaximize();
}
@Override
public final void onHidden(final boolean canceled) {
if (canceled) {
super.cancel();
} else {
super.dismiss();
}
}
@Override
public final void onStart() {
super.onStart();
getWindow().setAttributes(createLayoutParams());
getWindow().getDecorView().setOnTouchListener(createCancelOnTouchListener());
inflateRootView();
adaptRootView();
inflateTitleView();
inflateContentView();
adaptTitle();
adaptTitleColor();
adaptIcon();
adaptBackground();
adaptDragSensitivity();
adaptWidth();
adaptGridViewHeight();
}
@Override
public final void onStop() {
super.onStop();
rootView = null;
titleContainer = null;
titleTextView = null;
contentContainer = null;
gridView = null;
}
@NonNull
@Override
public final Bundle onSaveInstanceState() {
Bundle outState = super.onSaveInstanceState();
outState.putCharSequence(TITLE_EXTRA, title);
outState.putInt(TITLE_COLOR_EXTRA, titleColor);
outState.putBoolean(CANCELABLE_EXTRA, cancelable);
outState.putBoolean(CANCELED_ON_TOUCH_OUTSIDE_EXTRA, canceledOnTouchOutside);
outState.putFloat(DRAG_SENSITIVITY_EXTRA, dragSensitivity);
outState.putFloat(DIM_AMOUNT_EXTRA, dimAmount);
outState.putInt(WIDTH_EXTRA, width);
if (iconBitmap != null) {
outState.putParcelable(ICON_BITMAP_EXTRA, iconBitmap);
} else if (iconId != -1) {
outState.putInt(ICON_ID_EXTRA, iconId);
}
if (backgroundBitmap != null) {
outState.putParcelable(BACKGROUND_BITMAP_EXTRA, backgroundBitmap);
} else if (backgroundId != -1) {
outState.putInt(BACKGROUND_ID_EXTRA, backgroundId);
} else if (backgroundColor != -1) {
outState.putInt(BACKGROUND_COLOR_EXTRA, backgroundColor);
}
return outState;
}
@Override
public final void onRestoreInstanceState(@NonNull final Bundle savedInstanceState) {
setTitle(savedInstanceState.getCharSequence(TITLE_EXTRA));
setTitleColor(savedInstanceState.getInt(TITLE_COLOR_EXTRA));
setCancelable(savedInstanceState.getBoolean(CANCELABLE_EXTRA));
setCanceledOnTouchOutside(savedInstanceState.getBoolean(CANCELED_ON_TOUCH_OUTSIDE_EXTRA));
setDragSensitivity(savedInstanceState.getFloat(DRAG_SENSITIVITY_EXTRA));
setDimAmount(savedInstanceState.getFloat(DIM_AMOUNT_EXTRA));
setWidth(savedInstanceState.getInt(WIDTH_EXTRA));
if (savedInstanceState.containsKey(ICON_BITMAP_EXTRA)) {
setIcon((Bitmap) savedInstanceState.getParcelable(ICON_BITMAP_EXTRA));
} else if (savedInstanceState.containsKey(ICON_ID_EXTRA)) {
setIcon(savedInstanceState.getInt(ICON_ID_EXTRA));
} else if (savedInstanceState.containsKey(ICON_ATTRIBUTE_ID_EXTRA)) {
setIconAttribute(savedInstanceState.getInt(ICON_ATTRIBUTE_ID_EXTRA));
}
if (savedInstanceState.containsKey(BACKGROUND_BITMAP_EXTRA)) {
setBackground((Bitmap) savedInstanceState.getParcelable(BACKGROUND_BITMAP_EXTRA));
} else if (savedInstanceState.containsKey(BACKGROUND_ID_EXTRA)) {
setBackground(savedInstanceState.getInt(BACKGROUND_ID_EXTRA));
} else if (savedInstanceState.containsKey(BACKGROUND_COLOR_EXTRA)) {
setBackgroundColor(savedInstanceState.getInt(BACKGROUND_COLOR_EXTRA));
}
super.onRestoreInstanceState(savedInstanceState);
}
}
<|start_filename|>library/src/main/java/de/mrapp/android/bottomsheet/view/DraggableView.java<|end_filename|>
/*
* Copyright 2016 - 2018 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package de.mrapp.android.bottomsheet.view;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.Configuration;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.StyleRes;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewTreeObserver;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.view.animation.Animation;
import android.view.animation.Animation.AnimationListener;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.Interpolator;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import de.mrapp.android.bottomsheet.BottomSheet;
import de.mrapp.android.bottomsheet.R;
import de.mrapp.android.bottomsheet.animation.DraggableViewAnimation;
import de.mrapp.android.util.DisplayUtil.DeviceType;
import de.mrapp.android.util.gesture.DragHelper;
import static de.mrapp.android.util.DisplayUtil.getDeviceType;
/**
* The root view of a {@link BottomSheet}, which can be dragged by the user.
*
* @author <NAME>
* @since 1.0.0
*/
public class DraggableView extends LinearLayout implements ViewTreeObserver.OnGlobalLayoutListener {
/**
* Defines the interface, a class, which should be notified about the view's state, must
* implement.
*/
public interface Callback {
/**
* The method, which is invoked, when the view has been maximized.
*/
void onMaximized();
/**
* The method, which is invoked, when the view has been hidden.
*
* @param canceled
* True, if the view has been canceled, false otherwise
*/
void onHidden(boolean canceled);
}
/**
* The ratio between the view's height and the display's height, which is used to calculate the
* initial height.
*/
private static final float INITIAL_HEIGHT_RATIO = 9f / 16f;
/**
* The view group, which contains the view's title.
*/
private ViewGroup titleContainer;
/**
* The view group, which contains the view's content.
*/
private ViewGroup contentContainer;
/**
* The callback, which should be notified about the view's state.
*/
private Callback callback;
/**
* An instance of the class {@link DragHelper}, which is used to recognize drag gestures.
*/
private DragHelper dragHelper;
/**
* True, if the view is currently maximized, false otherwise.
*/
private boolean maximized;
/**
* The view's initial top margin in pixels.
*/
private int initialMargin = -1;
/**
* The view's minimum top margin in pixels.
*/
private int minMargin = -1;
/**
* The height of the view's parent in pixels.
*/
private int parentHeight = -1;
/**
* The speed of the animation, which is used to show or hide the sidebar, in pixels per
* millisecond.
*/
private float animationSpeed;
/**
* The width of the view in pixels.
*/
private int width;
/**
* Initializes the view.
*/
private void initialize() {
getViewTreeObserver().addOnGlobalLayoutListener(this);
dragHelper = new DragHelper(0);
maximized = false;
}
/**
* Returns, whether a touch event at a specific position targets a view, which can be scrolled
* up.
*
* @param x
* The horizontal position of the touch event in pixels as a {@link Float} value
* @param y
* The vertical position of the touch event in pixels as a {@link Float} value
* @return True, if the touch event targets a view, which can be scrolled up, false otherwise
*/
private boolean isScrollUpEvent(final float x, final float y) {
return isScrollUpEvent(x, y, contentContainer);
}
/**
* Returns, whether a touch event at a specific position targets a view, which can be scrolled
* up.
*
* @param x
* The horizontal position of the touch event in pixels as a {@link Float} value
* @param y
* The vertical position of the touch event in pixels as a {@link Float} value
* @param viewGroup
* The view group, which should be used to search for scrollable child views, as an
* instance of the class {@link ViewGroup}. The view group may not be null
* @return True, if the touch event targets a view, which can be scrolled up, false otherwise
*/
private boolean isScrollUpEvent(final float x, final float y,
@NonNull final ViewGroup viewGroup) {
int location[] = new int[2];
viewGroup.getLocationOnScreen(location);
if (x >= location[0] && x <= location[0] + viewGroup.getWidth() && y >= location[1] &&
y <= location[1] + viewGroup.getHeight()) {
for (int i = 0; i < viewGroup.getChildCount(); i++) {
View view = viewGroup.getChildAt(i);
if (view.canScrollVertically(-1)) {
return true;
} else if (view instanceof ViewGroup) {
return isScrollUpEvent(x, y, (ViewGroup) view);
}
}
}
return false;
}
/**
* Handles when a drag gesture is performed by the user.
*
* @return True, if the view has been moved by the drag gesture, false otherwise
*/
private boolean handleDrag() {
if (!isAnimationRunning()) {
if (dragHelper.hasThresholdBeenReached()) {
int margin = Math.round(isMaximized() ? dragHelper.getDragDistance() :
initialMargin + dragHelper.getDragDistance());
margin = Math.max(Math.max(margin, minMargin), 0);
setTopMargin(margin);
}
return true;
}
return false;
}
/**
* Handles when a drag gesture has been ended by the user.
*/
private void handleRelease() {
float speed = Math.max(dragHelper.getDragSpeed(), animationSpeed);
if (getTopMargin() > initialMargin ||
(dragHelper.getDragSpeed() > animationSpeed && dragHelper.getDragDistance() > 0) ||
(getDeviceType(getContext()) == DeviceType.TABLET && isMaximized() &&
getTopMargin() > minMargin)) {
animateHideView(parentHeight - getTopMargin(), speed, new DecelerateInterpolator(),
true);
} else {
animateShowView(-(getTopMargin() - minMargin), speed, new DecelerateInterpolator());
}
}
/**
* Returns the top margin of the view.
*
* @return The top margin of the view in pixels as an {@link Integer} value
*/
public final int getTopMargin() {
FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) getLayoutParams();
return layoutParams.topMargin;
}
/**
* Set the top margin of the view.
*
* @param margin
* The top margin, which should be set, in pixels as an {@link Integer} value
*/
private void setTopMargin(final int margin) {
FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) getLayoutParams();
layoutParams.topMargin = margin;
setLayoutParams(layoutParams);
}
/**
* Animates the view to become show.
*
* @param diff
* The distance the view has to be vertically moved by, as an {@link Integer} value
* @param animationSpeed
* The speed of the animation in pixels per milliseconds as a {@link Float} value
* @param interpolator
* The interpolator, which should be used by the animation, as an instance of the type
* {@link Interpolator}. The interpolator may not be null
*/
private void animateShowView(final int diff, final float animationSpeed,
@NonNull final Interpolator interpolator) {
animateView(diff, animationSpeed, createAnimationListener(true, false), interpolator);
}
/**
* Animates the view to become hidden.
*
* @param diff
* The distance the view has to be vertically moved by, as an {@link Integer} value
* @param animationSpeed
* The speed of the animation in pixels per milliseconds as a {@link Float} value
* @param interpolator
* The interpolator, which should be used by the animation, as an instance of the type
* {@link Interpolator}. The interpolator may not be null
* @param cancel
* True, if the view should be canceled, false otherwise
*/
private void animateHideView(final int diff, final float animationSpeed,
@NonNull final Interpolator interpolator, final boolean cancel) {
animateView(diff, animationSpeed, createAnimationListener(false, cancel), interpolator);
}
/**
* Animates the view to become shown or hidden.
*
* @param diff
* The distance the view has to be vertically moved by, as an {@link Integer} value
* @param animationSpeed
* The speed of the animation in pixels per millisecond as a {@link Float} value
* @param animationListener
* The listener, which should be notified about the animation's progress, as an instance
* of the type {@link AnimationListener}. The listener may not be null
* @param interpolator
* The interpolator, which should be used by the animation, as an instance of the type
* {@link Interpolator}. The interpolator may not be null
*/
private void animateView(final int diff, final float animationSpeed,
@NonNull final AnimationListener animationListener,
@NonNull final Interpolator interpolator) {
if (!isDragging() && !isAnimationRunning()) {
long duration = calculateAnimationDuration(diff, animationSpeed);
Animation animation =
new DraggableViewAnimation(this, diff, duration, animationListener);
animation.setInterpolator(interpolator);
startAnimation(animation);
}
}
/**
* Calculates the duration of the animation, which is used to hide or show the view, depending
* on a specific distance and speed.
*
* @param diff
* The distance, the view has to be vertically moved by, as an {@link Integer} value
* @param animationSpeed
* The speed of the animation in pixels per millisecond as a {@link Float} value
* @return The duration of the animation in milliseconds as an {@link Integer} value
*/
private int calculateAnimationDuration(final int diff, final float animationSpeed) {
return Math.round(Math.abs(diff) / animationSpeed);
}
/**
* Creates and returns a listener, which allows to handle the end of an animation, which has
* been used to show or hide the view.
*
* @param show
* True, if the view should be shown at the end of the animation, false otherwise
* @param cancel
* True, if the view should be canceled, false otherwise
* @return The listener, which has been created, as an instance of the type {@link
* AnimationListener}
*/
private AnimationListener createAnimationListener(final boolean show, final boolean cancel) {
return new AnimationListener() {
@Override
public void onAnimationStart(final Animation animation) {
}
@Override
public void onAnimationEnd(final Animation animation) {
clearAnimation();
maximized = show;
if (maximized) {
notifyOnMaximized();
} else {
notifyOnHidden(cancel);
}
}
@Override
public void onAnimationRepeat(final Animation animation) {
}
};
}
/**
* Notifies the callback, which should be notified about the view's state, that the view has
* been maximized.
*/
private void notifyOnMaximized() {
if (callback != null) {
callback.onMaximized();
}
}
/**
* Notifies the callback, which should be notified about the view's state, that the view has
* been hidden.
*
* @param canceled
* True, if the view has been canceled, false otherwise
*/
private void notifyOnHidden(final boolean canceled) {
if (callback != null) {
callback.onHidden(canceled);
}
}
/**
* Creates a new root view of a {@link BottomSheet}, which can be dragged by the user.
*
* @param context
* The context, which should be used by the view, as an instance of the class {@link
* Context}. The context may not be null
*/
public DraggableView(@NonNull final Context context) {
super(context);
initialize();
}
/**
* Creates a new root view of a {@link BottomSheet}, which can be dragged by the user.
*
* @param context
* The context, which should be used by the view, as an instance of the class {@link
* Context}. The context may not be null
* @param attributeSet
* The attribute set, the view's attributes should be obtained from, as an instance of
* the type {@link AttributeSet} or null, if no attributes should be obtained
*/
public DraggableView(@NonNull final Context context,
@Nullable final AttributeSet attributeSet) {
super(context, attributeSet);
initialize();
}
/**
* Creates a new root view of a {@link BottomSheet}, which can be dragged by the user.
*
* @param context
* The context, which should be used by the view, as an instance of the class {@link
* Context}. The context may not be null
* @param attributeSet
* The attribute set, the view's attributes should be obtained from, as an instance of
* the type {@link AttributeSet} or null, if no attributes should be obtained
* @param defaultStyle
* The default style to apply to this view. If 0, no style will be applied (beyond what
* is included in the theme). This may either be an attribute resource, whose value will
* be retrieved from the current theme, or an explicit style resource
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public DraggableView(@NonNull final Context context, @Nullable final AttributeSet attributeSet,
@StyleRes final int defaultStyle) {
super(context, attributeSet, defaultStyle);
initialize();
}
/**
* Creates a new root view of a {@link BottomSheet}, which can be dragged by the user.
*
* @param context
* The context, which should be used by the view, as an instance of the class {@link
* Context}. The context may not be null
* @param attributeSet
* The attribute set, the view's attributes should be obtained from, as an instance of
* the type {@link AttributeSet} or null, if no attributes should be obtained
* @param defaultStyle
* The default style to apply to this view. If 0, no style will be applied (beyond what
* is included in the theme). This may either be an attribute resource, whose value will
* be retrieved from the current theme, or an explicit style resource
* @param defaultStyleResource
* A resource identifier of a style resource that supplies default values for the view,
* used only if the default style is 0 or can not be found in the theme. Can be 0 to not
* look for defaults
*/
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public DraggableView(@NonNull final Context context, @Nullable final AttributeSet attributeSet,
@StyleRes final int defaultStyle,
@StyleRes final int defaultStyleResource) {
super(context, attributeSet, defaultStyle, defaultStyleResource);
initialize();
}
/**
* Hides the view in an animated manner.
*
* @param cancel
* True, if the view should be canceled, false otherwise
*/
public final void hideView(final boolean cancel) {
animateHideView(parentHeight - getTopMargin(), animationSpeed,
new AccelerateDecelerateInterpolator(), cancel);
}
/**
* Sets the callback, which should be notified about the view's state.
*
* @param callback
* The callback, which should be set, as an instance of the type {@link Callback}, or
* null, if no callback should be notified
*/
public final void setCallback(@Nullable final Callback callback) {
this.callback = callback;
}
/**
* Sets the distance in pixels, a drag gesture must last until it is recognized.
*
* @param dragSensitivity
* The distance, which should be set, in pixels as an {@link Integer} value. The value
* must be at least 0
*/
public final void setDragSensitivity(final int dragSensitivity) {
this.dragHelper = new DragHelper(dragSensitivity);
}
/**
* Sets the width of the view. The width is only used on tablet devices or in landscape mode.
*
* @param width
* The width, which should be set, in pixels as an {@link Integer} value. The width must
* be at least 1
*/
public final void setWidth(final int width) {
this.width = width;
}
/**
* Returns, whether a drag gesture, which moves the view, is currently performed, or not.
*
* @return True, if a drag gesture, which moves the view, is currently performed, false
* otherwise
*/
public final boolean isDragging() {
return !dragHelper.isReset() && dragHelper.hasThresholdBeenReached();
}
/**
* Returns, whether an animation, which moves the view, is currently running, or not.
*
* @return True, if an animation, which moves the view, is currently running, false otherwise
*/
public final boolean isAnimationRunning() {
return getAnimation() != null;
}
/**
* Returns, whether the view is currently maximized, or not.
*
* @return True, if the view is currently maximized, false otherwise
*/
public final boolean isMaximized() {
return maximized;
}
/**
* Maximizes the view.
*/
public final void maximize(final Interpolator interpolator) {
if (!isMaximized()) {
animateShowView(-(getTopMargin() - minMargin), animationSpeed, interpolator);
}
}
@Override
public final boolean dispatchTouchEvent(final MotionEvent event) {
boolean handled = false;
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
break;
case MotionEvent.ACTION_MOVE:
dragHelper.update(event.getRawY());
if (isMaximized() && (event.getRawY() - dragHelper.getDragStartPosition() < 0 ||
isScrollUpEvent(event.getRawX(), event.getRawY()))) {
dragHelper.reset();
break;
}
handled = handleDrag();
break;
case MotionEvent.ACTION_UP:
dragHelper.reset();
if (dragHelper.hasThresholdBeenReached()) {
handleRelease();
}
break;
default:
break;
}
return handled || super.dispatchTouchEvent(event);
}
@Override
public final boolean onTouchEvent(final MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
return true;
case MotionEvent.ACTION_MOVE:
dragHelper.update(event.getRawY());
handleDrag();
return true;
case MotionEvent.ACTION_UP:
dragHelper.reset();
if (dragHelper.hasThresholdBeenReached()) {
handleRelease();
}
performClick();
return true;
default:
break;
}
return super.onTouchEvent(event);
}
@Override
public final boolean performClick() {
super.performClick();
return true;
}
@Override
public final void onGlobalLayout() {
if (parentHeight == -1) {
parentHeight = ((View) getParent()).getHeight();
float initialHeight = parentHeight * INITIAL_HEIGHT_RATIO;
int titleContainerHeight =
titleContainer.getVisibility() == View.VISIBLE ? titleContainer.getHeight() : 0;
int contentContainerHeight = contentContainer.getVisibility() == View.VISIBLE ?
contentContainer.getHeight() : 0;
int padding = getPaddingTop() + getPaddingBottom();
minMargin = parentHeight - titleContainerHeight - contentContainerHeight - padding;
initialMargin = Math.max(Math.round(parentHeight - initialHeight), minMargin);
int animationDuration = getResources().getInteger(R.integer.animation_duration);
animationSpeed = initialHeight / (float) animationDuration;
setTopMargin(initialMargin);
}
}
@Override
protected final void onAttachedToWindow() {
super.onAttachedToWindow();
titleContainer = findViewById(R.id.title_container);
contentContainer = findViewById(R.id.content_container);
}
@Override
protected final void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
if (getDeviceType(getContext()) == DeviceType.TABLET ||
getResources().getConfiguration().orientation ==
Configuration.ORIENTATION_LANDSCAPE) {
int measureMode = MeasureSpec.getMode(widthMeasureSpec);
super.onMeasure(MeasureSpec.makeMeasureSpec(width, measureMode), heightMeasureSpec);
} else {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
}
} | DigitalGoal-Ltd/AndroidBottomSheet |
<|start_filename|>Makefile<|end_filename|>
all:
zip -j9 --filesync Screen\ Sharing.alfredworkflow src/* | pstadler/alfred-screensharing |
<|start_filename|>domain_info.go<|end_filename|>
package main
import (
"encoding/json"
"fmt"
"log"
)
type domainInfo struct {
Domain string `json:"domain"`
Records []struct {
RecordID uint64 `json:"record_id"`
Type string `json:"type"`
Domain string `json:"domain"`
Fqdn string `json:"fqdn"`
TTL uint64 `json:"ttl"`
Subdomain string `json:"subdomain"`
Content string `json:"content"`
Priority interface{} `json:"priority"`
} `json:"records"`
Success string `json:"success"`
Error string `json:"error"`
}
const (
getDomainInfoURLTemplate = "https://pddimp.yandex.ru/api2/admin/dns/list?domain=%s"
)
func parseDomainInfoData(data []byte) *domainInfo {
info := &domainInfo{}
err := json.Unmarshal(data, info)
if err != nil {
log.Fatalf("failed to parse response from Yandex DNS API service %v\n", err)
}
return info
}
func getDomainInfo(conf *config) *domainInfo {
url := fmt.Sprintf(getDomainInfoURLTemplate, conf.Domain)
body, err := getURL(url, &conf.Token)
if err != nil {
log.Fatalf("failed to query '%s': %s", url, err.Error())
}
info := parseDomainInfoData(body)
if info.Success == "error" {
log.Fatalf("invalid status response: %s\n", info.Error)
}
return info
}
<|start_filename|>get_ip.go<|end_filename|>
package main
import (
"log"
"net"
"regexp"
"strings"
)
type lookupExternalIPUrl struct {
v4 string
v6 string
}
type externalIPAddress struct {
v4 string
v6 string
}
var lookupExternalIPUrls = []lookupExternalIPUrl{
{
v4: "https://v4.ifconfig.co/ip",
v6: "https://v6.ifconfig.co/ip",
},
{
v4: "http://myexternalip.com/raw",
},
}
func isIPValid(addr string) bool {
if addr != "" {
ip := net.ParseIP(addr)
return !(ip == nil)
}
return false
}
func getIP(url string, regexp *regexp.Regexp) (string, error) {
var addr string
body, err := getURL(url, nil)
if err != nil {
return "", err
}
if regexp != nil {
result := regexp.FindAllStringSubmatch(string(body), -1)
if len(result) > 0 && len(result[0]) > 0 {
addr = result[0][1]
}
} else {
addr = strings.Trim(string(body), " \r\n")
}
if !isIPValid(addr) {
addr = ""
}
return addr, nil
}
func getExternalIP(conf *config) *externalIPAddress {
var IPv4, IPv6 string
var err error
for _, lookup := range lookupExternalIPUrls {
IPv4, err = getIP(lookup.v4, nil)
if err != nil {
log.Printf("%s", err.Error())
}
if conf.SetIPv6 {
IPv6, err = getIP(lookup.v6, nil)
if err != nil {
log.Printf("%s", err.Error())
}
}
if len(IPv4) > 0 || len(IPv6) > 0 {
break
}
}
if len(IPv4) == 0 && len(IPv6) == 0 {
log.Fatal("couldn't determine external IP address")
}
return &externalIPAddress{v4: IPv4, v6: IPv6}
}
<|start_filename|>http_request.go<|end_filename|>
package main
import (
"bytes"
"fmt"
"io/ioutil"
"log"
"net/http"
"net/url"
"time"
)
const (
userAgentHeader = "User-Agent"
defaultUserAgent = "Mozilla/4.0 (compatible; MSIE 7.0; +https://github.com/thekvs/yandex-ddns)"
)
const defaultNetworkTimeout = 20 * time.Second
var client = &http.Client{Timeout: defaultNetworkTimeout}
func postURL(url string, token *string, values *url.Values) ([]byte, error) {
var (
req *http.Request
err error
)
if values != nil {
req, err = http.NewRequest("POST", url, bytes.NewBufferString(values.Encode()))
} else {
req, err = http.NewRequest("POST", url, nil)
}
if err != nil {
log.Fatalf("%s", err)
}
if token != nil && len(*token) > 0 {
req.Header.Add("pddToken", *token)
}
req.Header.Set("Content-Type", "application/x-www-form-urlencoded; param=value")
resp, err := client.Do(req)
if err != nil {
log.Fatalf("%s", err)
}
body, err := ioutil.ReadAll(resp.Body)
defer resp.Body.Close()
if err != nil {
log.Fatalf("%s", err)
}
return body, nil
}
func getURL(url string, token *string) ([]byte, error) {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return nil, err
}
req.Header.Add(userAgentHeader, defaultUserAgent)
if token != nil && len(*token) > 0 {
req.Header.Add("pddToken", *token)
}
resp, err := client.Do(req)
if err != nil {
return nil, err
}
defer closeResource(resp.Body)
if resp.StatusCode != 200 {
return nil, fmt.Errorf("unexpecetd HTTP status code: %d", resp.StatusCode)
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, err
}
return body, nil
}
<|start_filename|>Makefile<|end_filename|>
MOUNT_POINT=/project
.PHONY: build clean lint
.DEFAULT_GOAL := build
build:
GOBIN=$(shell pwd)/bin go install -mod=vendor -ldflags '-w -s' -v ./...
clean:
rm -rf bin
lint:
docker run --rm \
--user `id -u`:`id -g` \
--env GOCACHE=$(MOUNT_POINT)/bin/.cache \
--volume `pwd`:$(MOUNT_POINT) \
--volume $(GOPATH):/go \
--workdir $(MOUNT_POINT) \
golangci/golangci-lint golangci-lint run
<|start_filename|>yandex-ddns.go<|end_filename|>
package main
import (
"flag"
"fmt"
"log"
"os"
"path/filepath"
"github.com/nightlyone/lockfile"
)
func initLock(file string) (*lockfile.Lockfile, error) {
lock, err := lockfile.New(filepath.Join(os.TempDir(), file))
if err != nil {
return nil, err
}
err = lock.TryLock()
if err != nil {
return nil, err
}
return &lock, nil
}
func main() {
var (
configFile string
testConfigOnly bool
)
flag.StringVar(&configFile, "config", "yandex-ddns.toml", "configuration file")
flag.BoolVar(&testConfigOnly, "t", false, "only test configuration file")
flag.Parse()
conf := newConfigurationFromFile(configFile)
if testConfigOnly {
verifyConfiguration(conf)
fmt.Println("Configuration file Ok.")
os.Exit(0)
}
lock, err := initLock("yandex-ddns.lock")
if err != nil {
log.Fatalf("Couldn't init lock file: %v\n", err)
}
defer lock.Unlock()
if conf.LogFile != "" {
f, err := os.OpenFile(conf.LogFile, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0o666)
if err != nil {
log.Fatalf("error opening file: %v", err)
}
defer closeResource(f)
log.SetOutput(f)
}
log.SetFlags(log.LstdFlags | log.Lshortfile)
verifyConfiguration(conf)
extIPAddr := getExternalIP(conf)
domainInfo := getDomainInfo(conf)
updateDomainAddress(domainInfo, extIPAddr, conf)
}
<|start_filename|>config.go<|end_filename|>
package main
import (
"io"
"log"
"os"
"github.com/BurntSushi/toml"
)
type config struct {
Token string `toml:"token"`
Domain string `toml:"domain"`
SubDomain string `toml:"subdomain"`
LogFile string `toml:"logfile"`
TTL *uint64 `toml:"ttl,omitempty"`
SetIPv6 bool `toml:"set_ipv6"`
}
const (
minTTLValue = 900
maxTTLValue = 1209600
)
const allowedPermissions = 0o600
func isPermissionsOk(f *os.File) bool {
finfo, err := f.Stat()
if err != nil {
log.Fatalf("error: %v\n", err)
}
permissions := finfo.Mode().Perm()
return permissions == allowedPermissions
}
func newConfigurationFromFile(path string) *config {
file, err := os.Open(path)
if err != nil {
log.Fatalf("can't open configuration file: %v\n", err)
}
defer closeResource(file)
if !isPermissionsOk(file) {
log.Fatalf("error: configuration file with sensitive information has insecure permissions\n")
}
conf := newConfiguration(file)
return conf
}
func verifyConfiguration(conf *config) {
if conf.Token == "" {
log.Fatal("missed mandatory configuration parameter 'token'")
}
if conf.Domain == "" {
log.Fatal("missed mandatory configuration parameter 'domain'")
}
if conf.TTL != nil {
if *conf.TTL < minTTLValue || *conf.TTL > maxTTLValue {
log.Fatalf("domain TTL value (=%d) exeeds permissible range (=[%d, %d])\n",
*conf.TTL, minTTLValue, maxTTLValue)
}
}
}
func newConfiguration(data io.Reader) *config {
var conf config
if _, err := toml.DecodeReader(data, &conf); err != nil {
log.Fatalf("Couldn't parse configuration file: %v", err)
}
return &conf
}
| monster1025/yandex-ddns-1 |
<|start_filename|>assets/js/mmenu-js/_modules/drag.js<|end_filename|>
import { touch } from './support';
var DragEngine = /** @class */ (function () {
function DragEngine(surface, area) {
this.surface = surface;
this.area = area;
// Set the mouse/touch events.
this.surface.addEventListener(touch ? 'touchstart' : 'mousedown', this.start.bind(this));
this.surface.addEventListener(touch ? 'touchend' : 'mouseup', this.stop.bind(this));
this.surface.addEventListener(touch ? 'touchleave' : 'mouseleave', this.stop.bind(this));
this.surface.addEventListener(touch ? 'touchmove' : 'mousemove', this.move.bind(this));
}
/**
* Starting the touch gesture.
*/
DragEngine.prototype.start = function (event) {
var passed = 0;
var width = this.surface.clientWidth;
var height = this.surface.clientHeight;
// Check if the gesture started below the area.top.
var top = this._getArea(this.area.top, height);
if (typeof top == 'number') {
if (event.pageY >= top) {
passed++;
}
}
// Check if the gesture started before the area.right.
var right = this._getArea(this.area.right, width);
if (typeof right == 'number') {
right = width - right;
if (event.pageX <= right) {
passed++;
}
}
// Check if the gesture started above the area.bottom.
var bottom = this._getArea(this.area.bottom, height);
if (typeof bottom == 'number') {
bottom = height - bottom;
if (event.pageY <= bottom) {
passed++;
}
}
// Check if the gesture started after the area.left.
var left = this._getArea(this.area.left, width);
if (typeof left == 'number') {
if (event.pageX >= left) {
passed++;
}
}
if (passed == 4) {
// Store the start x- and y-position.
this.startPosition = {
x: event.pageX,
y: event.pageY
};
// Set the state of the gesture to "watching".
this.state = DragEngine.state.watching;
}
};
/**
* Stopping the touch gesture.
*/
DragEngine.prototype.stop = function (event) {
// Dispatch the "dragEnd" events.
if (this.state == DragEngine.state.dragging) {
/** The event information. */
var detail = this._eventDetail();
/** The direction. */
var dragDirection = DragEngine.directionNames[this.axis][this.distance[this.axis] > 0 ? 0 : 1];
this._dispatchEvents('drag*End', detail, dragDirection);
// Dispatch the "swipe" events.
if (Math.abs(this.movement[this.axis]) > DragEngine.treshold.swipe) {
/** The direction. */
var swipeDirection = DragEngine.directionNames[this.axis][this.movement[this.axis] > 0 ? 0 : 1];
this._dispatchEvents('swipe*', detail, swipeDirection);
}
}
// Set the state of the gesture to "inactive".
this.state = DragEngine.state.inactive;
};
/**
* Doing the touch gesture.
*/
DragEngine.prototype.move = function (event) {
switch (this.state) {
case DragEngine.state.watching:
case DragEngine.state.dragging:
this.movement = {
x: event.movementX,
y: event.movementY
};
this.distance = {
x: event.pageX - this.startPosition.x,
y: event.pageY - this.startPosition.y
};
this.axis =
Math.abs(this.distance.x) > Math.abs(this.distance.y)
? 'x'
: 'y';
/** The event information. */
var detail = this._eventDetail();
/** The direction. */
var dragDirection = DragEngine.directionNames[this.axis][this.movement[this.axis] > 0 ? 0 : 1];
// Watching for the gesture to go past the treshold.
if (this.state == DragEngine.state.watching) {
if (Math.abs(this.distance[this.axis]) >
DragEngine.treshold.start) {
this._dispatchEvents('drag*Start', detail, dragDirection);
// Set the state of the gesture to "inactive".
this.state = DragEngine.state.dragging;
}
}
// Dispatch the "drag" events.
if (this.state == DragEngine.state.dragging) {
this._dispatchEvents('drag*', detail, dragDirection);
}
break;
}
};
DragEngine.prototype._eventDetail = function () {
return {
movementX: this.movement.x,
movementY: this.movement.y,
distanceX: this.distance.x -
(this.axis == 'x' ? DragEngine.treshold.start : 0),
distanceY: this.distance.y -
(this.axis == 'y' ? DragEngine.treshold.start : 0)
};
};
DragEngine.prototype._dispatchEvents = function (eventName, detail, dir) {
/** General event, e.g. "drag" */
var event = new CustomEvent(eventName.replace('*', ''), { detail: detail });
this.surface.dispatchEvent(event);
/** Axis event, e.g. "dragX" */
var axis = new CustomEvent(eventName.replace('*', this.axis.toUpperCase()), { detail: detail });
this.surface.dispatchEvent(axis);
/** Direction event, e.g. "dragLeft" */
var direction = new CustomEvent(eventName.replace('*', dir), {
detail: detail
});
this.surface.dispatchEvent(direction);
};
DragEngine.prototype._getArea = function (position, size) {
if (typeof position == 'string') {
if (position.slice(-1) == '%') {
position = parseInt(position.slice(0, -1), 10);
position = size * (position / 100);
}
}
return position;
};
DragEngine.directionNames = {
x: ['Right', 'Left'],
y: ['Down', 'Up']
};
DragEngine.treshold = {
start: 25,
swipe: 15
};
DragEngine.state = {
inactive: 0,
watching: 1,
dragging: 2
};
return DragEngine;
}());
export default DragEngine;
<|start_filename|>assets/js/mmenu-js/_modules/dragevents/dragevents.js<|end_filename|>
import * as support from './_support';
import * as options from './_defaults';
import * as settings from './_settings';
import { percentage2number } from './_helpers';
import { extend } from '../helpers';
var DragEvents = /** @class */ (function () {
/**
* Create the gestures.
* @param {HTMLElement} surface The surface for the gesture.
* @param {object} area Restriction where on the surface the gesture can be started.
* @param {object} treshold Treshold for the gestures.
*/
function DragEvents(surface, area, treshold) {
this.surface = surface;
this.area = extend(area, options.area);
this.treshold = extend(treshold, options.treshold);
// Set the mouse/touch events.
this.surface.addEventListener(support.touch ? 'touchstart' : 'mousedown', this.start.bind(this));
this.surface.addEventListener(support.touch ? 'touchend' : 'mouseup', this.stop.bind(this));
this.surface.addEventListener(support.touch ? 'touchleave' : 'mouseleave', this.stop.bind(this));
this.surface.addEventListener(support.touch ? 'touchmove' : 'mousemove', this.move.bind(this));
}
/**
* Starting the touch gesture.
*/
DragEvents.prototype.start = function (event) {
/** The widht of the surface. */
var width = this.surface.clientWidth;
/** The height of the surface. */
var height = this.surface.clientHeight;
// Check if the gesture started below the area.top.
var top = percentage2number(this.area.top, height);
if (typeof top == 'number') {
if (event.pageY < top) {
return;
}
}
// Check if the gesture started before the area.right.
var right = percentage2number(this.area.right, width);
if (typeof right == 'number') {
right = width - right;
if (event.pageX > right) {
return;
}
}
// Check if the gesture started above the area.bottom.
var bottom = percentage2number(this.area.bottom, height);
if (typeof bottom == 'number') {
bottom = height - bottom;
if (event.pageY > bottom) {
return;
}
}
// Check if the gesture started after the area.left.
var left = percentage2number(this.area.left, width);
if (typeof left == 'number') {
if (event.pageX < left) {
return;
}
}
// Store the start x- and y-position.
this.startPosition = {
x: event.pageX,
y: event.pageY
};
// Set the state of the gesture to "watching".
this.state = settings.state.watching;
};
/**
* Stopping the touch gesture.
*/
DragEvents.prototype.stop = function (event) {
// Dispatch the "dragEnd" events.
if (this.state == settings.state.dragging) {
/** The event information. */
var detail = this._eventDetail();
/** The direction. */
var dragDirection = this._dragDirection();
this._dispatchEvents('drag*End', detail, dragDirection);
// Dispatch the "swipe" events.
if (Math.abs(this.movement[this.axis]) > this.treshold.swipe) {
/** The direction. */
var swipeDirection = this._swipeDirection();
this._dispatchEvents('swipe*', detail, swipeDirection);
}
}
// Set the state of the gesture to "inactive".
this.state = settings.state.inactive;
};
/**
* Doing the touch gesture.
*/
DragEvents.prototype.move = function (event) {
switch (this.state) {
case settings.state.watching:
case settings.state.dragging:
this.movement = {
x: event.movementX,
y: event.movementY
};
this.distance = {
x: event.pageX - this.startPosition.x,
y: event.pageY - this.startPosition.y
};
this.axis =
Math.abs(this.distance.x) > Math.abs(this.distance.y)
? 'x'
: 'y';
/** The event information. */
var detail = this._eventDetail();
/** The direction. */
var dragDirection = this._dragDirection();
// Watching for the gesture to go past the treshold.
if (this.state == settings.state.watching) {
if (Math.abs(this.distance[this.axis]) > this.treshold.start) {
this._dispatchEvents('drag*Start', detail, dragDirection);
// Set the state of the gesture to "inactive".
this.state = settings.state.dragging;
}
}
// Dispatch the "drag" events.
if (this.state == settings.state.dragging) {
this._dispatchEvents('drag*Move', detail, dragDirection);
}
break;
}
};
/**
* Get the event details.
* @return {bject} The event details.
*/
DragEvents.prototype._eventDetail = function () {
return {
movementX: this.movement.x,
movementY: this.movement.y,
distanceX: this.distance.x - (this.axis == 'x' ? this.treshold.start : 0),
distanceY: this.distance.y - (this.axis == 'y' ? this.treshold.start : 0)
};
};
/**
* Dispatch the events
* @param {string} eventName The name for the events to dispatch.
* @param {object} detail The event details.
* @param {string} dir The direction of the gesture.
*/
DragEvents.prototype._dispatchEvents = function (eventName, detail, dir) {
/** General event, e.g. "drag" */
var event = new CustomEvent(eventName.replace('*', ''), { detail: detail });
this.surface.dispatchEvent(event);
/** Axis event, e.g. "dragX" */
var axis = new CustomEvent(eventName.replace('*', this.axis.toUpperCase()), { detail: detail });
this.surface.dispatchEvent(axis);
/** Direction event, e.g. "dragLeft" */
var direction = new CustomEvent(eventName.replace('*', dir), {
detail: detail
});
this.surface.dispatchEvent(direction);
};
DragEvents.prototype._dragDirection = function () {
return settings.directionNames[this.axis][this.distance[this.axis] > 0 ? 0 : 1];
};
DragEvents.prototype._swipeDirection = function () {
return settings.directionNames[this.axis][this.movement[this.axis] > 0 ? 0 : 1];
};
return DragEvents;
}());
export default DragEvents;
| S035779/golang_demo_app |
<|start_filename|>docs/source/_static/js/stem-tracks.js<|end_filename|>
// Available arguments can be found in the following page.
// https://github.com/naomiaro/waveform-playlist/#playlist-options
// https://github.com/naomiaro/waveform-playlist/#track-options
var playlist = WaveformPlaylist.init({
samplesPerPixel: 5000,
waveHeight: 100,
container: document.getElementById("collage-results"),
timescale: true,
state: 'cursor',
colors: {
waveOutlineColor: '#E0EFF1'
},
controls: {
show: true, //whether or not to include the track controls
width: 180 //width of controls in pixels
},
zoomLevels: [1000, 3000, 5000],
exclSolo: true,
isAutomaticScroll: true
});
playlist.load([{
"src": "../_audio/collage.mp3",
"name": "Original Song",
"gain": 0.5
},
{
"src": "../_audio/collage_vocal_contour.mp3",
"name": "Vocal Contour",
"gain": 1
},
{
"src": "../_audio/collage_vocal.mp3",
"name": "Vocal",
"gain": 1
}
])
<|start_filename|>Dockerfile<|end_filename|>
FROM tensorflow/tensorflow:2.5.0-gpu
WORKDIR /tmp
RUN apt-get update
RUN apt-get install --assume-yes libsndfile1 libgl1-mesa-glx ffmpeg vim fluidsynth
COPY omnizart ./omnizart
COPY scripts ./scripts
COPY pyproject.toml ./
COPY poetry.lock ./
COPY README.md ./
COPY Makefile ./
RUN scripts/install.sh
# Upgrade this for avoiding mysterious import module not found 'keyrings'
RUN pip install --upgrade keyrings.alt
WORKDIR /home
RUN mv /tmp/omnizart /usr/local/lib/python3.6/dist-packages
RUN rm -rf /tmp
COPY README.md ./
CMD ["omnizart"]
<|start_filename|>docs/source/_static/css/custom.css<|end_filename|>
audio {
width: 50%;
display: block;
}
iframe {
width: 100%;
height: 316px;
}
@media (max-width: 600px) {
audio {
width: 100%;
display: block;
}
iframe { height: 200px; }
}
@media (min-width: 1200px) {
iframe { width: 800px; }
}
img {
width: 100%;
} | nicolasanjoran/omnizart |
<|start_filename|>.eslintrc.js<|end_filename|>
module.exports = {
"extends": "airbnb-base",
"env": {
"browser": true,
"node": true
},
"rules": {
"semi": [2, "always"],
"eol-last": [2, "never"],
"no-param-reassign": [0],
"no-shadow": [0],
"import/no-extraneous-dependencies": ["error", {
"devDependencies": true,
"optionalDependencies": false,
"peerDependencies": false
}]
}
}; | lessworkjs/optional.js |
<|start_filename|>vscripts/HeroSelection.lua<|end_filename|>
--[[
Hero selection module for D2E.
This file basically just separates the functions related to hero selection from
the other functions present in D2E.
]]
--Constant parameters
SELECTION_DURATION_LIMIT = 60
--Class definition
if HeroSelection == nil then
HeroSelection = {}
HeroSelection.__index = HeroSelection
end
--[[
Start
Call this function from your gamemode once the gamestate changes
to pre-game to start the hero selection.
]]
function HeroSelection:Start()
--Figure out which players have to pick
HeroSelection.playerPicks = {}
HeroSelection.numPickers = 0
for pID = 0, DOTA_MAX_PLAYERS -1 do
if PlayerResource:IsValidPlayer( pID ) then
HeroSelection.numPickers = self.numPickers + 1
end
end
--Start the pick timer
HeroSelection.TimeLeft = SELECTION_DURATION_LIMIT
Timers:CreateTimer( 0.04, HeroSelection.Tick )
--Keep track of the number of players that have picked
HeroSelection.playersPicked = 0
--Listen for the pick event
HeroSelection.listener = CustomGameEventManager:RegisterListener( "hero_selected", HeroSelection.HeroSelect )
end
--[[
Tick
A tick of the pick timer.
Params:
- event {table} - A table containing PlayerID and HeroID.
]]
function HeroSelection:Tick()
--Send a time update to all clients
if HeroSelection.TimeLeft >= 0 then
CustomGameEventManager:Send_ServerToAllClients( "picking_time_update", {time = HeroSelection.TimeLeft} )
end
--Tick away a second of time
HeroSelection.TimeLeft = HeroSelection.TimeLeft - 1
if HeroSelection.TimeLeft == -1 then
--End picking phase
HeroSelection:EndPicking()
return nil
elseif HeroSelection.TimeLeft >= 0 then
return 1
else
return nil
end
end
--[[
HeroSelect
A player has selected a hero. This function is caled by the CustomGameEventManager
once a 'hero_selected' event was seen.
Params:
- event {table} - A table containing PlayerID and HeroID.
]]
function HeroSelection:HeroSelect( event )
--If this player has not picked yet give him the hero
if HeroSelection.playerPicks[ event.PlayerID ] == nil then
HeroSelection.playersPicked = HeroSelection.playersPicked + 1
HeroSelection.playerPicks[ event.PlayerID ] = event.HeroName
--Send a pick event to all clients
CustomGameEventManager:Send_ServerToAllClients( "picking_player_pick",
{ PlayerID = event.PlayerID, HeroName = event.HeroName} )
--Assign the hero if picking is over
if HeroSelection.TimeLeft <= 0 then
HeroSelection:AssignHero( event.PlayerID, event.HeroName )
end
end
--Check if all heroes have been picked
if HeroSelection.playersPicked >= HeroSelection.numPickers then
--End picking
HeroSelection.TimeLeft = 0
HeroSelection:Tick()
end
end
--[[
EndPicking
The final function of hero selection which is called once the selection is done.
This function spawns the heroes for the players and signals the picking screen
to disappear.
]]
function HeroSelection:EndPicking()
--Stop listening to pick events
--CustomGameEventManager:UnregisterListener( self.listener )
--Assign the picked heroes to all players that have picked
for player, hero in pairs( HeroSelection.playerPicks ) do
HeroSelection:AssignHero( player, hero )
end
--Signal the picking screen to disappear
CustomGameEventManager:Send_ServerToAllClients( "picking_done", {} )
end
--[[
AssignHero
Assign a hero to the player. Replaces the current hero of the player
with the selected hero, after it has finished precaching.
Params:
- player {integer} - The playerID of the player to assign to.
- hero {string} - The unit name of the hero to assign (e.g. 'npc_dota_hero_rubick')
]]
function HeroSelection:AssignHero( player, hero )
PrecacheUnitByNameAsync( hero, function()
PlayerResource:ReplaceHeroWith( player, hero, 0, 0 )
end, player)
end
<|start_filename|>panorama/scripts/custom_game/hero_selection.js<|end_filename|>
"use strict";
/* This file contains the scripts associated with hero_selection.xml.
* This UI element provides a custom hero selection screen.
*
* By: Perry
* Date: July 2015 */
//Define variables
var playerPanels = {};
var canEnter = false;
//Subscribe to events
GameEvents.Subscribe( "picking_done", OnPickingDone );
GameEvents.Subscribe( "picking_time_update", OnTimeUpdate );
GameEvents.Subscribe( "picking_player_pick", OnPlayerPicked );
/* Event Handlers
=========================================================================*/
/* Picking phase is done, allow the player to enter the game */
function OnPickingDone( data ) {
$("#EnterGameBtn").RemoveClass( "disabled" );
$("#EnterGameBtnTxt").text = "Enter Game";
canEnter = true;
}
/* Visual timer update */
function OnTimeUpdate( data ) {
$("#TimerTxt").text = data.time;
}
/* A player has picked a hero */
function OnPlayerPicked( data ) {
PlayerPicked( data.PlayerID, data.HeroName );
}
/* Functionality
=========================================================================*/
/* Add an empty element for each player in the game (steam avatar plus space for hero portrait) */
function LoadPlayers() {
//Get the players for both teams
var radiantPlayers = Game.GetPlayerIDsOnTeam( DOTATeam_t.DOTA_TEAM_GOODGUYS );
var direPlayers = Game.GetPlayerIDsOnTeam( DOTATeam_t.DOTA_TEAM_BADGUYS );
//Assign radiant players
$.Each( radiantPlayers, function( player ) {
var playerPanel = Modular.Spawn( "picking_player", $("#LeftPlayers") );
playerPanel.SetPlayer( player );
//Save the panel for later
playerPanels[player] = playerPanel;
});
//Assign dire players
$.Each( direPlayers, function( player ) {
var playerPanel = Modular.Spawn( "picking_player", $("#RightPlayers") );
playerPanel.SetPlayer( player );
playerPanel.SetIsRight( true );
//Save the panel for later
playerPanels[player] = playerPanel;
});
}
/* A player has picked a hero, tell the player's panel a hero was picked,
* show the hero was taken and if the player that picked is the local player
* swap to the hero preview screen. */
function PlayerPicked( player, hero ) {
//Update the player panel
playerPanels[player].SetHero( hero );
//Disable the hero button
$('#'+hero).AddClass( "taken" );
//Check if the pick was by the local player
if ( player == Players.GetLocalPlayer() ) {
SwitchToHeroPreview( hero );
}
}
/* Switch the content of the screen to show the picked hero instead of the
* pickable heroes. */
function SwitchToHeroPreview( heroName ) {
var previewPanel = $.CreatePanel("Panel", $('#PostPickScreen'), "HeroPreview");
previewPanel.BLoadLayoutFromString('<root><Panel><DOTAScenePanel style="width: 600px; height: 600px; opacity-mask: url(\'s2r://panorama/images/masks/softedge_box_png.vtex\');" unit="'+heroName+'"/></Panel></root>', false, false );
$('#PostPickScreen').MoveChildBefore( previewPanel, $("#EnterGameBtn") );
$('#PickList').style.visibility = 'collapse';
$('#PostPickScreen').style.visibility = 'visible';
}
/* Select a hero, called when a player clicks a hero panel in the layout */
function SelectHero( heroName ) {
//Send the pick to the server
GameEvents.SendCustomGameEventToServer( "hero_selected", { HeroName: heroName } );
}
/* Enter the game by removing the picking screen, called when the player
* clicks a button in the layout. */
function EnterGame() {
if ( canEnter ) {
$('#PickingScreen').DeleteAsync( 0.0 );
}
}
/* Initialisation - runs when the element is created
=========================================================================*/
(function () {
//Set panel visibility
$('#PickList').style.visibility = 'visible';
$('#PostPickScreen').style.visibility = 'collapse';
///Load player elements
LoadPlayers();
})(); | Perryvw/CustomHeroSelection |
<|start_filename|>Sumo-Logic-Tools/SumoAlerts-To-AWS-SNS/payload.json<|end_filename|>
{
"Subject" : "Sumo Logic Alert: {{NumRawResults}} results for {{SearchName}}",
"Email_Message" : "Sumo Logic has sent an alert for {{SearchName}}. \nSearch URL: {{SearchQueryUrl}}\nDescription: {{SearchDescription}}\nQuery: {{SearchQuery}}\nTime Range: {{TimeRange}}\nFire Time: {{FireTime}}\n\nNumber of Results: {{NumRawResults}}\nAggregate Results: {{AggregateResultsJson}}",
"SMS_Message" : "Sumo alert for {{SearchName}} returned {{NumRawResults}} results"
} | lghakamo-paf/sumologic-content |
<|start_filename|>docker/Dockerfile<|end_filename|>
# This Dockerfile contains two images, `builder` and `runtime`.
# `builder` contains all necessary code to build
# `runtime` is stripped down.
ARG ARCH=
ARG BUILD_DATE
ARG TAG
FROM ${ARCH}debian:bullseye-slim as builder
WORKDIR /
# This is a temporary workaround, see https://github.com/cowrie/docker-cowrie/issues/26
ENV CRYPTOGRAPHY_DONT_BUILD_RUST=1
ENV COWRIE_GROUP=cowrie \
COWRIE_USER=cowrie \
COWRIE_HOME=/cowrie
# Set locale to UTF-8, otherwise upstream libraries have bytes/string conversion issues
ENV LC_ALL=en_US.UTF-8 \
LANG=en_US.UTF-8 \
LANGUAGE=en_US.UTF-8
RUN groupadd -r ${COWRIE_GROUP} && \
useradd -r -d ${COWRIE_HOME} -m -g ${COWRIE_GROUP} ${COWRIE_USER}
# Set up Debian prereqs
RUN export DEBIAN_FRONTEND=noninteractive; \
apt-get update && \
apt-get install -y \
-o APT::Install-Suggests=false \
-o APT::Install-Recommends=false \
python3-pip \
ca-certificates \
libffi-dev \
libssl-dev \
python3-dev \
python3-venv \
python3 \
rustc \
cargo \
git \
build-essential \
python3-virtualenv \
libsnappy-dev && \
rm -rf /var/lib/apt/lists/*
USER ${COWRIE_USER}
WORKDIR ${COWRIE_HOME}
# Copy requirements first to use Docker caching better
RUN mkdir -p ${COWRIE_HOME}/cowrie-git
COPY --chown=${COWRIE_USER}:${COWRIE_GROUP} requirements.txt requirements-output.txt ${COWRIE_HOME}/cowrie-git
RUN python3 -m venv cowrie-env && \
. cowrie-env/bin/activate && \
pip install --no-cache-dir --upgrade pip wheel setuptools && \
pip install --no-cache-dir --upgrade cffi && \
pip install --no-cache-dir --upgrade -r ${COWRIE_HOME}/cowrie-git/requirements.txt && \
pip install --no-cache-dir --upgrade -r ${COWRIE_HOME}/cowrie-git/requirements-output.txt
COPY --chown=${COWRIE_USER}:${COWRIE_GROUP} . ${COWRIE_HOME}/cowrie-git
FROM gcr.io/distroless/python3-debian11 AS runtime
#FROM gcr.io/distroless/python3-debian11:debug AS runtime
LABEL org.opencontainers.image.created="${BUILD_DATE}"
LABEL org.opencontainers.image.authors="<NAME> <<EMAIL>>"
LABEL org.opencontainers.image.url="https://cowrie.org/"
LABEL org.opencontainers.image.documentation="https://cowrie.readthedocs.io"
LABEL org.opencontainers.image.source="https://github.com/cowrie/docker-cowrie"
LABEL org.opencontainers.image.version="${TAG}"
LABEL org.opencontainers.image.revision="Source control revision identifier for the packaged software."
LABEL org.opencontainers.image.vendor="Cowrie"
LABEL org.opencontainers.image.licenses="BSD-3-Clause"
LABEL org.opencontainers.image.ref.name="${TAG}"
LABEL org.opencontainers.image.title="Cowrie SSH/Telnet Honeypot"
LABEL org.opencontainers.image.description="Cowrie SSH/Telnet Honeypot"
#LABEL org.opencontainers.image.base.digest="7beb0248fd81"
LABEL org.opencontainers.image.base.name="gcr.io/distroless/python3-debian11"
ENV COWRIE_GROUP=cowrie \
COWRIE_USER=cowrie \
COWRIE_HOME=/cowrie
#RUN groupadd -r ${COWRIE_GROUP} && \
# useradd -r -d ${COWRIE_HOME} -m -g ${COWRIE_GROUP} ${COWRIE_USER}
COPY --from=builder --chown=0:0 /etc/passwd /etc/passwd
#RUN export DEBIAN_FRONTEND=noninteractive; \
# apt-get update && \
# apt-get install -y \
# -o APT::Install-Suggests=false \
# -o APT::Install-Recommends=false \
# libssl1.1 \
# ca-certificates \
# libffi7 \
# procps \
# python3 \
# python3-distutils && \
# rm -rf /var/lib/apt/lists/* && \
# ln -s /usr/bin/python3 /usr/local/bin/python
COPY --from=builder --chown=${COWRIE_USER}:${COWRIE_GROUP} ${COWRIE_HOME} ${COWRIE_HOME}
RUN python3 -m compileall ${COWRIE_HOME} /usr/lib/python3.9
VOLUME [ "/cowrie/cowrie-git/var", "/cowrie/cowrie-git/etc" ]
USER ${COWRIE_USER}
WORKDIR ${COWRIE_HOME}/cowrie-git
ENV PATH=${COWRIE_HOME}/cowrie-env/bin:${PATH}
ENV PYTHONPATH=${COWRIE_HOME}/cowrie-git/src
ENV PYTHONUNBUFFERED=1
ENTRYPOINT [ "/cowrie/cowrie-env/bin/python3" ]
CMD [ "/cowrie/cowrie-env/bin/twistd", "-n", "--umask=0022", "--pidfile=", "cowrie" ]
EXPOSE 2222 2223
| micheloosterhof/cowrie-dev |
<|start_filename|>app/templates/gulp/tasks/_modernizr.js<|end_filename|>
'use strict';
var gulp = require('gulp-help')(require('gulp'));
var modernizr = require('gulp-modernizr');
var config = require('./../config.js');
var build = require('./../utils/buildHelper.js');
// Lean Modernizr build
gulp.task('modernizr', 'Create modernizr lean build', function () {
var dest = build.isBuild() ? config.modernizr.destBuild : config.modernizr.dest;
return gulp.src(config.modernizr.src)
.pipe(modernizr(config.modernizr.cfg))
.pipe(gulp.dest(dest));
});
<|start_filename|>app/templates/scripts/modules/_utils.js<|end_filename|>
export function sayHello(name = '<%= projectNameSlug %>') { // eslint-disable-line import/prefer-default-export
console.log(`${name} says hello!`); // eslint-disable-line no-console
}
<|start_filename|>app/templates/gulp/tasks/_copy.js<|end_filename|>
'use strict';
var gulp = require('gulp-help')(require('gulp'));
var config = require('./../config.js');
// copy font to dist folder
gulp.task('fonts', 'Copy fonts to `dist/`', function () {
return gulp.src(config.copyFonts.src)
.pipe(gulp.dest(config.copyFonts.dest));
});
// copy icons to dist folder
gulp.task('icons', 'Copy icons to `dist/`', function () {
return gulp.src(config.copyIcons.src)
.pipe(gulp.dest(config.copyIcons.dest));
});
// copy extras in app/ directory
gulp.task('extras', 'Copy extras in `app/` root to `dist/`', function () {
return gulp.src(config.copyExtras.src, config.copyExtras.cfg)
.pipe(gulp.dest(config.copyExtras.dest));
});
gulp.task('copy', 'Copy fonts and extras to `dist/` folder', ['fonts', 'extras', 'icons']);
<|start_filename|>app/templates/gulp/tasks/_watch.js<|end_filename|>
'use strict';
var gulp = require('gulp-help')(require('gulp'));
var config = require('./../config.js');
var reload = require('./browserSync.js').reload;
// Watch source files
gulp.task('watch', 'Watch source files', function () {
gulp.watch(config.watch.styles, ['styles']);
gulp.watch(config.watch.pug, ['templates', reload]);
gulp.watch(config.watch.scripts, ['scripts', reload]);
});
<|start_filename|>app/templates/gulp/tasks/_templates.js<|end_filename|>
'use strict';
<% if (includeMultiLanguage) { %>var path = require('path');<% } %>
var gulp = require('gulp-help')(require('gulp'));
var pug = require('gulp-pug');
var data = require('gulp-data');
var plumber = require('gulp-plumber');
var fs = require('fs');
var extend = require('gulp-extend');
var gulpif = require('gulp-if');
var rev = require('gulp-rev');
var revReplace = require('gulp-rev-replace');
var filter = require('gulp-filter');
<% if (includeMultiLanguage) { %>var merge = require('merge-stream');<% } %>
<% if (includeDataYAML) { %>var yamlMerge = require('gulp-yaml-merge');
var yaml = require('js-yaml');<% } %>
var config = require('./../config.js');
var handleError = require('./../utils/handleError.js');
var build = require('./../utils/buildHelper.js');
// Compile pug to html
gulp.task('templates', 'Compile templates', ['templates:prepareData'], function() {
var dest = build.isBuild() ? config.templates.destBuild : config.templates.dest;
<% if (!includeMultiLanguage) { %>return gulp.src(config.templates.src)
.pipe(plumber(handleError))
.pipe(data(function() {
<% if (includeDataYAML) { %>return yaml.safeLoad(fs.readFileSync(config.templatesData.dataPath, 'utf8'));
<% } else { %> return JSON.parse(fs.readFileSync(config.templatesData.dataPath));<% } %>
}))
.pipe(pug(config.templates.cfg))
.pipe(gulp.dest(dest));
<% } else { %>
var languages = config.templates.languages.list.map(function(lang) {
return gulp.src(config.templates.src)
.pipe(plumber(handleError))
.pipe(data(function() {
<% if (includeDataYAML) { %>var json = yaml.safeLoad(fs.readFileSync(config.templatesData.dataPath, 'utf8'));
<% } else { %> var json = JSON.parse(fs.readFileSync(config.templatesData.dataPath));<% } %>
json.language = lang;
json.primaryLanguage = config.templates.languages.primary;
return json;
}))
.pipe(pug(config.templates.cfg))
.pipe((config.templates.languages.primary === lang) ? gulp.dest(dest) : gulp.dest(path.join(dest, lang)));
});
return merge(languages);<% } %>
});
// Concat *.json file to single data.json
gulp.task('templates:prepareData', 'Merge views data', function() {
return gulp.src(config.templatesData.src)
<% if (includeDataYAML) { %>.pipe(yamlMerge(config.templatesData.dataName))
<% } else { %>.pipe(extend(config.templatesData.dataName))<% } %>
.pipe(gulp.dest(config.templatesData.dest));
});
<|start_filename|>app/templates/gulp/tasks/_images.js<|end_filename|>
'use strict';
var gulp = require('gulp-help')(require('gulp'));
var imagemin = require('gulp-imagemin');
var cache = require('gulp-cache');
var gulpif = require('gulp-if');
var config = require('./../config.js');
// Clear imagemin cache
gulp.task('clearCache', 'Clear Imagemin cache', function (done) {
return cache.clearAll(done);
});
// Copy SVG to dist
gulp.task('copySvg', 'Copy SVGs to `dist/`', function () {
return gulp.src(config.images.srcSVG)
.pipe(gulp.dest(config.images.dest));
});
// Optimize images
gulp.task('images', 'Run Imagemin optimalizations and copy to `dist/`', ['copySvg'], function () {
return gulp.src(config.images.src)
.pipe(gulpif(config.optimizeImages, cache(imagemin(config.images.cfg))))
.pipe(gulp.dest(config.images.dest));
});
<|start_filename|>app/templates/gulp/tasks/_serve.js<|end_filename|>
'use strict';
var gulp = require('gulp-help')(require('gulp'));
var runSequence = require('run-sequence');
// Serve project with watching and livereload
gulp.task('serve', 'Serve project with livereload and file watching',function (cb) {
runSequence(
['styles', 'templates', 'scripts'],
<% if (includeModernizr) { %>'modernizr',<% } %>
'browser-sync',
'watch',
cb
);
});
gulp.task('serve:dist', 'Bulid preview', function (cb) {
runSequence(
'build',
'browser-sync:dist',
cb
);
});
<|start_filename|>app/templates/gulp/_config.js<|end_filename|>
'use strict';
var path = require('path');
var modRewrite = require('connect-modrewrite');
// Default settings
module.exports.uglifyJs = process.env.UGLIFYJS || true; // to remove .min sufix edit template manually
module.exports.minifyCss = process.env.MINIFYCSS || true; // to remove .min sufix edit template manually
module.exports.cacheBust = process.env.CACHEBUST || true;
module.exports.optimizeImages = process.env.OPTIMIZEIMAGES || true;
module.exports.lintJs = process.env.LINTJS || true;
module.exports.sourceMaps = process.env.SOURCEMAPS || true;
// Default paths
var app = 'app';
var tmp = '.tmp';
var dist = 'dist';
var nodeDir = 'node_modules';
// Default paths in app folder
var data = 'data';
var fonts = 'fonts';
var icons = 'icons';
var images = 'images';
var scripts = 'scripts';
var styles = 'styles';
var views = 'views';
<% if (includeMultiLanguage) { -%>
var languages = {
list: ['en', 'de', 'sk'],
primary: 'en'
};
<% } -%>
// Rewrite rules enables removing .html extensions in development.
// This are possible routes for same test.html file:
// http://localhost:3000/test.html
// http://localhost:3000/test
var rewriteRules = [
'^/$ - [L]', // default site root handling (index.html)
'.html$ - [L]', // ignore routes ends with '.html'
'(.*)/$ $1/index.html [L]', // routes with trailing slash are directories -> rewrite to directory index.html
'\\/\[a-zA-Z0-9_\\-\@.]+\\.\[a-zA-Z0-9]+$ - [L]', // ignore files with extension (eg. .css, .js, ...)
'(.*)$ $1.html [L]' // redirect routes ends with string without trailing slash to original html
];
// Browser sync task config
module.exports.browserSync = {
dev: {
server: {
baseDir: [tmp, app],
routes: {
'/node_modules': nodeDir
}
},
notify: false,
debugInfo: false,
host: 'localhost',
middleware: [
modRewrite(rewriteRules)
]
},
dist: {
server: {
baseDir: dist
},
notify: false,
debugInfo: false,
host: 'localhost',
middleware: [
modRewrite(rewriteRules)
]
}
};
// Build size task config
module.exports.buildSize = {
srcAll: dist + '/**/*',
cfgAll: {
title: 'build',
gzip: true
},
srcCss: path.join(dist, styles, '/**/*'),
cfgCss: {
title: 'CSS',
gzip: true
},
srcJs: path.join(dist, scripts, '/**/*'),
cfgJs: {
title: 'JS',
gzip: true
},
srcImages: path.join(dist, images, '/**/*'),
cfgImages: {
title: 'Images',
gzip: false
}
};
// Clean task config
// Be carefull what you cleaning!
module.exports.clean = [tmp, dist];
// Copy fonts task config
module.exports.copyFonts = {
src: [
path.join(app, fonts, '**/*')<% if (includeBootstrap) { %>, 'node_modules/bootstrap-sass/assets/fonts/**/*'<% } %>
],
dest: path.join(dist, fonts)
};
// Copy fonts task config
module.exports.copyIcons = {
src: path.join(app, icons, '**/*'),
dest: dist + '/icons'
};
// Copy extras task config
module.exports.copyExtras = {
src: [
app + '/*.*',
'!' + app + '/*.html'
],
dest: dist,
cfg: {
dot: true
}
};
// Deploy task config
// FTP settings are in .env file
module.exports.deploy = {
src: dist + '/**',
dev: {
root: dist,
hostname: process.env.FTP_DEV_HOSTNAME,
username: process.env.FTP_DEV_USER,
destination: process.env.FTP_DEV_DEST
},
dist: {
root: dist,
hostname: process.env.FTP_DIST_HOSTNAME,
username: process.env.FTP_DIST_USER,
destination: process.env.FTP_DIST_DEST
}
};
// Images task config
module.exports.images = {
src: path.join(app, images, '**/*.{gif,png,jpg}'),
srcSVG: path.join(app, images, '**/*.svg'),
dest: dist + '/images',
cfg: {
progressive: true,
interlaced: true,
svgoPlugins: [{cleanupIDs: false}]
}
};
// JSHint task config
module.exports.eslint = {
src: [
path.join(app, scripts,'**/*.js'),
path.join('!' + app, scripts,'plugins/**/*.js') // do not lint external plugins
]
};
<% if (includeModernizr) { -%>
// Modernizr task config
module.exports.modernizr = {
src: [
path.join(app, scripts,'**/*.js'),
path.join(tmp, styles,'*.css')
],
dest: path.join(tmp, scripts, 'plugins'),
destBuild: path.join(dist, scripts, 'plugins'),
cfg: {
silent: true,
options: [
'setClasses',
'addTest',
'html5printshiv',
'testProp',
'fnBind'
],
"excludeTests": [
'hidden'
],
}
};
<% } -%>
// Cachebusting task config
module.exports.rev = {
srcFiles: [
path.join(dist, '**/*.css'),
path.join(dist, '**/*.js'),
],
srcHtml: path.join(dist, '**/*.html'),
manifestPath: path.join(dist, 'rev-manifest.json'),
dest: path.join(dist),
}
// User scripts task
module.exports.scripts = {
src: path.join(app, scripts, 'main.js'),
dest: path.join(tmp, scripts),
rollupCfg: {
format: 'iife',
moduleName: '<%= projectNameSlug %>',
},
destBuild: path.join(dist, scripts)
};
// Styles task config
module.exports.styles = {
src: path.join(app, styles, '*.scss'),
dest: path.join(tmp,styles),
destBuild: path.join(dist, styles),
sassCfg: {
includePaths: 'node_modules',
outputStyle: 'expanded'
},
autoprefixerCfg: {
browsers: ['last 2 version']
}
};
// Templates task config
module.exports.templates = {
<% if (includeMultiLanguage) { %>languages: languages,<% } %>
src: path.join(app, views, '*.pug'),
dest: tmp,
destBuild: path.join(dist),
cfg: {
pretty: true,
compileDebug: true
}
};
// TemplatesData task config
module.exports.templatesData = {
src: path.join(app, views, data, '/**/*.<% if (includeDataYAML) { %>yaml<% } else { %>json<% } %>'),
dest: path.join(tmp, '/data'),
dataName: 'data.<% if (includeDataYAML) { %>yaml<% } else { %>json<% } %>',
dataPath: path.join(tmp, 'data/data.<% if (includeDataYAML) { %>yaml<% } else { %>json<% } %>')
};
// Watch task config
module.exports.watch = {
styles: path.join(app, styles, '/**/*.scss'),
pug: [
path.join(app, views, '/**/*.pug'),
path.join(app, views, data, '/**/*.<% if (includeDataYAML) { %>yaml<% } else { %>json<% } %>')
],
scripts: path.join(app, scripts, '/**/*.js')
};
<|start_filename|>app/templates/gulp/tasks/_browserSync.js<|end_filename|>
'use strict';
var gulp = require('gulp-help')(require('gulp'));
var browserSync = require('browser-sync');
var config = require('./../config.js');
// Serve project with livereload
gulp.task('browser-sync', false, function() {
browserSync(config.browserSync.dev);
});
// Serve dist of project
gulp.task('browser-sync:dist', false, function() {
browserSync(config.browserSync.dist);
});
module.exports.reload = browserSync.reload;
<|start_filename|>app/templates/gulp/tasks/_scripts.js<|end_filename|>
'use strict';
var path = require('path');
var gulp = require('gulp-help')(require('gulp'));
var gulpif = require('gulp-if');
var eslint = require('gulp-eslint');
var rollupStream = require('rollup-stream');
var rollup = require('rollup');
var babel = require('rollup-plugin-babel');
var nodeResolve = require('rollup-plugin-node-resolve');
var commonjs = require('rollup-plugin-commonjs');
var replace = require('rollup-plugin-replace');
var uglify = require('rollup-plugin-uglify');
var source = require('vinyl-source-stream')
var buffer = require('vinyl-buffer');
var sourcemaps = require('gulp-sourcemaps');
var config = require('./../config.js');
var handleError = require('./../utils/handleError.js');
var build = require('./../utils/buildHelper.js');
// Lint .js files
gulp.task('lintjs', 'Lint js files', function () {
if (config.lintJs) {
return gulp.src(config.eslint.src)
.pipe(eslint({ignore: false})) // temp hack with ignore
.pipe(eslint.format())
.pipe(eslint.failAfterError())
.on('error', handleError);
} else {
return;
}
});
var cache;
gulp.task('scripts', 'Compile ES6 to ES5', ['lintjs'],function () {
var dest = build.isBuild() ? config.scripts.destBuild : config.scripts.dest;
config.scripts.rollupCfg.entry = config.scripts.src;
config.scripts.rollupCfg.rollup = rollup;
config.scripts.rollupCfg.sourceMap = config.sourceMaps && !build.isBuild();
config.scripts.rollupCfg.plugins = [
nodeResolve({
jsnext: true,
browser: true,
}),
commonjs({
include: 'node_modules/**',
}),
babel({
exclude: 'node_modules/**',
}),
replace({
'process.env.NODE_ENV': JSON.stringify(build.isBuild() ? 'production' : 'development' )
}),
build.isBuild() ? uglify() : function() {},
];
return rollupStream(config.scripts.rollupCfg)
.on('bundle', function(bundle) {
cache = bundle;
})
.on('error', handleError)
.pipe(source(path.basename(config.scripts.rollupCfg.entry)))
.pipe(gulpif(config.sourceMaps && !build.isBuild(), buffer()))
.pipe(gulpif(config.sourceMaps && !build.isBuild(), sourcemaps.init({loadMaps: true})))
.pipe(gulpif(config.sourceMaps && !build.isBuild(), sourcemaps.write('.')))
.pipe(gulp.dest(dest));
});
<|start_filename|>app/templates/gulp/tasks/_rev.js<|end_filename|>
'use strict';
var gulp = require('gulp-help')(require('gulp'));
var rev = require('gulp-rev');
var revReplace = require('gulp-rev-replace');
var config = require('./../config.js');
gulp.task('rev:files', function(){
return gulp.src(config.rev.srcFiles)
.pipe(rev())
.pipe(gulp.dest(config.rev.dest))
.pipe(rev.manifest())
.pipe(gulp.dest(config.rev.dest));
});
gulp.task('rev', 'Added hashes to files and rewrite file paths in HTML (Cache busting)', ['rev:files'], function(){
return gulp.src(config.rev.srcHtml)
.pipe(revReplace({manifest: gulp.src(config.rev.manifestPath)}))
.pipe(gulp.dest(config.rev.dest));
});
<|start_filename|>app/templates/gulp/tasks/_deploy.js<|end_filename|>
'use strict';
var gulp = require('gulp-help')(require('gulp'));
var rsync = require('gulp-rsync');
var plumber = require('gulp-plumber');
var config = require('./../config.js');
var handleError = require('./../utils/handleError.js');
// Deploying via rsync/sftp
// Credentials are stored in .env file
// TODO plumber not working with this
gulp.task('deploy', 'Deploy to development enviroment (specified in `.env`)', function() {
return gulp.src(config.deploy.src)
.pipe(plumber(handleError))
.pipe(rsync(config.deploy.dev));
});
gulp.task('deploy:prod', 'Deploy to production enviroment (specified in `.env`)', function() {
return gulp.src(config.deploy.src)
.pipe(plumber(handleError))
.pipe(rsync(config.deploy.dist));
});
<|start_filename|>test/generator-spec.js<|end_filename|>
const path = require('path');
const assert = require('yeoman-assert');
const helpers = require('yeoman-test');
const test = require('tape');
const defaultPrompt = {
optIn: false,
name: 'test of generator',
features: [],
includeMultiLanguage: false,
dataFormat: 'yaml',
};
function testExpected(expected) {
expected.forEach((file) => {
if (typeof file === 'string') {
assert.file(file);
} else if (Array.isArray(file)) {
assert.fileContent.apply(this, file);
}
});
}
function runTest(prompt) {
return helpers.run(path.join(__dirname, '../app'))
.withOptions({ skipInstall: true })
.withPrompts(prompt)
.toPromise();
}
function handleError(err, t) {
console.log(err);
t.error(err);
t.end();
}
test('generator-lb:defaults', (t) => {
const prompt = defaultPrompt;
const expected = [
'package.json',
['package.json', /\"name\": \"test-of-generator\"/],
['package.json', /node-sass/],
['package.json', /gulp-sass/],
'gulpfile.js',
'gulp/tasks/browserSync.js',
'gulp/tasks/clean.js',
'gulp/tasks/default.js',
'gulp/tasks/deploy.js',
'gulp/tasks/images.js',
'gulp/tasks/rev.js',
'gulp/tasks/templates.js',
'gulp/tasks/watch.js',
'gulp/utils/buildHelper.js',
'gulp/utils/handleError.js',
'gulp/config.js',
'gulp/tasks/build.js',
'gulp/tasks/copy.js',
'gulp/tasks/serve.js',
'gulp/tasks/scripts.js',
'gulp/tasks/styles.js',
'readme.md',
'.gitignore',
'.gitattributes',
'.env',
'.eslintrc',
'.eslintignore',
'.editorconfig',
'app/styles/main.scss',
'app/views/index.pug',
'app/views/layouts/_default.pug',
'app/views/modules/_header.pug',
'app/views/modules/_footer.pug',
'app/views/data/index.yaml',
'app/.htaccess',
'app/favicon.ico',
'app/robots.txt',
'app/scripts/main.js'
];
runTest(prompt)
.then(() => {
t.doesNotThrow(() => testExpected(expected), null, 'All files present');
t.end();
})
.catch((err) => handleError(err, t));
});
test('generator-lb:bootstrap', (t) => {
const prompt = Object.assign(defaultPrompt, {
features: ['includeFEFramework'],
feFramework: 'includeBootstrap',
});
const expected = [
['package.json', /bootstrap-sass/],
['package.json', /\"jquery\":/],
'app/scripts/external/jquery.js',
];
runTest(prompt)
.then(() => {
t.doesNotThrow(() => testExpected(expected), null, 'bootstrap-sass present');
t.end();
})
.catch((err) => handleError(err, t));
});
test('generator-lb:foundation', (t) => {
const prompt = Object.assign(defaultPrompt, {
features: ['includeFEFramework'],
feFramework: 'includeFoundation',
});
const expected = [
['package.json', /foundation-sites/],
['package.json', /\"jquery\": \"~2/],
'app/scripts/external/jquery.js',
];
runTest(prompt)
.then(() => {
t.doesNotThrow(() => testExpected(expected), null, 'foundation-sites present');
t.end();
})
.catch((err) => handleError(err, t));
});
test('generator-lb:modernizr', (t) => {
const prompt = Object.assign(defaultPrompt, {
features: ['includeModernizr']
});
const expected = [
'gulp/tasks/modernizr.js',
['package.json', /gulp-modernizr/],
];
runTest(prompt)
.then(() => {
t.doesNotThrow(() => testExpected(expected), null, 'modernizr present');
t.end();
})
.catch((err) => handleError(err, t));
});
test('generator-lb:jquery2', (t) => {
const prompt = Object.assign(defaultPrompt, {
features: ['includejQuery'],
jQuery: 'includejQuery2',
});
const expected = [
['package.json', /\"jquery\": \"~2/],
'app/scripts/external/jquery.js',
];
runTest(prompt)
.then(() => {
t.doesNotThrow(() => testExpected(expected), null, 'jQuery 2.x.x present');
t.end();
})
.catch((err) => handleError(err, t));
});
test('generator-lb:jquery3', (t) => {
const prompt = Object.assign(defaultPrompt, {
features: ['includejQuery'],
jQuery: 'includejQuery3',
});
const expected = [
['package.json', /\"jquery\": \"~3/],
'app/scripts/external/jquery.js',
];
runTest(prompt)
.then(() => {
t.doesNotThrow(() => testExpected(expected), null, 'jQuery 3.x.x present');
t.end();
})
.catch((err) => handleError(err, t));
});
test('generator-lb:lightingFly', (t) => {
const prompt = Object.assign(defaultPrompt, {
features: ['includeLightingFly'],
});
const expected = [
['package.json', /lightingfly/],
];
runTest(prompt)
.then(() => {
t.doesNotThrow(() => testExpected(expected), null, 'lightingFly present');
t.end();
})
.catch((err) => handleError(err, t));
});
test('generator-lb:multiLanguage', (t) => {
const prompt = Object.assign(defaultPrompt, {
includeMultiLanguage: true,
});
const expected = [
['package.json', /merge-stream/],
['gulp/config.js', /var languages/]
];
runTest(prompt)
.then(() => {
t.doesNotThrow(() => testExpected(expected), null, 'multi-language support present');
t.end();
})
.catch((err) => handleError(err, t));
});
test('generator-lb:YAML', (t) => {
const prompt = Object.assign(defaultPrompt, {
dataFormat: 'yaml',
});
const expected = [
'app/views/data/index.yaml',
];
runTest(prompt)
.then(() => {
t.doesNotThrow(() => testExpected(expected), null, 'YAML formatting present');
t.end();
})
.catch((err) => handleError(err, t));
});
test('generator-lb:JSON', (t) => {
const prompt = Object.assign(defaultPrompt, {
dataFormat: 'json',
});
const expected = [
'app/views/data/index.json',
];
runTest(prompt)
.then(() => {
t.doesNotThrow(() => testExpected(expected), null, 'JSON formatting present');
t.end();
})
.catch((err) => handleError(err, t));
});
<|start_filename|>app/templates/gulp/tasks/_build.js<|end_filename|>
'use strict';
var gulp = require('gulp-help')(require('gulp'));
var runSequence = require('run-sequence');
var size = require('gulp-size');
var notifier = require('node-notifier');
var config = require('./../config.js');
var build = require('./../utils/buildHelper.js');
// Output size of dist folder
gulp.task('buildSize:css', false, function () {
return gulp.src(config.buildSize.srcCss)
.pipe(size(config.buildSize.cfgCss));
});
// Output size of dist folder
gulp.task('buildSize:js', false, function () {
return gulp.src(config.buildSize.srcJs)
.pipe(size(config.buildSize.cfgJs));
});
// Output size of dist folder
gulp.task('buildSize:images', false, function () {
return gulp.src(config.buildSize.srcImages)
.pipe(size(config.buildSize.cfgImages));
});
// Output size of dist folder
gulp.task('buildSize', 'Determine size of `dist/` folder', ['buildSize:css', 'buildSize:js','buildSize:images'], function () {
return gulp.src(config.buildSize.srcAll)
.pipe(size(config.buildSize.cfgAll));
});
// run build in sequence - this shoud be implemented in Gulp 4 natively
gulp.task('build', 'Build project (use with --force to force build)', function(cb) {
build.setBuild(true);
runSequence(
['clean'],
['styles', 'scripts'],
['images', 'copy', 'extras'<% if (includeModernizr) { %>, 'modernizr'<% }%>],
'templates',
'buildSize',
config.cacheBust ? 'rev' : function() {},
function() {
notifier.notify({
title: 'Build',
message: 'Build was successful'
});
cb();
}
);
});
<|start_filename|>app/templates/gulp/tasks/_styles.js<|end_filename|>
'use strict';
var gulp = require('gulp-help')(require('gulp'));
var gulpif = require('gulp-if');
var sass = require('gulp-sass');
var sourcemaps = require('gulp-sourcemaps');
var postcss = require('gulp-postcss');
var plumber = require('gulp-plumber');
<% if (includeBootstrap) { %>var replace = require('gulp-replace');<% } %>
var autoprefixer = require('autoprefixer');
var cssnano = require('cssnano');
var config = require('./../config.js');
var reload = require('./browserSync.js').reload;
var handleError = require('./../utils/handleError.js');
var build = require('./../utils/buildHelper.js');
// Complie scss using libsass
gulp.task('styles', 'Compile Sass to CSS', function () {
var dest = build.isBuild() ? config.styles.destBuild : config.styles.dest;
return gulp.src(config.styles.src)
<% if (includeBootstrap) { %>.pipe(replace('bootstrap-sass/assets/fonts/bootstrap/', '../fonts/'))<% } %>
.pipe(gulpif(config.sourceMaps && !build.isBuild(), sourcemaps.write('.')))
.pipe(sass(config.styles.sassCfg))
.on('error', handleError)
.pipe(postcss([
autoprefixer(config.styles.autoprefixerCfg),
build.isBuild() ? cssnano() : function() {}
]))
.pipe(gulpif(config.sourceMaps && !build.isBuild(), sourcemaps.write('.')))
.pipe(gulp.dest(dest))
.pipe(reload({stream:true}));
});
| Ad-Ok/generator-lb |
<|start_filename|>app/js/directives/sidemenutoggle.js<|end_filename|>
'use strict';
var ELHAppDirectives = angular.module('ELHAppDirectives');
ELHAppDirectives.directive('sidemenuToggle', function(){
return {
restrict: "A",
link: function(scope, elem, attrs){
$(elem).click(function(){
toggleNav();
});
function toggleNav() {
if ($('#content-canvas').hasClass('show-nav')) {
// Do things on Nav Close
$('#content-canvas').removeClass('show-nav');
$('.fixed-header').removeClass('show-nav');
$('.sidemenu').removeClass('show-nav');
} else {
// Do things on Nav Open
$('#content-canvas').addClass('show-nav');
$('.fixed-header').addClass('show-nav');
$('.sidemenu').addClass('show-nav');
}
}
}
}
}); | anddal/ELH |
<|start_filename|>src/transform/output.js<|end_filename|>
import { writeFileSync } from 'fs';
import { format } from 'prettier';
function output(code, target, parser = 'babel') {
try {
const formatCode = format(code, { parser });
writeFileSync(target, formatCode);
} catch {
writeFileSync(target, code);
}
}
export default output;
<|start_filename|>src/transform/ts.js<|end_filename|>
import babelTraverse from '@babel/traverse';
/**
* 用于去除ts类型检测
* @export
* @param {*} ast
* @returns
*/
export default function(ast) {
babelTraverse(ast, {
ExportNamedDeclaration(exportPath) {
let declaration = exportPath.get('declaration');
if (
declaration &&
(declaration.isTSInterfaceDeclaration() ||
declaration.isTSTypeAliasDeclaration())
) {
exportPath.remove();
}
},
TSTypeAliasDeclaration(path) {
path.remove();
},
TSTypeParameterDeclaration(path) {
path.remove();
},
TSInterfaceDeclaration(path) {
path.remove();
},
TSTypeParameterInstantiation(path) {
path.remove();
},
TSTypeAnnotation(path) {
path.remove();
},
TSAsExpression(path) {
path.replaceWith(path.get('expression'));
}
});
return ast;
}
<|start_filename|>src/doc/helper.js<|end_filename|>
const chalk = require('chalk')
module.exports = function help() {
console.log(`Usage: trans [targetPath] [options]`)
console.log(`
Options:
-v, --version output current version
-o, --output the output path for react component, default is process.cwd()/react__from__vue
-i, --ignore fileName or just RegExp => .ts$,ignoreFile.js,ignoreDir default: node_modules
-m, --module use cssModule(styles.***),default is global mode("class-name")
-t, --ts it is a typescript component
-h, --help output usage information
`)
console.log('Examples:')
console.log('')
console.log(chalk.gray(' # transform a vue component to react component.'))
console.log(' $ convert components/test.vue -o components')
console.log('')
}
<|start_filename|>lib/transform/output.js<|end_filename|>
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = void 0;
var _fs = require("fs");
var _prettier = require("prettier");
function output(code, target) {
var parser = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 'babel';
try {
var formatCode = (0, _prettier.format)(code, {
parser: parser
});
(0, _fs.writeFileSync)(target, formatCode);
} catch (_unused) {
(0, _fs.writeFileSync)(target, code);
}
}
var _default = output;
exports["default"] = _default;
<|start_filename|>lib/transform/sfc/generate-element.js<|end_filename|>
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _typeof3 = require("@babel/runtime/helpers/typeof");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.generateIfState = generateIfState;
exports.generateJSXElement = generateJSXElement;
exports.generateOneEle = generateOneEle;
var _typeof2 = _interopRequireDefault(require("@babel/runtime/helpers/typeof"));
var _slicedToArray2 = _interopRequireDefault(require("@babel/runtime/helpers/slicedToArray"));
var t = _interopRequireWildcard(require("@babel/types"));
var _utils = require("../utils");
var _directives = require("./directives");
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function _getRequireWildcardCache(nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || _typeof3(obj) !== "object" && typeof obj !== "function") { return { "default": obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj["default"] = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
// class => className
function solveClass(attrs, state) {
var _attrs = [];
var hasClass = attrs.some(function (_ref) {
var key = _ref.key;
return key === 'class';
});
var hasVclass = attrs.some(function (_ref2) {
var key = _ref2.key;
return key === 'v-bind:class';
});
var varible = t.JSXIdentifier('className');
var isUseCssModule = process.options ? process.options.cssModule : true;
var attrVal;
if (hasClass && hasVclass) {
// 模板字符串
var classItem = attrs.find(function (o) {
return o.key === 'class';
});
var vClassItem = attrs.find(function (o) {
return o.key === 'v-bind:class';
});
var templateElements = isUseCssModule ? [t.templateElement({
raw: '',
cooked: ''
}), t.templateElement({
raw: ' ',
cooked: ' '
}), t.templateElement({
raw: '',
cooked: ''
}, true)] : [t.templateElement({
raw: "".concat(classItem.value, " "),
cooked: "".concat(classItem.value, " ")
}), t.templateElement({
raw: '',
cooked: ''
}, true)];
var expressions = isUseCssModule ? [t.memberExpression(t.identifier('styles'), t.stringLiteral(classItem.value), true), (0, _directives.handleExpression)(state, vClassItem.value)] : [(0, _directives.handleExpression)(state, vClassItem.value)];
attrVal = t.jSXExpressionContainer(t.templateLiteral(templateElements, expressions));
} else if (hasClass) {
var _attrs$find = attrs.find(function (o) {
return o.key === 'class';
}),
value = _attrs$find.value;
attrVal = isUseCssModule ? t.jSXExpressionContainer(t.memberExpression(t.identifier('styles'), t.stringLiteral(value), true)) : t.stringLiteral(value);
} else if (hasVclass) {
var _attrs$find2 = attrs.find(function (o) {
return o.key === 'v-bind:class';
}),
_value = _attrs$find2.value;
attrVal = t.jSXExpressionContainer((0, _directives.handleExpression)(state, _value));
}
if (hasClass || hasVclass) {
_attrs.push(t.jsxAttribute(varible, attrVal));
return [attrs.filter(function (_ref3) {
var key = _ref3.key;
return key !== 'class' && key !== 'v-bind:class';
}), _attrs];
} else {
return [attrs, _attrs];
}
}
/**
* 属性赋值
* @param {String} tagName 标签名
* @param {Array} attrList 属性列表 {k:v}
*/
function handleAttrValue(tagName, attrList, state, child) {
var _solveClass = solveClass(attrList, state),
_solveClass2 = (0, _slicedToArray2["default"])(_solveClass, 2),
attrs = _solveClass2[0],
_attrs = _solveClass2[1]; // 处理class属性
var hasforCycle = attrs.some(function (_ref4) {
var key = _ref4.key;
return key === 'v-for';
});
attrs.map(function (_ref5) {
var key = _ref5.key,
value = _ref5.value;
// 元素添加属性
if (key === 'v-show' || key === 'v-for') {//不处理 在父组件处理 { exp && <dom/> } { data.map() }
} else if (key.indexOf('v-on:') > -1) {
// 暂不处理事件修饰符
// .stop
// .prevent
// .capture
// .self
// .once
// .passive
var varible = t.JSXIdentifier((0, _directives.handleOnDirective)(key));
var attrVal = (0, _directives.handleAttribution)(state, value);
_attrs.push(t.jsxAttribute(varible, attrVal));
} else if (key.indexOf('v-bind:') > -1) {
var keys = key.replace('v-bind:', '').split('.');
var _varible2 = t.JSXIdentifier(keys[0]);
var _attrVal = t.jSXExpressionContainer((0, _directives.handleExpression)(state, value));
if (hasforCycle && key === 'v-bind:key') {
_attrVal = t.jSXExpressionContainer(t.identifier(value));
}
_attrs.push(t.jsxAttribute(_varible2, _attrVal));
if (keys.length && keys[1] === 'sync') {
// v-bind:attr.sync = xxx> // 双向绑定的特殊情况
// v-bind:attr=xxx v-on:emiterName ==> emiterName={(new) => this.setState({xxx:new})
var _var = t.JSXIdentifier((0, _utils.camelName)("update:".concat(keys[0]), ':'));
var _varible = t.identifier('_new');
var _block = t.callExpression(t.memberExpression(t.thisExpression(), t.identifier('setState')), [t.objectExpression([t.objectProperty(t.identifier(keys[0]), _varible)])]);
var _val = t.jSXExpressionContainer(t.arrowFunctionExpression([_varible], _block));
_attrs.push(t.jsxAttribute(_var, _val));
}
} else if (key === 'v-model') {
// 改为value = xxx onInput={e => this.setState({xxx:e.target.value|checked})}
var _varible3 = t.JSXIdentifier('value');
var _attrVal2 = t.jSXExpressionContainer(t.memberExpression(t.memberExpression(t.thisExpression(), t.identifier('state')), t.identifier(value)));
_attrs.push(t.jsxAttribute(_varible3, _attrVal2)); // 处理onInput
var inputKey = t.JSXIdentifier('onInput');
var _varible4 = tagName === 'input' && attrs.some(function (_ref6) {
var key = _ref6.key,
value = _ref6.value;
return key === 'type' && value === 'checkbox';
}) ? t.identifier('e.target.checked') : t.identifier('e.target.value');
var _block2 = t.callExpression(t.memberExpression(t.thisExpression(), t.identifier('setState')), [t.objectExpression([t.objectProperty(t.identifier(value), _varible4)])]);
var _val2 = t.jSXExpressionContainer(t.arrowFunctionExpression([t.identifier('e')], _block2));
_attrs.push(t.jsxAttribute(inputKey, _val2));
} else if (key === 'v-text') {
var content = t.jsxExpressionContainer((0, _directives.handleExpression)(state, value));
child.push(content);
} else if (key === 'v-html') {
var _varible5 = t.jSXIdentifier('dangerouslySetInnerHTML');
var _attrVal3 = t.jSXExpressionContainer(t.objectExpression([t.objectProperty(t.identifier('__html'), (0, _directives.handleExpression)(state, value))]));
_attrs.push(t.jsxAttribute(_varible5, _attrVal3));
} else if (key === 'ref') {
// ref='dom' => ref={dom => this.dom = dom}
state.$refs[value] = true;
var _varible6 = t.JSXIdentifier(key);
var left = t.memberExpression(t.thisExpression(), t.identifier(value));
var right = t.identifier('_dom');
var _attrVal4 = t.jsxExpressionContainer(t.arrowFunctionExpression([t.identifier('_dom')], t.assignmentExpression('=', left, right)));
_attrs.push(t.jsxAttribute(_varible6, _attrVal4));
} else {
_attrs.push(t.jsxAttribute(t.JSXIdentifier(key), t.stringLiteral(value)));
}
});
return _attrs;
}
/* 生成一个标签并添加静态属性 */
function generateOneEle(ast, state) {
var attrs = ast.attrsList;
var isVif = !!ast.ifConditions && ast.ifConditions[0].exp;
var vShowItem = attrs.find(function (o) {
return o.key === 'v-show';
});
var vForItem = attrs.find(function (o) {
return o.key === 'v-for';
});
var child = [];
if (ast.tag === 'slot') {
// 处理slot标签
var slotName = ast.attrsMap.name;
var _child = t.jSXExpressionContainer(t.memberExpression(t.memberExpression(t.thisExpression(), t.identifier('props')), t.identifier(slotName ? slotName : 'children')));
_child.dom = null;
return _child;
}
var openingElement = t.jsxOpeningElement(t.JSXIdentifier((0, _utils.parseName)(ast.tag)), handleAttrValue(ast.tag, attrs, state, child), false);
var closeElement = t.jsxClosingElement(t.JSXIdentifier((0, _utils.parseName)(ast.tag)));
var dom = t.jsxElement(openingElement, closeElement, child);
if (vForItem && (isVif || vShowItem)) {
//v-for、 v-if|v-show同时存在
var exp = isVif && vShowItem ? "".concat(isVif && vShowItem.value) : isVif ? isVif : vShowItem.value;
var _child2 = (0, _directives.handleForDirective)(vForItem.value, dom, state, exp);
_child2.dom = dom;
return _child2;
} else {
if (vForItem) {
// v-for="(value, index) in list" 处理为map
var _child3 = (0, _directives.handleForDirective)(vForItem.value, dom, state);
_child3.dom = dom;
return _child3;
} else if (vShowItem) {
var _child4 = null;
if (!ast.parent) {
// v-show 处理为if return
var body = t.blockStatement([t.returnStatement(dom)]);
_child4 = t.ifStatement((0, _directives.handleExpression)(state, vShowItem.value), body);
} else {
// v-show 特殊处理为{condotion && <dom/>}
_child4 = t.jSXExpressionContainer(t.logicalExpression('&&', (0, _directives.handleExpression)(state, vShowItem.value), dom));
}
_child4.dom = dom;
return _child4;
}
}
return dom;
}
/**
* 生成 if statement 递归ast结构
* @param {*} origin :[]
* @param {*} state
* @param {*} i
*/
function generateIfState(origin, state) {
var i = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
var cur = origin[i];
if (!origin[i]) return null;
var exp = cur.exp,
block = cur.block;
var body = t.blockStatement([t.returnStatement(generateJSXElement(block, null, state, true))]);
if (!exp) {
var alter = t.blockStatement([t.returnStatement(generateJSXElement(block, null, state, true))]);
return alter;
}
return t.ifStatement((0, _directives.handleExpression)(state, exp), body, generateIfState(origin, state, ++i));
}
/**
* 生成 三元表达式 expression 递归ast结构
* @param {*} origin :[]
* @param {*} state
* @param {*} i
*/
function generateConditionalExpression(origin, state) {
var i = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
var cur = origin[i];
var exp = cur.exp,
block = cur.block;
if (!exp || !origin[i + 1]) {
return generateJSXElement(block, null, state, true);
}
return t.conditionalExpression((0, _directives.handleExpression)(state, exp), generateJSXElement(block, null, state, true), generateConditionalExpression(origin, state, ++i));
}
/**
* 根据数据长度生成表达式
* @param {Array} origin
* length : 1 { condition && <dom /> }
* length : 2 { condition ? <dom /> : <dom />}
* length > 2 新增const condition = () { generateIfState }
* @param {Object} state
*/
function generateConditionEle(origin, parent, state) {
var length = origin.length;
var cur = origin[0];
var next = origin[1];
var child = null;
if (length === 1) {
var ele = generateJSXElement(cur.block, null, state, true);
var dom = t.isJSXExpressionContainer(ele) ? ele.dom : ele;
child = t.jsxExpressionContainer( //JSX表达式容器
// 转化成逻辑表达式
t.logicalExpression('&&', (0, _directives.handleExpression)(state, cur.exp), dom));
} else if (length === 2) {
child = t.jsxExpressionContainer( // 转化成条件表达式
t.conditionalExpression((0, _directives.handleExpression)(state, cur.exp), generateJSXElement(cur.block, null, state, true), generateJSXElement(next.block, null, state, true)));
} else {
child = t.jSXExpressionContainer(generateConditionalExpression(origin, state));
}
return child;
}
/**
*
* @param {*} ast
* type: 1 => dom节点 2 => expression 3 => text
* isNoChild: 无child 针对if条件语句 block会无线循环
*/
function generateJSXElement(ast, parent, state, isNoChild) {
var type = ast && ast.type;
if (type === 1) {
if (ast.ifConditions && !isNoChild) {
if (!parent) {
// 根节点的条件语句使用if(condition) {return <dom/>}
return generateIfState(ast.ifConditions, state);
} else {
// 非根节点的条件语句使用{ condition ? <dom /> : <dom /> }
var expression = generateConditionEle(ast.ifConditions, parent, state);
parent.children.push(expression);
return parent;
}
} else {
var nextParent = generateOneEle(ast, state);
if (!parent) parent = nextParent;else parent.children.push(nextParent);
if (ast.children.length) {
var next = t.isJSXElement(nextParent) ? nextParent : nextParent.dom;
ast.children.map(function (o) {
var isNochild = o.attrsList && o.attrsList.some(function (_ref7) {
var key = _ref7.key;
return key === 'v-for';
}) && (o.attrsList.some(function (_ref8) {
var key = _ref8.key;
return key === 'v-show';
}) || o.ifConditions);
generateJSXElement(o, next, state, isNochild);
});
}
}
} else if (type === 2) {
// expression
if (parent && parent.children) {
var tokens = ast.tokens;
tokens.map(function (o) {
if (typeof o === 'string') {
parent.children.push(t.jsxText(o));
} else if ((0, _typeof2["default"])(o) === 'object' && o['@binding']) {
var container = t.jsxExpressionContainer((0, _directives.handleExpression)(state, o['@binding']));
parent.children.push(container);
}
});
}
} else if (type === 3 && ast.text.trim()) {
var _nextParent = t.jsxText(ast.text);
if (!parent) parent = _nextParent;else parent.children.push(_nextParent);
}
return parent;
}
<|start_filename|>lib/transform/transform.js<|end_filename|>
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = transform;
var _vueTemplateCompiler = require("vue-template-compiler");
var _parser = require("@babel/parser");
var _traverse = _interopRequireDefault(require("@babel/traverse"));
var _types = require("@babel/types");
var _generator = _interopRequireDefault(require("@babel/generator"));
var _prettier = require("prettier");
var _ts = _interopRequireDefault(require("./ts"));
var _sfc = _interopRequireDefault(require("./sfc"));
var _utils = require("./utils");
var _collectState = require("./collect-state");
var _vueAstHelpers = require("./vue-ast-helpers");
var _sfcAstHelpers = require("./sfc/sfc-ast-helpers");
var _reactAstHelpers = require("./react-ast-helpers");
var plugins = ['typescript', 'jsx', 'classProperties', 'trailingFunctionCommas', 'asyncFunctions', 'exponentiationOperator', 'asyncGenerators', 'objectRestSpread', ['decorators', {
decoratorsBeforeExport: true
}]];
/**
* transform
* @param {string} content
* @param {*} opts {isTs:源文件是否使用ts, isUseCssModule:是否使用模块化css}
* @returns {jsx:string, css:string}
*/
function transform(fileContent, opt) {
var _ref = opt || {},
_ref$isTs = _ref.isTs,
isTs = _ref$isTs === void 0 ? false : _ref$isTs,
_ref$isUseCssModule = _ref.isUseCssModule,
isUseCssModule = _ref$isUseCssModule === void 0 ? true : _ref$isUseCssModule;
var state = {
name: undefined,
data: {},
props: {},
computeds: {},
components: {},
classMethods: {},
$refs: {},
// 存放refs
vForVars: {} // 存放v-for 中的变量
}; // Life-cycle methods relations mapping
var cycle = {
created: 'componentWillMount',
mounted: 'componentDidMount',
updated: 'componentDidUpdate',
beforeDestroy: 'componentWillUnmount',
errorCaptured: 'componentDidCatch',
render: 'render'
};
var collect = {
imports: [],
classMethods: {}
}; // 读取文件
var component = formatContent(fileContent);
var result = {
jsx: '',
css: ''
};
/* solve styles */
var styles = component.styles;
var suffixName = null;
if (isUseCssModule && styles && styles[0]) {
var style = styles[0];
result.css = style.content;
}
try {
// 解析模块
var ast = (0, _parser.parse)(component.js, {
sourceType: 'module',
strictMode: false,
plugins: plugins
});
if (isTs) {
(0, _ts["default"])(ast);
}
(0, _collectState.initProps)(ast, state);
(0, _collectState.initData)(ast, state);
(0, _collectState.initComputed)(ast, state);
(0, _collectState.initComponents)(ast, state); // SFC
(0, _traverse["default"])(ast, {
ImportDeclaration: function ImportDeclaration(path) {
if (path.node.source && path.node.source.value !== 'vue') collect.imports.unshift(path.node);
},
ObjectMethod: function ObjectMethod(path) {
var name = path.node.key.name;
if (path.parentPath.parent.key && path.parentPath.parent.key.name === 'methods') {
(0, _vueAstHelpers.handleGeneralMethods)(path, collect, state, name);
} else if (cycle[name]) {
(0, _vueAstHelpers.handleCycleMethods)(path, collect, state, name, cycle[name], true);
} else {
if (name === 'data' || state.computeds[name]) {
return;
} // log(`The ${name} method maybe be not support now`);
}
}
});
var html = component.template && (0, _sfc["default"])(component.template, state); // // AST for react component
var tpl = "export default class ".concat((0, _utils.parseName)(state.name), " extends Component {}");
var rast = (0, _parser.parse)(tpl, {
sourceType: 'module'
});
(0, _traverse["default"])(rast, {
Program: function Program(path) {
(0, _reactAstHelpers.genImports)(path, collect, suffixName);
},
ClassBody: function ClassBody(path) {
(0, _reactAstHelpers.genConstructor)(path, state);
(0, _reactAstHelpers.genStaticProps)(path, state);
(0, _reactAstHelpers.genClassMethods)(path, state);
(0, _sfcAstHelpers.genSFCRenderMethod)(path, state, html);
}
}); // react组件使用
(0, _traverse["default"])(rast, {
ClassMethod: function ClassMethod(path) {
if (path.node.key.name === 'render') {
path.traverse({
JSXIdentifier: function JSXIdentifier(path) {
if ((0, _types.isJSXClosingElement)(path.parent) || (0, _types.isJSXOpeningElement)(path.parent)) {
var node = path.node;
var componentName = state.components[node.name] || state.components[(0, _utils.parseComponentName)(node.name)];
if (componentName) {
path.replaceWith((0, _types.jSXIdentifier)(componentName));
path.stop();
}
}
}
});
}
}
});
var _generate = (0, _generator["default"])(rast, {
quotes: 'single',
retainLines: true
}),
code = _generate.code;
result.jsx = (0, _prettier.format)(code, {
parser: 'babel'
});
result.css = (0, _prettier.format)(result.css, {
parser: 'css'
});
return result;
} catch (error) {
(0, _utils.log)(error);
}
}
/**
* 解析vue文件
* @param {*} source
* @returns
*/
function formatContent(source) {
var res = (0, _vueTemplateCompiler.parseComponent)(source, {
pad: 'line'
});
var jsCode = res.script.content.replace(/\/\/\n/g, '');
jsCode = jsCode.replace('export default Vue.extend', 'export default ');
return {
template: res.template ? res.template.content : null,
js: jsCode,
styles: res.styles
};
}
<|start_filename|>lib/transform/utils.js<|end_filename|>
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _typeof3 = require("@babel/runtime/helpers/typeof");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.camelName = camelName;
exports.genDefaultProps = genDefaultProps;
exports.genPropTypes = genPropTypes;
exports.getIdentifier = getIdentifier;
exports.getStateOrProp = getStateOrProp;
exports.log = log;
exports.parseComponentName = parseComponentName;
exports.parseName = parseName;
var _typeof2 = _interopRequireDefault(require("@babel/runtime/helpers/typeof"));
var t = _interopRequireWildcard(require("@babel/types"));
var _chalk = _interopRequireDefault(require("chalk"));
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function _getRequireWildcardCache(nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || _typeof3(obj) !== "object" && typeof obj !== "function") { return { "default": obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj["default"] = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function camelName(name) {
var split = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : '_';
var val = name.toLowerCase().split(split);
if (val.length === 1) return name;
var str = val.reduce(function (prev, next) {
var nextStr = next[0].toUpperCase() + next.substr(1);
return prev + nextStr;
});
return str;
}
function parseName(name) {
var split = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : '-';
name = name || 'react-compoennt';
var val = name.toLowerCase().split(split);
if (val.length === 1) return name;
var str = val.reduce(function (prev, next) {
var nextStr = next[0].toUpperCase() + next.substr(1);
return prev + nextStr;
}, '');
return str;
}
function parseComponentName(str) {
if (str) {
var a = str.split('-').map(function (e) {
return e[0].toUpperCase() + e.substr(1);
});
return a.join('');
}
}
function log(msg) {
var type = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'error';
if (type === 'error') {
return console.log(_chalk["default"].red(" ".concat(msg)));
}
console.log(_chalk["default"].green(msg));
}
function getIdentifier(state, key) {
return state.data[key] ? t.identifier('state') : t.identifier('props');
}
function getStateOrProp(state, key) {
return state.data[key] ? 'this.state' : 'this.props';
}
function genPropTypes(props) {
var properties = [];
var keys = Object.keys(props);
for (var i = 0, l = keys.length; i < l; i++) {
var key = keys[i];
var obj = props[key];
var identifier = t.identifier(key);
var val = t.memberExpression(t.identifier('PropTypes'), t.identifier('any'));
if (obj.type === 'typesOfArray' || obj.type === 'array') {
if (obj.type === 'typesOfArray') {
(function () {
var elements = [];
obj.value.forEach(function (val) {
elements.push(t.memberExpression(t.identifier('PropTypes'), t.identifier(val)));
});
val = t.callExpression(t.memberExpression(t.identifier('PropTypes'), t.identifier('oneOfType')), [t.arrayExpression(elements)]);
})();
} else {
val = obj.required ? t.memberExpression(t.memberExpression(t.identifier('PropTypes'), t.identifier('array')), t.identifier('isRequired')) : t.memberExpression(t.identifier('PropTypes'), t.identifier('array'));
}
} else if (obj.validator) {
// 复杂验证会出问题 干掉
var node = t.callExpression(t.memberExpression(t.identifier('PropTypes'), t.identifier('oneOf')), [t.arrayExpression(obj.validator.elements)]);
if (obj.required) {
val = t.memberExpression(node, t.identifier('isRequired'));
} else {
val = node;
}
} else {
val = obj.required ? t.memberExpression(t.memberExpression(t.identifier('PropTypes'), t.identifier(obj.type)), t.identifier('isRequired')) : t.memberExpression(t.identifier('PropTypes'), t.identifier(obj.type));
}
properties.push(t.objectProperty(identifier, val));
} // Babel does't support to create static class property???
return t.classProperty(t.identifier('static propTypes'), t.objectExpression(properties), null, []);
}
function genDefaultProps(props) {
var properties = [];
var keys = Object.keys(props).filter(function (key) {
return props[key].value !== undefined;
});
for (var i = 0, l = keys.length; i < l; i++) {
var key = keys[i];
var obj = props[key];
var identifier = t.identifier(key);
var val = null;
if (obj.type === 'typesOfArray') {
var type = (0, _typeof2["default"])(obj.defaultValue);
if (type !== 'undefined') {
var v = obj.defaultValue;
val = type === 'number' ? t.numericLiteral(Number(v)) : type === 'string' ? t.stringLiteral(v) : t.booleanLiteral(v);
} else {
continue;
}
} else if (obj.type === 'array') {
val = t.arrayExpression(obj.value.elements);
} else if (obj.type === 'object') {
val = t.objectExpression(obj.value.properties);
} else {
switch ((0, _typeof2["default"])(obj.value)) {
case 'string':
val = t.stringLiteral(obj.value);
break;
case 'boolean':
val = t.booleanLiteral(obj.value);
break;
case 'number':
val = t.numericLiteral(Number(obj.value));
break;
default:
val = t.stringLiteral(obj.value);
}
}
properties.push(t.objectProperty(identifier, val));
} // Babel does't support to create static class property???
return t.classProperty(t.identifier('static defaultProps'), t.objectExpression(properties), null, []);
}
<|start_filename|>src/transform/sfc/generate-element.js<|end_filename|>
import * as t from '@babel/types';
import { parseName, camelName } from '../utils';
import { handleAttribution, handleExpression, handleOnDirective, handleForDirective } from './directives';
// class => className
function solveClass(attrs, state) {
const _attrs = [];
const hasClass = attrs.some(({ key }) => key === 'class');
const hasVclass = attrs.some(({ key }) => key === 'v-bind:class');
const varible = t.JSXIdentifier('className');
const isUseCssModule = process.options ? process.options.cssModule : true;
let attrVal;
if (hasClass && hasVclass) {
// 模板字符串
const classItem = attrs.find(o => o.key === 'class');
const vClassItem = attrs.find(o => o.key === 'v-bind:class');
const templateElements = isUseCssModule
? [t.templateElement({ raw: '', cooked: '' }), t.templateElement({ raw: ' ', cooked: ' ' }), t.templateElement({ raw: '', cooked: '' }, true)]
: [
t.templateElement({
raw: `${classItem.value} `,
cooked: `${classItem.value} `,
}),
t.templateElement({ raw: '', cooked: '' }, true),
];
const expressions = isUseCssModule
? [t.memberExpression(t.identifier('styles'), t.stringLiteral(classItem.value), true), handleExpression(state, vClassItem.value)]
: [handleExpression(state, vClassItem.value)];
attrVal = t.jSXExpressionContainer(t.templateLiteral(templateElements, expressions));
} else if (hasClass) {
const { value } = attrs.find(o => o.key === 'class');
attrVal = isUseCssModule
? t.jSXExpressionContainer(t.memberExpression(t.identifier('styles'), t.stringLiteral(value), true))
: t.stringLiteral(value);
} else if (hasVclass) {
const { value } = attrs.find(o => o.key === 'v-bind:class');
attrVal = t.jSXExpressionContainer(handleExpression(state, value));
}
if (hasClass || hasVclass) {
_attrs.push(t.jsxAttribute(varible, attrVal));
return [attrs.filter(({ key }) => key !== 'class' && key !== 'v-bind:class'), _attrs];
} else {
return [attrs, _attrs];
}
}
/**
* 属性赋值
* @param {String} tagName 标签名
* @param {Array} attrList 属性列表 {k:v}
*/
function handleAttrValue(tagName, attrList, state, child) {
const [attrs, _attrs] = solveClass(attrList, state); // 处理class属性
const hasforCycle = attrs.some(({ key }) => key === 'v-for');
attrs.map(({ key, value }) => {
// 元素添加属性
if (key === 'v-show' || key === 'v-for') {
//不处理 在父组件处理 { exp && <dom/> } { data.map() }
} else if (key.indexOf('v-on:') > -1) {
// 暂不处理事件修饰符
// .stop
// .prevent
// .capture
// .self
// .once
// .passive
const varible = t.JSXIdentifier(handleOnDirective(key));
const attrVal = handleAttribution(state, value);
_attrs.push(t.jsxAttribute(varible, attrVal));
} else if (key.indexOf('v-bind:') > -1) {
const keys = key.replace('v-bind:', '').split('.');
const varible = t.JSXIdentifier(keys[0]);
let attrVal = t.jSXExpressionContainer(handleExpression(state, value));
if (hasforCycle && key === 'v-bind:key') {
attrVal = t.jSXExpressionContainer(t.identifier(value));
}
_attrs.push(t.jsxAttribute(varible, attrVal));
if (keys.length && keys[1] === 'sync') {
// v-bind:attr.sync = xxx> // 双向绑定的特殊情况
// v-bind:attr=xxx v-on:emiterName ==> emiterName={(new) => this.setState({xxx:new})
const _var = t.JSXIdentifier(camelName(`update:${keys[0]}`, ':'));
const _varible = t.identifier('_new');
const _block = t.callExpression(t.memberExpression(t.thisExpression(), t.identifier('setState')), [
t.objectExpression([t.objectProperty(t.identifier(keys[0]), _varible)]),
]);
const _val = t.jSXExpressionContainer(t.arrowFunctionExpression([_varible], _block));
_attrs.push(t.jsxAttribute(_var, _val));
}
} else if (key === 'v-model') {
// 改为value = xxx onInput={e => this.setState({xxx:e.target.value|checked})}
const varible = t.JSXIdentifier('value');
const attrVal = t.jSXExpressionContainer(
t.memberExpression(t.memberExpression(t.thisExpression(), t.identifier('state')), t.identifier(value))
);
_attrs.push(t.jsxAttribute(varible, attrVal));
// 处理onInput
const inputKey = t.JSXIdentifier('onInput');
const _varible =
tagName === 'input' && attrs.some(({ key, value }) => key === 'type' && value === 'checkbox')
? t.identifier('e.target.checked')
: t.identifier('e.target.value');
const _block = t.callExpression(t.memberExpression(t.thisExpression(), t.identifier('setState')), [
t.objectExpression([t.objectProperty(t.identifier(value), _varible)]),
]);
const _val = t.jSXExpressionContainer(t.arrowFunctionExpression([t.identifier('e')], _block));
_attrs.push(t.jsxAttribute(inputKey, _val));
} else if (key === 'v-text') {
const content = t.jsxExpressionContainer(handleExpression(state, value));
child.push(content);
} else if (key === 'v-html') {
const varible = t.jSXIdentifier('dangerouslySetInnerHTML');
const attrVal = t.jSXExpressionContainer(t.objectExpression([t.objectProperty(t.identifier('__html'), handleExpression(state, value))]));
_attrs.push(t.jsxAttribute(varible, attrVal));
} else if (key === 'ref') {
// ref='dom' => ref={dom => this.dom = dom}
state.$refs[value] = true;
const varible = t.JSXIdentifier(key);
const left = t.memberExpression(t.thisExpression(), t.identifier(value));
const right = t.identifier('_dom');
const attrVal = t.jsxExpressionContainer(t.arrowFunctionExpression([t.identifier('_dom')], t.assignmentExpression('=', left, right)));
_attrs.push(t.jsxAttribute(varible, attrVal));
} else {
_attrs.push(t.jsxAttribute(t.JSXIdentifier(key), t.stringLiteral(value)));
}
});
return _attrs;
}
/* 生成一个标签并添加静态属性 */
function generateOneEle(ast, state) {
const attrs = ast.attrsList;
const isVif = !!ast.ifConditions && ast.ifConditions[0].exp;
const vShowItem = attrs.find(o => o.key === 'v-show');
const vForItem = attrs.find(o => o.key === 'v-for');
const child = [];
if (ast.tag === 'slot') {
// 处理slot标签
const slotName = ast.attrsMap.name;
const _child = t.jSXExpressionContainer(
t.memberExpression(t.memberExpression(t.thisExpression(), t.identifier('props')), t.identifier(slotName ? slotName : 'children'))
);
_child.dom = null;
return _child;
}
const openingElement = t.jsxOpeningElement(t.JSXIdentifier(parseName(ast.tag)), handleAttrValue(ast.tag, attrs, state, child), false);
const closeElement = t.jsxClosingElement(t.JSXIdentifier(parseName(ast.tag)));
const dom = t.jsxElement(openingElement, closeElement, child);
if (vForItem && (isVif || vShowItem)) {
//v-for、 v-if|v-show同时存在
const exp = isVif && vShowItem ? `${isVif && vShowItem.value}` : isVif ? isVif : vShowItem.value;
const _child = handleForDirective(vForItem.value, dom, state, exp);
_child.dom = dom;
return _child;
} else {
if (vForItem) {
// v-for="(value, index) in list" 处理为map
const _child = handleForDirective(vForItem.value, dom, state);
_child.dom = dom;
return _child;
} else if (vShowItem) {
let _child = null;
if (!ast.parent) {
// v-show 处理为if return
const body = t.blockStatement([t.returnStatement(dom)]);
_child = t.ifStatement(handleExpression(state, vShowItem.value), body);
} else {
// v-show 特殊处理为{condotion && <dom/>}
_child = t.jSXExpressionContainer(t.logicalExpression('&&', handleExpression(state, vShowItem.value), dom));
}
_child.dom = dom;
return _child;
}
}
return dom;
}
/**
* 生成 if statement 递归ast结构
* @param {*} origin :[]
* @param {*} state
* @param {*} i
*/
function generateIfState(origin, state, i = 0) {
const cur = origin[i];
if (!origin[i]) return null;
const { exp, block } = cur;
const body = t.blockStatement([t.returnStatement(generateJSXElement(block, null, state, true))]);
if (!exp) {
const alter = t.blockStatement([t.returnStatement(generateJSXElement(block, null, state, true))]);
return alter;
}
return t.ifStatement(handleExpression(state, exp), body, generateIfState(origin, state, ++i));
}
/**
* 生成 三元表达式 expression 递归ast结构
* @param {*} origin :[]
* @param {*} state
* @param {*} i
*/
function generateConditionalExpression(origin, state, i = 0) {
const cur = origin[i];
const { exp, block } = cur;
if (!exp || !origin[i + 1]) {
return generateJSXElement(block, null, state, true);
}
return t.conditionalExpression(
handleExpression(state, exp),
generateJSXElement(block, null, state, true),
generateConditionalExpression(origin, state, ++i)
);
}
/**
* 根据数据长度生成表达式
* @param {Array} origin
* length : 1 { condition && <dom /> }
* length : 2 { condition ? <dom /> : <dom />}
* length > 2 新增const condition = () { generateIfState }
* @param {Object} state
*/
function generateConditionEle(origin, parent, state) {
const length = origin.length;
const cur = origin[0];
const next = origin[1];
let child = null;
if (length === 1) {
const ele = generateJSXElement(cur.block, null, state, true);
const dom = t.isJSXExpressionContainer(ele) ? ele.dom : ele;
child = t.jsxExpressionContainer(
//JSX表达式容器
// 转化成逻辑表达式
t.logicalExpression('&&', handleExpression(state, cur.exp), dom)
);
} else if (length === 2) {
child = t.jsxExpressionContainer(
// 转化成条件表达式
t.conditionalExpression(
handleExpression(state, cur.exp),
generateJSXElement(cur.block, null, state, true),
generateJSXElement(next.block, null, state, true)
)
);
} else {
child = t.jSXExpressionContainer(generateConditionalExpression(origin, state));
}
return child;
}
/**
*
* @param {*} ast
* type: 1 => dom节点 2 => expression 3 => text
* isNoChild: 无child 针对if条件语句 block会无线循环
*/
function generateJSXElement(ast, parent, state, isNoChild) {
const type = ast && ast.type;
if (type === 1) {
if (ast.ifConditions && !isNoChild) {
if (!parent) {
// 根节点的条件语句使用if(condition) {return <dom/>}
return generateIfState(ast.ifConditions, state);
} else {
// 非根节点的条件语句使用{ condition ? <dom /> : <dom /> }
const expression = generateConditionEle(ast.ifConditions, parent, state);
parent.children.push(expression);
return parent;
}
} else {
const nextParent = generateOneEle(ast, state);
if (!parent) parent = nextParent;
else parent.children.push(nextParent);
if (ast.children.length) {
const next = t.isJSXElement(nextParent) ? nextParent : nextParent.dom;
ast.children.map(o => {
const isNochild =
o.attrsList && o.attrsList.some(({ key }) => key === 'v-for') && (o.attrsList.some(({ key }) => key === 'v-show') || o.ifConditions);
generateJSXElement(o, next, state, isNochild);
});
}
}
} else if (type === 2) {
// expression
if (parent && parent.children) {
const tokens = ast.tokens;
tokens.map(o => {
if (typeof o === 'string') {
parent.children.push(t.jsxText(o));
} else if (typeof o === 'object' && o['@binding']) {
const container = t.jsxExpressionContainer(handleExpression(state, o['@binding']));
parent.children.push(container);
}
});
}
} else if (type === 3 && ast.text.trim()) {
const nextParent = t.jsxText(ast.text);
if (!parent) parent = nextParent;
else parent.children.push(nextParent);
}
return parent;
}
export { generateOneEle, generateIfState, generateJSXElement };
<|start_filename|>src/transform/transform.js<|end_filename|>
import { parseComponent } from 'vue-template-compiler';
import { parse } from '@babel/parser';
import babelTraverse from '@babel/traverse';
import { isJSXClosingElement, isJSXOpeningElement, jSXIdentifier } from '@babel/types';
import generate from '@babel/generator';
import { format } from 'prettier';
import transformTS from './ts';
import transfromTemplate from './sfc';
import { parseName, log, parseComponentName } from './utils';
import { initProps, initData, initComputed, initComponents } from './collect-state';
import { handleCycleMethods, handleGeneralMethods } from './vue-ast-helpers';
import { genSFCRenderMethod } from './sfc/sfc-ast-helpers';
import { genImports, genConstructor, genStaticProps, genClassMethods } from './react-ast-helpers';
const plugins = [
'typescript',
'jsx',
'classProperties',
'trailingFunctionCommas',
'asyncFunctions',
'exponentiationOperator',
'asyncGenerators',
'objectRestSpread',
[
'decorators',
{
decoratorsBeforeExport: true,
},
],
];
/**
* transform
* @param {string} content
* @param {*} opts {isTs:源文件是否使用ts, isUseCssModule:是否使用模块化css}
* @returns {jsx:string, css:string}
*/
export default function transform(fileContent, opt) {
const { isTs = false, isUseCssModule = true } = opt || {};
const state = {
name: undefined,
data: {},
props: {},
computeds: {},
components: {},
classMethods: {},
$refs: {}, // 存放refs
vForVars: {}, // 存放v-for 中的变量
};
// Life-cycle methods relations mapping
const cycle = {
created: 'componentWillMount',
mounted: 'componentDidMount',
updated: 'componentDidUpdate',
beforeDestroy: 'componentWillUnmount',
errorCaptured: 'componentDidCatch',
render: 'render',
};
const collect = {
imports: [],
classMethods: {},
};
// 读取文件
const component = formatContent(fileContent);
const result = { jsx: '', css: '' };
/* solve styles */
const styles = component.styles;
let suffixName = null;
if (isUseCssModule && styles && styles[0]) {
const style = styles[0];
result.css = style.content;
}
try {
// 解析模块
let ast = parse(component.js, {
sourceType: 'module',
strictMode: false,
plugins,
});
if (isTs) {
transformTS(ast);
}
initProps(ast, state);
initData(ast, state);
initComputed(ast, state);
initComponents(ast, state); // SFC
babelTraverse(ast, {
ImportDeclaration(path) {
if (path.node.source && path.node.source.value !== 'vue') collect.imports.unshift(path.node);
},
ObjectMethod(path) {
const name = path.node.key.name;
if (path.parentPath.parent.key && path.parentPath.parent.key.name === 'methods') {
handleGeneralMethods(path, collect, state, name);
} else if (cycle[name]) {
handleCycleMethods(path, collect, state, name, cycle[name], true);
} else {
if (name === 'data' || state.computeds[name]) {
return;
}
// log(`The ${name} method maybe be not support now`);
}
},
});
const html = component.template && transfromTemplate(component.template, state);
// // AST for react component
const tpl = `export default class ${parseName(state.name)} extends Component {}`;
const rast = parse(tpl, {
sourceType: 'module',
});
babelTraverse(rast, {
Program(path) {
genImports(path, collect, suffixName);
},
ClassBody(path) {
genConstructor(path, state);
genStaticProps(path, state);
genClassMethods(path, state);
genSFCRenderMethod(path, state, html);
},
});
// react组件使用
babelTraverse(rast, {
ClassMethod(path) {
if (path.node.key.name === 'render') {
path.traverse({
JSXIdentifier(path) {
if (isJSXClosingElement(path.parent) || isJSXOpeningElement(path.parent)) {
const node = path.node;
const componentName = state.components[node.name] || state.components[parseComponentName(node.name)];
if (componentName) {
path.replaceWith(jSXIdentifier(componentName));
path.stop();
}
}
},
});
}
},
});
const { code } = generate(rast, {
quotes: 'single',
retainLines: true,
});
result.jsx = format(code, { parser: 'babel' });
result.css = format(result.css, { parser: 'css' });
return result;
} catch (error) {
log(error);
}
}
/**
* 解析vue文件
* @param {*} source
* @returns
*/
function formatContent(source) {
const res = parseComponent(source, { pad: 'line' });
let jsCode = res.script.content.replace(/\/\/\n/g, '');
jsCode = jsCode.replace('export default Vue.extend', 'export default ');
return {
template: res.template ? res.template.content : null,
js: jsCode,
styles: res.styles,
};
}
<|start_filename|>src/transform/index.js<|end_filename|>
/**
* @babel/parser通过该模块来解析我们的代码生成AST抽象语法树;
* @babel/traverse通过该模块对AST节点进行递归遍历;
* @babel/types通过该模块对具体的AST节点进行进行增、删、改、查;
* @babel/generator通过该模块可以将修改后的AST生成新的代码;
*/
import { existsSync, statSync, readFileSync, readdirSync, mkdirSync, copyFileSync } from 'fs';
import rimraf from 'rimraf';
import { parse } from '@babel/parser';
import babelTraverse from '@babel/traverse';
import generate from '@babel/generator';
import { parseComponent } from 'vue-template-compiler';
import { isJSXClosingElement, isJSXOpeningElement, jSXIdentifier } from '@babel/types';
import { parseName, log, parseComponentName } from './utils';
import transformTS from './ts';
import transfromTemplate from './sfc';
import { initProps, initData, initComputed, initComponents } from './collect-state';
import { genImports, genConstructor, genStaticProps, genClassMethods } from './react-ast-helpers';
import { handleCycleMethods, handleGeneralMethods } from './vue-ast-helpers';
import { genSFCRenderMethod } from './sfc/sfc-ast-helpers';
import outputFile from './output';
const plugins = [
'typescript',
'jsx',
'classProperties',
'trailingFunctionCommas',
'asyncFunctions',
'exponentiationOperator',
'asyncGenerators',
'objectRestSpread',
[
'decorators',
{
decoratorsBeforeExport: true,
},
],
];
function getSuffix(lang) {
switch (lang) {
case 'stylus':
return 'styl';
case 'sass':
return 'sass';
case 'less':
return 'less';
default:
return 'css';
}
}
/**
* transform
* @param {string:path} input
* @param {string:path} output
* @param {json} options
*/
function transform(input, output, options) {
const failedList = [];
const { isTs, extra } = options;
if (!existsSync(input)) {
log('未找到有效转译文件源,请重试');
process.exit();
}
if (statSync(input).isFile()) output = output + '.js';
// if (existsSync(output)) {
// log('当前路径存在同名文件!,请重试');
// process.exit();
// }
if (statSync(input).isFile()) {
// 单个文件时
solveSingleFile(input, output, { isTs }, failedList);
} else if (statSync(input).isDirectory()) {
transformDir(input, output, { isTs, extra: extra.concat('node_modules') }, failedList);
}
if (failedList.length) {
console.log('\n Transform failed list:');
failedList.map(o => log(` ${o}`));
} else {
log(`\n Transform completed!!\n`, 'success');
}
}
/**
* 解析vue文件
* @param {*} source
* @returns
*/
function formatContent(source) {
const res = parseComponent(source, { pad: 'line' });
let jsCode = res.script.content.replace(/\/\/\n/g, '');
jsCode = jsCode.replace('export default Vue.extend', 'export default ');
return {
template: res.template ? res.template.content : null,
js: jsCode,
styles: res.styles,
};
}
function transformDir(input, output, options = {}, failedList) {
const { isTs, extra } = options;
const reg = new RegExp(extra.join('|'));
if (reg.test(input)) return;
if (existsSync(output)) {
const files = readdirSync(input);
files.forEach(file => {
const from = input + '/' + file;
const to = output + '/' + file;
const temp = statSync(from);
if (reg.test(from)) return;
if (temp.isDirectory()) {
transformDir(from, to, { isTs, extra }, failedList);
} else if (temp.isFile()) {
console.log(` Transforming ${from.replace(process.cwd(), '')}`);
solveSingleFile(from, to, { isTs }, failedList);
}
});
} else {
mkdirSync(output);
transformDir(input, output, { isTs, extra }, failedList);
}
}
function solveSingleFile(from, to, opt, failedList) {
const state = {
name: undefined,
data: {},
props: {},
computeds: {},
components: {},
classMethods: {},
$refs: {}, // 存放refs
vForVars: {}, // 存放v-for 中的变量
};
// Life-cycle methods relations mapping
const cycle = {
created: 'componentWillMount',
mounted: 'componentDidMount',
updated: 'componentDidUpdate',
beforeDestroy: 'componentWillUnmount',
errorCaptured: 'componentDidCatch',
render: 'render',
};
const collect = {
imports: [],
classMethods: {},
};
// 读取文件
const { isTs } = opt;
const isVue = /\.vue$/.test(from);
const isTsFile = /\.ts$/.test(from);
if (!isVue) {
if (isTsFile && isTs) {
// 处理 ts或者js文件 去除type
let ast = parse(readFileSync(from).toString(), {
sourceType: 'module',
strictMode: false,
plugins,
});
transformTS(ast);
const { code } = generate(ast, {
quotes: 'single',
retainLines: true,
});
outputFile(
code,
to.replace(/(.*).ts$/, (match, o) => o + '.js')
);
return;
} else {
copyFileSync(from, to);
return;
}
}
let fileContent = readFileSync(from);
const component = formatContent(fileContent.toString());
/* solve styles */
const styles = component.styles;
let suffixName = null;
let cssRoute = null;
const isUseCssModule = process.options.cssModule;
if (isUseCssModule && styles && styles[0]) {
const style = styles[0];
const route = to.split('/');
route.pop();
const cssFileName = route.join('/');
const suffix = getSuffix(style.attrs.lang);
suffixName = `index.${suffix}`;
cssRoute = `${cssFileName}/${suffixName}`;
outputFile(style.content, cssRoute, 'css'); // 支持sass less 格式化
}
try {
// 解析模块
let ast = parse(component.js, {
sourceType: 'module',
strictMode: false,
plugins,
});
if (isTs) {
transformTS(ast);
}
initProps(ast, state);
initData(ast, state);
initComputed(ast, state);
initComponents(ast, state); // SFC
babelTraverse(ast, {
ImportDeclaration(path) {
if (path.node.source && path.node.source.value !== 'vue') collect.imports.unshift(path.node);
},
ObjectMethod(path) {
const name = path.node.key.name;
if (path.parentPath.parent.key && path.parentPath.parent.key.name === 'methods') {
handleGeneralMethods(path, collect, state, name);
} else if (cycle[name]) {
handleCycleMethods(path, collect, state, name, cycle[name], isVue);
} else {
if (name === 'data' || state.computeds[name]) {
return;
}
// log(`The ${name} method maybe be not support now`);
}
},
});
const html = component.template && transfromTemplate(component.template, state);
// // AST for react component
const tpl = `export default class ${parseName(state.name)} extends Component {}`;
const rast = parse(tpl, {
sourceType: 'module',
});
babelTraverse(rast, {
Program(path) {
genImports(path, collect, suffixName);
},
ClassBody(path) {
genConstructor(path, state);
genStaticProps(path, state);
genClassMethods(path, state);
genSFCRenderMethod(path, state, html);
},
});
// react组件使用
babelTraverse(rast, {
ClassMethod(path) {
if (path.node.key.name === 'render') {
path.traverse({
JSXIdentifier(path) {
if (isJSXClosingElement(path.parent) || isJSXOpeningElement(path.parent)) {
const node = path.node;
const componentName = state.components[node.name] || state.components[parseComponentName(node.name)];
if (componentName) {
path.replaceWith(jSXIdentifier(componentName));
path.stop();
}
}
},
});
}
},
});
const { code } = generate(rast, {
quotes: 'single',
retainLines: true,
});
outputFile(
code,
to.replace(/(.*).vue$/, (match, o) => o + '.js')
);
} catch (error) {
log(error);
failedList.push(from.replace(process.cwd(), ''));
rimraf.sync(to);
rimraf.sync(cssRoute);
}
}
export default transform;
<|start_filename|>lib/transform/sfc/event-map.js<|end_filename|>
"use strict";
module.exports = {
'click': 'onClick',
'dblclick': 'onDoubleClick',
'abort': 'onAbort',
'change': 'onChange',
'input': 'onInput',
'error': 'onError',
'focus': 'onFocus',
'blur': 'onBlur',
'keydown': 'onKeyDown',
'keyup': 'onKeyUp',
'keypress': 'onKeyPress',
'load': 'onLoad',
'mousedown': 'onMouseDown',
'mouseup': 'onMouseUp',
'mousemove': 'onMouseMove',
'mouseenter': 'onMouseEnter',
'mouseleave': 'onMouseLeave',
'mouseout': 'onMouseOut',
'mouseover': 'onMouseOver',
'reset': 'onReset',
'resize': 'onResize',
'select': 'onSelect',
'submit': 'onSubmit',
'unload': 'onUnload',
'drag': 'onDrag',
'dragend': 'onDragEnd',
'dragenter': 'onDragEnter',
'dragexit': 'onDragExit',
'dragleave': 'onDragLeave',
'dragover': 'onDragOver',
'dragstart': 'onDragStart',
'drop': 'onDrop',
'touchstart': 'onTouchStart',
'touchend': 'onTouchEnd',
'touchcancel': 'onTouchCancel',
'touchmove': 'onTouchMove'
};
<|start_filename|>src/transform/sfc/index.js<|end_filename|>
import { compile } from 'vue-template-compiler';
import * as t from '@babel/types';
import { generateJSXElement } from './generate-element';
/**
* 保证名称一致 '@click' => v-on:click :text => v-bind:text
* class ==> className
* @param {*} ast
*/
function flatName(ast, isNochild = false) {
if (ast.ifConditions && ast.ifConditions.length && !isNochild) {
ast.ifConditions.map(({ block }) => flatName(block, true));
} else if (ast && ast.type === 1) {
const attrsList = [];
const attrObj = ast.attrsMap;
Object.keys(attrObj).map(o => {
let key = o;
if (key === 'v-if' || key === 'v-else-if' || key === 'v-else') return;
else if (/^:/.test(o)) {
// 统一成v-bind
key = o.replace(/^:/, 'v-bind:');
} else if (/^@/.test(o)) key = o.replace(/^@/, 'v-on:');
attrsList.push({ key, value: attrObj[o] });
});
ast.attrsList = attrsList;
if (!ast.children) return;
ast.children.map(o => flatName(o));
}
}
export default function transfromTemplate(template, state) {
const ast = compile(template).ast;
flatName(ast); // 统一name 如:@click => v-on:click
let argument = generateJSXElement(ast, null, state);
if (t.isJSXElement(argument)) {
argument = t.returnStatement(argument);
}
return argument;
}
<|start_filename|>lib/transform/react-ast-helpers.js<|end_filename|>
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.genClassMethods = genClassMethods;
exports.genConstructor = genConstructor;
exports.genImports = genImports;
exports.genStaticProps = genStaticProps;
var t = require('@babel/types');
var chalk = require('chalk');
var _require = require('./utils'),
genDefaultProps = _require.genDefaultProps,
genPropTypes = _require.genPropTypes;
function genImports(path, collect, cssSuffixName) {
var nodeLists = path.node.body;
var importReact = t.importDeclaration([t.importDefaultSpecifier(t.identifier('React')), t.importSpecifier(t.identifier('Component'), t.identifier('Component'))], t.stringLiteral('react'));
if (cssSuffixName) {
var importCss = t.importDeclaration([t.importDefaultSpecifier(t.identifier('styles'))], t.stringLiteral("./".concat(cssSuffixName)));
collect.imports.unshift(importCss);
}
collect.imports.push(importReact);
collect.imports.forEach(function (node) {
return nodeLists.unshift(node);
});
}
function genConstructor(path, state) {
var nodeLists = path.node.body;
var blocks = [t.expressionStatement(t.callExpression(t["super"](), [t.identifier('props')]))];
if (state.data['_statements']) {
state.data['_statements'].forEach(function (node) {
if (t.isReturnStatement(node)) {
var props = node.argument.properties; // supports init data property with props property
props.forEach(function (n) {
if (t.isMemberExpression(n.value)) {
n.value = t.memberExpression(t.identifier('props'), t.identifier(n.value.property.name));
}
});
blocks.push(t.expressionStatement(t.assignmentExpression('=', t.memberExpression(t.thisExpression(), t.identifier('state')), node.argument)));
} else {
blocks.push(node);
}
});
}
var ctro = t.classMethod('constructor', t.identifier('constructor'), [t.identifier('props')], t.blockStatement(blocks));
nodeLists.push(ctro);
}
function genStaticProps(path, state) {
var props = state.props;
var nodeLists = path.node.body;
if (Object.keys(props).length) {
// nodeLists.push(genPropTypes(props));
nodeLists.push(genDefaultProps(props));
}
}
function genClassMethods(path, state) {
var nodeLists = path.node.body;
var methods = state.classMethods;
if (Object.keys(methods).length) {
Object.keys(methods).forEach(function (key) {
nodeLists.push(methods[key]);
});
}
}
<|start_filename|>src/transform/vue-ast-helpers.js<|end_filename|>
const t = require('@babel/types');
const { log, getIdentifier, getStateOrProp, camelName } = require('./utils');
const nestedMethodsVisitor = {
// VariableDeclaration(path) {
// const declarations = path.node.declarations;
// declarations.forEach(d => {
// if (t.isMemberExpression(d.init)) {
// const key = d.init.property.name;
// d.init.object = t.memberExpression(
// t.thisExpression(),
// getIdentifier(this.state, key)
// );
// }
// });
// },
ExpressionStatement(path) {
const expression = path.node.expression;
if (
t.isAssignmentExpression(expression) &&
t.isThisExpression(expression.left.object)
) {
// 针对 this[props] = varible => this.setState({props:varible})
const right = expression.right;
const leftNode = expression.left.property;
let key = leftNode;
if (t.isTemplateLiteral(leftNode)) {
// 模板字符串作为key时需处理
key = t.identifier(`TemplateLiteral_${+new Date()}`);
const declarator = t.variableDeclarator(key, leftNode);
const declaration = t.variableDeclaration('const', [declarator]);
path.parent.body.unshift(declaration);
}
path.node.expression = t.callExpression(
t.memberExpression(t.thisExpression(), t.identifier('setState')),
[
t.objectExpression([
t.objectProperty(key, right, t.isExpression(key))
])
]
);
}
if (
t.isCallExpression(expression) &&
t.isThisExpression(expression.callee.object) &&
expression.callee.property.name === '$emit'
) {
// this.$emit('xxx',data) => this.props.xxx(data)
path.traverse({
CallExpression(memPath) {
const args = memPath.node.arguments;
if (!t.isStringLiteral(args[0])) {
log(`this.$emit(${args[0].name}, ${args[1].name}) :`);
log(' expected string type but got ' + args[0].type);
return;
}
const property = t.isStringLiteral(args[0])
? t.identifier(camelName(args[0].value, ':'))
: args[0];
memPath.replaceWith(
t.callExpression(
t.memberExpression(
t.memberExpression(t.thisExpression(), t.identifier('props')),
property
),
args[1] ? [args[1]] : []
)
);
memPath.stop();
}
});
}
},
MemberExpression(path) {
const node = path.node;
if (t.isThisExpression(node.object)) {
const key = node.property.name;
if (key !== 'state' && key !== 'props' && key !== '$refs') {
const replaceStr = getStateOrProp(this.state, key);
path
.get('object') // 获取`ThisExpresssion`
.replaceWithSourceString(replaceStr);
path.stop();
}
}
if (
t.isMemberExpression(node.object) &&
node.object.property.name === '$refs'
) {
path
.get('object') // 获取`ThisExpresssion`
.replaceWithSourceString('this');
path.stop();
}
}
};
function createClassMethod(path, state, name) {
const body = path.node.body;
let params = path.node.params;
const blocks = [];
if (name === 'componentDidCatch') {
params = [t.identifier('error'), t.identifier('info')];
}
path.traverse(nestedMethodsVisitor, { blocks, state });
return t.classProperty(
t.identifier(name),
t.arrowFunctionExpression(params, t.blockStatement(body.body))
);
}
function replaceThisExpression(path, key, state) {
if (state.data[key] || state.props[key]) {
path.replaceWith(
t.memberExpression(t.thisExpression(), getIdentifier(state, key))
);
} else {
// from computed
path.parentPath.replaceWith(t.identifier(key));
}
path.stop();
}
function createRenderMethod(path, state, name) {
if (path.node.params.length) {
log(`
Maybe you will call $createElement or h method in your render, but react does not support it.
And it's maybe cause some unknown error in transforming
`);
}
path.traverse({
ThisExpression(thisPath) {
const parentNode = thisPath.parentPath.parentPath.parent;
const isValid =
t.isExpressionStatement(parentNode) ||
t.isVariableDeclaration(parentNode) ||
t.isBlockStatement(parentNode) ||
t.isJSXElement(parentNode) ||
t.isCallExpression(parentNode) ||
(t.isJSXAttribute(parentNode) &&
!parentNode.name.name.startsWith('on'));
if (isValid) {
// prop
const key = thisPath.parent.property.name;
replaceThisExpression(thisPath, key, state);
}
},
JSXAttribute(attrPath) {
const attrNode = attrPath.node;
if (attrNode.name.name === 'class') {
attrPath.replaceWith(
t.jSXAttribute(t.jSXIdentifier('className'), attrNode.value)
);
}
if (attrNode.name.name === 'domPropsInnerHTML') {
const v = attrNode.value;
if (t.isLiteral(v)) {
attrPath.replaceWith(
t.jSXAttribute(
t.jSXIdentifier('dangerouslySetInnerHTML'),
t.jSXExpressionContainer(
t.objectExpression([
t.objectProperty(t.identifier('__html'), attrNode.value)
])
)
)
);
} else if (t.isJSXExpressionContainer(v)) {
const expression = v.expression;
if (t.isMemberExpression(expression)) {
attrPath.traverse({
ThisExpression(thisPath) {
const key = thisPath.parent.property.name;
replaceThisExpression(thisPath, key, state);
}
});
}
attrPath.replaceWith(
t.jSXAttribute(
t.jSXIdentifier('dangerouslySetInnerHTML'),
t.jSXExpressionContainer(
t.objectExpression([
t.objectProperty(t.identifier('__html'), expression)
])
)
)
);
}
}
}
});
let blocks = [];
// computed props
const computedProps = Object.keys(state.computeds);
if (computedProps.length) {
computedProps.forEach(prop => {
const v = state.computeds[prop];
blocks = blocks.concat(v['_statements']);
});
}
blocks = blocks.concat(path.node.body.body);
return t.classMethod(
'method',
t.identifier(name),
[],
t.blockStatement(blocks)
);
}
export function handleCycleMethods(
path,
collect,
state,
name,
cycleName,
isSFC
) {
if (name === 'render') {
if (isSFC) {
return;
}
collect.classMethods[cycleName] = createRenderMethod(path, state, name);
} else {
collect.classMethods[cycleName] = createClassMethod(path, state, cycleName);
}
}
export function handleGeneralMethods(path, collect, state, name) {
const methods = createClassMethod(path, state, name);
collect.classMethods[name] = methods;
state.classMethods[name] = methods;
}
<|start_filename|>src/index.js<|end_filename|>
import { dirname, resolve } from 'path';
import helper from './doc/helper';
import chalk from 'chalk';
process.env.HOME_DIR = dirname(require.resolve('../package'));
const nodeVersion = process.versions.node;
const versions = nodeVersion.split('.');
const major = versions[0];
const minor = versions[1];
if (major * 10 + minor * 1 < 100) {
console.log(`Node version must >= 10.0, but got ${major}.${minor}`);
process.exit(1);
}
const updater = require('update-notifier');
const pkg = require('../package.json');
const notifier = updater({ pkg, updateCheckInterval: 1000 * 60 * 60 * 24 * 7 });
if (notifier.update && notifier.update.latest !== pkg.version) {
// 存在新版本
const old = notifier.update.current;
const latest = notifier.update.latest;
let type = notifier.update.type;
switch (type) {
case 'major':
type = chalk.red(type);
break;
case 'minor':
type = chalk.yellow(type);
break;
case 'patch':
type = chalk.green(type);
break;
default:
break;
}
notifier.notify({
message: `New ${type} version of ${pkg.name} available! ${chalk.red(old)} -> ${chalk.green(latest)}\nRun ${chalk.green(
`npm install -g ${pkg.name}`
)} to update!`,
});
}
const command = process.argv[2];
const args = process.argv.slice(3);
const version = pkg.version;
const outputIndex = args.findIndex(o => o === '-o' || o === '--output');
const extraIndex = args.findIndex(o => o === '-i' || o === '--ignore');
const isTs = args.includes('-t') || args.includes('--ts');
const cssModule = args.includes('-m') || args.includes('--module'); // 是否模块化css styles[...]
switch (command) {
case '-v':
case '--version':
console.log(version);
break;
case '-h':
case '--help':
helper();
break;
default:
if (!command) helper();
else {
const input = resolve(process.cwd(), command);
const output =
outputIndex > -1 && args[outputIndex + 1] ? resolve(process.cwd(), args[outputIndex + 1]) : resolve(process.cwd(), 'react__from__vue');
const extra = extraIndex > -1 && args[extraIndex + 1] ? args[extraIndex + 1].split(',') : [];
const options = {
isTs,
cssModule,
extra,
};
process.options = options;
transform(input, output, options);
}
break;
}
<|start_filename|>src/transform/utils.js<|end_filename|>
import * as t from '@babel/types';
import chalk from 'chalk';
export function camelName(name, split = '_') {
const val = name.toLowerCase().split(split);
if (val.length === 1) return name;
const str = val.reduce((prev, next) => {
const nextStr = next[0].toUpperCase() + next.substr(1);
return prev + nextStr;
});
return str;
}
export function parseName(name, split = '-') {
name = name || 'react-compoennt';
const val = name.toLowerCase().split(split);
if (val.length === 1) return name;
const str = val.reduce((prev, next) => {
const nextStr = next[0].toUpperCase() + next.substr(1);
return prev + nextStr;
}, '');
return str;
}
export function parseComponentName(str) {
if (str) {
const a = str.split('-').map(e => e[0].toUpperCase() + e.substr(1));
return a.join('');
}
}
export function log(msg, type = 'error') {
if (type === 'error') {
return console.log(chalk.red(` ${msg}`));
}
console.log(chalk.green(msg));
}
export function getIdentifier(state, key) {
return state.data[key] ? t.identifier('state') : t.identifier('props');
}
export function getStateOrProp(state, key) {
return state.data[key] ? 'this.state' : 'this.props';
}
export function genPropTypes(props) {
const properties = [];
const keys = Object.keys(props);
for (let i = 0, l = keys.length; i < l; i++) {
const key = keys[i];
const obj = props[key];
const identifier = t.identifier(key);
let val = t.memberExpression(
t.identifier('PropTypes'),
t.identifier('any')
);
if (obj.type === 'typesOfArray' || obj.type === 'array') {
if (obj.type === 'typesOfArray') {
const elements = [];
obj.value.forEach(val => {
elements.push(
t.memberExpression(t.identifier('PropTypes'), t.identifier(val))
);
});
val = t.callExpression(
t.memberExpression(
t.identifier('PropTypes'),
t.identifier('oneOfType')
),
[t.arrayExpression(elements)]
);
} else {
val = obj.required
? t.memberExpression(
t.memberExpression(
t.identifier('PropTypes'),
t.identifier('array')
),
t.identifier('isRequired')
)
: t.memberExpression(
t.identifier('PropTypes'),
t.identifier('array')
);
}
} else if (obj.validator) {
// 复杂验证会出问题 干掉
const node = t.callExpression(
t.memberExpression(t.identifier('PropTypes'), t.identifier('oneOf')),
[t.arrayExpression(obj.validator.elements)]
);
if (obj.required) {
val = t.memberExpression(node, t.identifier('isRequired'));
} else {
val = node;
}
} else {
val = obj.required
? t.memberExpression(
t.memberExpression(
t.identifier('PropTypes'),
t.identifier(obj.type)
),
t.identifier('isRequired')
)
: t.memberExpression(t.identifier('PropTypes'), t.identifier(obj.type));
}
properties.push(t.objectProperty(identifier, val));
}
// Babel does't support to create static class property???
return t.classProperty(
t.identifier('static propTypes'),
t.objectExpression(properties),
null,
[]
);
}
export function genDefaultProps(props) {
const properties = [];
const keys = Object.keys(props).filter(key => props[key].value !== undefined);
for (let i = 0, l = keys.length; i < l; i++) {
const key = keys[i];
const obj = props[key];
const identifier = t.identifier(key);
let val = null;
if (obj.type === 'typesOfArray') {
const type = typeof obj.defaultValue;
if (type !== 'undefined') {
const v = obj.defaultValue;
val =
type === 'number'
? t.numericLiteral(Number(v))
: type === 'string'
? t.stringLiteral(v)
: t.booleanLiteral(v);
} else {
continue;
}
} else if (obj.type === 'array') {
val = t.arrayExpression(obj.value.elements);
} else if (obj.type === 'object') {
val = t.objectExpression(obj.value.properties);
} else {
switch (typeof obj.value) {
case 'string':
val = t.stringLiteral(obj.value);
break;
case 'boolean':
val = t.booleanLiteral(obj.value);
break;
case 'number':
val = t.numericLiteral(Number(obj.value));
break;
default:
val = t.stringLiteral(obj.value);
}
}
properties.push(t.objectProperty(identifier, val));
}
// Babel does't support to create static class property???
return t.classProperty(
t.identifier('static defaultProps'),
t.objectExpression(properties),
null,
[]
);
}
<|start_filename|>src/debug.js<|end_filename|>
/* node demo */
// require('@babel/register');
// const resolve = require('path').resolve;
// const trans = require('./transform').default;
// const input = resolve(process.cwd(), 'demo/demo.vue');
// const output = resolve(process.cwd(), 'demo/react');
// const options = {
// isTs: true,
// cssModule: false
// };
// process.options = options;
// trans(input, output, { isTs: true });
/* browser demo */
require('@babel/register');
const fs = require('fs');
const trans = require('./transform/transform').default;
const resolve = require('path').resolve;
const input = resolve(process.cwd(), 'demo/demo.vue');
const content = fs.readFileSync(input);
const res = trans(content.toString());
console.log(res.jsx)
console.log(res.css)
<|start_filename|>src/transform/collect-state.js<|end_filename|>
const babelTraverse = require('@babel/traverse').default;
const t = require('@babel/types');
const { log } = require('./utils');
const collectVueProps = require('./vue-props');
const collectVueComputed = require('./vue-computed');
/**
* Collect vue component state(data prop, props prop & computed prop)
* Don't support watch prop of vue component
*/
export function initProps(ast, state) {
babelTraverse(ast, {
Program(path) {
const nodeLists = path.node.body;
let count = 0;
for (let i = 0; i < nodeLists.length; i++) {
const node = nodeLists[i];
if (t.isExportDefaultDeclaration(node)) {
count++;
}
}
if (count > 1 || !count) {
const msg = !count ? 'Must hava one' : 'Only one';
log(`${msg} export default declaration in youe vue component file`);
process.exit();
}
},
ObjectProperty(path) {
const parent = path.parentPath.parent;
const name = path.node.key.name;
if (parent) {
if (name === 'name') {
if (t.isStringLiteral(path.node.value)) {
state.name = path.node.value.value;
} else {
log(`The value of name prop should be a string literal.`);
}
} else if (name === 'props') {
collectVueProps(path, state);
path.stop();
}
}
}
});
};
export function initData(ast, state) {
babelTraverse(ast, {
ObjectMethod(path) {
// 对象方法
const parent = path.parentPath.parent;
const name = path.node.key.name;
if (parent && t.isExportDefaultDeclaration(parent)) {
if (name === 'data') {
const body = path.node.body.body;
state.data['_statements'] = [].concat(body);
let propNodes = {};
body.forEach(node => {
if (t.isReturnStatement(node)) {
propNodes = node.argument.properties;
}
});
propNodes.forEach(propNode => {
state.data[propNode.key.name] = propNode.value;
});
path.stop();
}
}
}
});
};
export function initComputed(ast, state) {
babelTraverse(ast, {
ObjectProperty(path) {
const parent = path.parentPath.parent;
const name = path.node.key.name;
if (parent) {
if (name === 'computed') {
collectVueComputed(path, state);
path.stop();
}
}
}
});
};
export function initComponents(ast, state) {
babelTraverse(ast, {
ObjectProperty(path) {
const parent = path.parentPath.parent;
const name = path.node.key.name;
if (parent && t.isExportDefaultDeclaration(parent)) {
if (name === 'components') {
// collectVueComputed(path, state);
const props = path.node.value.properties;
props.forEach(prop => {
state.components[prop.key.name] = prop.value.name;
});
path.stop();
}
}
}
});
};
<|start_filename|>lib/debug.js<|end_filename|>
"use strict";
/* node demo */
// require('@babel/register');
// const resolve = require('path').resolve;
// const trans = require('./transform').default;
// const input = resolve(process.cwd(), 'demo/demo.vue');
// const output = resolve(process.cwd(), 'demo/react');
// const options = {
// isTs: true,
// cssModule: false
// };
// process.options = options;
// trans(input, output, { isTs: true });
/* browser demo */
require('@babel/register');
var fs = require('fs');
var trans = require('./transform/transform')["default"];
var resolve = require('path').resolve;
var input = resolve(process.cwd(), 'demo/demo.vue');
var content = fs.readFileSync(input);
var res = trans(content.toString());
console.log(res.jsx);
console.log(res.css);
<|start_filename|>src/mian.js<|end_filename|>
import transform from './transform/transform';
export default transform;
<|start_filename|>demo/react.js<|end_filename|>
import React, { Component } from "react";
import ToDo from "./todo";
import "./your.less";
export default class TestSfc extends Component {
constructor(props) {
super(props);
const now = Date.now();
this.state = {
list: [1, 2, 3],
html: "<div>1111<span>222</span>333<p>ssssss</p></div>",
error: false,
checked: false,
time: now
};
}
static defaultProps = { msg: "hello, sfc" };
clickMethod = () => {
console.log("click method");
};
testMethod = () => {
console.log("call test");
};
render() {
const test = () => {
console.log("from computed", this.props.msg);
return `${this.state.time}: ${this.state.html}`;
};
return (
<div className="wrap">
<div calss="wrap-tit">time: {this.state.time}</div>
{this.state.error ? (
<p>some error happend</p>
) : (
<p className="name">your msg: {this.props.msg}</p>
)}
{this.props.msg && <p className="shown">test v-show</p>}
<p onClick={this.clickMethod}>test v-on</p>
<img src={this.props.imageSrc}></img>
<ul className="test-list">
{this.state.list.map((value, index) => (
<li className="list-item" key={index}>
<div>{value}</div>
<span>{this.props.msg}</span>
</li>
))}
</ul>
<input
value={this.state.text}
onInput={e => this.setState({ text: e.target.value })}
></input>
<input
type="checkbox"
value={this.state.checked}
onInput={e => this.setState({ checked: e.target.checked })}
></input>
<span>{this.props.text}</span>
<div dangerouslySetInnerHTML={{ __html: this.state.html }}></div>
<ToDo msg={this.props.msg} list={this.state.list}></ToDo>
{this.props.msg}
</div>
);
}
}
<|start_filename|>lib/transform/sfc/directives.js<|end_filename|>
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.handleAttribution = handleAttribution;
exports.handleExpression = handleExpression;
exports.handleForDirective = handleForDirective;
exports.handleOnDirective = handleOnDirective;
var _slicedToArray2 = _interopRequireDefault(require("@babel/runtime/helpers/slicedToArray"));
var t = require('@babel/types');
var _require = require('../utils'),
getIdentifier = _require.getIdentifier;
var eventMap = require('./event-map');
var LOGINCAL_EXPRESSION = ['||', '&&', '??'];
var BINARY_EXPRESSION = ['+', '-', '/', '%', '*', '**', '&', ',', '>>', '>>>', '<<', '^', '==', '===', '!=', '!==', 'in', 'instanceof', '>', '<', '>=', '<='];
/**
* 获取变量 可能来自:computed、props、state
* 'state.a+state.b+a.b.c'=> ["state.a", "state.b", "a.b.c"]
*/
function handleExpression(state, value) {
var realVar = '';
if (value[0] === '{' && value[value.length - 1] === '}') {
// 本身为对象
realVar = value.replace(/:(.+?),?/g, function (word) {
return word.replace(/[a-zA-Z\$_]+(\w+)?((\.[a-zA-Z\$_]+(\w+))+)?/g, function (match) {
var index = word.indexOf(match);
var split = match.split('.');
var _val = split[0];
if (split.length === 1 && (word[index - 1] === "'" && word[index + match.length] === "'" || word[index - 1] === '"' && word[index + match.length] === '"')) {
// 可能本身就是字符串 '' "" 模板字符串等会有漏洞 那又何妨
return match;
} else if (state.$refs[_val]) return "this.".concat(match);else if (state.computeds[_val]) return "".concat(match, "()");else if (state.vForVars[_val]) return match;else if (state.data[_val]) return "this.state.".concat(match);else return "this.props.".concat(match);
});
});
} else {
realVar = value.replace(/[a-zA-Z\$_]+(\w+)?((\.[a-zA-Z\$_]+(\w+))+)?/g, function (match) {
var index = value.indexOf(match);
var split = match.split('.');
var _val = split[0];
if (split.length === 1 && (value[index - 1] === "'" && value[index + match.length] === "'" || value[index - 1] === '"' && value[index + match.length] === '"')) {
// 可能本身就是字符串 '' "" 模板字符串等会有漏洞
return match;
} else if (state.$refs[_val]) return "this.".concat(match);else if (state.computeds[_val]) return "".concat(match, "()");else if (state.data[_val]) return "this.state.".concat(match);else if (state.vForVars[_val]) return match;else return "this.props.".concat(match);
});
}
return t.identifier(realVar);
}
/**
* 处理动态属性值
* @param {*} state // 搜集state props computes classMethods
* @param {*} value // 属性字符串值
*/
function handleAttribution(state, value) {
var variable = null;
var key = value.split('.')[0]; // 考虑到value可能是 a.b的形式
if (state.computeds[key]) {
variable = t.identifier(value);
} else if (state.data[key] || state.props[key]) {
variable = t.memberExpression(t.memberExpression(t.thisExpression(), getIdentifier(state, value)), t.identifier(value));
} else if (state.classMethods[key]) {
variable = t.memberExpression(t.thisExpression(), t.identifier(value));
} else if (!variable) {
return t.stringLiteral(value);
}
return t.jSXExpressionContainer(variable);
}
/**
* v-for="(value, index) in list" 处理为map
* @param {*} value
* @param {*} dom
* @param {*} state
*/
function handleForDirective(value, dom, state, showIfExp) {
var _value$split = value.split(/\s+?(in|of)\s+?/),
_value$split2 = (0, _slicedToArray2["default"])(_value$split, 3),
left = _value$split2[0],
inOrof = _value$split2[1],
right = _value$split2[2];
var _left$replace$replace = left.replace('(', '').replace(')', '').split(','),
_left$replace$replace2 = (0, _slicedToArray2["default"])(_left$replace$replace, 2),
item = _left$replace$replace2[0],
index = _left$replace$replace2[1];
state.vForVars[item.trim()] = true;
state.vForVars[index.trim()] = true;
var member = handleExpression(state, right.trim());
var body = !showIfExp ? dom : t.blockStatement([t.ifStatement(handleExpression(state, showIfExp), t.blockStatement([t.returnStatement(dom)]))]);
var child = t.jSXExpressionContainer(t.callExpression(t.memberExpression(member, t.identifier('map')), [t.arrowFunctionExpression([t.identifier(item.trim()), t.identifier(index.trim())], body)]));
return child;
}
function handleOnDirective(key) {
var name = key.replace(/^(@|v-on:)/, '').split('.')[0];
var eventName = eventMap[name];
if (!eventName) {
// log(`Not support event name:${name}`);
return name.replace(/:(\w)/g, function (match, letter) {
return letter.toUpperCase();
});
}
return eventName;
}
<|start_filename|>src/transform/sfc/sfc-ast-helpers.js<|end_filename|>
import * as t from '@babel/types';
export function genSFCRenderMethod(path, state, argument) {
// computed props
const computedProps = Object.keys(state.computeds);
let blocks = [];
if (computedProps.length) {
computedProps.forEach(prop => {
const v = state.computeds[prop];
blocks = blocks.concat(v['_statements']);
});
}
if (argument) blocks = blocks.concat(argument);
const render = t.classMethod(
'method',
t.identifier('render'),
[],
t.blockStatement(blocks)
);
path.node.body.push(render);
}
<|start_filename|>src/transform/sfc/directives.js<|end_filename|>
const t = require('@babel/types');
const { getIdentifier } = require('../utils');
const eventMap = require('./event-map');
const LOGINCAL_EXPRESSION = ['||', '&&', '??'];
const BINARY_EXPRESSION = [
'+',
'-',
'/',
'%',
'*',
'**',
'&',
',',
'>>',
'>>>',
'<<',
'^',
'==',
'===',
'!=',
'!==',
'in',
'instanceof',
'>',
'<',
'>=',
'<='
];
/**
* 获取变量 可能来自:computed、props、state
* 'state.a+state.b+a.b.c'=> ["state.a", "state.b", "a.b.c"]
*/
export function handleExpression(state, value) {
let realVar = '';
if (value[0] === '{' && value[value.length - 1] === '}') {
// 本身为对象
realVar = value.replace(/:(.+?),?/g, word => {
return word.replace(
/[a-zA-Z\$_]+(\w+)?((\.[a-zA-Z\$_]+(\w+))+)?/g,
match => {
const index = word.indexOf(match);
const split = match.split('.');
const _val = split[0];
if (
split.length === 1 &&
((word[index - 1] === "'" && word[index + match.length] === "'") ||
(word[index - 1] === '"' && word[index + match.length] === '"'))
) {
// 可能本身就是字符串 '' "" 模板字符串等会有漏洞 那又何妨
return match;
} else if (state.$refs[_val]) return `this.${match}`;
else if (state.computeds[_val]) return `${match}()`;
else if (state.vForVars[_val]) return match;
else if (state.data[_val]) return `this.state.${match}`;
else return `this.props.${match}`;
}
);
});
} else {
realVar = value.replace(
/[a-zA-Z\$_]+(\w+)?((\.[a-zA-Z\$_]+(\w+))+)?/g,
match => {
const index = value.indexOf(match);
const split = match.split('.');
const _val = split[0];
if (
split.length === 1 &&
((value[index - 1] === "'" && value[index + match.length] === "'") ||
(value[index - 1] === '"' && value[index + match.length] === '"'))
) {
// 可能本身就是字符串 '' "" 模板字符串等会有漏洞
return match;
} else if (state.$refs[_val]) return `this.${match}`;
else if (state.computeds[_val]) return `${match}()`;
else if (state.data[_val]) return `this.state.${match}`;
else if (state.vForVars[_val]) return match;
else return `this.props.${match}`;
}
);
}
return t.identifier(realVar);
}
/**
* 处理动态属性值
* @param {*} state // 搜集state props computes classMethods
* @param {*} value // 属性字符串值
*/
export function handleAttribution(state, value) {
let variable = null;
const key = value.split('.')[0]; // 考虑到value可能是 a.b的形式
if (state.computeds[key]) {
variable = t.identifier(value);
} else if (state.data[key] || state.props[key]) {
variable = t.memberExpression(
t.memberExpression(t.thisExpression(), getIdentifier(state, value)),
t.identifier(value)
);
} else if (state.classMethods[key]) {
variable = t.memberExpression(t.thisExpression(), t.identifier(value));
} else if (!variable) {
return t.stringLiteral(value);
}
return t.jSXExpressionContainer(variable);
}
/**
* v-for="(value, index) in list" 处理为map
* @param {*} value
* @param {*} dom
* @param {*} state
*/
export function handleForDirective(value, dom, state, showIfExp) {
const [left, inOrof, right] = value.split(/\s+?(in|of)\s+?/);
const [item, index] = left
.replace('(', '')
.replace(')', '')
.split(',');
state.vForVars[item.trim()] = true;
state.vForVars[index.trim()] = true;
const member = handleExpression(state, right.trim());
const body = !showIfExp
? dom
: t.blockStatement([
t.ifStatement(
handleExpression(state, showIfExp),
t.blockStatement([t.returnStatement(dom)])
)
]);
const child = t.jSXExpressionContainer(
t.callExpression(t.memberExpression(member, t.identifier('map')), [
t.arrowFunctionExpression(
[t.identifier(item.trim()), t.identifier(index.trim())],
body
)
])
);
return child;
}
export function handleOnDirective(key) {
const name = key.replace(/^(@|v-on:)/, '').split('.')[0];
const eventName = eventMap[name];
if (!eventName) {
// log(`Not support event name:${name}`);
return name.replace(/:(\w)/g, (match, letter) => letter.toUpperCase());
}
return eventName;
}
<|start_filename|>src/transform/vue-props.js<|end_filename|>
const t = require('@babel/types');
const chalk = require('chalk');
const { log } = require('./utils');
const nestedPropsVisitor = {
ObjectProperty(path) {
const parentKey = path.parentPath.parent.key;
if (parentKey && parentKey.name === this.childKey) {
const key = path.node.key;
const node = path.node.value;
if (key.name === 'type') {
if (t.isIdentifier(node)) {
this.state.props[this.childKey].type = node.name.toLowerCase();
} else if (t.isArrayExpression(node)) {
const elements = [];
node.elements.forEach(n => {
elements.push(n.name.toLowerCase());
});
if (!elements.length) {
log(
`Providing a type for the ${this.childKey} prop is a good practice.`
);
}
/**
* supports following syntax:
* propKey: { type: [Number, String], default: 0}
*/
this.state.props[this.childKey].type =
elements.length > 1
? 'typesOfArray'
: elements[0]
? elements[0].toLowerCase()
: elements;
this.state.props[this.childKey].value =
elements.length > 1
? elements
: elements[0]
? elements[0]
: elements;
} else {
log(
`The type in ${this.childKey} prop only supports identifier or array expression, eg: Boolean, [String]`
);
}
}
if (t.isLiteral(node)) {
if (key.name === 'default') {
if (this.state.props[this.childKey].type === 'typesOfArray') {
this.state.props[this.childKey].defaultValue = node.value;
} else {
this.state.props[this.childKey].value = node.value;
}
}
if (key.name === 'required') {
this.state.props[this.childKey].required = node.value;
}
}
}
},
ArrowFunctionExpression(path) {
const parentKey = path.parentPath.parentPath.parent.key;
if (parentKey && parentKey.name === this.childKey) {
const body = path.node.body;
if (t.isArrayExpression(body)) {
// Array
this.state.props[this.childKey].value = body;
} else if (t.isBlockStatement(body)) {
// Object/Block array
const childNodes = body.body;
if (childNodes.length === 1 && t.isReturnStatement(childNodes[0])) {
this.state.props[this.childKey].value = childNodes[0].argument;
}
}
// validator
if (path.parent.key && path.parent.key.name === 'validator') {
path.traverse(
{
ArrayExpression(path) {
this.state.props[this.childKey].validator = path.node;
}
},
{ state: this.state, childKey: this.childKey }
);
}
}
}
};
module.exports = function collectVueProps(path, state) {
const childs = path.node.value.properties;
const parentKey = path.node.key.name; // props;
if (childs.length) {
path.traverse({
ObjectProperty(propPath) {
const parentNode = propPath.parentPath.parent;
if (parentNode.key && parentNode.key.name === parentKey) {
const childNode = propPath.node;
const childKey = childNode.key.name;
const childVal = childNode.value;
if (!state.props[childKey]) {
if (t.isArrayExpression(childVal)) {
const elements = [];
childVal.elements.forEach(node => {
elements.push(node.name.toLowerCase());
});
state.props[childKey] = {
type:
elements.length > 1
? 'typesOfArray'
: elements[0]
? elements[0].toLowerCase()
: elements,
value:
elements.length > 1
? elements
: elements[0]
? elements[0]
: elements,
required: false,
validator: false
};
} else if (t.isObjectExpression(childVal)) {
state.props[childKey] = {
type: '',
value: undefined,
required: false,
validator: false
};
path.traverse(nestedPropsVisitor, { state, childKey });
} else if (t.isIdentifier(childVal)) {
// supports propKey: type
state.props[childKey] = {
type: childVal.name.toLowerCase(),
value: undefined,
required: false,
validator: false
};
} else {
log(
`Not supports expression for the ${this.childKey} prop in props.`
);
}
}
}
}
});
}
};
<|start_filename|>lib/transform/vue-ast-helpers.js<|end_filename|>
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.handleCycleMethods = handleCycleMethods;
exports.handleGeneralMethods = handleGeneralMethods;
var t = require('@babel/types');
var _require = require('./utils'),
log = _require.log,
getIdentifier = _require.getIdentifier,
getStateOrProp = _require.getStateOrProp,
camelName = _require.camelName;
var nestedMethodsVisitor = {
// VariableDeclaration(path) {
// const declarations = path.node.declarations;
// declarations.forEach(d => {
// if (t.isMemberExpression(d.init)) {
// const key = d.init.property.name;
// d.init.object = t.memberExpression(
// t.thisExpression(),
// getIdentifier(this.state, key)
// );
// }
// });
// },
ExpressionStatement: function ExpressionStatement(path) {
var expression = path.node.expression;
if (t.isAssignmentExpression(expression) && t.isThisExpression(expression.left.object)) {
// 针对 this[props] = varible => this.setState({props:varible})
var right = expression.right;
var leftNode = expression.left.property;
var key = leftNode;
if (t.isTemplateLiteral(leftNode)) {
// 模板字符串作为key时需处理
key = t.identifier("TemplateLiteral_".concat(+new Date()));
var declarator = t.variableDeclarator(key, leftNode);
var declaration = t.variableDeclaration('const', [declarator]);
path.parent.body.unshift(declaration);
}
path.node.expression = t.callExpression(t.memberExpression(t.thisExpression(), t.identifier('setState')), [t.objectExpression([t.objectProperty(key, right, t.isExpression(key))])]);
}
if (t.isCallExpression(expression) && t.isThisExpression(expression.callee.object) && expression.callee.property.name === '$emit') {
// this.$emit('xxx',data) => this.props.xxx(data)
path.traverse({
CallExpression: function CallExpression(memPath) {
var args = memPath.node.arguments;
if (!t.isStringLiteral(args[0])) {
log("this.$emit(".concat(args[0].name, ", ").concat(args[1].name, ") :"));
log(' expected string type but got ' + args[0].type);
return;
}
var property = t.isStringLiteral(args[0]) ? t.identifier(camelName(args[0].value, ':')) : args[0];
memPath.replaceWith(t.callExpression(t.memberExpression(t.memberExpression(t.thisExpression(), t.identifier('props')), property), args[1] ? [args[1]] : []));
memPath.stop();
}
});
}
},
MemberExpression: function MemberExpression(path) {
var node = path.node;
if (t.isThisExpression(node.object)) {
var key = node.property.name;
if (key !== 'state' && key !== 'props' && key !== '$refs') {
var replaceStr = getStateOrProp(this.state, key);
path.get('object') // 获取`ThisExpresssion`
.replaceWithSourceString(replaceStr);
path.stop();
}
}
if (t.isMemberExpression(node.object) && node.object.property.name === '$refs') {
path.get('object') // 获取`ThisExpresssion`
.replaceWithSourceString('this');
path.stop();
}
}
};
function createClassMethod(path, state, name) {
var body = path.node.body;
var params = path.node.params;
var blocks = [];
if (name === 'componentDidCatch') {
params = [t.identifier('error'), t.identifier('info')];
}
path.traverse(nestedMethodsVisitor, {
blocks: blocks,
state: state
});
return t.classProperty(t.identifier(name), t.arrowFunctionExpression(params, t.blockStatement(body.body)));
}
function replaceThisExpression(path, key, state) {
if (state.data[key] || state.props[key]) {
path.replaceWith(t.memberExpression(t.thisExpression(), getIdentifier(state, key)));
} else {
// from computed
path.parentPath.replaceWith(t.identifier(key));
}
path.stop();
}
function createRenderMethod(path, state, name) {
if (path.node.params.length) {
log("\n Maybe you will call $createElement or h method in your render, but react does not support it.\n And it's maybe cause some unknown error in transforming\n ");
}
path.traverse({
ThisExpression: function ThisExpression(thisPath) {
var parentNode = thisPath.parentPath.parentPath.parent;
var isValid = t.isExpressionStatement(parentNode) || t.isVariableDeclaration(parentNode) || t.isBlockStatement(parentNode) || t.isJSXElement(parentNode) || t.isCallExpression(parentNode) || t.isJSXAttribute(parentNode) && !parentNode.name.name.startsWith('on');
if (isValid) {
// prop
var key = thisPath.parent.property.name;
replaceThisExpression(thisPath, key, state);
}
},
JSXAttribute: function JSXAttribute(attrPath) {
var attrNode = attrPath.node;
if (attrNode.name.name === 'class') {
attrPath.replaceWith(t.jSXAttribute(t.jSXIdentifier('className'), attrNode.value));
}
if (attrNode.name.name === 'domPropsInnerHTML') {
var v = attrNode.value;
if (t.isLiteral(v)) {
attrPath.replaceWith(t.jSXAttribute(t.jSXIdentifier('dangerouslySetInnerHTML'), t.jSXExpressionContainer(t.objectExpression([t.objectProperty(t.identifier('__html'), attrNode.value)]))));
} else if (t.isJSXExpressionContainer(v)) {
var expression = v.expression;
if (t.isMemberExpression(expression)) {
attrPath.traverse({
ThisExpression: function ThisExpression(thisPath) {
var key = thisPath.parent.property.name;
replaceThisExpression(thisPath, key, state);
}
});
}
attrPath.replaceWith(t.jSXAttribute(t.jSXIdentifier('dangerouslySetInnerHTML'), t.jSXExpressionContainer(t.objectExpression([t.objectProperty(t.identifier('__html'), expression)]))));
}
}
}
});
var blocks = []; // computed props
var computedProps = Object.keys(state.computeds);
if (computedProps.length) {
computedProps.forEach(function (prop) {
var v = state.computeds[prop];
blocks = blocks.concat(v['_statements']);
});
}
blocks = blocks.concat(path.node.body.body);
return t.classMethod('method', t.identifier(name), [], t.blockStatement(blocks));
}
function handleCycleMethods(path, collect, state, name, cycleName, isSFC) {
if (name === 'render') {
if (isSFC) {
return;
}
collect.classMethods[cycleName] = createRenderMethod(path, state, name);
} else {
collect.classMethods[cycleName] = createClassMethod(path, state, cycleName);
}
}
function handleGeneralMethods(path, collect, state, name) {
var methods = createClassMethod(path, state, name);
collect.classMethods[name] = methods;
state.classMethods[name] = methods;
}
<|start_filename|>lib/doc/helper.js<|end_filename|>
"use strict";
var chalk = require('chalk');
module.exports = function help() {
console.log("Usage: trans [targetPath] [options]");
console.log("\n Options:\n -v, --version output current version\n -o, --output the output path for react component, default is process.cwd()/react__from__vue\n -i, --ignore fileName or just RegExp => .ts$,ignoreFile.js,ignoreDir default: node_modules\n -m, --module use cssModule(styles.***),default is global mode(\"class-name\")\n -t, --ts it is a typescript component\n -h, --help output usage information\n ");
console.log('Examples:');
console.log('');
console.log(chalk.gray(' # transform a vue component to react component.'));
console.log(' $ convert components/test.vue -o components');
console.log('');
};
<|start_filename|>src/transform/vue-computed.js<|end_filename|>
const t = require('@babel/types');
const { getIdentifier, log } = require('./utils');
const nestedMethodsVisitor = {
VariableDeclaration(path) {
const declarations = path.node.declarations;
declarations.forEach(d => {
if (t.isMemberExpression(d.init)) {
const key = d.init.property.name;
d.init.object = t.memberExpression(
t.thisExpression(),
getIdentifier(this.state, key)
);
}
});
},
ExpressionStatement(path) {
const expression = path.node.expression;
if (
t.isCallExpression(expression) &&
!t.isThisExpression(expression.callee.object)
) {
path.traverse(
{
ThisExpression(memPath) {
const key = memPath.parent.property.name;
memPath.replaceWith(
t.memberExpression(
t.thisExpression(),
getIdentifier(this.state, key)
)
);
memPath.stop();
}
},
{ state: this.state }
);
}
if (t.isAssignmentExpression(expression)) {
// return log(`Don't do assignment in ${this.key} computed prop`);
}
},
ReturnStatement(path) {
path.traverse(
{
ThisExpression(memPath) {
const key = memPath.parent.property.name;
memPath.replaceWith(
t.memberExpression(
t.thisExpression(),
getIdentifier(this.state, key)
)
);
memPath.stop();
}
},
{ state: this.state }
);
}
};
module.exports = function collectVueComputed(path, state) {
const childs = path.node.value.properties;
const parentKey = path.node.key.name; // computed;
if (childs.length) {
path.traverse({
ObjectMethod(propPath) {
const key = propPath.node.key.name;
if (!state.computeds[key]) {
propPath.traverse(nestedMethodsVisitor, { state, key });
const varNode = t.variableDeclaration('const', [
t.variableDeclarator(
t.identifier(key),
t.arrowFunctionExpression(
propPath.node.params,
propPath.node.body
)
)
]);
state.computeds[key] = {
_statements: [varNode]
};
}
}
});
}
};
<|start_filename|>lib/transform/index.js<|end_filename|>
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = void 0;
var _fs = require("fs");
var _rimraf = _interopRequireDefault(require("rimraf"));
var _parser = require("@babel/parser");
var _traverse = _interopRequireDefault(require("@babel/traverse"));
var _generator = _interopRequireDefault(require("@babel/generator"));
var _vueTemplateCompiler = require("vue-template-compiler");
var _types = require("@babel/types");
var _utils = require("./utils");
var _ts = _interopRequireDefault(require("./ts"));
var _sfc = _interopRequireDefault(require("./sfc"));
var _collectState = require("./collect-state");
var _reactAstHelpers = require("./react-ast-helpers");
var _vueAstHelpers = require("./vue-ast-helpers");
var _sfcAstHelpers = require("./sfc/sfc-ast-helpers");
var _output = _interopRequireDefault(require("./output"));
/**
* @babel/parser通过该模块来解析我们的代码生成AST抽象语法树;
* @babel/traverse通过该模块对AST节点进行递归遍历;
* @babel/types通过该模块对具体的AST节点进行进行增、删、改、查;
* @babel/generator通过该模块可以将修改后的AST生成新的代码;
*/
var plugins = ['typescript', 'jsx', 'classProperties', 'trailingFunctionCommas', 'asyncFunctions', 'exponentiationOperator', 'asyncGenerators', 'objectRestSpread', ['decorators', {
decoratorsBeforeExport: true
}]];
function getSuffix(lang) {
switch (lang) {
case 'stylus':
return 'styl';
case 'sass':
return 'sass';
case 'less':
return 'less';
default:
return 'css';
}
}
/**
* transform
* @param {string:path} input
* @param {string:path} output
* @param {json} options
*/
function transform(input, output, options) {
var failedList = [];
var isTs = options.isTs,
extra = options.extra;
if (!(0, _fs.existsSync)(input)) {
(0, _utils.log)('未找到有效转译文件源,请重试');
process.exit();
}
if ((0, _fs.statSync)(input).isFile()) output = output + '.js'; // if (existsSync(output)) {
// log('当前路径存在同名文件!,请重试');
// process.exit();
// }
if ((0, _fs.statSync)(input).isFile()) {
// 单个文件时
solveSingleFile(input, output, {
isTs: isTs
}, failedList);
} else if ((0, _fs.statSync)(input).isDirectory()) {
transformDir(input, output, {
isTs: isTs,
extra: extra.concat('node_modules')
}, failedList);
}
if (failedList.length) {
console.log('\n Transform failed list:');
failedList.map(function (o) {
return (0, _utils.log)(" ".concat(o));
});
} else {
(0, _utils.log)("\n Transform completed!!\n", 'success');
}
}
/**
* 解析vue文件
* @param {*} source
* @returns
*/
function formatContent(source) {
var res = (0, _vueTemplateCompiler.parseComponent)(source, {
pad: 'line'
});
var jsCode = res.script.content.replace(/\/\/\n/g, '');
jsCode = jsCode.replace('export default Vue.extend', 'export default ');
return {
template: res.template ? res.template.content : null,
js: jsCode,
styles: res.styles
};
}
function transformDir(input, output) {
var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
var failedList = arguments.length > 3 ? arguments[3] : undefined;
var isTs = options.isTs,
extra = options.extra;
var reg = new RegExp(extra.join('|'));
if (reg.test(input)) return;
if ((0, _fs.existsSync)(output)) {
var files = (0, _fs.readdirSync)(input);
files.forEach(function (file) {
var from = input + '/' + file;
var to = output + '/' + file;
var temp = (0, _fs.statSync)(from);
if (reg.test(from)) return;
if (temp.isDirectory()) {
transformDir(from, to, {
isTs: isTs,
extra: extra
}, failedList);
} else if (temp.isFile()) {
console.log(" Transforming ".concat(from.replace(process.cwd(), '')));
solveSingleFile(from, to, {
isTs: isTs
}, failedList);
}
});
} else {
(0, _fs.mkdirSync)(output);
transformDir(input, output, {
isTs: isTs,
extra: extra
}, failedList);
}
}
function solveSingleFile(from, to, opt, failedList) {
var state = {
name: undefined,
data: {},
props: {},
computeds: {},
components: {},
classMethods: {},
$refs: {},
// 存放refs
vForVars: {} // 存放v-for 中的变量
}; // Life-cycle methods relations mapping
var cycle = {
created: 'componentWillMount',
mounted: 'componentDidMount',
updated: 'componentDidUpdate',
beforeDestroy: 'componentWillUnmount',
errorCaptured: 'componentDidCatch',
render: 'render'
};
var collect = {
imports: [],
classMethods: {}
}; // 读取文件
var isTs = opt.isTs;
var isVue = /\.vue$/.test(from);
var isTsFile = /\.ts$/.test(from);
if (!isVue) {
if (isTsFile && isTs) {
// 处理 ts或者js文件 去除type
var ast = (0, _parser.parse)((0, _fs.readFileSync)(from).toString(), {
sourceType: 'module',
strictMode: false,
plugins: plugins
});
(0, _ts["default"])(ast);
var _generate = (0, _generator["default"])(ast, {
quotes: 'single',
retainLines: true
}),
code = _generate.code;
(0, _output["default"])(code, to.replace(/(.*).ts$/, function (match, o) {
return o + '.js';
}));
return;
} else {
(0, _fs.copyFileSync)(from, to);
return;
}
}
var fileContent = (0, _fs.readFileSync)(from);
var component = formatContent(fileContent.toString());
/* solve styles */
var styles = component.styles;
var suffixName = null;
var cssRoute = null;
var isUseCssModule = process.options.cssModule;
if (isUseCssModule && styles && styles[0]) {
var style = styles[0];
var route = to.split('/');
route.pop();
var cssFileName = route.join('/');
var suffix = getSuffix(style.attrs.lang);
suffixName = "index.".concat(suffix);
cssRoute = "".concat(cssFileName, "/").concat(suffixName);
(0, _output["default"])(style.content, cssRoute, 'css'); // 支持sass less 格式化
}
try {
// 解析模块
var _ast = (0, _parser.parse)(component.js, {
sourceType: 'module',
strictMode: false,
plugins: plugins
});
if (isTs) {
(0, _ts["default"])(_ast);
}
(0, _collectState.initProps)(_ast, state);
(0, _collectState.initData)(_ast, state);
(0, _collectState.initComputed)(_ast, state);
(0, _collectState.initComponents)(_ast, state); // SFC
(0, _traverse["default"])(_ast, {
ImportDeclaration: function ImportDeclaration(path) {
if (path.node.source && path.node.source.value !== 'vue') collect.imports.unshift(path.node);
},
ObjectMethod: function ObjectMethod(path) {
var name = path.node.key.name;
if (path.parentPath.parent.key && path.parentPath.parent.key.name === 'methods') {
(0, _vueAstHelpers.handleGeneralMethods)(path, collect, state, name);
} else if (cycle[name]) {
(0, _vueAstHelpers.handleCycleMethods)(path, collect, state, name, cycle[name], isVue);
} else {
if (name === 'data' || state.computeds[name]) {
return;
} // log(`The ${name} method maybe be not support now`);
}
}
});
var html = component.template && (0, _sfc["default"])(component.template, state); // // AST for react component
var tpl = "export default class ".concat((0, _utils.parseName)(state.name), " extends Component {}");
var rast = (0, _parser.parse)(tpl, {
sourceType: 'module'
});
(0, _traverse["default"])(rast, {
Program: function Program(path) {
(0, _reactAstHelpers.genImports)(path, collect, suffixName);
},
ClassBody: function ClassBody(path) {
(0, _reactAstHelpers.genConstructor)(path, state);
(0, _reactAstHelpers.genStaticProps)(path, state);
(0, _reactAstHelpers.genClassMethods)(path, state);
(0, _sfcAstHelpers.genSFCRenderMethod)(path, state, html);
}
}); // react组件使用
(0, _traverse["default"])(rast, {
ClassMethod: function ClassMethod(path) {
if (path.node.key.name === 'render') {
path.traverse({
JSXIdentifier: function JSXIdentifier(path) {
if ((0, _types.isJSXClosingElement)(path.parent) || (0, _types.isJSXOpeningElement)(path.parent)) {
var node = path.node;
var componentName = state.components[node.name] || state.components[(0, _utils.parseComponentName)(node.name)];
if (componentName) {
path.replaceWith((0, _types.jSXIdentifier)(componentName));
path.stop();
}
}
}
});
}
}
});
var _generate2 = (0, _generator["default"])(rast, {
quotes: 'single',
retainLines: true
}),
_code = _generate2.code;
(0, _output["default"])(_code, to.replace(/(.*).vue$/, function (match, o) {
return o + '.js';
}));
} catch (error) {
(0, _utils.log)(error);
failedList.push(from.replace(process.cwd(), ''));
_rimraf["default"].sync(to);
_rimraf["default"].sync(cssRoute);
}
}
var _default = transform;
exports["default"] = _default;
<|start_filename|>lib/transform/collect-state.js<|end_filename|>
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.initComponents = initComponents;
exports.initComputed = initComputed;
exports.initData = initData;
exports.initProps = initProps;
var babelTraverse = require('@babel/traverse')["default"];
var t = require('@babel/types');
var _require = require('./utils'),
log = _require.log;
var collectVueProps = require('./vue-props');
var collectVueComputed = require('./vue-computed');
/**
* Collect vue component state(data prop, props prop & computed prop)
* Don't support watch prop of vue component
*/
function initProps(ast, state) {
babelTraverse(ast, {
Program: function Program(path) {
var nodeLists = path.node.body;
var count = 0;
for (var i = 0; i < nodeLists.length; i++) {
var node = nodeLists[i];
if (t.isExportDefaultDeclaration(node)) {
count++;
}
}
if (count > 1 || !count) {
var msg = !count ? 'Must hava one' : 'Only one';
log("".concat(msg, " export default declaration in youe vue component file"));
process.exit();
}
},
ObjectProperty: function ObjectProperty(path) {
var parent = path.parentPath.parent;
var name = path.node.key.name;
if (parent) {
if (name === 'name') {
if (t.isStringLiteral(path.node.value)) {
state.name = path.node.value.value;
} else {
log("The value of name prop should be a string literal.");
}
} else if (name === 'props') {
collectVueProps(path, state);
path.stop();
}
}
}
});
}
;
function initData(ast, state) {
babelTraverse(ast, {
ObjectMethod: function ObjectMethod(path) {
// 对象方法
var parent = path.parentPath.parent;
var name = path.node.key.name;
if (parent && t.isExportDefaultDeclaration(parent)) {
if (name === 'data') {
var body = path.node.body.body;
state.data['_statements'] = [].concat(body);
var propNodes = {};
body.forEach(function (node) {
if (t.isReturnStatement(node)) {
propNodes = node.argument.properties;
}
});
propNodes.forEach(function (propNode) {
state.data[propNode.key.name] = propNode.value;
});
path.stop();
}
}
}
});
}
;
function initComputed(ast, state) {
babelTraverse(ast, {
ObjectProperty: function ObjectProperty(path) {
var parent = path.parentPath.parent;
var name = path.node.key.name;
if (parent) {
if (name === 'computed') {
collectVueComputed(path, state);
path.stop();
}
}
}
});
}
;
function initComponents(ast, state) {
babelTraverse(ast, {
ObjectProperty: function ObjectProperty(path) {
var parent = path.parentPath.parent;
var name = path.node.key.name;
if (parent && t.isExportDefaultDeclaration(parent)) {
if (name === 'components') {
// collectVueComputed(path, state);
var props = path.node.value.properties;
props.forEach(function (prop) {
state.components[prop.key.name] = prop.value.name;
});
path.stop();
}
}
}
});
}
;
<|start_filename|>lib/transform/vue-computed.js<|end_filename|>
"use strict";
var t = require('@babel/types');
var _require = require('./utils'),
getIdentifier = _require.getIdentifier,
log = _require.log;
var nestedMethodsVisitor = {
VariableDeclaration: function VariableDeclaration(path) {
var _this = this;
var declarations = path.node.declarations;
declarations.forEach(function (d) {
if (t.isMemberExpression(d.init)) {
var key = d.init.property.name;
d.init.object = t.memberExpression(t.thisExpression(), getIdentifier(_this.state, key));
}
});
},
ExpressionStatement: function ExpressionStatement(path) {
var expression = path.node.expression;
if (t.isCallExpression(expression) && !t.isThisExpression(expression.callee.object)) {
path.traverse({
ThisExpression: function ThisExpression(memPath) {
var key = memPath.parent.property.name;
memPath.replaceWith(t.memberExpression(t.thisExpression(), getIdentifier(this.state, key)));
memPath.stop();
}
}, {
state: this.state
});
}
if (t.isAssignmentExpression(expression)) {// return log(`Don't do assignment in ${this.key} computed prop`);
}
},
ReturnStatement: function ReturnStatement(path) {
path.traverse({
ThisExpression: function ThisExpression(memPath) {
var key = memPath.parent.property.name;
memPath.replaceWith(t.memberExpression(t.thisExpression(), getIdentifier(this.state, key)));
memPath.stop();
}
}, {
state: this.state
});
}
};
module.exports = function collectVueComputed(path, state) {
var childs = path.node.value.properties;
var parentKey = path.node.key.name; // computed;
if (childs.length) {
path.traverse({
ObjectMethod: function ObjectMethod(propPath) {
var key = propPath.node.key.name;
if (!state.computeds[key]) {
propPath.traverse(nestedMethodsVisitor, {
state: state,
key: key
});
var varNode = t.variableDeclaration('const', [t.variableDeclarator(t.identifier(key), t.arrowFunctionExpression(propPath.node.params, propPath.node.body))]);
state.computeds[key] = {
_statements: [varNode]
};
}
}
});
}
};
<|start_filename|>lib/index.js<|end_filename|>
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _path = require("path");
var _helper = _interopRequireDefault(require("./doc/helper"));
var _chalk = _interopRequireDefault(require("chalk"));
process.env.HOME_DIR = (0, _path.dirname)(require.resolve('../package'));
var nodeVersion = process.versions.node;
var versions = nodeVersion.split('.');
var major = versions[0];
var minor = versions[1];
if (major * 10 + minor * 1 < 100) {
console.log("Node version must >= 10.0, but got ".concat(major, ".").concat(minor));
process.exit(1);
}
var updater = require('update-notifier');
var pkg = require('../package.json');
var notifier = updater({
pkg: pkg,
updateCheckInterval: 1000 * 60 * 60 * 24 * 7
});
if (notifier.update && notifier.update.latest !== pkg.version) {
// 存在新版本
var old = notifier.update.current;
var latest = notifier.update.latest;
var type = notifier.update.type;
switch (type) {
case 'major':
type = _chalk["default"].red(type);
break;
case 'minor':
type = _chalk["default"].yellow(type);
break;
case 'patch':
type = _chalk["default"].green(type);
break;
default:
break;
}
notifier.notify({
message: "New ".concat(type, " version of ").concat(pkg.name, " available! ").concat(_chalk["default"].red(old), " -> ").concat(_chalk["default"].green(latest), "\nRun ").concat(_chalk["default"].green("npm install -g ".concat(pkg.name)), " to update!")
});
}
var command = process.argv[2];
var args = process.argv.slice(3);
var version = pkg.version;
var outputIndex = args.findIndex(function (o) {
return o === '-o' || o === '--output';
});
var extraIndex = args.findIndex(function (o) {
return o === '-i' || o === '--ignore';
});
var isTs = args.includes('-t') || args.includes('--ts');
var cssModule = args.includes('-m') || args.includes('--module'); // 是否模块化css styles[...]
switch (command) {
case '-v':
case '--version':
console.log(version);
break;
case '-h':
case '--help':
(0, _helper["default"])();
break;
default:
if (!command) (0, _helper["default"])();else {
var input = (0, _path.resolve)(process.cwd(), command);
var output = outputIndex > -1 && args[outputIndex + 1] ? (0, _path.resolve)(process.cwd(), args[outputIndex + 1]) : (0, _path.resolve)(process.cwd(), 'react__from__vue');
var extra = extraIndex > -1 && args[extraIndex + 1] ? args[extraIndex + 1].split(',') : [];
var options = {
isTs: isTs,
cssModule: cssModule,
extra: extra
};
process.options = options;
transform(input, output, options);
}
break;
}
<|start_filename|>lib/transform/vue-props.js<|end_filename|>
"use strict";
var t = require('@babel/types');
var chalk = require('chalk');
var _require = require('./utils'),
log = _require.log;
var nestedPropsVisitor = {
ObjectProperty: function ObjectProperty(path) {
var parentKey = path.parentPath.parent.key;
if (parentKey && parentKey.name === this.childKey) {
var key = path.node.key;
var node = path.node.value;
if (key.name === 'type') {
if (t.isIdentifier(node)) {
this.state.props[this.childKey].type = node.name.toLowerCase();
} else if (t.isArrayExpression(node)) {
var elements = [];
node.elements.forEach(function (n) {
elements.push(n.name.toLowerCase());
});
if (!elements.length) {
log("Providing a type for the ".concat(this.childKey, " prop is a good practice."));
}
/**
* supports following syntax:
* propKey: { type: [Number, String], default: 0}
*/
this.state.props[this.childKey].type = elements.length > 1 ? 'typesOfArray' : elements[0] ? elements[0].toLowerCase() : elements;
this.state.props[this.childKey].value = elements.length > 1 ? elements : elements[0] ? elements[0] : elements;
} else {
log("The type in ".concat(this.childKey, " prop only supports identifier or array expression, eg: Boolean, [String]"));
}
}
if (t.isLiteral(node)) {
if (key.name === 'default') {
if (this.state.props[this.childKey].type === 'typesOfArray') {
this.state.props[this.childKey].defaultValue = node.value;
} else {
this.state.props[this.childKey].value = node.value;
}
}
if (key.name === 'required') {
this.state.props[this.childKey].required = node.value;
}
}
}
},
ArrowFunctionExpression: function ArrowFunctionExpression(path) {
var parentKey = path.parentPath.parentPath.parent.key;
if (parentKey && parentKey.name === this.childKey) {
var body = path.node.body;
if (t.isArrayExpression(body)) {
// Array
this.state.props[this.childKey].value = body;
} else if (t.isBlockStatement(body)) {
// Object/Block array
var childNodes = body.body;
if (childNodes.length === 1 && t.isReturnStatement(childNodes[0])) {
this.state.props[this.childKey].value = childNodes[0].argument;
}
} // validator
if (path.parent.key && path.parent.key.name === 'validator') {
path.traverse({
ArrayExpression: function ArrayExpression(path) {
this.state.props[this.childKey].validator = path.node;
}
}, {
state: this.state,
childKey: this.childKey
});
}
}
}
};
module.exports = function collectVueProps(path, state) {
var childs = path.node.value.properties;
var parentKey = path.node.key.name; // props;
if (childs.length) {
path.traverse({
ObjectProperty: function ObjectProperty(propPath) {
var parentNode = propPath.parentPath.parent;
if (parentNode.key && parentNode.key.name === parentKey) {
var childNode = propPath.node;
var childKey = childNode.key.name;
var childVal = childNode.value;
if (!state.props[childKey]) {
if (t.isArrayExpression(childVal)) {
var elements = [];
childVal.elements.forEach(function (node) {
elements.push(node.name.toLowerCase());
});
state.props[childKey] = {
type: elements.length > 1 ? 'typesOfArray' : elements[0] ? elements[0].toLowerCase() : elements,
value: elements.length > 1 ? elements : elements[0] ? elements[0] : elements,
required: false,
validator: false
};
} else if (t.isObjectExpression(childVal)) {
state.props[childKey] = {
type: '',
value: undefined,
required: false,
validator: false
};
path.traverse(nestedPropsVisitor, {
state: state,
childKey: childKey
});
} else if (t.isIdentifier(childVal)) {
// supports propKey: type
state.props[childKey] = {
type: childVal.name.toLowerCase(),
value: undefined,
required: false,
validator: false
};
} else {
log("Not supports expression for the ".concat(this.childKey, " prop in props."));
}
}
}
}
});
}
};
<|start_filename|>lib/transform/ts.js<|end_filename|>
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = _default;
var _traverse = _interopRequireDefault(require("@babel/traverse"));
/**
* 用于去除ts类型检测
* @export
* @param {*} ast
* @returns
*/
function _default(ast) {
(0, _traverse["default"])(ast, {
ExportNamedDeclaration: function ExportNamedDeclaration(exportPath) {
var declaration = exportPath.get('declaration');
if (declaration && (declaration.isTSInterfaceDeclaration() || declaration.isTSTypeAliasDeclaration())) {
exportPath.remove();
}
},
TSTypeAliasDeclaration: function TSTypeAliasDeclaration(path) {
path.remove();
},
TSTypeParameterDeclaration: function TSTypeParameterDeclaration(path) {
path.remove();
},
TSInterfaceDeclaration: function TSInterfaceDeclaration(path) {
path.remove();
},
TSTypeParameterInstantiation: function TSTypeParameterInstantiation(path) {
path.remove();
},
TSTypeAnnotation: function TSTypeAnnotation(path) {
path.remove();
},
TSAsExpression: function TSAsExpression(path) {
path.replaceWith(path.get('expression'));
}
});
return ast;
}
<|start_filename|>lib/transform/sfc/index.js<|end_filename|>
"use strict";
var _typeof = require("@babel/runtime/helpers/typeof");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = transfromTemplate;
var _vueTemplateCompiler = require("vue-template-compiler");
var t = _interopRequireWildcard(require("@babel/types"));
var _generateElement = require("./generate-element");
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function _getRequireWildcardCache(nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || _typeof(obj) !== "object" && typeof obj !== "function") { return { "default": obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj["default"] = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
/**
* 保证名称一致 '@click' => v-on:click :text => v-bind:text
* class ==> className
* @param {*} ast
*/
function flatName(ast) {
var isNochild = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
if (ast.ifConditions && ast.ifConditions.length && !isNochild) {
ast.ifConditions.map(function (_ref) {
var block = _ref.block;
return flatName(block, true);
});
} else if (ast && ast.type === 1) {
var attrsList = [];
var attrObj = ast.attrsMap;
Object.keys(attrObj).map(function (o) {
var key = o;
if (key === 'v-if' || key === 'v-else-if' || key === 'v-else') return;else if (/^:/.test(o)) {
// 统一成v-bind
key = o.replace(/^:/, 'v-bind:');
} else if (/^@/.test(o)) key = o.replace(/^@/, 'v-on:');
attrsList.push({
key: key,
value: attrObj[o]
});
});
ast.attrsList = attrsList;
if (!ast.children) return;
ast.children.map(function (o) {
return flatName(o);
});
}
}
function transfromTemplate(template, state) {
var ast = (0, _vueTemplateCompiler.compile)(template).ast;
flatName(ast); // 统一name 如:@click => v-on:click
var argument = (0, _generateElement.generateJSXElement)(ast, null, state);
if (t.isJSXElement(argument)) {
argument = t.returnStatement(argument);
}
return argument;
} | zhaozeq/vue-to-react |
<|start_filename|>go-fuzz/sys_windows.go<|end_filename|>
// Copyright 2015 go-fuzz project authors. All rights reserved.
// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
package main
import (
"fmt"
"log"
"os"
"os/exec"
"reflect"
"syscall"
"unsafe"
)
func lowerProcessPrio() {
// TODO: implement me
}
type Mapping struct {
mapping syscall.Handle
addr uintptr
}
func createMapping(name string, size int) (*Mapping, []byte) {
f, err := os.OpenFile(name, os.O_RDWR, 0)
if err != nil {
log.Fatalf("failed to open comm file: %v", err)
}
defer f.Close()
mapping, err := syscall.CreateFileMapping(syscall.InvalidHandle, nil, syscall.PAGE_READWRITE, 0, uint32(size), nil)
if err != nil {
log.Fatalf("failed to create file mapping: %v", err)
}
const FILE_MAP_ALL_ACCESS = 0xF001F
addr, err := syscall.MapViewOfFile(mapping, FILE_MAP_ALL_ACCESS, 0, 0, uintptr(size))
if err != nil {
log.Fatalf("failed to mmap comm file: %v", err)
}
hdr := reflect.SliceHeader{addr, size, size}
mem := *(*[]byte)(unsafe.Pointer(&hdr))
mem[0] = 1 // test access
return &Mapping{mapping, addr}, mem
}
func (m *Mapping) destroy() {
syscall.UnmapViewOfFile(m.addr)
syscall.CloseHandle(m.mapping)
}
func setupCommMapping(cmd *exec.Cmd, comm *Mapping, rOut, wIn *os.File) {
syscall.SetHandleInformation(syscall.Handle(comm.mapping), syscall.HANDLE_FLAG_INHERIT, 1)
syscall.SetHandleInformation(syscall.Handle(rOut.Fd()), syscall.HANDLE_FLAG_INHERIT, 1)
syscall.SetHandleInformation(syscall.Handle(wIn.Fd()), syscall.HANDLE_FLAG_INHERIT, 1)
cmd.Env = append(cmd.Env, fmt.Sprintf("GO_FUZZ_COMM_FD=%v", comm.mapping))
cmd.Env = append(cmd.Env, fmt.Sprintf("GO_FUZZ_IN_FD=%v", rOut.Fd()))
cmd.Env = append(cmd.Env, fmt.Sprintf("GO_FUZZ_OUT_FD=%v", wIn.Fd()))
}
<|start_filename|>test/vendor/non.existent.com/foo/foo.go<|end_filename|>
// Copyright 2016 go-fuzz project authors. All rights reserved.
// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
package foo
func Foo() {
}
<|start_filename|>slides/regexp.go<|end_filename|>
// Prevent go install ./... from complaining about different packages in the same dir.
// +build
package regexp
import (
"regexp"
)
// START OMIT
func FuzzRegexp(data []byte) int {
if len(data) < 3 {
return 0
}
longestMode := data[0]%2 != 0 // first byte as "longest" flag
reStr := data[1 : len(data)/2] // half as regular expression
matchStr := data[len(data)/2:] // the rest is string to match
re, err := regexp.Compile(string(reStr))
if err != nil {
return 0
}
if longestMode {
re.Longest()
}
re.FindAll(matchStr, -1)
return 0
}
// END OMIT
<|start_filename|>go-fuzz/exectype_string.go<|end_filename|>
// Code generated by "stringer -type execType -trimprefix exec"; DO NOT EDIT.
package main
import "strconv"
const _execType_name = "BootstrapCorpusMinimizeInputMinimizeCrasherTriageInputFuzzVersifierSmashSonarSonarHintTotalCount"
var _execType_index = [...]uint8{0, 9, 15, 28, 43, 54, 58, 67, 72, 77, 86, 91, 96}
func (i execType) String() string {
if i >= execType(len(_execType_index)-1) {
return "execType(" + strconv.FormatInt(int64(i), 10) + ")"
}
return _execType_name[_execType_index[i]:_execType_index[i+1]]
}
<|start_filename|>go-fuzz/vendor/github.com/stephens2424/writerset/writerset.go<|end_filename|>
// Package writerset implements a mechanism to add and remove writers from a construct
// similar to io.MultiWriter.
package writerset
import (
"io"
"net/http"
"sync"
)
// ErrPartialWrite encapsulates an error from a WriterSet.
type ErrPartialWrite struct {
Writer io.Writer
Err error
Expected, Wrote int
}
// Error returns the error string from the underlying error.
func (e ErrPartialWrite) Error() string {
return e.Err.Error()
}
// WriterSet wraps multiple writers like io.MultiWriter, but such that individual
// writers are easy to add or remove as necessary.
type WriterSet struct {
m map[io.Writer]chan error
mu sync.Mutex
}
// New initializes a new empty writer set.
func New() *WriterSet {
return &WriterSet{
m: make(map[io.Writer]chan error),
mu: sync.Mutex{},
}
}
// Add ensures w is in the set.
func (ws *WriterSet) Add(w io.Writer) <-chan error {
ws.mu.Lock()
defer ws.mu.Unlock()
c, ok := ws.m[w]
if ok {
return c
}
c = make(chan error, 1)
ws.m[w] = c
return c
}
// Contains determines if w is in the set.
func (ws *WriterSet) Contains(w io.Writer) bool {
ws.mu.Lock()
defer ws.mu.Unlock()
_, ok := ws.m[w]
return ok
}
// Remove ensures w is not in the set.
func (ws *WriterSet) Remove(w io.Writer) {
ws.mu.Lock()
defer ws.mu.Unlock()
delete(ws.m, w)
}
// Write writes data to each underlying writer. If an error occurs on an underlying writer,
// that writer is removed from the set. The error will be wrapped as an ErrPartialWrite and
// sent on the channel created when the writer was added.
func (ws *WriterSet) Write(b []byte) (int, error) {
ws.mu.Lock()
defer ws.mu.Unlock()
for w, c := range ws.m {
bs, err := w.Write(b)
if err != nil {
c <- ErrPartialWrite{
Err: err,
Wrote: bs,
Expected: len(b),
Writer: w,
}
close(c)
delete(ws.m, w)
}
}
return len(b), nil
}
// Flush implements http.Flusher by calling flush on all the underlying writers if they are
// also http.Flushers.
func (ws *WriterSet) Flush() {
ws.mu.Lock()
defer ws.mu.Unlock()
for w := range ws.m {
if w, ok := w.(http.Flusher); ok {
w.Flush()
}
}
}
<|start_filename|>slides/crash.go<|end_filename|>
// Prevent go install ./... from complaining about different packages in the same dir.
// +build
package n
type R interface{ S }
type S = interface{ R }
<|start_filename|>go-fuzz/compare_amd64.go<|end_filename|>
// Copyright 2015 go-fuzz project authors. All rights reserved.
// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
package main
func compareCoverBody(base, cur []byte) bool {
if hasAVX2 {
return compareCoverBodyAVX2(&base[0], &cur[0])
}
return compareCoverBodySSE2(&base[0], &cur[0])
}
func compareCoverBodySSE2(base, cur *byte) bool // in compare_amd64.s
func compareCoverBodyAVX2(base, cur *byte) bool // in compare_amd64.s
<|start_filename|>go-fuzz/cpu_amd64.go<|end_filename|>
// Copyright 2015 go-fuzz project authors. All rights reserved.
// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
package main
// Adapted from GOROOT/src/internal/cpu/cpu_x86.go.
var hasAVX2 bool
func cpuid(eaxArg, ecxArg uint32) (eax, ebx, ecx, edx uint32)
func xgetbv() (eax, edx uint32)
const (
// ecx bits
cpuid_OSXSAVE = 1 << 27
// ebx bits
cpuid_AVX2 = 1 << 5
)
func init() {
_, _, ecx1, _ := cpuid(1, 0)
hasOSXSAVE := cpuBitIsSet(ecx1, cpuid_OSXSAVE)
osSupportsAVX := false
// For XGETBV, OSXSAVE bit is required and sufficient.
if hasOSXSAVE {
eax, _ := xgetbv()
// Check if XMM and YMM registers have OS support.
osSupportsAVX = cpuBitIsSet(eax, 1<<1) && cpuBitIsSet(eax, 1<<2)
}
_, ebx7, _, _ := cpuid(7, 0)
hasAVX2 = cpuBitIsSet(ebx7, cpuid_AVX2) && osSupportsAVX
}
func cpuBitIsSet(hwc uint32, value uint32) bool {
return hwc&value != 0
}
<|start_filename|>test/internal/test/test.go<|end_filename|>
// Copyright 2016 go-fuzz project authors. All rights reserved.
// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
// Test for fuzzing of internal packages.
package test
func Fuzz(data []byte) int {
return 0
}
<|start_filename|>go-fuzz/versifier/versifier_test.go<|end_filename|>
// Copyright 2015 go-fuzz project authors. All rights reserved.
// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
package versifier
import (
"os"
"testing"
)
func dump(data string) {
v := BuildVerse(nil, []byte(data))
v.Print(os.Stdout)
}
func TestNumber(t *testing.T) {
dump(`abc -10 def 0xab1 0x123 1e10 asd 1e2 22e-78 -11e72`)
}
func TestList1(t *testing.T) {
dump(`{"f1": "v1", "f2": "v2", "f3": "v3"}`)
}
func TestList2(t *testing.T) {
dump(`1,2.0,3e3`)
}
func TestBracket(t *testing.T) {
dump(`[] [afal] ( ) (afaf)`)
}
func TestKeyValue(t *testing.T) {
dump(`a=1 a=b 2 (aa=bb) a bb:cc:dd,a=b,c=d,e=f`)
dump(`:a`)
}
<|start_filename|>go-fuzz/worker_test.go<|end_filename|>
package main
import (
"encoding/binary"
"math"
"testing"
)
func TestIncrementDecrement(t *testing.T) {
b := make([]byte, 2)
for i := 0; i < math.MaxUint16; i++ {
u := uint16(i)
binary.LittleEndian.PutUint16(b, u)
b1 := increment(b)
u1 := binary.LittleEndian.Uint16(b1)
if u+1 != u1 {
t.Fatalf("increment(%d) = %d, want %d", u, u1, u+1)
}
b1 = decrement(b)
u1 = binary.LittleEndian.Uint16(b1)
if u-1 != u1 {
t.Fatalf("decrement(%d) = %d, want %d", u, u1, u-1)
}
}
}
<|start_filename|>slides/gob.go<|end_filename|>
// Prevent go install ./... from complaining about different packages in the same dir.
// +build
package main
import (
"bytes"
"encoding/gob"
"encoding/hex"
)
// START OMIT
const data = "4dffb503010102303001ff30000109010130010800010130010800010130" +
"01ffb80001014a01ffb60001014b01ff860001013001ff860001013001ff" +
"860001013001ff860001013001ffb80000001eff850401010e3030303030" +
"30303030303030303001ff3000010c0104000016ffb70201010830303030" +
"3030303001ff3000010c000030ffb6040405fcff00303030303030303030" +
"303030303030303030303030303030303030303030303030303030303030" +
"303030303030303030303030303030303030303030303030303030303030" +
"30303030303030"
type X struct {
J *X
K map[string]int
}
func main() {
raw, _ := hex.DecodeString(data)
gob.NewDecoder(bytes.NewReader(raw)).Decode(new(X))
}
// END OMIT
| disconnect3d/go-fuzz |